comment
stringlengths
22
3.02k
method_body
stringlengths
46
368k
target_code
stringlengths
0
181
method_body_after
stringlengths
12
368k
context_before
stringlengths
11
634k
context_after
stringlengths
11
632k
For completeness, why don't you pass here `stateHandle.getStateSize()`?
public IncrementalLocalKeyedStateHandle createLocalStateHandleForDownloadedState() { return new IncrementalLocalKeyedStateHandle( stateHandle.getBackendIdentifier(), stateHandle.getCheckpointId(), new DirectoryStateHandle(downloadDestination, 0L), stateHandle.getKeyGroupRange(), stateHandle.getMetaDataStateHandle(), stateHandle.getSharedState()); }
new DirectoryStateHandle(downloadDestination, 0L),
public IncrementalLocalKeyedStateHandle createLocalStateHandleForDownloadedState() { return new IncrementalLocalKeyedStateHandle( stateHandle.getBackendIdentifier(), stateHandle.getCheckpointId(), new DirectoryStateHandle(downloadDestination, stateHandle.getStateSize()), stateHandle.getKeyGroupRange(), stateHandle.getMetaDataStateHandle(), stateHandle.getSharedState()); }
class StateHandleDownloadSpec { /** The state handle to download. */ private final IncrementalRemoteKeyedStateHandle stateHandle; /** The path to which the content of the state handle shall be downloaded. */ private final Path downloadDestination; public StateHandleDownloadSpec( IncrementalRemoteKeyedStateHandle stateHandle, Path downloadDestination) { this.stateHandle = stateHandle; this.downloadDestination = downloadDestination; } public IncrementalRemoteKeyedStateHandle getStateHandle() { return stateHandle; } public Path getDownloadDestination() { return downloadDestination; } }
class StateHandleDownloadSpec { /** The state handle to download. */ private final IncrementalRemoteKeyedStateHandle stateHandle; /** The path to which the content of the state handle shall be downloaded. */ private final Path downloadDestination; public StateHandleDownloadSpec( IncrementalRemoteKeyedStateHandle stateHandle, Path downloadDestination) { this.stateHandle = stateHandle; this.downloadDestination = downloadDestination; } public IncrementalRemoteKeyedStateHandle getStateHandle() { return stateHandle; } public Path getDownloadDestination() { return downloadDestination; } }
Unfortunately, doing so would cause an NPE in the docker environment factory. The server's address descriptor is explicitly used there in order to pass to the docker invocation. I'll introduce a no-op or exception-throwing artifact retrieval service for now to make this explicit.
public static DockerJobBundleFactory create(ArtifactSource artifactSource) throws Exception { DockerCommand dockerCommand = DockerCommand.forExecutable("docker", Duration.ofSeconds(60)); ServerFactory serverFactory = getServerFactory(); IdGenerator stageIdGenerator = IdGenerators.incrementingLongs(); ControlClientPool clientPool = MapControlClientPool.create(); GrpcFnServer<FnApiControlClientPoolService> controlServer = GrpcFnServer.allocatePortAndCreateFor( FnApiControlClientPoolService.offeringClientsToPool( clientPool.getSink(), GrpcContextHeaderAccessorProvider.getHeaderAccessor()), serverFactory); GrpcFnServer<GrpcLoggingService> loggingServer = GrpcFnServer.allocatePortAndCreateFor( GrpcLoggingService.forWriter(Slf4jLogWriter.getDefault()), serverFactory); GrpcFnServer<ArtifactRetrievalService> retrievalServer = GrpcFnServer.allocatePortAndCreateFor(null, serverFactory); GrpcFnServer<StaticGrpcProvisionService> provisioningServer = GrpcFnServer.allocatePortAndCreateFor( StaticGrpcProvisionService.create(ProvisionInfo.newBuilder().build()), serverFactory); DockerEnvironmentFactory environmentFactory = DockerEnvironmentFactory.forServices( dockerCommand, controlServer, loggingServer, retrievalServer, provisioningServer, clientPool.getSource(), IdGenerators.incrementingLongs()); return new DockerJobBundleFactory( environmentFactory, serverFactory, stageIdGenerator, controlServer, loggingServer, retrievalServer, provisioningServer); }
public static DockerJobBundleFactory create(ArtifactSource artifactSource) throws Exception { ServerFactory serverFactory = getServerFactory(); IdGenerator stageIdGenerator = IdGenerators.incrementingLongs(); ControlClientPool clientPool = MapControlClientPool.create(); GrpcFnServer<FnApiControlClientPoolService> controlServer = GrpcFnServer.allocatePortAndCreateFor( FnApiControlClientPoolService.offeringClientsToPool( clientPool.getSink(), GrpcContextHeaderAccessorProvider.getHeaderAccessor()), serverFactory); GrpcFnServer<GrpcLoggingService> loggingServer = GrpcFnServer.allocatePortAndCreateFor( GrpcLoggingService.forWriter(Slf4jLogWriter.getDefault()), serverFactory); GrpcFnServer<ArtifactRetrievalService> retrievalServer = GrpcFnServer.allocatePortAndCreateFor( new UnimplementedArtifactRetrievalService(), serverFactory); GrpcFnServer<StaticGrpcProvisionService> provisioningServer = GrpcFnServer.allocatePortAndCreateFor( StaticGrpcProvisionService.create(ProvisionInfo.newBuilder().build()), serverFactory); DockerEnvironmentFactory environmentFactory = DockerEnvironmentFactory.forServices( controlServer, loggingServer, retrievalServer, provisioningServer, clientPool.getSource(), IdGenerators.incrementingLongs()); return new DockerJobBundleFactory( environmentFactory, serverFactory, stageIdGenerator, controlServer, loggingServer, retrievalServer, provisioningServer); }
class DockerJobBundleFactory implements JobBundleFactory { private static final Logger LOG = LoggerFactory.getLogger(DockerJobBundleFactory.class); private static final String DOCKER_FOR_MAC_HOST = "host.docker.internal"; private final IdGenerator stageIdGenerator; private final GrpcFnServer<FnApiControlClientPoolService> controlServer; private final GrpcFnServer<GrpcLoggingService> loggingServer; private final GrpcFnServer<ArtifactRetrievalService> retrievalServer; private final GrpcFnServer<StaticGrpcProvisionService> provisioningServer; private final LoadingCache<Environment, WrappedSdkHarnessClient> environmentCache; private DockerJobBundleFactory( DockerEnvironmentFactory environmentFactory, ServerFactory serverFactory, IdGenerator stageIdGenerator, GrpcFnServer<FnApiControlClientPoolService> controlServer, GrpcFnServer<GrpcLoggingService> loggingServer, GrpcFnServer<ArtifactRetrievalService> retrievalServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServer) { this.stageIdGenerator = stageIdGenerator; this.controlServer = controlServer; this.loggingServer = loggingServer; this.retrievalServer = retrievalServer; this.provisioningServer = provisioningServer; this.environmentCache = CacheBuilder.newBuilder() .weakValues() .removalListener( ((RemovalNotification<Environment, WrappedSdkHarnessClient> notification) -> { LOG.debug("Cleaning up for environment {}", notification.getKey().getUrl()); try { notification.getValue().close(); } catch (Exception e) { LOG.warn("Error cleaning up environment", notification.getKey(), e); } })) .build( new CacheLoader<Environment, WrappedSdkHarnessClient>() { @Override public WrappedSdkHarnessClient load(Environment environment) throws Exception { RemoteEnvironment remoteEnvironment = environmentFactory.createEnvironment(environment); return WrappedSdkHarnessClient.wrapping(remoteEnvironment, serverFactory); } }); } @Override public <T> StageBundleFactory<T> forStage(ExecutableStage executableStage) { WrappedSdkHarnessClient wrappedClient = environmentCache.getUnchecked(executableStage.getEnvironment()); ExecutableProcessBundleDescriptor processBundleDescriptor; try { processBundleDescriptor = ProcessBundleDescriptors.fromExecutableStage( stageIdGenerator.getId(), executableStage, wrappedClient.getDataServer().getApiServiceDescriptor()); } catch (IOException e) { throw new RuntimeException(e); } return SimpleStageBundleFactory.create(wrappedClient, processBundleDescriptor); } @Override public void close() throws Exception { environmentCache.invalidateAll(); environmentCache.cleanUp(); controlServer.close(); loggingServer.close(); retrievalServer.close(); provisioningServer.close(); } private static ServerFactory getServerFactory() { switch (getPlatform()) { case LINUX: return ServerFactory.createDefault(); case MAC: return ServerFactory.createWithUrlFactory( (host, port) -> HostAndPort.fromParts(DOCKER_FOR_MAC_HOST, port).toString()); default: LOG.warn("Unknown Docker platform. Falling back to default server factory"); return ServerFactory.createDefault(); } } private static Platform getPlatform() { String osName = System.getProperty("os.name").toLowerCase(); if (osName.startsWith("mac")) { return Platform.MAC; } else if (osName.startsWith("linux")) { return Platform.LINUX; } return Platform.OTHER; } private static class SimpleStageBundleFactory<InputT> implements StageBundleFactory<InputT> { private final BundleProcessor<InputT> processor; private final ExecutableProcessBundleDescriptor processBundleDescriptor; @SuppressFBWarnings private WrappedSdkHarnessClient wrappedClient; static <InputT> SimpleStageBundleFactory<InputT> create( WrappedSdkHarnessClient wrappedClient, ExecutableProcessBundleDescriptor processBundleDescriptor) { @SuppressWarnings("unchecked") BundleProcessor<InputT> processor = wrappedClient .getClient() .getProcessor( processBundleDescriptor.getProcessBundleDescriptor(), (RemoteInputDestination) processBundleDescriptor.getRemoteInputDestination(), wrappedClient.getStateServer().getService()); return new SimpleStageBundleFactory<>(processBundleDescriptor, processor, wrappedClient); } SimpleStageBundleFactory( ExecutableProcessBundleDescriptor processBundleDescriptor, BundleProcessor<InputT> processor, WrappedSdkHarnessClient wrappedClient) { this.processBundleDescriptor = processBundleDescriptor; this.processor = processor; this.wrappedClient = wrappedClient; } @Override public RemoteBundle<InputT> getBundle( OutputReceiverFactory outputReceiverFactory, StateRequestHandler stateRequestHandler) throws Exception { ImmutableMap.Builder<Target, RemoteOutputReceiver<?>> outputReceivers = ImmutableMap.builder(); for (Map.Entry<Target, Coder<WindowedValue<?>>> targetCoder : processBundleDescriptor.getOutputTargetCoders().entrySet()) { Target target = targetCoder.getKey(); Coder<WindowedValue<?>> coder = targetCoder.getValue(); String bundleOutputPCollection = Iterables.getOnlyElement( processBundleDescriptor .getProcessBundleDescriptor() .getTransformsOrThrow(target.getPrimitiveTransformReference()) .getInputsMap() .values()); FnDataReceiver<WindowedValue<?>> outputReceiver = outputReceiverFactory.create(bundleOutputPCollection); outputReceivers.put(target, RemoteOutputReceiver.of(coder, outputReceiver)); } return processor.newBundle(outputReceivers.build(), stateRequestHandler); } @Override public void close() throws Exception { wrappedClient = null; } } private static class WrappedSdkHarnessClient implements AutoCloseable { private final RemoteEnvironment environment; private final ExecutorService executor; private final GrpcFnServer<GrpcDataService> dataServer; private final GrpcFnServer<GrpcStateService> stateServer; private final SdkHarnessClient client; static WrappedSdkHarnessClient wrapping( RemoteEnvironment environment, ServerFactory serverFactory) throws Exception { ExecutorService executor = Executors.newCachedThreadPool(); GrpcFnServer<GrpcDataService> dataServer = GrpcFnServer.allocatePortAndCreateFor(GrpcDataService.create(executor), serverFactory); GrpcFnServer<GrpcStateService> stateServer = GrpcFnServer.allocatePortAndCreateFor(GrpcStateService.create(), serverFactory); SdkHarnessClient client = SdkHarnessClient.usingFnApiClient( environment.getInstructionRequestHandler(), dataServer.getService()); return new WrappedSdkHarnessClient(environment, executor, dataServer, stateServer, client); } private WrappedSdkHarnessClient( RemoteEnvironment environment, ExecutorService executor, GrpcFnServer<GrpcDataService> dataServer, GrpcFnServer<GrpcStateService> stateServer, SdkHarnessClient client) { this.executor = executor; this.environment = environment; this.dataServer = dataServer; this.stateServer = stateServer; this.client = client; } SdkHarnessClient getClient() { return client; } GrpcFnServer<GrpcStateService> getStateServer() { return stateServer; } GrpcFnServer<GrpcDataService> getDataServer() { return dataServer; } @Override public void close() throws Exception { executor.shutdown(); environment.close(); dataServer.close(); stateServer.close(); } } private enum Platform { MAC, LINUX, OTHER, } }
class DockerJobBundleFactory implements JobBundleFactory { private static final Logger LOG = LoggerFactory.getLogger(DockerJobBundleFactory.class); private static final String DOCKER_FOR_MAC_HOST = "host.docker.internal"; private final IdGenerator stageIdGenerator; private final GrpcFnServer<FnApiControlClientPoolService> controlServer; private final GrpcFnServer<GrpcLoggingService> loggingServer; private final GrpcFnServer<ArtifactRetrievalService> retrievalServer; private final GrpcFnServer<StaticGrpcProvisionService> provisioningServer; private final LoadingCache<Environment, WrappedSdkHarnessClient> environmentCache; @VisibleForTesting DockerJobBundleFactory( DockerEnvironmentFactory environmentFactory, ServerFactory serverFactory, IdGenerator stageIdGenerator, GrpcFnServer<FnApiControlClientPoolService> controlServer, GrpcFnServer<GrpcLoggingService> loggingServer, GrpcFnServer<ArtifactRetrievalService> retrievalServer, GrpcFnServer<StaticGrpcProvisionService> provisioningServer) { this.stageIdGenerator = stageIdGenerator; this.controlServer = controlServer; this.loggingServer = loggingServer; this.retrievalServer = retrievalServer; this.provisioningServer = provisioningServer; this.environmentCache = CacheBuilder.newBuilder() .weakValues() .removalListener( ((RemovalNotification<Environment, WrappedSdkHarnessClient> notification) -> { LOG.debug("Cleaning up for environment {}", notification.getKey().getUrl()); try { notification.getValue().close(); } catch (Exception e) { LOG.warn( String.format("Error cleaning up environment %s", notification.getKey()), e); } })) .build( new CacheLoader<Environment, WrappedSdkHarnessClient>() { @Override public WrappedSdkHarnessClient load(Environment environment) throws Exception { RemoteEnvironment remoteEnvironment = environmentFactory.createEnvironment(environment); return WrappedSdkHarnessClient.wrapping(remoteEnvironment, serverFactory); } }); } @Override public <T> StageBundleFactory<T> forStage(ExecutableStage executableStage) { WrappedSdkHarnessClient wrappedClient = environmentCache.getUnchecked(executableStage.getEnvironment()); ExecutableProcessBundleDescriptor processBundleDescriptor; try { processBundleDescriptor = ProcessBundleDescriptors.fromExecutableStage( stageIdGenerator.getId(), executableStage, wrappedClient.getDataServer().getApiServiceDescriptor()); } catch (IOException e) { throw new RuntimeException(e); } return SimpleStageBundleFactory.create(wrappedClient, processBundleDescriptor); } @Override public void close() throws Exception { environmentCache.invalidateAll(); environmentCache.cleanUp(); controlServer.close(); loggingServer.close(); retrievalServer.close(); provisioningServer.close(); } private static ServerFactory getServerFactory() { switch (getPlatform()) { case LINUX: return ServerFactory.createDefault(); case MAC: return ServerFactory.createWithUrlFactory( (host, port) -> HostAndPort.fromParts(DOCKER_FOR_MAC_HOST, port).toString()); default: LOG.warn("Unknown Docker platform. Falling back to default server factory"); return ServerFactory.createDefault(); } } private static Platform getPlatform() { String osName = System.getProperty("os.name").toLowerCase(); if (osName.startsWith("mac")) { return Platform.MAC; } else if (osName.startsWith("linux")) { return Platform.LINUX; } return Platform.OTHER; } private static class SimpleStageBundleFactory<InputT> implements StageBundleFactory<InputT> { private final BundleProcessor<InputT> processor; private final ExecutableProcessBundleDescriptor processBundleDescriptor; @SuppressFBWarnings private WrappedSdkHarnessClient wrappedClient; static <InputT> SimpleStageBundleFactory<InputT> create( WrappedSdkHarnessClient wrappedClient, ExecutableProcessBundleDescriptor processBundleDescriptor) { @SuppressWarnings("unchecked") BundleProcessor<InputT> processor = wrappedClient .getClient() .getProcessor( processBundleDescriptor.getProcessBundleDescriptor(), (RemoteInputDestination) processBundleDescriptor.getRemoteInputDestination(), wrappedClient.getStateServer().getService()); return new SimpleStageBundleFactory<>(processBundleDescriptor, processor, wrappedClient); } SimpleStageBundleFactory( ExecutableProcessBundleDescriptor processBundleDescriptor, BundleProcessor<InputT> processor, WrappedSdkHarnessClient wrappedClient) { this.processBundleDescriptor = processBundleDescriptor; this.processor = processor; this.wrappedClient = wrappedClient; } @Override public RemoteBundle<InputT> getBundle( OutputReceiverFactory outputReceiverFactory, StateRequestHandler stateRequestHandler) throws Exception { ImmutableMap.Builder<Target, RemoteOutputReceiver<?>> outputReceivers = ImmutableMap.builder(); for (Map.Entry<Target, Coder<WindowedValue<?>>> targetCoder : processBundleDescriptor.getOutputTargetCoders().entrySet()) { Target target = targetCoder.getKey(); Coder<WindowedValue<?>> coder = targetCoder.getValue(); String bundleOutputPCollection = Iterables.getOnlyElement( processBundleDescriptor .getProcessBundleDescriptor() .getTransformsOrThrow(target.getPrimitiveTransformReference()) .getInputsMap() .values()); FnDataReceiver<WindowedValue<?>> outputReceiver = outputReceiverFactory.create(bundleOutputPCollection); outputReceivers.put(target, RemoteOutputReceiver.of(coder, outputReceiver)); } return processor.newBundle(outputReceivers.build(), stateRequestHandler); } @Override public void close() throws Exception { wrappedClient = null; } } /** * Holder for an {@link SdkHarnessClient} along with its associated state and data servers. As of * now, there is a 1:1 relationship between data services and harness clients. The servers are * packaged here to tie server lifetimes to harness client lifetimes. */ private static class WrappedSdkHarnessClient implements AutoCloseable { private final RemoteEnvironment environment; private final ExecutorService executor; private final GrpcFnServer<GrpcDataService> dataServer; private final GrpcFnServer<GrpcStateService> stateServer; private final SdkHarnessClient client; static WrappedSdkHarnessClient wrapping( RemoteEnvironment environment, ServerFactory serverFactory) throws Exception { ExecutorService executor = Executors.newCachedThreadPool(); GrpcFnServer<GrpcDataService> dataServer = GrpcFnServer.allocatePortAndCreateFor(GrpcDataService.create(executor), serverFactory); GrpcFnServer<GrpcStateService> stateServer = GrpcFnServer.allocatePortAndCreateFor(GrpcStateService.create(), serverFactory); SdkHarnessClient client = SdkHarnessClient.usingFnApiClient( environment.getInstructionRequestHandler(), dataServer.getService()); return new WrappedSdkHarnessClient(environment, executor, dataServer, stateServer, client); } private WrappedSdkHarnessClient( RemoteEnvironment environment, ExecutorService executor, GrpcFnServer<GrpcDataService> dataServer, GrpcFnServer<GrpcStateService> stateServer, SdkHarnessClient client) { this.executor = executor; this.environment = environment; this.dataServer = dataServer; this.stateServer = stateServer; this.client = client; } SdkHarnessClient getClient() { return client; } GrpcFnServer<GrpcStateService> getStateServer() { return stateServer; } GrpcFnServer<GrpcDataService> getDataServer() { return dataServer; } @Override public void close() throws Exception { try (AutoCloseable stateServerCloser = stateServer; AutoCloseable dataServerCloser = dataServer; AutoCloseable envCloser = environment; AutoCloseable executorCloser = executor::shutdown) { } } } private enum Platform { MAC, LINUX, OTHER, } private static class UnimplementedArtifactRetrievalService extends ArtifactRetrievalServiceGrpc.ArtifactRetrievalServiceImplBase implements ArtifactRetrievalService { @Override public void close() throws Exception {} } }
When users need to disable checkpoint during backlog, they can configure checkpoint-interval-during-backlog into -1L. In checkpoint-interval's case, user can disable checkpoint by not setting this configuration. However, according to FLIP-309, not setting checkpoint-interval-during-backlog stands for using the same value as checkpoint-interval, so we need to find an alternative special value for this function. According to [1] and [2], checkpoint interval uses -1L to represent intervals of infinite length, or disabling checkpoint interval[3]. Thus I adopted -1L as the special value in accordance with the current semantics for checkpoint interval. [1] https://github.com/apache/flink/blob/master/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/CheckpointConfig.java#L201 [2] https://github.com/apache/flink/blob/master/flink-streaming-java/src/main/java/org/apache/flink/streaming/api/graph/StreamingJobGraphGenerator.java#L1918 [3] https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointCoordinator.java#L1946
public void setCheckpointIntervalDuringBacklog(long checkpointInterval) { if (checkpointInterval != -1L && checkpointInterval < MINIMAL_CHECKPOINT_TIME) { throw new IllegalArgumentException( String.format( "Checkpoint interval must be larger than or equal to %s ms", MINIMAL_CHECKPOINT_TIME)); } configuration.set( ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL_DURING_BACKLOG, Duration.ofMillis(checkpointInterval)); }
if (checkpointInterval != -1L && checkpointInterval < MINIMAL_CHECKPOINT_TIME) {
public void setCheckpointIntervalDuringBacklog(long checkpointInterval) { if (checkpointInterval != 0 && checkpointInterval < MINIMAL_CHECKPOINT_TIME) { throw new IllegalArgumentException( String.format( "Checkpoint interval must be zero or larger than or equal to %s ms", MINIMAL_CHECKPOINT_TIME)); } configuration.set( ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL_DURING_BACKLOG, Duration.ofMillis(checkpointInterval)); }
class CheckpointConfig implements java.io.Serializable { private static final long serialVersionUID = -750378776078908147L; private static final Logger LOG = LoggerFactory.getLogger(CheckpointConfig.class); @Deprecated /** * The default checkpoint mode: exactly once. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.CHECKPOINTING_MODE} instead. */ public static final CheckpointingMode DEFAULT_MODE = ExecutionCheckpointingOptions.CHECKPOINTING_MODE.defaultValue(); /** * The default timeout of a checkpoint attempt: 10 minutes. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT} instead. */ @Deprecated public static final long DEFAULT_TIMEOUT = ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT.defaultValue().toMillis(); /** * The default minimum pause to be made between checkpoints: none. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS} instead. */ @Deprecated public static final long DEFAULT_MIN_PAUSE_BETWEEN_CHECKPOINTS = ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS.defaultValue().toMillis(); /** * The default limit of concurrently happening checkpoints: one. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS} instead. */ @Deprecated public static final int DEFAULT_MAX_CONCURRENT_CHECKPOINTS = ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS.defaultValue(); /** @deprecated This field is no longer used. */ @Deprecated public static final int UNDEFINED_TOLERABLE_CHECKPOINT_NUMBER = -1; /** * Default id of checkpoint for which in-flight data should be ignored on recovery. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA} instead. */ @Deprecated public static final int DEFAULT_CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA = ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA .defaultValue() .intValue(); /** * In the long run, this field should be somehow merged with the {@link Configuration} from * {@link StreamExecutionEnvironment}. */ private final Configuration configuration; /** * The checkpoint storage for this application. This field is marked as transient because it may * contain user-code. * * @deprecated this should be moved somehow to {@link */ @Deprecated private transient CheckpointStorage storage; /** * Creates a deep copy of the provided {@link CheckpointConfig}. * * @param checkpointConfig the config to copy. */ public CheckpointConfig(final CheckpointConfig checkpointConfig) { checkNotNull(checkpointConfig); this.configuration = new Configuration(checkpointConfig.configuration); this.storage = checkpointConfig.getCheckpointStorage(); } public CheckpointConfig() { configuration = new Configuration(); } /** Disables checkpointing. */ public void disableCheckpointing() { configuration.removeConfig(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL); } /** * Checks whether checkpointing is enabled. * * @return True if checkpointing is enables, false otherwise. */ public boolean isCheckpointingEnabled() { return getCheckpointInterval() > 0; } /** * Gets the checkpointing mode (exactly-once vs. at-least-once). * * @return The checkpointing mode. */ public CheckpointingMode getCheckpointingMode() { return configuration.get(ExecutionCheckpointingOptions.CHECKPOINTING_MODE); } /** * Sets the checkpointing mode (exactly-once vs. at-least-once). * * @param checkpointingMode The checkpointing mode. */ public void setCheckpointingMode(CheckpointingMode checkpointingMode) { configuration.set(ExecutionCheckpointingOptions.CHECKPOINTING_MODE, checkpointingMode); } /** * Gets the interval in which checkpoints are periodically scheduled. * * <p>This setting defines the base interval. Checkpoint triggering may be delayed by the * settings {@link * * @return The checkpoint interval, in milliseconds. */ public long getCheckpointInterval() { return configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL) .map(Duration::toMillis) .orElse(-1L); } /** * Sets the interval in which checkpoints are periodically scheduled. * * <p>This setting defines the base interval. Checkpoint triggering may be delayed by the * settings {@link * * * @param checkpointInterval The checkpoint interval, in milliseconds. */ public void setCheckpointInterval(long checkpointInterval) { if (checkpointInterval < MINIMAL_CHECKPOINT_TIME) { throw new IllegalArgumentException( String.format( "Checkpoint interval must be larger than or equal to %s ms", MINIMAL_CHECKPOINT_TIME)); } configuration.set( ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL, Duration.ofMillis(checkpointInterval)); } public long getCheckpointIntervalDuringBacklog() { return configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL_DURING_BACKLOG) .map(Duration::toMillis) .orElseGet(this::getCheckpointInterval); } /** * Gets the maximum time that a checkpoint may take before being discarded. * * @return The checkpoint timeout, in milliseconds. */ public long getCheckpointTimeout() { return configuration.get(ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT).toMillis(); } /** * Sets the maximum time that a checkpoint may take before being discarded. * * @param checkpointTimeout The checkpoint timeout, in milliseconds. */ public void setCheckpointTimeout(long checkpointTimeout) { if (checkpointTimeout < MINIMAL_CHECKPOINT_TIME) { throw new IllegalArgumentException( String.format( "Checkpoint timeout must be larger than or equal to %s ms", MINIMAL_CHECKPOINT_TIME)); } configuration.set( ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT, Duration.ofMillis(checkpointTimeout)); } /** * Gets the minimal pause between checkpointing attempts. This setting defines how soon the * checkpoint coordinator may trigger another checkpoint after it becomes possible to trigger * another checkpoint with respect to the maximum number of concurrent checkpoints (see {@link * * * @return The minimal pause before the next checkpoint is triggered. */ public long getMinPauseBetweenCheckpoints() { return configuration .get(ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS) .toMillis(); } /** * Sets the minimal pause between checkpointing attempts. This setting defines how soon the * checkpoint coordinator may trigger another checkpoint after it becomes possible to trigger * another checkpoint with respect to the maximum number of concurrent checkpoints (see {@link * * * <p>If the maximum number of concurrent checkpoints is set to one, this setting makes * effectively sure that a minimum amount of time passes where no checkpoint is in progress at * all. * * @param minPauseBetweenCheckpoints The minimal pause before the next checkpoint is triggered. */ public void setMinPauseBetweenCheckpoints(long minPauseBetweenCheckpoints) { if (minPauseBetweenCheckpoints < 0) { throw new IllegalArgumentException("Pause value must be zero or positive"); } configuration.set( ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS, Duration.ofMillis(minPauseBetweenCheckpoints)); } /** * Gets the maximum number of checkpoint attempts that may be in progress at the same time. If * this value is <i>n</i>, then no checkpoints will be triggered while <i>n</i> checkpoint * attempts are currently in flight. For the next checkpoint to be triggered, one checkpoint * attempt would need to finish or expire. * * @return The maximum number of concurrent checkpoint attempts. */ public int getMaxConcurrentCheckpoints() { return configuration.get(ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS); } /** * Sets the maximum number of checkpoint attempts that may be in progress at the same time. If * this value is <i>n</i>, then no checkpoints will be triggered while <i>n</i> checkpoint * attempts are currently in flight. For the next checkpoint to be triggered, one checkpoint * attempt would need to finish or expire. * * @param maxConcurrentCheckpoints The maximum number of concurrent checkpoint attempts. */ public void setMaxConcurrentCheckpoints(int maxConcurrentCheckpoints) { if (maxConcurrentCheckpoints < 1) { throw new IllegalArgumentException( "The maximum number of concurrent attempts must be at least one."); } configuration.set( ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS, maxConcurrentCheckpoints); } /** * Checks whether checkpointing is forced, despite currently non-checkpointable iteration * feedback. * * @return True, if checkpointing is forced, false otherwise. * @deprecated This will be removed once iterations properly participate in checkpointing. */ @Deprecated @PublicEvolving public boolean isForceCheckpointing() { return configuration.get(ExecutionCheckpointingOptions.FORCE_CHECKPOINTING); } /** * Checks whether checkpointing is forced, despite currently non-checkpointable iteration * feedback. * * @param forceCheckpointing The flag to force checkpointing. * @deprecated This will be removed once iterations properly participate in checkpointing. */ @Deprecated @PublicEvolving public void setForceCheckpointing(boolean forceCheckpointing) { configuration.set(ExecutionCheckpointingOptions.FORCE_CHECKPOINTING, forceCheckpointing); } /** * Checks whether unaligned checkpoints are forced, despite iteration feedback. * * @return True, if unaligned checkpoints are forced, false otherwise. */ @PublicEvolving public boolean isForceUnalignedCheckpoints() { return configuration.get(ExecutionCheckpointingOptions.FORCE_UNALIGNED); } /** * Checks whether unaligned checkpoints are forced, despite currently non-checkpointable * iteration feedback or custom partitioners. * * @param forceUnalignedCheckpoints The flag to force unaligned checkpoints. */ @PublicEvolving public void setForceUnalignedCheckpoints(boolean forceUnalignedCheckpoints) { configuration.set(ExecutionCheckpointingOptions.FORCE_UNALIGNED, forceUnalignedCheckpoints); } /** * This determines the behaviour when meeting checkpoint errors. If this returns true, which is * equivalent to get tolerableCheckpointFailureNumber as zero, job manager would fail the whole * job once it received a decline checkpoint message. If this returns false, which is equivalent * to get tolerableCheckpointFailureNumber as the maximum of integer (means unlimited), job * manager would not fail the whole job no matter how many declined checkpoints it received. * * @deprecated Use {@link */ @Deprecated public boolean isFailOnCheckpointingErrors() { return getTolerableCheckpointFailureNumber() == 0; } /** * Sets the expected behaviour for tasks in case that they encounter an error when * checkpointing. If this is set as true, which is equivalent to set * tolerableCheckpointFailureNumber as zero, job manager would fail the whole job once it * received a decline checkpoint message. If this is set as false, which is equivalent to set * tolerableCheckpointFailureNumber as the maximum of integer (means unlimited), job manager * would not fail the whole job no matter how many declined checkpoints it received. * * <p>{@link * method if they have conflicts. * * @deprecated Use {@link */ @Deprecated public void setFailOnCheckpointingErrors(boolean failOnCheckpointingErrors) { if (configuration .getOptional(ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER) .isPresent()) { LOG.warn( "Since ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER has been configured as {}, deprecated " + " + " + "determine your expected behaviour when checkpoint errors on task side.", getTolerableCheckpointFailureNumber()); return; } if (failOnCheckpointingErrors) { setTolerableCheckpointFailureNumber(0); } else { setTolerableCheckpointFailureNumber(UNLIMITED_TOLERABLE_FAILURE_NUMBER); } } /** * Get the defined number of consecutive checkpoint failures that will be tolerated, before the * whole job is failed over. * * <p>If the {@link ExecutionCheckpointingOptions * configured, this method would return 0 which means the checkpoint failure manager would not * tolerate any declined checkpoint failure. */ public int getTolerableCheckpointFailureNumber() { return configuration .getOptional(ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER) .orElse(0); } /** * This defines how many consecutive checkpoint failures will be tolerated, before the whole job * is failed over. The default value is `0`, which means no checkpoint failures will be * tolerated, and the job will fail on first reported checkpoint failure. */ public void setTolerableCheckpointFailureNumber(int tolerableCheckpointFailureNumber) { if (tolerableCheckpointFailureNumber < 0) { throw new IllegalArgumentException( "The tolerable failure checkpoint number must be non-negative."); } configuration.set( ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER, tolerableCheckpointFailureNumber); } /** * Sets the mode for externalized checkpoint clean-up. Externalized checkpoints will be enabled * automatically unless the mode is set to {@link * ExternalizedCheckpointCleanup * * <p>Externalized checkpoints write their meta data out to persistent storage and are * <strong>not</strong> automatically cleaned up when the owning job fails or is suspended * (terminating with job status {@link JobStatus * this case, you have to manually clean up the checkpoint state, both the meta data and actual * program state. * * <p>The {@link ExternalizedCheckpointCleanup} mode defines how an externalized checkpoint * should be cleaned up on job cancellation. If you choose to retain externalized checkpoints on * cancellation you have to handle checkpoint clean-up manually when you cancel the job as well * (terminating with job status {@link JobStatus * * <p>The target directory for externalized checkpoints is configured via {@link * CheckpointingOptions * * @param cleanupMode Externalized checkpoint clean-up behaviour. */ @PublicEvolving public void setExternalizedCheckpointCleanup(ExternalizedCheckpointCleanup cleanupMode) { configuration.set(ExecutionCheckpointingOptions.EXTERNALIZED_CHECKPOINT, cleanupMode); } /** * Sets the mode for externalized checkpoint clean-up. Externalized checkpoints will be enabled * automatically unless the mode is set to {@link * ExternalizedCheckpointCleanup * * <p>Externalized checkpoints write their meta data out to persistent storage and are * <strong>not</strong> automatically cleaned up when the owning job fails or is suspended * (terminating with job status {@link JobStatus * this case, you have to manually clean up the checkpoint state, both the meta data and actual * program state. * * <p>The {@link ExternalizedCheckpointCleanup} mode defines how an externalized checkpoint * should be cleaned up on job cancellation. If you choose to retain externalized checkpoints on * cancellation you have to handle checkpoint clean-up manually when you cancel the job as well * (terminating with job status {@link JobStatus * * <p>The target directory for externalized checkpoints is configured via {@link * CheckpointingOptions * * @param cleanupMode Externalized checkpoint clean-up behaviour. * @deprecated use {@link * instead. */ @PublicEvolving @Deprecated public void enableExternalizedCheckpoints(ExternalizedCheckpointCleanup cleanupMode) { setExternalizedCheckpointCleanup(cleanupMode); } /** * Returns whether checkpoints should be persisted externally. * * @return <code>true</code> if checkpoints should be externalized. */ @PublicEvolving public boolean isExternalizedCheckpointsEnabled() { return getExternalizedCheckpointCleanup() != ExternalizedCheckpointCleanup.NO_EXTERNALIZED_CHECKPOINTS; } /** * Enables unaligned checkpoints, which greatly reduce checkpointing times under backpressure. * * <p>Unaligned checkpoints contain data stored in buffers as part of the checkpoint state, * which allows checkpoint barriers to overtake these buffers. Thus, the checkpoint duration * becomes independent of the current throughput as checkpoint barriers are effectively not * embedded into the stream of data anymore. * * <p>Unaligned checkpoints can only be enabled if {@link * ExecutionCheckpointingOptions * * @param enabled Flag to indicate whether unaligned are enabled. */ @PublicEvolving public void enableUnalignedCheckpoints(boolean enabled) { configuration.set(ExecutionCheckpointingOptions.ENABLE_UNALIGNED, enabled); } /** * Enables unaligned checkpoints, which greatly reduce checkpointing times under backpressure. * * <p>Unaligned checkpoints contain data stored in buffers as part of the checkpoint state, * which allows checkpoint barriers to overtake these buffers. Thus, the checkpoint duration * becomes independent of the current throughput as checkpoint barriers are effectively not * embedded into the stream of data anymore. * * <p>Unaligned checkpoints can only be enabled if {@link * ExecutionCheckpointingOptions */ @PublicEvolving public void enableUnalignedCheckpoints() { enableUnalignedCheckpoints(true); } /** * Returns whether unaligned checkpoints are enabled. * * @return <code>true</code> if unaligned checkpoints are enabled. */ @PublicEvolving public boolean isUnalignedCheckpointsEnabled() { return configuration.get(ExecutionCheckpointingOptions.ENABLE_UNALIGNED); } /** * Only relevant if {@link * * <p>If {@link ExecutionCheckpointingOptions * <code>0</code>, checkpoints will always start unaligned. * * <p>If {@link ExecutionCheckpointingOptions * <code>0</code>, checkpoints will start aligned. If during checkpointing, checkpoint start * delay exceeds this {@link ExecutionCheckpointingOptions * alignment will timeout and checkpoint will start working as unaligned checkpoint. * * @deprecated Use {@link */ @Deprecated @PublicEvolving public void setAlignmentTimeout(Duration alignmentTimeout) { setAlignedCheckpointTimeout(alignmentTimeout); } /** * @return value of alignment timeout, as configured via {@link * or {@link ExecutionCheckpointingOptions * @deprecated User {@link */ @Deprecated @PublicEvolving public Duration getAlignmentTimeout() { return getAlignedCheckpointTimeout(); } /** * @return value of alignment timeout, as configured via {@link * * ExecutionCheckpointingOptions */ @PublicEvolving public Duration getAlignedCheckpointTimeout() { return configuration.get(ExecutionCheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT); } /** * Only relevant if {@link ExecutionCheckpointingOptions.ENABLE_UNALIGNED} is enabled. * * <p>If {@link ExecutionCheckpointingOptions * <code>0</code>, checkpoints will * * <p>always start unaligned. * * <p>If {@link ExecutionCheckpointingOptions * <code>0</code>, checkpoints will start aligned. If during checkpointing, checkpoint start * delay exceeds this {@link ExecutionCheckpointingOptions * alignment will timeout and checkpoint will start working as unaligned checkpoint. */ @PublicEvolving public void setAlignedCheckpointTimeout(Duration alignedCheckpointTimeout) { configuration.set( ExecutionCheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT, alignedCheckpointTimeout); } /** * @return the number of subtasks to share the same channel state file, as configured via {@link * * ExecutionCheckpointingOptions */ @PublicEvolving public int getMaxSubtasksPerChannelStateFile() { return configuration.get( ExecutionCheckpointingOptions.UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE); } /** * The number of subtasks to share the same channel state file. If {@link * ExecutionCheckpointingOptions * to <code>1</code>, each subtask will create a new channel state file. */ @PublicEvolving public void setMaxSubtasksPerChannelStateFile(int maxSubtasksPerChannelStateFile) { configuration.set( ExecutionCheckpointingOptions.UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE, maxSubtasksPerChannelStateFile); } /** * Returns whether approximate local recovery is enabled. * * @return <code>true</code> if approximate local recovery is enabled. */ @Experimental public boolean isApproximateLocalRecoveryEnabled() { return configuration.get(ExecutionCheckpointingOptions.APPROXIMATE_LOCAL_RECOVERY); } /** * Enables the approximate local recovery mode. * * <p>In this recovery mode, when a task fails, the entire downstream of the tasks (including * the failed task) restart. * * <p>Notice that 1. Approximate recovery may lead to data loss. The amount of data which leads * the failed task from the state of the last completed checkpoint to the state when the task * fails is lost. 2. In the next version, we will support restarting the set of failed set of * tasks only. In this version, we only support downstream restarts when a task fails. 3. It is * only an internal feature for now. * * @param enabled Flag to indicate whether approximate local recovery is enabled . */ @Experimental public void enableApproximateLocalRecovery(boolean enabled) { configuration.set(ExecutionCheckpointingOptions.APPROXIMATE_LOCAL_RECOVERY, enabled); } /** * Returns the cleanup behaviour for externalized checkpoints. * * @return The cleanup behaviour for externalized checkpoints or <code>null</code> if none is * configured. */ @PublicEvolving public ExternalizedCheckpointCleanup getExternalizedCheckpointCleanup() { return configuration.get(ExecutionCheckpointingOptions.EXTERNALIZED_CHECKPOINT); } /** * CheckpointStorage defines how {@link StateBackend}'s checkpoint their state for fault * tolerance in streaming applications. Various implementations store their checkpoints in * different fashions and have different requirements and availability guarantees. * * <p>For example, {@link org.apache.flink.runtime.state.storage.JobManagerCheckpointStorage * JobManagerCheckpointStorage} stores checkpoints in the memory of the JobManager. It is * lightweight and without additional dependencies but is not highly available and only supports * small state sizes. This checkpoint storage policy is convenient for local testing and * development. * * <p>{@link org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage * FileSystemCheckpointStorage} stores checkpoints in a filesystem. For systems like HDFS, NFS * Drives, S3, and GCS, this storage policy supports large state size, in the magnitude of many * terabytes while providing a highly available foundation for stateful applications. This * checkpoint storage policy is recommended for most production deployments. * * @param storage The checkpoint storage policy. */ @PublicEvolving public void setCheckpointStorage(CheckpointStorage storage) { Preconditions.checkNotNull(storage, "Checkpoint storage must not be null"); this.storage = storage; } /** * Configures the application to write out checkpoint snapshots to the configured directory. See * {@link FileSystemCheckpointStorage} for more details on checkpointing to a file system. * * @param checkpointDirectory The path to write checkpoint metadata to. * @see */ @PublicEvolving public void setCheckpointStorage(String checkpointDirectory) { Preconditions.checkNotNull(checkpointDirectory, "Checkpoint directory must not be null"); this.storage = new FileSystemCheckpointStorage(checkpointDirectory); } /** * Configures the application to write out checkpoint snapshots to the configured directory. See * {@link FileSystemCheckpointStorage} for more details on checkpointing to a file system. * * @param checkpointDirectory The path to write checkpoint metadata to. * @see */ @PublicEvolving public void setCheckpointStorage(URI checkpointDirectory) { Preconditions.checkNotNull(checkpointDirectory, "Checkpoint directory must not be null"); this.storage = new FileSystemCheckpointStorage(checkpointDirectory); } /** * Configures the application to write out checkpoint snapshots to the configured directory. See * {@link FileSystemCheckpointStorage} for more details on checkpointing to a file system. * * @param checkpointDirectory The path to write checkpoint metadata to. * @see */ @PublicEvolving public void setCheckpointStorage(Path checkpointDirectory) { Preconditions.checkNotNull(checkpointDirectory, "Checkpoint directory must not be null"); this.storage = new FileSystemCheckpointStorage(checkpointDirectory); } /** * @return The {@link CheckpointStorage} that has been configured for the job. Or {@code null} * if none has been set. * @see */ @Nullable @PublicEvolving public CheckpointStorage getCheckpointStorage() { return this.storage; } /** * Setup the checkpoint id for which the in-flight data will be ignored for all operators in * case of the recovery from this checkpoint. * * @param checkpointIdOfIgnoredInFlightData Checkpoint id for which in-flight data should be * ignored. * @see */ @PublicEvolving public void setCheckpointIdOfIgnoredInFlightData(long checkpointIdOfIgnoredInFlightData) { configuration.set( ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA, checkpointIdOfIgnoredInFlightData); } /** * @return Checkpoint id for which in-flight data should be ignored. * @see */ @PublicEvolving public long getCheckpointIdOfIgnoredInFlightData() { return configuration.get( ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA); } /** Cleanup behaviour for externalized checkpoints when the job is cancelled. */ @PublicEvolving public enum ExternalizedCheckpointCleanup implements DescribedEnum { /** * Delete externalized checkpoints on job cancellation. * * <p>All checkpoint state will be deleted when you cancel the owning job, both the meta * data and actual program state. Therefore, you cannot resume from externalized checkpoints * after the job has been cancelled. * * <p>Note that checkpoint state is always kept if the job terminates with state {@link * JobStatus */ DELETE_ON_CANCELLATION( text( "Checkpoint state is only kept when the owning job fails. It is deleted if " + "the job is cancelled.")), /** * Retain externalized checkpoints on job cancellation. * * <p>All checkpoint state is kept when you cancel the owning job. You have to manually * delete both the checkpoint meta data and actual program state after cancelling the job. * * <p>Note that checkpoint state is always kept if the job terminates with state {@link * JobStatus */ RETAIN_ON_CANCELLATION( text("Checkpoint state is kept when the owning job is cancelled or fails.")), /** Externalized checkpoints are disabled completely. */ NO_EXTERNALIZED_CHECKPOINTS(text("Externalized checkpoints are disabled.")); private final InlineElement description; ExternalizedCheckpointCleanup(InlineElement description) { this.description = description; } /** * Returns whether persistent checkpoints shall be discarded on cancellation of the job. * * @return <code>true</code> if persistent checkpoints shall be discarded on cancellation of * the job. */ public boolean deleteOnCancellation() { return this == DELETE_ON_CANCELLATION; } @Override @Internal public InlineElement getDescription() { return description; } } /** * Sets all relevant options contained in the {@link ReadableConfig} such as e.g. {@link * ExecutionCheckpointingOptions * * <p>It will change the value of a setting only if a corresponding option was set in the {@code * configuration}. If a key is not present, the current value of a field will remain untouched. * * @param configuration a configuration to read the values from */ public void configure(ReadableConfig configuration) { configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_MODE) .ifPresent(this::setCheckpointingMode); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL) .ifPresent(i -> this.setCheckpointInterval(i.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL_DURING_BACKLOG) .ifPresent(i -> this.setCheckpointIntervalDuringBacklog(i.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT) .ifPresent(t -> this.setCheckpointTimeout(t.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS) .ifPresent(this::setMaxConcurrentCheckpoints); configuration .getOptional(ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS) .ifPresent(m -> this.setMinPauseBetweenCheckpoints(m.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER) .ifPresent(this::setTolerableCheckpointFailureNumber); configuration .getOptional(ExecutionCheckpointingOptions.EXTERNALIZED_CHECKPOINT) .ifPresent(this::setExternalizedCheckpointCleanup); configuration .getOptional(ExecutionCheckpointingOptions.ENABLE_UNALIGNED) .ifPresent(this::enableUnalignedCheckpoints); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA) .ifPresent(this::setCheckpointIdOfIgnoredInFlightData); configuration .getOptional(ExecutionCheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT) .ifPresent(this::setAlignedCheckpointTimeout); configuration .getOptional( ExecutionCheckpointingOptions.UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE) .ifPresent(this::setMaxSubtasksPerChannelStateFile); configuration .getOptional(ExecutionCheckpointingOptions.FORCE_UNALIGNED) .ifPresent(this::setForceUnalignedCheckpoints); configuration .getOptional(CheckpointingOptions.CHECKPOINTS_DIRECTORY) .ifPresent(this::setCheckpointStorage); } /** * @return A copy of internal {@link * stored as plain java fields in {@link CheckpointConfig}, for example {@link */ @Internal public Configuration toConfiguration() { return new Configuration(configuration); } }
class CheckpointConfig implements java.io.Serializable { private static final long serialVersionUID = -750378776078908147L; private static final Logger LOG = LoggerFactory.getLogger(CheckpointConfig.class); @Deprecated /** * The default checkpoint mode: exactly once. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.CHECKPOINTING_MODE} instead. */ public static final CheckpointingMode DEFAULT_MODE = ExecutionCheckpointingOptions.CHECKPOINTING_MODE.defaultValue(); /** * The default timeout of a checkpoint attempt: 10 minutes. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT} instead. */ @Deprecated public static final long DEFAULT_TIMEOUT = ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT.defaultValue().toMillis(); /** * The default minimum pause to be made between checkpoints: none. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS} instead. */ @Deprecated public static final long DEFAULT_MIN_PAUSE_BETWEEN_CHECKPOINTS = ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS.defaultValue().toMillis(); /** * The default limit of concurrently happening checkpoints: one. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS} instead. */ @Deprecated public static final int DEFAULT_MAX_CONCURRENT_CHECKPOINTS = ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS.defaultValue(); /** @deprecated This field is no longer used. */ @Deprecated public static final int UNDEFINED_TOLERABLE_CHECKPOINT_NUMBER = -1; /** * Default id of checkpoint for which in-flight data should be ignored on recovery. * * @deprecated This field is no longer used. Please use {@link * ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA} instead. */ @Deprecated public static final int DEFAULT_CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA = ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA .defaultValue() .intValue(); /** * In the long run, this field should be somehow merged with the {@link Configuration} from * {@link StreamExecutionEnvironment}. */ private final Configuration configuration; /** * The checkpoint storage for this application. This field is marked as transient because it may * contain user-code. * * @deprecated this should be moved somehow to {@link */ @Deprecated private transient CheckpointStorage storage; /** * Creates a deep copy of the provided {@link CheckpointConfig}. * * @param checkpointConfig the config to copy. */ public CheckpointConfig(final CheckpointConfig checkpointConfig) { checkNotNull(checkpointConfig); this.configuration = new Configuration(checkpointConfig.configuration); this.storage = checkpointConfig.getCheckpointStorage(); } public CheckpointConfig() { configuration = new Configuration(); } /** Disables checkpointing. */ public void disableCheckpointing() { configuration.removeConfig(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL); } /** * Checks whether checkpointing is enabled. * * @return True if checkpointing is enables, false otherwise. */ public boolean isCheckpointingEnabled() { return getCheckpointInterval() > 0; } /** * Gets the checkpointing mode (exactly-once vs. at-least-once). * * @return The checkpointing mode. */ public CheckpointingMode getCheckpointingMode() { return configuration.get(ExecutionCheckpointingOptions.CHECKPOINTING_MODE); } /** * Sets the checkpointing mode (exactly-once vs. at-least-once). * * @param checkpointingMode The checkpointing mode. */ public void setCheckpointingMode(CheckpointingMode checkpointingMode) { configuration.set(ExecutionCheckpointingOptions.CHECKPOINTING_MODE, checkpointingMode); } /** * Gets the interval in which checkpoints are periodically scheduled. * * <p>This setting defines the base interval. Checkpoint triggering may be delayed by the * settings {@link * * @return The checkpoint interval, in milliseconds. */ public long getCheckpointInterval() { return configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL) .map(Duration::toMillis) .orElse(-1L); } /** * Sets the interval in which checkpoints are periodically scheduled. * * <p>This setting defines the base interval. Checkpoint triggering may be delayed by the * settings {@link * * * @param checkpointInterval The checkpoint interval, in milliseconds. */ public void setCheckpointInterval(long checkpointInterval) { if (checkpointInterval < MINIMAL_CHECKPOINT_TIME) { throw new IllegalArgumentException( String.format( "Checkpoint interval must be larger than or equal to %s ms", MINIMAL_CHECKPOINT_TIME)); } configuration.set( ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL, Duration.ofMillis(checkpointInterval)); } /** * Gets the interval in which checkpoints are periodically scheduled during backlog. * * <p>This setting defines the base interval. Checkpoint triggering may be delayed by the * settings {@link * * <p>If not explicitly configured, checkpoint interval during backlog will be the same as that * in normal situation(see {@link * means that checkpoints would be disabled during backlog. * * @return The checkpoint interval, in milliseconds. */ public long getCheckpointIntervalDuringBacklog() { long intervalDuringBacklog = configuration .getOptional( ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL_DURING_BACKLOG) .map(Duration::toMillis) .orElseGet(this::getCheckpointInterval); if (intervalDuringBacklog < MINIMAL_CHECKPOINT_TIME) { intervalDuringBacklog = CheckpointCoordinatorConfiguration.DISABLED_CHECKPOINT_INTERVAL; } long checkpointInterval = getCheckpointInterval(); if (checkpointInterval < MINIMAL_CHECKPOINT_TIME) { checkpointInterval = CheckpointCoordinatorConfiguration.DISABLED_CHECKPOINT_INTERVAL; } if (intervalDuringBacklog < checkpointInterval) { throw new IllegalArgumentException( "Checkpoint interval during backlog must " + "be larger than or equal to that in normal situation."); } return intervalDuringBacklog; } /** * Sets the interval in which checkpoints are periodically scheduled during backlog. * * <p>This setting defines the base interval. Checkpoint triggering may be delayed by the * settings {@link * * * <p>If not explicitly configured, checkpoint interval during backlog will be the same as that * in normal situation(see {@link * checkpoints would be disabled during backlog. * * @param checkpointInterval The checkpoint interval, in milliseconds. */ /** * Gets the maximum time that a checkpoint may take before being discarded. * * @return The checkpoint timeout, in milliseconds. */ public long getCheckpointTimeout() { return configuration.get(ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT).toMillis(); } /** * Sets the maximum time that a checkpoint may take before being discarded. * * @param checkpointTimeout The checkpoint timeout, in milliseconds. */ public void setCheckpointTimeout(long checkpointTimeout) { if (checkpointTimeout < MINIMAL_CHECKPOINT_TIME) { throw new IllegalArgumentException( String.format( "Checkpoint timeout must be larger than or equal to %s ms", MINIMAL_CHECKPOINT_TIME)); } configuration.set( ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT, Duration.ofMillis(checkpointTimeout)); } /** * Gets the minimal pause between checkpointing attempts. This setting defines how soon the * checkpoint coordinator may trigger another checkpoint after it becomes possible to trigger * another checkpoint with respect to the maximum number of concurrent checkpoints (see {@link * * * @return The minimal pause before the next checkpoint is triggered. */ public long getMinPauseBetweenCheckpoints() { return configuration .get(ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS) .toMillis(); } /** * Sets the minimal pause between checkpointing attempts. This setting defines how soon the * checkpoint coordinator may trigger another checkpoint after it becomes possible to trigger * another checkpoint with respect to the maximum number of concurrent checkpoints (see {@link * * * <p>If the maximum number of concurrent checkpoints is set to one, this setting makes * effectively sure that a minimum amount of time passes where no checkpoint is in progress at * all. * * @param minPauseBetweenCheckpoints The minimal pause before the next checkpoint is triggered. */ public void setMinPauseBetweenCheckpoints(long minPauseBetweenCheckpoints) { if (minPauseBetweenCheckpoints < 0) { throw new IllegalArgumentException("Pause value must be zero or positive"); } configuration.set( ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS, Duration.ofMillis(minPauseBetweenCheckpoints)); } /** * Gets the maximum number of checkpoint attempts that may be in progress at the same time. If * this value is <i>n</i>, then no checkpoints will be triggered while <i>n</i> checkpoint * attempts are currently in flight. For the next checkpoint to be triggered, one checkpoint * attempt would need to finish or expire. * * @return The maximum number of concurrent checkpoint attempts. */ public int getMaxConcurrentCheckpoints() { return configuration.get(ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS); } /** * Sets the maximum number of checkpoint attempts that may be in progress at the same time. If * this value is <i>n</i>, then no checkpoints will be triggered while <i>n</i> checkpoint * attempts are currently in flight. For the next checkpoint to be triggered, one checkpoint * attempt would need to finish or expire. * * @param maxConcurrentCheckpoints The maximum number of concurrent checkpoint attempts. */ public void setMaxConcurrentCheckpoints(int maxConcurrentCheckpoints) { if (maxConcurrentCheckpoints < 1) { throw new IllegalArgumentException( "The maximum number of concurrent attempts must be at least one."); } configuration.set( ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS, maxConcurrentCheckpoints); } /** * Checks whether checkpointing is forced, despite currently non-checkpointable iteration * feedback. * * @return True, if checkpointing is forced, false otherwise. * @deprecated This will be removed once iterations properly participate in checkpointing. */ @Deprecated @PublicEvolving public boolean isForceCheckpointing() { return configuration.get(ExecutionCheckpointingOptions.FORCE_CHECKPOINTING); } /** * Checks whether checkpointing is forced, despite currently non-checkpointable iteration * feedback. * * @param forceCheckpointing The flag to force checkpointing. * @deprecated This will be removed once iterations properly participate in checkpointing. */ @Deprecated @PublicEvolving public void setForceCheckpointing(boolean forceCheckpointing) { configuration.set(ExecutionCheckpointingOptions.FORCE_CHECKPOINTING, forceCheckpointing); } /** * Checks whether unaligned checkpoints are forced, despite iteration feedback. * * @return True, if unaligned checkpoints are forced, false otherwise. */ @PublicEvolving public boolean isForceUnalignedCheckpoints() { return configuration.get(ExecutionCheckpointingOptions.FORCE_UNALIGNED); } /** * Checks whether unaligned checkpoints are forced, despite currently non-checkpointable * iteration feedback or custom partitioners. * * @param forceUnalignedCheckpoints The flag to force unaligned checkpoints. */ @PublicEvolving public void setForceUnalignedCheckpoints(boolean forceUnalignedCheckpoints) { configuration.set(ExecutionCheckpointingOptions.FORCE_UNALIGNED, forceUnalignedCheckpoints); } /** * This determines the behaviour when meeting checkpoint errors. If this returns true, which is * equivalent to get tolerableCheckpointFailureNumber as zero, job manager would fail the whole * job once it received a decline checkpoint message. If this returns false, which is equivalent * to get tolerableCheckpointFailureNumber as the maximum of integer (means unlimited), job * manager would not fail the whole job no matter how many declined checkpoints it received. * * @deprecated Use {@link */ @Deprecated public boolean isFailOnCheckpointingErrors() { return getTolerableCheckpointFailureNumber() == 0; } /** * Sets the expected behaviour for tasks in case that they encounter an error when * checkpointing. If this is set as true, which is equivalent to set * tolerableCheckpointFailureNumber as zero, job manager would fail the whole job once it * received a decline checkpoint message. If this is set as false, which is equivalent to set * tolerableCheckpointFailureNumber as the maximum of integer (means unlimited), job manager * would not fail the whole job no matter how many declined checkpoints it received. * * <p>{@link * method if they have conflicts. * * @deprecated Use {@link */ @Deprecated public void setFailOnCheckpointingErrors(boolean failOnCheckpointingErrors) { if (configuration .getOptional(ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER) .isPresent()) { LOG.warn( "Since ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER has been configured as {}, deprecated " + " + " + "determine your expected behaviour when checkpoint errors on task side.", getTolerableCheckpointFailureNumber()); return; } if (failOnCheckpointingErrors) { setTolerableCheckpointFailureNumber(0); } else { setTolerableCheckpointFailureNumber(UNLIMITED_TOLERABLE_FAILURE_NUMBER); } } /** * Get the defined number of consecutive checkpoint failures that will be tolerated, before the * whole job is failed over. * * <p>If the {@link ExecutionCheckpointingOptions * configured, this method would return 0 which means the checkpoint failure manager would not * tolerate any declined checkpoint failure. */ public int getTolerableCheckpointFailureNumber() { return configuration .getOptional(ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER) .orElse(0); } /** * This defines how many consecutive checkpoint failures will be tolerated, before the whole job * is failed over. The default value is `0`, which means no checkpoint failures will be * tolerated, and the job will fail on first reported checkpoint failure. */ public void setTolerableCheckpointFailureNumber(int tolerableCheckpointFailureNumber) { if (tolerableCheckpointFailureNumber < 0) { throw new IllegalArgumentException( "The tolerable failure checkpoint number must be non-negative."); } configuration.set( ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER, tolerableCheckpointFailureNumber); } /** * Sets the mode for externalized checkpoint clean-up. Externalized checkpoints will be enabled * automatically unless the mode is set to {@link * ExternalizedCheckpointCleanup * * <p>Externalized checkpoints write their meta data out to persistent storage and are * <strong>not</strong> automatically cleaned up when the owning job fails or is suspended * (terminating with job status {@link JobStatus * this case, you have to manually clean up the checkpoint state, both the meta data and actual * program state. * * <p>The {@link ExternalizedCheckpointCleanup} mode defines how an externalized checkpoint * should be cleaned up on job cancellation. If you choose to retain externalized checkpoints on * cancellation you have to handle checkpoint clean-up manually when you cancel the job as well * (terminating with job status {@link JobStatus * * <p>The target directory for externalized checkpoints is configured via {@link * CheckpointingOptions * * @param cleanupMode Externalized checkpoint clean-up behaviour. */ @PublicEvolving public void setExternalizedCheckpointCleanup(ExternalizedCheckpointCleanup cleanupMode) { configuration.set(ExecutionCheckpointingOptions.EXTERNALIZED_CHECKPOINT, cleanupMode); } /** * Sets the mode for externalized checkpoint clean-up. Externalized checkpoints will be enabled * automatically unless the mode is set to {@link * ExternalizedCheckpointCleanup * * <p>Externalized checkpoints write their meta data out to persistent storage and are * <strong>not</strong> automatically cleaned up when the owning job fails or is suspended * (terminating with job status {@link JobStatus * this case, you have to manually clean up the checkpoint state, both the meta data and actual * program state. * * <p>The {@link ExternalizedCheckpointCleanup} mode defines how an externalized checkpoint * should be cleaned up on job cancellation. If you choose to retain externalized checkpoints on * cancellation you have to handle checkpoint clean-up manually when you cancel the job as well * (terminating with job status {@link JobStatus * * <p>The target directory for externalized checkpoints is configured via {@link * CheckpointingOptions * * @param cleanupMode Externalized checkpoint clean-up behaviour. * @deprecated use {@link * instead. */ @PublicEvolving @Deprecated public void enableExternalizedCheckpoints(ExternalizedCheckpointCleanup cleanupMode) { setExternalizedCheckpointCleanup(cleanupMode); } /** * Returns whether checkpoints should be persisted externally. * * @return <code>true</code> if checkpoints should be externalized. */ @PublicEvolving public boolean isExternalizedCheckpointsEnabled() { return getExternalizedCheckpointCleanup() != ExternalizedCheckpointCleanup.NO_EXTERNALIZED_CHECKPOINTS; } /** * Enables unaligned checkpoints, which greatly reduce checkpointing times under backpressure. * * <p>Unaligned checkpoints contain data stored in buffers as part of the checkpoint state, * which allows checkpoint barriers to overtake these buffers. Thus, the checkpoint duration * becomes independent of the current throughput as checkpoint barriers are effectively not * embedded into the stream of data anymore. * * <p>Unaligned checkpoints can only be enabled if {@link * ExecutionCheckpointingOptions * * @param enabled Flag to indicate whether unaligned are enabled. */ @PublicEvolving public void enableUnalignedCheckpoints(boolean enabled) { configuration.set(ExecutionCheckpointingOptions.ENABLE_UNALIGNED, enabled); } /** * Enables unaligned checkpoints, which greatly reduce checkpointing times under backpressure. * * <p>Unaligned checkpoints contain data stored in buffers as part of the checkpoint state, * which allows checkpoint barriers to overtake these buffers. Thus, the checkpoint duration * becomes independent of the current throughput as checkpoint barriers are effectively not * embedded into the stream of data anymore. * * <p>Unaligned checkpoints can only be enabled if {@link * ExecutionCheckpointingOptions */ @PublicEvolving public void enableUnalignedCheckpoints() { enableUnalignedCheckpoints(true); } /** * Returns whether unaligned checkpoints are enabled. * * @return <code>true</code> if unaligned checkpoints are enabled. */ @PublicEvolving public boolean isUnalignedCheckpointsEnabled() { return configuration.get(ExecutionCheckpointingOptions.ENABLE_UNALIGNED); } /** * Only relevant if {@link * * <p>If {@link ExecutionCheckpointingOptions * <code>0</code>, checkpoints will always start unaligned. * * <p>If {@link ExecutionCheckpointingOptions * <code>0</code>, checkpoints will start aligned. If during checkpointing, checkpoint start * delay exceeds this {@link ExecutionCheckpointingOptions * alignment will timeout and checkpoint will start working as unaligned checkpoint. * * @deprecated Use {@link */ @Deprecated @PublicEvolving public void setAlignmentTimeout(Duration alignmentTimeout) { setAlignedCheckpointTimeout(alignmentTimeout); } /** * @return value of alignment timeout, as configured via {@link * or {@link ExecutionCheckpointingOptions * @deprecated User {@link */ @Deprecated @PublicEvolving public Duration getAlignmentTimeout() { return getAlignedCheckpointTimeout(); } /** * @return value of alignment timeout, as configured via {@link * * ExecutionCheckpointingOptions */ @PublicEvolving public Duration getAlignedCheckpointTimeout() { return configuration.get(ExecutionCheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT); } /** * Only relevant if {@link ExecutionCheckpointingOptions.ENABLE_UNALIGNED} is enabled. * * <p>If {@link ExecutionCheckpointingOptions * <code>0</code>, checkpoints will * * <p>always start unaligned. * * <p>If {@link ExecutionCheckpointingOptions * <code>0</code>, checkpoints will start aligned. If during checkpointing, checkpoint start * delay exceeds this {@link ExecutionCheckpointingOptions * alignment will timeout and checkpoint will start working as unaligned checkpoint. */ @PublicEvolving public void setAlignedCheckpointTimeout(Duration alignedCheckpointTimeout) { configuration.set( ExecutionCheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT, alignedCheckpointTimeout); } /** * @return the number of subtasks to share the same channel state file, as configured via {@link * * ExecutionCheckpointingOptions */ @PublicEvolving public int getMaxSubtasksPerChannelStateFile() { return configuration.get( ExecutionCheckpointingOptions.UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE); } /** * The number of subtasks to share the same channel state file. If {@link * ExecutionCheckpointingOptions * to <code>1</code>, each subtask will create a new channel state file. */ @PublicEvolving public void setMaxSubtasksPerChannelStateFile(int maxSubtasksPerChannelStateFile) { configuration.set( ExecutionCheckpointingOptions.UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE, maxSubtasksPerChannelStateFile); } /** * Returns whether approximate local recovery is enabled. * * @return <code>true</code> if approximate local recovery is enabled. */ @Experimental public boolean isApproximateLocalRecoveryEnabled() { return configuration.get(ExecutionCheckpointingOptions.APPROXIMATE_LOCAL_RECOVERY); } /** * Enables the approximate local recovery mode. * * <p>In this recovery mode, when a task fails, the entire downstream of the tasks (including * the failed task) restart. * * <p>Notice that 1. Approximate recovery may lead to data loss. The amount of data which leads * the failed task from the state of the last completed checkpoint to the state when the task * fails is lost. 2. In the next version, we will support restarting the set of failed set of * tasks only. In this version, we only support downstream restarts when a task fails. 3. It is * only an internal feature for now. * * @param enabled Flag to indicate whether approximate local recovery is enabled . */ @Experimental public void enableApproximateLocalRecovery(boolean enabled) { configuration.set(ExecutionCheckpointingOptions.APPROXIMATE_LOCAL_RECOVERY, enabled); } /** * Returns the cleanup behaviour for externalized checkpoints. * * @return The cleanup behaviour for externalized checkpoints or <code>null</code> if none is * configured. */ @PublicEvolving public ExternalizedCheckpointCleanup getExternalizedCheckpointCleanup() { return configuration.get(ExecutionCheckpointingOptions.EXTERNALIZED_CHECKPOINT); } /** * CheckpointStorage defines how {@link StateBackend}'s checkpoint their state for fault * tolerance in streaming applications. Various implementations store their checkpoints in * different fashions and have different requirements and availability guarantees. * * <p>For example, {@link org.apache.flink.runtime.state.storage.JobManagerCheckpointStorage * JobManagerCheckpointStorage} stores checkpoints in the memory of the JobManager. It is * lightweight and without additional dependencies but is not highly available and only supports * small state sizes. This checkpoint storage policy is convenient for local testing and * development. * * <p>{@link org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage * FileSystemCheckpointStorage} stores checkpoints in a filesystem. For systems like HDFS, NFS * Drives, S3, and GCS, this storage policy supports large state size, in the magnitude of many * terabytes while providing a highly available foundation for stateful applications. This * checkpoint storage policy is recommended for most production deployments. * * @param storage The checkpoint storage policy. */ @PublicEvolving public void setCheckpointStorage(CheckpointStorage storage) { Preconditions.checkNotNull(storage, "Checkpoint storage must not be null"); this.storage = storage; } /** * Configures the application to write out checkpoint snapshots to the configured directory. See * {@link FileSystemCheckpointStorage} for more details on checkpointing to a file system. * * @param checkpointDirectory The path to write checkpoint metadata to. * @see */ @PublicEvolving public void setCheckpointStorage(String checkpointDirectory) { Preconditions.checkNotNull(checkpointDirectory, "Checkpoint directory must not be null"); this.storage = new FileSystemCheckpointStorage(checkpointDirectory); } /** * Configures the application to write out checkpoint snapshots to the configured directory. See * {@link FileSystemCheckpointStorage} for more details on checkpointing to a file system. * * @param checkpointDirectory The path to write checkpoint metadata to. * @see */ @PublicEvolving public void setCheckpointStorage(URI checkpointDirectory) { Preconditions.checkNotNull(checkpointDirectory, "Checkpoint directory must not be null"); this.storage = new FileSystemCheckpointStorage(checkpointDirectory); } /** * Configures the application to write out checkpoint snapshots to the configured directory. See * {@link FileSystemCheckpointStorage} for more details on checkpointing to a file system. * * @param checkpointDirectory The path to write checkpoint metadata to. * @see */ @PublicEvolving public void setCheckpointStorage(Path checkpointDirectory) { Preconditions.checkNotNull(checkpointDirectory, "Checkpoint directory must not be null"); this.storage = new FileSystemCheckpointStorage(checkpointDirectory); } /** * @return The {@link CheckpointStorage} that has been configured for the job. Or {@code null} * if none has been set. * @see */ @Nullable @PublicEvolving public CheckpointStorage getCheckpointStorage() { return this.storage; } /** * Setup the checkpoint id for which the in-flight data will be ignored for all operators in * case of the recovery from this checkpoint. * * @param checkpointIdOfIgnoredInFlightData Checkpoint id for which in-flight data should be * ignored. * @see */ @PublicEvolving public void setCheckpointIdOfIgnoredInFlightData(long checkpointIdOfIgnoredInFlightData) { configuration.set( ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA, checkpointIdOfIgnoredInFlightData); } /** * @return Checkpoint id for which in-flight data should be ignored. * @see */ @PublicEvolving public long getCheckpointIdOfIgnoredInFlightData() { return configuration.get( ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA); } /** Cleanup behaviour for externalized checkpoints when the job is cancelled. */ @PublicEvolving public enum ExternalizedCheckpointCleanup implements DescribedEnum { /** * Delete externalized checkpoints on job cancellation. * * <p>All checkpoint state will be deleted when you cancel the owning job, both the meta * data and actual program state. Therefore, you cannot resume from externalized checkpoints * after the job has been cancelled. * * <p>Note that checkpoint state is always kept if the job terminates with state {@link * JobStatus */ DELETE_ON_CANCELLATION( text( "Checkpoint state is only kept when the owning job fails. It is deleted if " + "the job is cancelled.")), /** * Retain externalized checkpoints on job cancellation. * * <p>All checkpoint state is kept when you cancel the owning job. You have to manually * delete both the checkpoint meta data and actual program state after cancelling the job. * * <p>Note that checkpoint state is always kept if the job terminates with state {@link * JobStatus */ RETAIN_ON_CANCELLATION( text("Checkpoint state is kept when the owning job is cancelled or fails.")), /** Externalized checkpoints are disabled completely. */ NO_EXTERNALIZED_CHECKPOINTS(text("Externalized checkpoints are disabled.")); private final InlineElement description; ExternalizedCheckpointCleanup(InlineElement description) { this.description = description; } /** * Returns whether persistent checkpoints shall be discarded on cancellation of the job. * * @return <code>true</code> if persistent checkpoints shall be discarded on cancellation of * the job. */ public boolean deleteOnCancellation() { return this == DELETE_ON_CANCELLATION; } @Override @Internal public InlineElement getDescription() { return description; } } /** * Sets all relevant options contained in the {@link ReadableConfig} such as e.g. {@link * ExecutionCheckpointingOptions * * <p>It will change the value of a setting only if a corresponding option was set in the {@code * configuration}. If a key is not present, the current value of a field will remain untouched. * * @param configuration a configuration to read the values from */ public void configure(ReadableConfig configuration) { configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_MODE) .ifPresent(this::setCheckpointingMode); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL) .ifPresent(i -> this.setCheckpointInterval(i.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL_DURING_BACKLOG) .ifPresent(i -> this.setCheckpointIntervalDuringBacklog(i.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT) .ifPresent(t -> this.setCheckpointTimeout(t.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS) .ifPresent(this::setMaxConcurrentCheckpoints); configuration .getOptional(ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS) .ifPresent(m -> this.setMinPauseBetweenCheckpoints(m.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER) .ifPresent(this::setTolerableCheckpointFailureNumber); configuration .getOptional(ExecutionCheckpointingOptions.EXTERNALIZED_CHECKPOINT) .ifPresent(this::setExternalizedCheckpointCleanup); configuration .getOptional(ExecutionCheckpointingOptions.ENABLE_UNALIGNED) .ifPresent(this::enableUnalignedCheckpoints); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINT_ID_OF_IGNORED_IN_FLIGHT_DATA) .ifPresent(this::setCheckpointIdOfIgnoredInFlightData); configuration .getOptional(ExecutionCheckpointingOptions.ALIGNED_CHECKPOINT_TIMEOUT) .ifPresent(this::setAlignedCheckpointTimeout); configuration .getOptional( ExecutionCheckpointingOptions.UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE) .ifPresent(this::setMaxSubtasksPerChannelStateFile); configuration .getOptional(ExecutionCheckpointingOptions.FORCE_UNALIGNED) .ifPresent(this::setForceUnalignedCheckpoints); configuration .getOptional(CheckpointingOptions.CHECKPOINTS_DIRECTORY) .ifPresent(this::setCheckpointStorage); } /** * @return A copy of internal {@link * stored as plain java fields in {@link CheckpointConfig}, for example {@link */ @Internal public Configuration toConfiguration() { return new Configuration(configuration); } }
I can confirm what @gsmet mentions from my part as well. We have instructed contributors multiple times to avoid them. Of course some can sneak in 😁
private void dispatch(RoutingContext routingContext, InputStream is, VertxOutput output) { try { Context ctx = vertx.getOrCreateContext(); HttpServerRequest request = routingContext.request(); ResteasyUriInfo uriInfo = VertxUtil.extractUriInfo(request, rootPath); ResteasyHttpHeaders headers = VertxUtil.extractHttpHeaders(request); HttpServerResponse response = request.response(); VertxHttpResponse vertxResponse = new VertxHttpResponse(request, dispatcher.getProviderFactory(), request.method(), allocator, output); Supplier<String> hostNameProvider = () -> { SocketAddress socketAddress = request.remoteAddress(); String host = socketAddress != null ? socketAddress.host() : null; return host; }; VertxHttpRequest vertxRequest = new VertxHttpRequest(ctx, headers, uriInfo, request.rawMethod(), hostNameProvider, dispatcher.getDispatcher(), vertxResponse); vertxRequest.setInputStream(is); try { ResteasyContext.pushContext(SecurityContext.class, new QuarkusResteasySecurityContext(request)); ResteasyContext.pushContext(RoutingContext.class, routingContext); dispatcher.service(ctx, request, response, vertxRequest, vertxResponse, true); } catch (Failure e1) { vertxResponse.setStatus(e1.getErrorCode()); if (e1.isLoggable()) { log.error(e1); } } catch (Throwable ex) { routingContext.fail(ex); } if (!vertxRequest.getAsyncContext().isSuspended()) { try { vertxResponse.finish(); } catch (IOException e) { log.error("Unexpected failure", e); } } } catch (Throwable t) { routingContext.fail(t); } }
Supplier<String> hostNameProvider = () -> {
private void dispatch(RoutingContext routingContext, InputStream is, VertxOutput output) { try { Context ctx = vertx.getOrCreateContext(); HttpServerRequest request = routingContext.request(); ResteasyUriInfo uriInfo = VertxUtil.extractUriInfo(request, rootPath); ResteasyHttpHeaders headers = VertxUtil.extractHttpHeaders(request); HttpServerResponse response = request.response(); VertxHttpResponse vertxResponse = new VertxHttpResponse(request, dispatcher.getProviderFactory(), request.method(), allocator, output); Supplier<String> hostNameProvider = () -> { SocketAddress socketAddress = request.remoteAddress(); String host = socketAddress != null ? socketAddress.host() : null; return host; }; VertxHttpRequest vertxRequest = new VertxHttpRequest(ctx, headers, uriInfo, request.rawMethod(), hostNameProvider, dispatcher.getDispatcher(), vertxResponse); vertxRequest.setInputStream(is); try { ResteasyContext.pushContext(SecurityContext.class, new QuarkusResteasySecurityContext(request)); ResteasyContext.pushContext(RoutingContext.class, routingContext); dispatcher.service(ctx, request, response, vertxRequest, vertxResponse, true); } catch (Failure e1) { vertxResponse.setStatus(e1.getErrorCode()); if (e1.isLoggable()) { log.error(e1); } } catch (Throwable ex) { routingContext.fail(ex); } if (!vertxRequest.getAsyncContext().isSuspended()) { try { vertxResponse.finish(); } catch (IOException e) { log.error("Unexpected failure", e); } } } catch (Throwable t) { routingContext.fail(t); } }
class VertxRequestHandler implements Handler<RoutingContext> { private static final Logger log = Logger.getLogger("io.quarkus.resteasy"); protected final Vertx vertx; protected final RequestDispatcher dispatcher; protected final String rootPath; protected final BufferAllocator allocator; protected final BeanContainer beanContainer; protected final CurrentIdentityAssociation association; public VertxRequestHandler(Vertx vertx, BeanContainer beanContainer, ResteasyDeployment deployment, String rootPath, BufferAllocator allocator) { this.vertx = vertx; this.beanContainer = beanContainer; this.dispatcher = new RequestDispatcher((SynchronousDispatcher) deployment.getDispatcher(), deployment.getProviderFactory(), null, Thread.currentThread().getContextClassLoader()); this.rootPath = rootPath; this.allocator = allocator; Instance<CurrentIdentityAssociation> association = CDI.current().select(CurrentIdentityAssociation.class); this.association = association.isResolvable() ? association.get() : null; } @Override public void handle(RoutingContext request) { VertxInputStream is; try { is = new VertxInputStream(request.request()); } catch (IOException e) { request.fail(e); return; } vertx.executeBlocking(event -> { dispatchRequestContext(request, is, new VertxBlockingOutput(request.request())); }, false, event -> { }); } private void dispatchRequestContext(RoutingContext request, InputStream is, VertxOutput output) { ManagedContext requestContext = beanContainer.requestContext(); requestContext.activate(); QuarkusHttpUser user = (QuarkusHttpUser) request.user(); if (user != null && association != null) { association.setIdentity(user.getSecurityIdentity()); } try { dispatch(request, is, output); } finally { requestContext.terminate(); } } }
class VertxRequestHandler implements Handler<RoutingContext> { private static final Logger log = Logger.getLogger("io.quarkus.resteasy"); protected final Vertx vertx; protected final RequestDispatcher dispatcher; protected final String rootPath; protected final BufferAllocator allocator; protected final BeanContainer beanContainer; protected final CurrentIdentityAssociation association; public VertxRequestHandler(Vertx vertx, BeanContainer beanContainer, ResteasyDeployment deployment, String rootPath, BufferAllocator allocator) { this.vertx = vertx; this.beanContainer = beanContainer; this.dispatcher = new RequestDispatcher((SynchronousDispatcher) deployment.getDispatcher(), deployment.getProviderFactory(), null, Thread.currentThread().getContextClassLoader()); this.rootPath = rootPath; this.allocator = allocator; Instance<CurrentIdentityAssociation> association = CDI.current().select(CurrentIdentityAssociation.class); this.association = association.isResolvable() ? association.get() : null; } @Override public void handle(RoutingContext request) { VertxInputStream is; try { is = new VertxInputStream(request.request()); } catch (IOException e) { request.fail(e); return; } vertx.executeBlocking(event -> { dispatchRequestContext(request, is, new VertxBlockingOutput(request.request())); }, false, event -> { }); } private void dispatchRequestContext(RoutingContext request, InputStream is, VertxOutput output) { ManagedContext requestContext = beanContainer.requestContext(); requestContext.activate(); QuarkusHttpUser user = (QuarkusHttpUser) request.user(); if (user != null && association != null) { association.setIdentity(user.getSecurityIdentity()); } try { dispatch(request, is, output); } finally { requestContext.terminate(); } } }
Our Uid format is internal and if we change the format then this will break (similarly if Quarkus replaces the underlying transaction engine). Testing uid format does not provide any positive benefit? However the test that verifies that the `@Transactional` annotation correctly starts a transaction is valid and is sufficient.
public void test() { RestAssured.when().get("/uid").then().assertThat().body(MatchesPattern.matchesPattern("[:0-9a-f]+")); RestAssured.when().get("/status").then().body(is("0")); }
RestAssured.when().get("/uid").then().assertThat().body(MatchesPattern.matchesPattern("[:0-9a-f]+"));
public void test() { RestAssured.when().get("/status").then().body(is("0")); }
class TransactionalTestCase { @Test }
class TransactionalTestCase { @Test }
do we need add this operationtype to doris-2.0 branch?
public void readFields(DataInput in) throws IOException { opCode = in.readShort(); boolean isRead = false; LOG.debug("get opcode: {}", opCode); switch (opCode) { case OperationType.OP_LOCAL_EOF: { data = null; isRead = true; break; } case OperationType.OP_SAVE_NEXTID: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_SAVE_TRANSACTION_ID: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_CREATE_DB: { data = Database.read(in); isRead = true; break; } case OperationType.OP_DROP_DB: { data = DropDbInfo.read(in); isRead = true; break; } case OperationType.OP_ALTER_DB: case OperationType.OP_RENAME_DB: { data = new DatabaseInfo(); ((DatabaseInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_CREATE_TABLE: { data = new CreateTableInfo(); ((CreateTableInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_DROP_TABLE: { data = DropInfo.read(in); isRead = true; break; } case OperationType.OP_ALTER_EXTERNAL_TABLE_SCHEMA: { data = RefreshExternalTableInfo.read(in); isRead = true; break; } case OperationType.OP_ADD_PARTITION: { data = new PartitionPersistInfo(); ((PartitionPersistInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_DROP_PARTITION: { data = DropPartitionInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_PARTITION: { data = ModifyPartitionInfo.read(in); isRead = true; break; } case OperationType.OP_BATCH_MODIFY_PARTITION: { data = BatchModifyPartitionsInfo.read(in); isRead = true; break; } case OperationType.OP_ERASE_DB: case OperationType.OP_ERASE_TABLE: case OperationType.OP_ERASE_PARTITION: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_RECOVER_DB: case OperationType.OP_RECOVER_TABLE: case OperationType.OP_RECOVER_PARTITION: { data = RecoverInfo.read(in); isRead = true; break; } case OperationType.OP_DROP_ROLLUP: { data = DropInfo.read(in); isRead = true; break; } case OperationType.OP_BATCH_DROP_ROLLUP: { data = BatchDropInfo.read(in); isRead = true; break; } case OperationType.OP_RENAME_TABLE: case OperationType.OP_RENAME_ROLLUP: case OperationType.OP_RENAME_PARTITION: { data = new TableInfo(); ((TableInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_RENAME_COLUMN: { data = TableRenameColumnInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_VIEW_DEF: { data = AlterViewInfo.read(in); isRead = true; break; } case OperationType.OP_BACKUP_JOB: { data = BackupJob.read(in); isRead = true; break; } case OperationType.OP_RESTORE_JOB: { data = RestoreJob.read(in); isRead = true; break; } case OperationType.OP_FINISH_CONSISTENCY_CHECK: { data = new ConsistencyCheckInfo(); ((ConsistencyCheckInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_LOAD_START: case OperationType.OP_LOAD_ETL: case OperationType.OP_LOAD_LOADING: case OperationType.OP_LOAD_QUORUM: case OperationType.OP_LOAD_DONE: case OperationType.OP_LOAD_CANCEL: { data = new LoadJob(); ((LoadJob) data).readFields(in); isRead = true; break; } case OperationType.OP_EXPORT_CREATE: data = ExportJob.read(in); isRead = true; break; case OperationType.OP_EXPORT_UPDATE_STATE: data = ExportJobStateTransfer.read(in); isRead = true; break; case OperationType.OP_FINISH_DELETE: { data = DeleteInfo.read(in); isRead = true; break; } case OperationType.OP_ADD_REPLICA: case OperationType.OP_UPDATE_REPLICA: case OperationType.OP_DELETE_REPLICA: case OperationType.OP_CLEAR_ROLLUP_INFO: { data = ReplicaPersistInfo.read(in); isRead = true; break; } case OperationType.OP_ADD_BACKEND: case OperationType.OP_DROP_BACKEND: case OperationType.OP_MODIFY_BACKEND: case OperationType.OP_BACKEND_STATE_CHANGE: { data = Backend.read(in); isRead = true; break; } case OperationType.OP_ADD_FRONTEND: case OperationType.OP_ADD_FIRST_FRONTEND: case OperationType.OP_MODIFY_FRONTEND: case OperationType.OP_REMOVE_FRONTEND: { data = Frontend.read(in); isRead = true; break; } case OperationType.OP_SET_LOAD_ERROR_HUB: { data = new LoadErrorHub.Param(); ((LoadErrorHub.Param) data).readFields(in); isRead = true; break; } case OperationType.OP_NEW_DROP_USER: { data = UserIdentity.read(in); isRead = true; break; } case OperationType.OP_CREATE_USER: case OperationType.OP_GRANT_PRIV: case OperationType.OP_REVOKE_PRIV: case OperationType.OP_SET_PASSWORD: case OperationType.OP_CREATE_ROLE: case OperationType.OP_DROP_ROLE: { data = PrivInfo.read(in); isRead = true; break; } case OperationType.OP_SET_LDAP_PASSWORD: { data = LdapInfo.read(in); isRead = true; break; } case OperationType.OP_UPDATE_USER_PROPERTY: { data = UserPropertyInfo.read(in); isRead = true; break; } case OperationType.OP_MASTER_INFO_CHANGE: { data = MasterInfo.read(in); isRead = true; break; } case OperationType.OP_TIMESTAMP: { data = new Timestamp(); ((Timestamp) data).readFields(in); isRead = true; break; } case OperationType.OP_META_VERSION: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_CREATE_CLUSTER: { data = Cluster.read(in); isRead = true; break; } case OperationType.OP_ADD_BROKER: case OperationType.OP_DROP_BROKER: { data = new BrokerMgr.ModifyBrokerInfo(); ((BrokerMgr.ModifyBrokerInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_DROP_ALL_BROKER: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_UPSERT_TRANSACTION_STATE: case OperationType.OP_DELETE_TRANSACTION_STATE: { data = new TransactionState(); ((TransactionState) data).readFields(in); isRead = true; break; } case OperationType.OP_BATCH_REMOVE_TXNS: { data = BatchRemoveTransactionsOperation.read(in); isRead = true; break; } case OperationType.OP_BATCH_REMOVE_TXNS_V2: { data = BatchRemoveTransactionsOperationV2.read(in); isRead = true; break; } case OperationType.OP_SET_TABLE_STATUS: { data = SetTableStatusOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_REPOSITORY: { data = Repository.read(in); isRead = true; break; } case OperationType.OP_DROP_REPOSITORY: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_TRUNCATE_TABLE: { data = TruncateTableInfo.read(in); isRead = true; break; } case OperationType.OP_COLOCATE_ADD_TABLE: case OperationType.OP_COLOCATE_REMOVE_TABLE: case OperationType.OP_COLOCATE_BACKENDS_PER_BUCKETSEQ: case OperationType.OP_COLOCATE_MARK_UNSTABLE: case OperationType.OP_COLOCATE_MARK_STABLE: { data = ColocatePersistInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_TABLE_COLOCATE: { data = TablePropertyInfo.read(in); isRead = true; break; } case OperationType.OP_HEARTBEAT: { data = HbPackage.read(in); isRead = true; break; } case OperationType.OP_ADD_FUNCTION: { data = Function.read(in); isRead = true; break; } case OperationType.OP_DROP_FUNCTION: { data = FunctionSearchDesc.read(in); isRead = true; break; } case OperationType.OP_ADD_GLOBAL_FUNCTION: { data = Function.read(in); isRead = true; break; } case OperationType.OP_DROP_GLOBAL_FUNCTION: { data = FunctionSearchDesc.read(in); isRead = true; break; } case OperationType.OP_CREATE_ENCRYPTKEY: { data = EncryptKey.read(in); isRead = true; break; } case OperationType.OP_DROP_ENCRYPTKEY: { data = EncryptKeySearchDesc.read(in); isRead = true; break; } case OperationType.OP_BACKEND_TABLETS_INFO: { data = BackendTabletsInfo.read(in); isRead = true; break; } case OperationType.OP_BACKEND_REPLICAS_INFO: { data = BackendReplicasInfo.read(in); isRead = true; break; } case OperationType.OP_CREATE_ROUTINE_LOAD_JOB: { data = RoutineLoadJob.read(in); isRead = true; break; } case OperationType.OP_CHANGE_ROUTINE_LOAD_JOB: case OperationType.OP_REMOVE_ROUTINE_LOAD_JOB: { data = RoutineLoadOperation.read(in); isRead = true; break; } case OperationType.OP_UPDATE_SCHEDULER_JOB: case OperationType.OP_DELETE_SCHEDULER_JOB: case OperationType.OP_CREATE_SCHEDULER_JOB: { Job job = Job.readFields(in); data = job; isRead = true; break; } case OperationType.OP_CREATE_SCHEDULER_TASK: case OperationType.OP_DELETE_SCHEDULER_TASK: { JobTask task = JobTask.readFields(in); data = task; isRead = true; break; } case OperationType.OP_CREATE_LOAD_JOB: { data = org.apache.doris.load.loadv2.LoadJob.read(in); isRead = true; break; } case OperationType.OP_END_LOAD_JOB: { data = LoadJobFinalOperation.read(in); isRead = true; break; } case OperationType.OP_UPDATE_LOAD_JOB: { data = LoadJobStateUpdateInfo.read(in); isRead = true; break; } case OperationType.OP_CREATE_SYNC_JOB: { data = SyncJob.read(in); isRead = true; break; } case OperationType.OP_UPDATE_SYNC_JOB_STATE: { data = SyncJob.SyncJobUpdateStateInfo.read(in); isRead = true; break; } case OperationType.OP_FETCH_STREAM_LOAD_RECORD: { data = FetchStreamLoadRecord.read(in); isRead = true; break; } case OperationType.OP_CREATE_RESOURCE: case OperationType.OP_ALTER_RESOURCE: { data = Resource.read(in); isRead = true; break; } case OperationType.OP_DROP_RESOURCE: { data = DropResourceOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_SMALL_FILE: case OperationType.OP_DROP_SMALL_FILE: { data = SmallFile.read(in); isRead = true; break; } case OperationType.OP_ALTER_JOB_V2: { data = AlterJobV2.read(in); isRead = true; break; } case OperationType.OP_UPDATE_COOLDOWN_CONF: { data = CooldownConfList.read(in); isRead = true; break; } case OperationType.OP_COOLDOWN_DELETE: { data = CooldownDelete.read(in); isRead = true; break; } case OperationType.OP_BATCH_ADD_ROLLUP: { data = BatchAlterJobPersistInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_DISTRIBUTION_TYPE: { data = TableInfo.read(in); isRead = true; break; } case OperationType.OP_SET_REPLICA_STATUS: { data = SetReplicaStatusOperationLog.read(in); isRead = true; break; } case OperationType.OP_SET_REPLICA_VERSION: { data = SetReplicaVersionOperationLog.read(in); isRead = true; break; } case OperationType.OP_SET_PARTITION_VERSION: { data = SetPartitionVersionOperationLog.read(in); isRead = true; break; } case OperationType.OP_DYNAMIC_PARTITION: case OperationType.OP_MODIFY_IN_MEMORY: case OperationType.OP_MODIFY_REPLICATION_NUM: case OperationType.OP_UPDATE_BINLOG_CONFIG: { data = ModifyTablePropertyOperationLog.read(in); isRead = true; break; } case OperationType.OP_MODIFY_DISTRIBUTION_BUCKET_NUM: { data = ModifyTableDefaultDistributionBucketNumOperationLog.read(in); isRead = true; break; } case OperationType.OP_REPLACE_TEMP_PARTITION: { data = ReplacePartitionOperationLog.read(in); isRead = true; break; } case OperationType.OP_INSTALL_PLUGIN: { data = PluginInfo.read(in); isRead = true; break; } case OperationType.OP_UNINSTALL_PLUGIN: { data = PluginInfo.read(in); isRead = true; break; } case OperationType.OP_REMOVE_ALTER_JOB_V2: { data = RemoveAlterJobV2OperationLog.read(in); isRead = true; break; } case OperationType.OP_MODIFY_COMMENT: { data = ModifyCommentOperationLog.read(in); isRead = true; break; } case OperationType.OP_ALTER_ROUTINE_LOAD_JOB: { data = AlterRoutineLoadJobOperationLog.read(in); isRead = true; break; } case OperationType.OP_GLOBAL_VARIABLE_V2: { data = GlobalVarPersistInfo.read(in); isRead = true; break; } case OperationType.OP_REPLACE_TABLE: { data = ReplaceTableOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_SQL_BLOCK_RULE: { data = SqlBlockRule.read(in); isRead = true; break; } case OperationType.OP_ALTER_SQL_BLOCK_RULE: { data = SqlBlockRule.read(in); isRead = true; break; } case OperationType.OP_DROP_SQL_BLOCK_RULE: { data = DropSqlBlockRuleOperationLog.read(in); isRead = true; break; } case OperationType.OP_MODIFY_TABLE_ENGINE: { data = ModifyTableEngineOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_POLICY: { data = Policy.read(in); isRead = true; break; } case OperationType.OP_DROP_POLICY: { data = DropPolicyLog.read(in); isRead = true; break; } case OperationType.OP_ALTER_STORAGE_POLICY: { data = StoragePolicy.read(in); isRead = true; break; } case OperationType.OP_CREATE_CATALOG: case OperationType.OP_DROP_CATALOG: case OperationType.OP_ALTER_CATALOG_NAME: case OperationType.OP_ALTER_CATALOG_PROPS: case OperationType.OP_REFRESH_CATALOG: { data = CatalogLog.read(in); isRead = true; break; } case OperationType.OP_INIT_CATALOG: { data = InitCatalogLog.read(in); isRead = true; break; } case OperationType.OP_INIT_EXTERNAL_DB: { data = InitDatabaseLog.read(in); isRead = true; break; } case OperationType.OP_INIT_EXTERNAL_TABLE: { data = InitTableLog.read(in); isRead = true; break; } case OperationType.OP_REFRESH_EXTERNAL_DB: case OperationType.OP_DROP_EXTERNAL_TABLE: case OperationType.OP_CREATE_EXTERNAL_TABLE: case OperationType.OP_DROP_EXTERNAL_DB: case OperationType.OP_CREATE_EXTERNAL_DB: case OperationType.OP_ADD_EXTERNAL_PARTITIONS: case OperationType.OP_DROP_EXTERNAL_PARTITIONS: case OperationType.OP_REFRESH_EXTERNAL_PARTITIONS: case OperationType.OP_REFRESH_EXTERNAL_TABLE: { data = ExternalObjectLog.read(in); isRead = true; break; } case OperationType.OP_MODIFY_TABLE_LIGHT_SCHEMA_CHANGE: { data = TableAddOrDropColumnsInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_TABLE_ADD_OR_DROP_INVERTED_INDICES: { data = TableAddOrDropInvertedIndicesInfo.read(in); isRead = true; break; } case OperationType.OP_INVERTED_INDEX_JOB: { data = IndexChangeJob.read(in); isRead = true; break; } case OperationType.OP_CLEAN_LABEL: { data = CleanLabelOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_MTMV_JOB: { data = MTMVJob.read(in); isRead = true; break; } case OperationType.OP_DROP_MTMV_JOB: { data = DropMTMVJob.read(in); isRead = true; break; } case OperationType.OP_CHANGE_MTMV_JOB: { data = ChangeMTMVJob.read(in); isRead = true; break; } case OperationType.OP_CREATE_MTMV_TASK: { data = MTMVTask.read(in); isRead = true; break; } case OperationType.OP_DROP_MTMV_TASK: { data = DropMTMVTask.read(in); isRead = true; break; } case OperationType.OP_CHANGE_MTMV_TASK: { Text.readString(in); isRead = true; break; } case OperationType.OP_ALTER_MTMV_STMT: { data = AlterMultiMaterializedView.read(in); isRead = true; break; } case OperationType.OP_ALTER_USER: { data = AlterUserOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_WORKLOAD_GROUP: case OperationType.OP_ALTER_WORKLOAD_GROUP: { data = WorkloadGroup.read(in); isRead = true; break; } case OperationType.OP_DROP_WORKLOAD_GROUP: { data = DropWorkloadGroupOperationLog.read(in); isRead = true; break; } case OperationType.OP_ALTER_LIGHT_SCHEMA_CHANGE: { data = AlterLightSchemaChangeInfo.read(in); isRead = true; break; } case OperationType.OP_CLEAN_QUERY_STATS: { data = CleanQueryStatsInfo.read(in); isRead = true; break; } case OperationType.OP_CREATE_ANALYSIS_JOB: { data = AnalysisInfo.read(in); isRead = true; break; } case OperationType.OP_CREATE_ANALYSIS_TASK: { data = AnalysisInfo.read(in); isRead = true; break; } case OperationType.OP_DELETE_ANALYSIS_JOB: { data = AnalyzeDeletionLog.read(in); isRead = true; break; } case OperationType.OP_DELETE_ANALYSIS_TASK: { data = AnalyzeDeletionLog.read(in); break; } case OperationType.OP_UPDATE_AUTO_INCREMENT_ID: { data = AutoIncrementIdUpdateLog.read(in); isRead = true; break; } case OperationType.OP_ALTER_DATABASE_PROPERTY: { data = AlterDatabasePropertyInfo.read(in); isRead = true; break; } case OperationType.OP_GC_BINLOG: { data = BinlogGcInfo.read(in); isRead = true; break; } case OperationType.OP_BARRIER: { data = BarrierLog.read(in); isRead = true; break; } case OperationType.OP_UPDATE_TABLE_STATS: { data = TableStats.read(in); isRead = true; break; } case OperationType.OP_PERSIST_AUTO_JOB: { data = AnalysisInfo.read(in); isRead = true; break; } case OperationType.OP_DELETE_TABLE_STATS: { data = TableStatsDeletionLog.read(in); isRead = true; break; } default: { IOException e = new IOException(); LOG.error("UNKNOWN Operation Type {}", opCode, e); throw e; } } Preconditions.checkState(isRead); }
case OperationType.OP_DELETE_TABLE_STATS: {
public void readFields(DataInput in) throws IOException { opCode = in.readShort(); boolean isRead = false; LOG.debug("get opcode: {}", opCode); switch (opCode) { case OperationType.OP_LOCAL_EOF: { data = null; isRead = true; break; } case OperationType.OP_SAVE_NEXTID: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_SAVE_TRANSACTION_ID: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_CREATE_DB: { data = Database.read(in); isRead = true; break; } case OperationType.OP_DROP_DB: { data = DropDbInfo.read(in); isRead = true; break; } case OperationType.OP_ALTER_DB: case OperationType.OP_RENAME_DB: { data = new DatabaseInfo(); ((DatabaseInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_CREATE_TABLE: { data = new CreateTableInfo(); ((CreateTableInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_DROP_TABLE: { data = DropInfo.read(in); isRead = true; break; } case OperationType.OP_ALTER_EXTERNAL_TABLE_SCHEMA: { data = RefreshExternalTableInfo.read(in); isRead = true; break; } case OperationType.OP_ADD_PARTITION: { data = new PartitionPersistInfo(); ((PartitionPersistInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_DROP_PARTITION: { data = DropPartitionInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_PARTITION: { data = ModifyPartitionInfo.read(in); isRead = true; break; } case OperationType.OP_BATCH_MODIFY_PARTITION: { data = BatchModifyPartitionsInfo.read(in); isRead = true; break; } case OperationType.OP_ERASE_DB: case OperationType.OP_ERASE_TABLE: case OperationType.OP_ERASE_PARTITION: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_RECOVER_DB: case OperationType.OP_RECOVER_TABLE: case OperationType.OP_RECOVER_PARTITION: { data = RecoverInfo.read(in); isRead = true; break; } case OperationType.OP_DROP_ROLLUP: { data = DropInfo.read(in); isRead = true; break; } case OperationType.OP_BATCH_DROP_ROLLUP: { data = BatchDropInfo.read(in); isRead = true; break; } case OperationType.OP_RENAME_TABLE: case OperationType.OP_RENAME_ROLLUP: case OperationType.OP_RENAME_PARTITION: { data = new TableInfo(); ((TableInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_RENAME_COLUMN: { data = TableRenameColumnInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_VIEW_DEF: { data = AlterViewInfo.read(in); isRead = true; break; } case OperationType.OP_BACKUP_JOB: { data = BackupJob.read(in); isRead = true; break; } case OperationType.OP_RESTORE_JOB: { data = RestoreJob.read(in); isRead = true; break; } case OperationType.OP_FINISH_CONSISTENCY_CHECK: { data = new ConsistencyCheckInfo(); ((ConsistencyCheckInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_LOAD_START: case OperationType.OP_LOAD_ETL: case OperationType.OP_LOAD_LOADING: case OperationType.OP_LOAD_QUORUM: case OperationType.OP_LOAD_DONE: case OperationType.OP_LOAD_CANCEL: { data = new LoadJob(); ((LoadJob) data).readFields(in); isRead = true; break; } case OperationType.OP_EXPORT_CREATE: data = ExportJob.read(in); isRead = true; break; case OperationType.OP_EXPORT_UPDATE_STATE: data = ExportJobStateTransfer.read(in); isRead = true; break; case OperationType.OP_FINISH_DELETE: { data = DeleteInfo.read(in); isRead = true; break; } case OperationType.OP_ADD_REPLICA: case OperationType.OP_UPDATE_REPLICA: case OperationType.OP_DELETE_REPLICA: case OperationType.OP_CLEAR_ROLLUP_INFO: { data = ReplicaPersistInfo.read(in); isRead = true; break; } case OperationType.OP_ADD_BACKEND: case OperationType.OP_DROP_BACKEND: case OperationType.OP_MODIFY_BACKEND: case OperationType.OP_BACKEND_STATE_CHANGE: { data = Backend.read(in); isRead = true; break; } case OperationType.OP_ADD_FRONTEND: case OperationType.OP_ADD_FIRST_FRONTEND: case OperationType.OP_MODIFY_FRONTEND: case OperationType.OP_REMOVE_FRONTEND: { data = Frontend.read(in); isRead = true; break; } case OperationType.OP_SET_LOAD_ERROR_HUB: { data = new LoadErrorHub.Param(); ((LoadErrorHub.Param) data).readFields(in); isRead = true; break; } case OperationType.OP_NEW_DROP_USER: { data = UserIdentity.read(in); isRead = true; break; } case OperationType.OP_CREATE_USER: case OperationType.OP_GRANT_PRIV: case OperationType.OP_REVOKE_PRIV: case OperationType.OP_SET_PASSWORD: case OperationType.OP_CREATE_ROLE: case OperationType.OP_DROP_ROLE: { data = PrivInfo.read(in); isRead = true; break; } case OperationType.OP_SET_LDAP_PASSWORD: { data = LdapInfo.read(in); isRead = true; break; } case OperationType.OP_UPDATE_USER_PROPERTY: { data = UserPropertyInfo.read(in); isRead = true; break; } case OperationType.OP_MASTER_INFO_CHANGE: { data = MasterInfo.read(in); isRead = true; break; } case OperationType.OP_TIMESTAMP: { data = new Timestamp(); ((Timestamp) data).readFields(in); isRead = true; break; } case OperationType.OP_META_VERSION: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_CREATE_CLUSTER: { data = Cluster.read(in); isRead = true; break; } case OperationType.OP_ADD_BROKER: case OperationType.OP_DROP_BROKER: { data = new BrokerMgr.ModifyBrokerInfo(); ((BrokerMgr.ModifyBrokerInfo) data).readFields(in); isRead = true; break; } case OperationType.OP_DROP_ALL_BROKER: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_UPSERT_TRANSACTION_STATE: case OperationType.OP_DELETE_TRANSACTION_STATE: { data = new TransactionState(); ((TransactionState) data).readFields(in); isRead = true; break; } case OperationType.OP_BATCH_REMOVE_TXNS: { data = BatchRemoveTransactionsOperation.read(in); isRead = true; break; } case OperationType.OP_BATCH_REMOVE_TXNS_V2: { data = BatchRemoveTransactionsOperationV2.read(in); isRead = true; break; } case OperationType.OP_SET_TABLE_STATUS: { data = SetTableStatusOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_REPOSITORY: { data = Repository.read(in); isRead = true; break; } case OperationType.OP_DROP_REPOSITORY: { data = new Text(); ((Text) data).readFields(in); isRead = true; break; } case OperationType.OP_TRUNCATE_TABLE: { data = TruncateTableInfo.read(in); isRead = true; break; } case OperationType.OP_COLOCATE_ADD_TABLE: case OperationType.OP_COLOCATE_REMOVE_TABLE: case OperationType.OP_COLOCATE_BACKENDS_PER_BUCKETSEQ: case OperationType.OP_COLOCATE_MARK_UNSTABLE: case OperationType.OP_COLOCATE_MARK_STABLE: { data = ColocatePersistInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_TABLE_COLOCATE: { data = TablePropertyInfo.read(in); isRead = true; break; } case OperationType.OP_HEARTBEAT: { data = HbPackage.read(in); isRead = true; break; } case OperationType.OP_ADD_FUNCTION: { data = Function.read(in); isRead = true; break; } case OperationType.OP_DROP_FUNCTION: { data = FunctionSearchDesc.read(in); isRead = true; break; } case OperationType.OP_ADD_GLOBAL_FUNCTION: { data = Function.read(in); isRead = true; break; } case OperationType.OP_DROP_GLOBAL_FUNCTION: { data = FunctionSearchDesc.read(in); isRead = true; break; } case OperationType.OP_CREATE_ENCRYPTKEY: { data = EncryptKey.read(in); isRead = true; break; } case OperationType.OP_DROP_ENCRYPTKEY: { data = EncryptKeySearchDesc.read(in); isRead = true; break; } case OperationType.OP_BACKEND_TABLETS_INFO: { data = BackendTabletsInfo.read(in); isRead = true; break; } case OperationType.OP_BACKEND_REPLICAS_INFO: { data = BackendReplicasInfo.read(in); isRead = true; break; } case OperationType.OP_CREATE_ROUTINE_LOAD_JOB: { data = RoutineLoadJob.read(in); isRead = true; break; } case OperationType.OP_CHANGE_ROUTINE_LOAD_JOB: case OperationType.OP_REMOVE_ROUTINE_LOAD_JOB: { data = RoutineLoadOperation.read(in); isRead = true; break; } case OperationType.OP_UPDATE_SCHEDULER_JOB: case OperationType.OP_DELETE_SCHEDULER_JOB: case OperationType.OP_CREATE_SCHEDULER_JOB: { Job job = Job.readFields(in); data = job; isRead = true; break; } case OperationType.OP_CREATE_SCHEDULER_TASK: case OperationType.OP_DELETE_SCHEDULER_TASK: { JobTask task = JobTask.readFields(in); data = task; isRead = true; break; } case OperationType.OP_CREATE_LOAD_JOB: { data = org.apache.doris.load.loadv2.LoadJob.read(in); isRead = true; break; } case OperationType.OP_END_LOAD_JOB: { data = LoadJobFinalOperation.read(in); isRead = true; break; } case OperationType.OP_UPDATE_LOAD_JOB: { data = LoadJobStateUpdateInfo.read(in); isRead = true; break; } case OperationType.OP_CREATE_SYNC_JOB: { data = SyncJob.read(in); isRead = true; break; } case OperationType.OP_UPDATE_SYNC_JOB_STATE: { data = SyncJob.SyncJobUpdateStateInfo.read(in); isRead = true; break; } case OperationType.OP_FETCH_STREAM_LOAD_RECORD: { data = FetchStreamLoadRecord.read(in); isRead = true; break; } case OperationType.OP_CREATE_RESOURCE: case OperationType.OP_ALTER_RESOURCE: { data = Resource.read(in); isRead = true; break; } case OperationType.OP_DROP_RESOURCE: { data = DropResourceOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_SMALL_FILE: case OperationType.OP_DROP_SMALL_FILE: { data = SmallFile.read(in); isRead = true; break; } case OperationType.OP_ALTER_JOB_V2: { data = AlterJobV2.read(in); isRead = true; break; } case OperationType.OP_UPDATE_COOLDOWN_CONF: { data = CooldownConfList.read(in); isRead = true; break; } case OperationType.OP_COOLDOWN_DELETE: { data = CooldownDelete.read(in); isRead = true; break; } case OperationType.OP_BATCH_ADD_ROLLUP: { data = BatchAlterJobPersistInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_DISTRIBUTION_TYPE: { data = TableInfo.read(in); isRead = true; break; } case OperationType.OP_SET_REPLICA_STATUS: { data = SetReplicaStatusOperationLog.read(in); isRead = true; break; } case OperationType.OP_SET_REPLICA_VERSION: { data = SetReplicaVersionOperationLog.read(in); isRead = true; break; } case OperationType.OP_SET_PARTITION_VERSION: { data = SetPartitionVersionOperationLog.read(in); isRead = true; break; } case OperationType.OP_DYNAMIC_PARTITION: case OperationType.OP_MODIFY_IN_MEMORY: case OperationType.OP_MODIFY_REPLICATION_NUM: case OperationType.OP_UPDATE_BINLOG_CONFIG: { data = ModifyTablePropertyOperationLog.read(in); isRead = true; break; } case OperationType.OP_MODIFY_DISTRIBUTION_BUCKET_NUM: { data = ModifyTableDefaultDistributionBucketNumOperationLog.read(in); isRead = true; break; } case OperationType.OP_REPLACE_TEMP_PARTITION: { data = ReplacePartitionOperationLog.read(in); isRead = true; break; } case OperationType.OP_INSTALL_PLUGIN: { data = PluginInfo.read(in); isRead = true; break; } case OperationType.OP_UNINSTALL_PLUGIN: { data = PluginInfo.read(in); isRead = true; break; } case OperationType.OP_REMOVE_ALTER_JOB_V2: { data = RemoveAlterJobV2OperationLog.read(in); isRead = true; break; } case OperationType.OP_MODIFY_COMMENT: { data = ModifyCommentOperationLog.read(in); isRead = true; break; } case OperationType.OP_ALTER_ROUTINE_LOAD_JOB: { data = AlterRoutineLoadJobOperationLog.read(in); isRead = true; break; } case OperationType.OP_GLOBAL_VARIABLE_V2: { data = GlobalVarPersistInfo.read(in); isRead = true; break; } case OperationType.OP_REPLACE_TABLE: { data = ReplaceTableOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_SQL_BLOCK_RULE: { data = SqlBlockRule.read(in); isRead = true; break; } case OperationType.OP_ALTER_SQL_BLOCK_RULE: { data = SqlBlockRule.read(in); isRead = true; break; } case OperationType.OP_DROP_SQL_BLOCK_RULE: { data = DropSqlBlockRuleOperationLog.read(in); isRead = true; break; } case OperationType.OP_MODIFY_TABLE_ENGINE: { data = ModifyTableEngineOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_POLICY: { data = Policy.read(in); isRead = true; break; } case OperationType.OP_DROP_POLICY: { data = DropPolicyLog.read(in); isRead = true; break; } case OperationType.OP_ALTER_STORAGE_POLICY: { data = StoragePolicy.read(in); isRead = true; break; } case OperationType.OP_CREATE_CATALOG: case OperationType.OP_DROP_CATALOG: case OperationType.OP_ALTER_CATALOG_NAME: case OperationType.OP_ALTER_CATALOG_PROPS: case OperationType.OP_REFRESH_CATALOG: { data = CatalogLog.read(in); isRead = true; break; } case OperationType.OP_INIT_CATALOG: { data = InitCatalogLog.read(in); isRead = true; break; } case OperationType.OP_INIT_EXTERNAL_DB: { data = InitDatabaseLog.read(in); isRead = true; break; } case OperationType.OP_INIT_EXTERNAL_TABLE: { data = InitTableLog.read(in); isRead = true; break; } case OperationType.OP_REFRESH_EXTERNAL_DB: case OperationType.OP_DROP_EXTERNAL_TABLE: case OperationType.OP_CREATE_EXTERNAL_TABLE: case OperationType.OP_DROP_EXTERNAL_DB: case OperationType.OP_CREATE_EXTERNAL_DB: case OperationType.OP_ADD_EXTERNAL_PARTITIONS: case OperationType.OP_DROP_EXTERNAL_PARTITIONS: case OperationType.OP_REFRESH_EXTERNAL_PARTITIONS: case OperationType.OP_REFRESH_EXTERNAL_TABLE: { data = ExternalObjectLog.read(in); isRead = true; break; } case OperationType.OP_MODIFY_TABLE_LIGHT_SCHEMA_CHANGE: { data = TableAddOrDropColumnsInfo.read(in); isRead = true; break; } case OperationType.OP_MODIFY_TABLE_ADD_OR_DROP_INVERTED_INDICES: { data = TableAddOrDropInvertedIndicesInfo.read(in); isRead = true; break; } case OperationType.OP_INVERTED_INDEX_JOB: { data = IndexChangeJob.read(in); isRead = true; break; } case OperationType.OP_CLEAN_LABEL: { data = CleanLabelOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_MTMV_JOB: { data = MTMVJob.read(in); isRead = true; break; } case OperationType.OP_DROP_MTMV_JOB: { data = DropMTMVJob.read(in); isRead = true; break; } case OperationType.OP_CHANGE_MTMV_JOB: { data = ChangeMTMVJob.read(in); isRead = true; break; } case OperationType.OP_CREATE_MTMV_TASK: { data = MTMVTask.read(in); isRead = true; break; } case OperationType.OP_DROP_MTMV_TASK: { data = DropMTMVTask.read(in); isRead = true; break; } case OperationType.OP_CHANGE_MTMV_TASK: { Text.readString(in); isRead = true; break; } case OperationType.OP_ALTER_MTMV_STMT: { data = AlterMultiMaterializedView.read(in); isRead = true; break; } case OperationType.OP_ALTER_USER: { data = AlterUserOperationLog.read(in); isRead = true; break; } case OperationType.OP_CREATE_WORKLOAD_GROUP: case OperationType.OP_ALTER_WORKLOAD_GROUP: { data = WorkloadGroup.read(in); isRead = true; break; } case OperationType.OP_DROP_WORKLOAD_GROUP: { data = DropWorkloadGroupOperationLog.read(in); isRead = true; break; } case OperationType.OP_ALTER_LIGHT_SCHEMA_CHANGE: { data = AlterLightSchemaChangeInfo.read(in); isRead = true; break; } case OperationType.OP_CLEAN_QUERY_STATS: { data = CleanQueryStatsInfo.read(in); isRead = true; break; } case OperationType.OP_CREATE_ANALYSIS_JOB: { data = AnalysisInfo.read(in); isRead = true; break; } case OperationType.OP_CREATE_ANALYSIS_TASK: { data = AnalysisInfo.read(in); isRead = true; break; } case OperationType.OP_DELETE_ANALYSIS_JOB: { data = AnalyzeDeletionLog.read(in); isRead = true; break; } case OperationType.OP_DELETE_ANALYSIS_TASK: { data = AnalyzeDeletionLog.read(in); break; } case OperationType.OP_UPDATE_AUTO_INCREMENT_ID: { data = AutoIncrementIdUpdateLog.read(in); isRead = true; break; } case OperationType.OP_ALTER_DATABASE_PROPERTY: { data = AlterDatabasePropertyInfo.read(in); isRead = true; break; } case OperationType.OP_GC_BINLOG: { data = BinlogGcInfo.read(in); isRead = true; break; } case OperationType.OP_BARRIER: { data = BarrierLog.read(in); isRead = true; break; } case OperationType.OP_UPDATE_TABLE_STATS: { data = TableStats.read(in); isRead = true; break; } case OperationType.OP_PERSIST_AUTO_JOB: { data = AnalysisInfo.read(in); isRead = true; break; } case OperationType.OP_DELETE_TABLE_STATS: { data = TableStatsDeletionLog.read(in); isRead = true; break; } default: { IOException e = new IOException(); LOG.error("UNKNOWN Operation Type {}", opCode, e); throw e; } } Preconditions.checkState(isRead); }
class JournalEntity implements Writable { public static final Logger LOG = LogManager.getLogger(JournalEntity.class); private short opCode; private Writable data; private long dataSize; public short getOpCode() { return this.opCode; } public void setOpCode(short opCode) { this.opCode = opCode; } public Writable getData() { return this.data; } public void setData(Writable data) { this.data = data; } public String toString() { return " opCode=" + opCode + " " + data; } public void setDataSize(long dataSize) { this.dataSize = dataSize; } public long getDataSize() { return this.dataSize; } @Override public void write(DataOutput out) throws IOException { out.writeShort(opCode); data.write(out); } }
class JournalEntity implements Writable { public static final Logger LOG = LogManager.getLogger(JournalEntity.class); private short opCode; private Writable data; private long dataSize; public short getOpCode() { return this.opCode; } public void setOpCode(short opCode) { this.opCode = opCode; } public Writable getData() { return this.data; } public void setData(Writable data) { this.data = data; } public String toString() { return " opCode=" + opCode + " " + data; } public void setDataSize(long dataSize) { this.dataSize = dataSize; } public long getDataSize() { return this.dataSize; } @Override public void write(DataOutput out) throws IOException { out.writeShort(opCode); data.write(out); } }
`finishBundle` contains a different blocking version of flush that only blocks on the records emitted by this bundle. It would actually be harmful to do that: Using `producer.flushSync()` in `finishBundle` would likely block that dofn instance "forever" because the producer is shared and other instances keep writing to the producer. `flushSync` basically loops infinitely until there's no more pending records in the producer.
private void teardownSharedProducer() { synchronized (producerRefCount) { if (producerRefCount.decrementAndGet() == 0) { if (producer == null) { return; } if (producer.getOutstandingRecordsCount() > 0) { producer.flushSync(); } producer.destroy(); producer = null; } } }
producer.flushSync();
private void teardownSharedProducer() { IKinesisProducer obsolete = null; synchronized (KinesisWriterFn.class) { if (--producerRefCount == 0) { obsolete = producer; producer = null; } } if (obsolete != null) { obsolete.flushSync(); obsolete.destroy(); } }
class KinesisWriterFn extends DoFn<byte[], Void> { private static final int MAX_NUM_FAILURES = 10; /** Usage count of static, shared Kinesis producer. */ private static final AtomicInteger producerRefCount = new AtomicInteger(); /** Static, shared Kinesis producer. */ private static IKinesisProducer producer; private final KinesisIO.Write spec; private transient KinesisPartitioner partitioner; private transient LinkedBlockingDeque<KinesisWriteException> failures; private transient List<Future<UserRecordResult>> putFutures; KinesisWriterFn(KinesisIO.Write spec) { this.spec = spec; } /** * Initialize statically shared Kinesis producer if required and count usage. * * <p>NOTE: If there is, for whatever reasons, another instance of a {@link KinesisWriterFn} * with different producer properties or even a different implementation of {@link * AWSClientsProvider}, these changes will be silently discarded in favor of an existing * producer instance. */ @SuppressFBWarnings("JLM_JSR166_UTILCONCURRENT_MONITORENTER") private void setupSharedProducer() { synchronized (producerRefCount) { if (producer == null) { producer = spec.getAWSClientsProvider() .createKinesisProducer(spec.createProducerConfiguration()); producerRefCount.set(0); } } producerRefCount.incrementAndGet(); } /** * Discard statically shared producer if it is not used anymore according to the usage count. */ @SuppressFBWarnings("JLM_JSR166_UTILCONCURRENT_MONITORENTER") @Setup public void setup() { setupSharedProducer(); if (spec.getPartitioner() != null) { partitioner = spec.getPartitioner(); } } @StartBundle public void startBundle() { putFutures = Collections.synchronizedList(new ArrayList<>()); /** Keep only the first {@link MAX_NUM_FAILURES} occurred exceptions */ failures = new LinkedBlockingDeque<>(MAX_NUM_FAILURES); } /** * It adds a record asynchronously which then should be delivered by Kinesis producer in * background (Kinesis producer forks native processes to do this job). * * <p>The records can be batched and then they will be sent in one HTTP request. Amazon KPL * supports two types of batching - aggregation and collection - and they can be configured by * producer properties. * * <p>More details can be found here: <a * href="https: * Concepts</a> and <a * href="https: * the KPL</a> */ @ProcessElement public void processElement(ProcessContext c) { ByteBuffer data = ByteBuffer.wrap(c.element()); String partitionKey = spec.getPartitionKey(); String explicitHashKey = null; if (partitioner != null) { partitionKey = partitioner.getPartitionKey(c.element()); explicitHashKey = partitioner.getExplicitHashKey(c.element()); } ListenableFuture<UserRecordResult> f = producer.addUserRecord(spec.getStreamName(), partitionKey, explicitHashKey, data); putFutures.add(f); } @FinishBundle public void finishBundle() throws Exception { flushBundle(); } /** * Flush outstanding records until the total number of failed records will be less than 0 or * the number of retries will be exhausted. The retry timeout starts from 1 second and it * doubles on every iteration. */ private void flushBundle() throws InterruptedException, ExecutionException, IOException { int retries = spec.getRetries(); int numFailedRecords; int retryTimeout = 1000; String message = ""; do { numFailedRecords = 0; producer.flush(); for (Future<UserRecordResult> f : putFutures) { UserRecordResult result = f.get(); if (!result.isSuccessful()) { numFailedRecords++; } } Thread.sleep(retryTimeout); retryTimeout *= 2; } while (numFailedRecords > 0 && retries-- > 0); if (numFailedRecords > 0) { for (Future<UserRecordResult> f : putFutures) { UserRecordResult result = f.get(); if (!result.isSuccessful()) { failures.offer( new KinesisWriteException( "Put record was not successful.", new UserRecordFailedException(result))); } } message = String.format( "After [%d] retries, number of failed records [%d] is still greater than 0", spec.getRetries(), numFailedRecords); LOG.error(message); } checkForFailures(message); } /** If any write has asynchronously failed, fail the bundle with a useful error. */ private void checkForFailures(String message) throws IOException { if (failures.isEmpty()) { return; } StringBuilder logEntry = new StringBuilder(); logEntry.append(message).append(System.lineSeparator()); int i = 0; while (!failures.isEmpty()) { i++; KinesisWriteException exc = failures.remove(); logEntry.append(System.lineSeparator()).append(exc.getMessage()); Throwable cause = exc.getCause(); if (cause != null) { logEntry.append(": ").append(cause.getMessage()); if (cause instanceof UserRecordFailedException) { List<Attempt> attempts = ((UserRecordFailedException) cause).getResult().getAttempts(); for (Attempt attempt : attempts) { if (attempt.getErrorMessage() != null) { logEntry.append(System.lineSeparator()).append(attempt.getErrorMessage()); } } } } } String errorMessage = String.format( "Some errors occurred writing to Kinesis. First %d errors: %s", i, logEntry.toString()); throw new IOException(errorMessage); } @Teardown public void teardown() throws Exception { teardownSharedProducer(); } }
class KinesisWriterFn extends DoFn<byte[], Void> { private static final int MAX_NUM_FAILURES = 10; /** Usage count of static, shared Kinesis producer. */ private static int producerRefCount = 0; /** Static, shared Kinesis producer. */ private static IKinesisProducer producer; private final KinesisIO.Write spec; private transient KinesisPartitioner partitioner; private transient LinkedBlockingDeque<KinesisWriteException> failures; private transient List<Future<UserRecordResult>> putFutures; KinesisWriterFn(KinesisIO.Write spec) { this.spec = spec; } /** * Initialize statically shared Kinesis producer if required and count usage. * * <p>NOTE: If there is, for whatever reasons, another instance of a {@link KinesisWriterFn} * with different producer properties or even a different implementation of {@link * AWSClientsProvider}, these changes will be silently discarded in favor of an existing * producer instance. */ private void setupSharedProducer() { synchronized (KinesisWriterFn.class) { if (producer == null) { producer = spec.getAWSClientsProvider() .createKinesisProducer(spec.createProducerConfiguration()); producerRefCount = 0; } producerRefCount++; } } /** * Discard statically shared producer if it is not used anymore according to the usage count. */ @Setup public void setup() { setupSharedProducer(); if (spec.getPartitioner() != null) { partitioner = spec.getPartitioner(); } } @StartBundle public void startBundle() { putFutures = Collections.synchronizedList(new ArrayList<>()); /** Keep only the first {@link MAX_NUM_FAILURES} occurred exceptions */ failures = new LinkedBlockingDeque<>(MAX_NUM_FAILURES); } /** * It adds a record asynchronously which then should be delivered by Kinesis producer in * background (Kinesis producer forks native processes to do this job). * * <p>The records can be batched and then they will be sent in one HTTP request. Amazon KPL * supports two types of batching - aggregation and collection - and they can be configured by * producer properties. * * <p>More details can be found here: <a * href="https: * Concepts</a> and <a * href="https: * the KPL</a> */ @ProcessElement public void processElement(ProcessContext c) { ByteBuffer data = ByteBuffer.wrap(c.element()); String partitionKey = spec.getPartitionKey(); String explicitHashKey = null; if (partitioner != null) { partitionKey = partitioner.getPartitionKey(c.element()); explicitHashKey = partitioner.getExplicitHashKey(c.element()); } ListenableFuture<UserRecordResult> f = producer.addUserRecord(spec.getStreamName(), partitionKey, explicitHashKey, data); putFutures.add(f); } @FinishBundle public void finishBundle() throws Exception { flushBundle(); } /** * Flush outstanding records until the total number of failed records will be less than 0 or * the number of retries will be exhausted. The retry timeout starts from 1 second and it * doubles on every iteration. */ private void flushBundle() throws InterruptedException, ExecutionException, IOException { int retries = spec.getRetries(); int numFailedRecords; int retryTimeout = 1000; String message = ""; do { numFailedRecords = 0; producer.flush(); for (Future<UserRecordResult> f : putFutures) { UserRecordResult result = f.get(); if (!result.isSuccessful()) { numFailedRecords++; } } Thread.sleep(retryTimeout); retryTimeout *= 2; } while (numFailedRecords > 0 && retries-- > 0); if (numFailedRecords > 0) { for (Future<UserRecordResult> f : putFutures) { UserRecordResult result = f.get(); if (!result.isSuccessful()) { failures.offer( new KinesisWriteException( "Put record was not successful.", new UserRecordFailedException(result))); } } message = String.format( "After [%d] retries, number of failed records [%d] is still greater than 0", spec.getRetries(), numFailedRecords); LOG.error(message); } checkForFailures(message); } /** If any write has asynchronously failed, fail the bundle with a useful error. */ private void checkForFailures(String message) throws IOException { if (failures.isEmpty()) { return; } StringBuilder logEntry = new StringBuilder(); logEntry.append(message).append(System.lineSeparator()); int i = 0; while (!failures.isEmpty()) { i++; KinesisWriteException exc = failures.remove(); logEntry.append(System.lineSeparator()).append(exc.getMessage()); Throwable cause = exc.getCause(); if (cause != null) { logEntry.append(": ").append(cause.getMessage()); if (cause instanceof UserRecordFailedException) { List<Attempt> attempts = ((UserRecordFailedException) cause).getResult().getAttempts(); for (Attempt attempt : attempts) { if (attempt.getErrorMessage() != null) { logEntry.append(System.lineSeparator()).append(attempt.getErrorMessage()); } } } } } String errorMessage = String.format( "Some errors occurred writing to Kinesis. First %d errors: %s", i, logEntry.toString()); throw new IOException(errorMessage); } @Teardown public void teardown() throws Exception { teardownSharedProducer(); } }
The motivation is for setting the default timeout (100) only for streaming job by design. But in this procedure we can not determine the `ResultPartitionType` for properly setting the default value, so I removed that path before. And actually the default value can also be set/got in the following procedure by `StreamConfig`. But I found another problem to do so, which will effect some other code paths. Now I adjusted to make all the preceding default values as `-1` if not explicitly set. Then during job graph generation, we can determine whether to reset the default value `100` only for pipelined partition.
private Collection<Integer> transform(Transformation<?> transform) { if (alreadyTransformed.containsKey(transform)) { return alreadyTransformed.get(transform); } LOG.debug("Transforming " + transform); if (transform.getMaxParallelism() <= 0) { int globalMaxParallelismFromConfig = executionConfig.getMaxParallelism(); if (globalMaxParallelismFromConfig > 0) { transform.setMaxParallelism(globalMaxParallelismFromConfig); } } transform.getOutputType(); Collection<Integer> transformedIds; if (transform instanceof OneInputTransformation<?, ?>) { transformedIds = transformOneInputTransform((OneInputTransformation<?, ?>) transform); } else if (transform instanceof TwoInputTransformation<?, ?, ?>) { transformedIds = transformTwoInputTransform((TwoInputTransformation<?, ?, ?>) transform); } else if (transform instanceof AbstractMultipleInputTransformation<?>) { transformedIds = transformMultipleInputTransform((AbstractMultipleInputTransformation<?>) transform); } else if (transform instanceof SourceTransformation) { transformedIds = transformSource((SourceTransformation<?>) transform); } else if (transform instanceof LegacySourceTransformation<?>) { transformedIds = transformLegacySource((LegacySourceTransformation<?>) transform); } else if (transform instanceof SinkTransformation<?>) { transformedIds = transformSink((SinkTransformation<?>) transform); } else if (transform instanceof UnionTransformation<?>) { transformedIds = transformUnion((UnionTransformation<?>) transform); } else if (transform instanceof SplitTransformation<?>) { transformedIds = transformSplit((SplitTransformation<?>) transform); } else if (transform instanceof SelectTransformation<?>) { transformedIds = transformSelect((SelectTransformation<?>) transform); } else if (transform instanceof FeedbackTransformation<?>) { transformedIds = transformFeedback((FeedbackTransformation<?>) transform); } else if (transform instanceof CoFeedbackTransformation<?>) { transformedIds = transformCoFeedback((CoFeedbackTransformation<?>) transform); } else if (transform instanceof PartitionTransformation<?>) { transformedIds = transformPartition((PartitionTransformation<?>) transform); } else if (transform instanceof SideOutputTransformation<?>) { transformedIds = transformSideOutput((SideOutputTransformation<?>) transform); } else { throw new IllegalStateException("Unknown transformation: " + transform); } if (!alreadyTransformed.containsKey(transform)) { alreadyTransformed.put(transform, transformedIds); } streamGraph.setBufferTimeout(transform.getId(), transform.getBufferTimeout()); if (transform.getUid() != null) { streamGraph.setTransformationUID(transform.getId(), transform.getUid()); } if (transform.getUserProvidedNodeHash() != null) { streamGraph.setTransformationUserHash(transform.getId(), transform.getUserProvidedNodeHash()); } if (!streamGraph.getExecutionConfig().hasAutoGeneratedUIDsEnabled()) { if (transform instanceof PhysicalTransformation && transform.getUserProvidedNodeHash() == null && transform.getUid() == null) { throw new IllegalStateException("Auto generated UIDs have been disabled " + "but no UID or hash has been assigned to operator " + transform.getName()); } } if (transform.getMinResources() != null && transform.getPreferredResources() != null) { streamGraph.setResources(transform.getId(), transform.getMinResources(), transform.getPreferredResources()); } streamGraph.setManagedMemoryWeight(transform.getId(), transform.getManagedMemoryWeight()); return transformedIds; }
streamGraph.setTransformationUID(transform.getId(), transform.getUid());
private Collection<Integer> transform(Transformation<?> transform) { if (alreadyTransformed.containsKey(transform)) { return alreadyTransformed.get(transform); } LOG.debug("Transforming " + transform); if (transform.getMaxParallelism() <= 0) { int globalMaxParallelismFromConfig = executionConfig.getMaxParallelism(); if (globalMaxParallelismFromConfig > 0) { transform.setMaxParallelism(globalMaxParallelismFromConfig); } } transform.getOutputType(); Collection<Integer> transformedIds; if (transform instanceof OneInputTransformation<?, ?>) { transformedIds = transformOneInputTransform((OneInputTransformation<?, ?>) transform); } else if (transform instanceof TwoInputTransformation<?, ?, ?>) { transformedIds = transformTwoInputTransform((TwoInputTransformation<?, ?, ?>) transform); } else if (transform instanceof AbstractMultipleInputTransformation<?>) { transformedIds = transformMultipleInputTransform((AbstractMultipleInputTransformation<?>) transform); } else if (transform instanceof SourceTransformation) { transformedIds = transformSource((SourceTransformation<?>) transform); } else if (transform instanceof LegacySourceTransformation<?>) { transformedIds = transformLegacySource((LegacySourceTransformation<?>) transform); } else if (transform instanceof SinkTransformation<?>) { transformedIds = transformSink((SinkTransformation<?>) transform); } else if (transform instanceof UnionTransformation<?>) { transformedIds = transformUnion((UnionTransformation<?>) transform); } else if (transform instanceof SplitTransformation<?>) { transformedIds = transformSplit((SplitTransformation<?>) transform); } else if (transform instanceof SelectTransformation<?>) { transformedIds = transformSelect((SelectTransformation<?>) transform); } else if (transform instanceof FeedbackTransformation<?>) { transformedIds = transformFeedback((FeedbackTransformation<?>) transform); } else if (transform instanceof CoFeedbackTransformation<?>) { transformedIds = transformCoFeedback((CoFeedbackTransformation<?>) transform); } else if (transform instanceof PartitionTransformation<?>) { transformedIds = transformPartition((PartitionTransformation<?>) transform); } else if (transform instanceof SideOutputTransformation<?>) { transformedIds = transformSideOutput((SideOutputTransformation<?>) transform); } else { throw new IllegalStateException("Unknown transformation: " + transform); } if (!alreadyTransformed.containsKey(transform)) { alreadyTransformed.put(transform, transformedIds); } if (transform.getBufferTimeout() >= 0) { streamGraph.setBufferTimeout(transform.getId(), transform.getBufferTimeout()); } else { streamGraph.setBufferTimeout(transform.getId(), defaultBufferTimeout); } if (transform.getUid() != null) { streamGraph.setTransformationUID(transform.getId(), transform.getUid()); } if (transform.getUserProvidedNodeHash() != null) { streamGraph.setTransformationUserHash(transform.getId(), transform.getUserProvidedNodeHash()); } if (!streamGraph.getExecutionConfig().hasAutoGeneratedUIDsEnabled()) { if (transform instanceof PhysicalTransformation && transform.getUserProvidedNodeHash() == null && transform.getUid() == null) { throw new IllegalStateException("Auto generated UIDs have been disabled " + "but no UID or hash has been assigned to operator " + transform.getName()); } } if (transform.getMinResources() != null && transform.getPreferredResources() != null) { streamGraph.setResources(transform.getId(), transform.getMinResources(), transform.getPreferredResources()); } streamGraph.setManagedMemoryWeight(transform.getId(), transform.getManagedMemoryWeight()); return transformedIds; }
class StreamGraphGenerator { private static final Logger LOG = LoggerFactory.getLogger(StreamGraphGenerator.class); public static final int DEFAULT_LOWER_BOUND_MAX_PARALLELISM = KeyGroupRangeAssignment.DEFAULT_LOWER_BOUND_MAX_PARALLELISM; public static final ScheduleMode DEFAULT_SCHEDULE_MODE = ScheduleMode.EAGER; public static final TimeCharacteristic DEFAULT_TIME_CHARACTERISTIC = TimeCharacteristic.ProcessingTime; public static final String DEFAULT_JOB_NAME = "Flink Streaming Job"; /** The default buffer timeout (max delay of records in the network stack). */ public static final long DEFAULT_NETWORK_BUFFER_TIMEOUT = 100L; public static final String DEFAULT_SLOT_SHARING_GROUP = "default"; private final List<Transformation<?>> transformations; private final ExecutionConfig executionConfig; private final CheckpointConfig checkpointConfig; private SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.none(); private StateBackend stateBackend; private boolean chaining = true; private ScheduleMode scheduleMode = DEFAULT_SCHEDULE_MODE; private Collection<Tuple2<String, DistributedCache.DistributedCacheEntry>> userArtifacts; private TimeCharacteristic timeCharacteristic = DEFAULT_TIME_CHARACTERISTIC; private long defaultBufferTimeout = DEFAULT_NETWORK_BUFFER_TIMEOUT; private String jobName = DEFAULT_JOB_NAME; private GlobalDataExchangeMode globalDataExchangeMode = GlobalDataExchangeMode.ALL_EDGES_PIPELINED; protected static Integer iterationIdCounter = 0; public static int getNewIterationNodeId() { iterationIdCounter--; return iterationIdCounter; } private StreamGraph streamGraph; private Map<Transformation<?>, Collection<Integer>> alreadyTransformed; public StreamGraphGenerator(List<Transformation<?>> transformations, ExecutionConfig executionConfig, CheckpointConfig checkpointConfig) { this.transformations = checkNotNull(transformations); this.executionConfig = checkNotNull(executionConfig); this.checkpointConfig = checkNotNull(checkpointConfig); } public StreamGraphGenerator setStateBackend(StateBackend stateBackend) { this.stateBackend = stateBackend; return this; } public StreamGraphGenerator setChaining(boolean chaining) { this.chaining = chaining; return this; } public StreamGraphGenerator setScheduleMode(ScheduleMode scheduleMode) { this.scheduleMode = scheduleMode; return this; } public StreamGraphGenerator setUserArtifacts(Collection<Tuple2<String, DistributedCache.DistributedCacheEntry>> userArtifacts) { this.userArtifacts = userArtifacts; return this; } public StreamGraphGenerator setTimeCharacteristic(TimeCharacteristic timeCharacteristic) { this.timeCharacteristic = timeCharacteristic; return this; } public StreamGraphGenerator setDefaultBufferTimeout(long defaultBufferTimeout) { this.defaultBufferTimeout = defaultBufferTimeout; return this; } public StreamGraphGenerator setJobName(String jobName) { this.jobName = jobName; return this; } public StreamGraphGenerator setGlobalDataExchangeMode(GlobalDataExchangeMode globalDataExchangeMode) { this.globalDataExchangeMode = globalDataExchangeMode; return this; } public void setSavepointRestoreSettings(SavepointRestoreSettings savepointRestoreSettings) { this.savepointRestoreSettings = savepointRestoreSettings; } public StreamGraph generate() { streamGraph = new StreamGraph(executionConfig, checkpointConfig, savepointRestoreSettings); streamGraph.setStateBackend(stateBackend); streamGraph.setChaining(chaining); streamGraph.setScheduleMode(scheduleMode); streamGraph.setUserArtifacts(userArtifacts); streamGraph.setTimeCharacteristic(timeCharacteristic); streamGraph.setJobName(jobName); streamGraph.setGlobalDataExchangeMode(globalDataExchangeMode); alreadyTransformed = new HashMap<>(); for (Transformation<?> transformation: transformations) { transform(transformation); } final StreamGraph builtStreamGraph = streamGraph; alreadyTransformed.clear(); alreadyTransformed = null; streamGraph = null; return builtStreamGraph; } /** * Transforms one {@code Transformation}. * * <p>This checks whether we already transformed it and exits early in that case. If not it * delegates to one of the transformation specific methods. */ /** * Transforms a {@code UnionTransformation}. * * <p>This is easy, we only have to transform the inputs and return all the IDs in a list so * that downstream operations can connect to all upstream nodes. */ private <T> Collection<Integer> transformUnion(UnionTransformation<T> union) { List<Transformation<T>> inputs = union.getInputs(); List<Integer> resultIds = new ArrayList<>(); for (Transformation<T> input: inputs) { resultIds.addAll(transform(input)); } return resultIds; } /** * Transforms a {@code PartitionTransformation}. * * <p>For this we create a virtual node in the {@code StreamGraph} that holds the partition * property. @see StreamGraphGenerator */ private <T> Collection<Integer> transformPartition(PartitionTransformation<T> partition) { Transformation<T> input = partition.getInput(); List<Integer> resultIds = new ArrayList<>(); Collection<Integer> transformedIds = transform(input); for (Integer transformedId: transformedIds) { int virtualId = Transformation.getNewNodeId(); streamGraph.addVirtualPartitionNode( transformedId, virtualId, partition.getPartitioner(), partition.getShuffleMode()); resultIds.add(virtualId); } return resultIds; } /** * Transforms a {@code SplitTransformation}. * * <p>We add the output selector to previously transformed nodes. */ private <T> Collection<Integer> transformSplit(SplitTransformation<T> split) { Transformation<T> input = split.getInput(); Collection<Integer> resultIds = transform(input); validateSplitTransformation(input); if (alreadyTransformed.containsKey(split)) { return alreadyTransformed.get(split); } for (int inputId : resultIds) { streamGraph.addOutputSelector(inputId, split.getOutputSelector()); } return resultIds; } /** * Transforms a {@code SelectTransformation}. * * <p>For this we create a virtual node in the {@code StreamGraph} holds the selected names. * * @see org.apache.flink.streaming.api.graph.StreamGraphGenerator */ private <T> Collection<Integer> transformSelect(SelectTransformation<T> select) { Transformation<T> input = select.getInput(); Collection<Integer> resultIds = transform(input); if (alreadyTransformed.containsKey(select)) { return alreadyTransformed.get(select); } List<Integer> virtualResultIds = new ArrayList<>(); for (int inputId : resultIds) { int virtualId = Transformation.getNewNodeId(); streamGraph.addVirtualSelectNode(inputId, virtualId, select.getSelectedNames()); virtualResultIds.add(virtualId); } return virtualResultIds; } /** * Transforms a {@code SideOutputTransformation}. * * <p>For this we create a virtual node in the {@code StreamGraph} that holds the side-output * {@link org.apache.flink.util.OutputTag}. * * @see org.apache.flink.streaming.api.graph.StreamGraphGenerator */ private <T> Collection<Integer> transformSideOutput(SideOutputTransformation<T> sideOutput) { Transformation<?> input = sideOutput.getInput(); Collection<Integer> resultIds = transform(input); if (alreadyTransformed.containsKey(sideOutput)) { return alreadyTransformed.get(sideOutput); } List<Integer> virtualResultIds = new ArrayList<>(); for (int inputId : resultIds) { int virtualId = Transformation.getNewNodeId(); streamGraph.addVirtualSideOutputNode(inputId, virtualId, sideOutput.getOutputTag()); virtualResultIds.add(virtualId); } return virtualResultIds; } /** * Transforms a {@code FeedbackTransformation}. * * <p>This will recursively transform the input and the feedback edges. We return the * concatenation of the input IDs and the feedback IDs so that downstream operations can be * wired to both. * * <p>This is responsible for creating the IterationSource and IterationSink which are used to * feed back the elements. */ private <T> Collection<Integer> transformFeedback(FeedbackTransformation<T> iterate) { if (iterate.getFeedbackEdges().size() <= 0) { throw new IllegalStateException("Iteration " + iterate + " does not have any feedback edges."); } Transformation<T> input = iterate.getInput(); List<Integer> resultIds = new ArrayList<>(); Collection<Integer> inputIds = transform(input); resultIds.addAll(inputIds); if (alreadyTransformed.containsKey(iterate)) { return alreadyTransformed.get(iterate); } Tuple2<StreamNode, StreamNode> itSourceAndSink = streamGraph.createIterationSourceAndSink( iterate.getId(), getNewIterationNodeId(), getNewIterationNodeId(), iterate.getWaitTime(), iterate.getParallelism(), iterate.getMaxParallelism(), iterate.getMinResources(), iterate.getPreferredResources()); StreamNode itSource = itSourceAndSink.f0; StreamNode itSink = itSourceAndSink.f1; streamGraph.setSerializers(itSource.getId(), null, null, iterate.getOutputType().createSerializer(executionConfig)); streamGraph.setSerializers(itSink.getId(), iterate.getOutputType().createSerializer(executionConfig), null, null); resultIds.add(itSource.getId()); alreadyTransformed.put(iterate, resultIds); List<Integer> allFeedbackIds = new ArrayList<>(); for (Transformation<T> feedbackEdge : iterate.getFeedbackEdges()) { Collection<Integer> feedbackIds = transform(feedbackEdge); allFeedbackIds.addAll(feedbackIds); for (Integer feedbackId: feedbackIds) { streamGraph.addEdge(feedbackId, itSink.getId(), 0 ); } } String slotSharingGroup = determineSlotSharingGroup(null, allFeedbackIds); if (slotSharingGroup == null) { slotSharingGroup = "SlotSharingGroup-" + iterate.getId(); } itSink.setSlotSharingGroup(slotSharingGroup); itSource.setSlotSharingGroup(slotSharingGroup); return resultIds; } /** * Transforms a {@code CoFeedbackTransformation}. * * <p>This will only transform feedback edges, the result of this transform will be wired * to the second input of a Co-Transform. The original input is wired directly to the first * input of the downstream Co-Transform. * * <p>This is responsible for creating the IterationSource and IterationSink which * are used to feed back the elements. */ private <F> Collection<Integer> transformCoFeedback(CoFeedbackTransformation<F> coIterate) { Tuple2<StreamNode, StreamNode> itSourceAndSink = streamGraph.createIterationSourceAndSink( coIterate.getId(), getNewIterationNodeId(), getNewIterationNodeId(), coIterate.getWaitTime(), coIterate.getParallelism(), coIterate.getMaxParallelism(), coIterate.getMinResources(), coIterate.getPreferredResources()); StreamNode itSource = itSourceAndSink.f0; StreamNode itSink = itSourceAndSink.f1; streamGraph.setSerializers(itSource.getId(), null, null, coIterate.getOutputType().createSerializer(executionConfig)); streamGraph.setSerializers(itSink.getId(), coIterate.getOutputType().createSerializer(executionConfig), null, null); Collection<Integer> resultIds = Collections.singleton(itSource.getId()); alreadyTransformed.put(coIterate, resultIds); List<Integer> allFeedbackIds = new ArrayList<>(); for (Transformation<F> feedbackEdge : coIterate.getFeedbackEdges()) { Collection<Integer> feedbackIds = transform(feedbackEdge); allFeedbackIds.addAll(feedbackIds); for (Integer feedbackId: feedbackIds) { streamGraph.addEdge(feedbackId, itSink.getId(), 0 ); } } String slotSharingGroup = determineSlotSharingGroup(null, allFeedbackIds); itSink.setSlotSharingGroup(slotSharingGroup); itSource.setSlotSharingGroup(slotSharingGroup); return Collections.singleton(itSource.getId()); } /** * Transforms a {@code SourceTransformation}. */ private <T> Collection<Integer> transformSource(SourceTransformation<T> source) { String slotSharingGroup = determineSlotSharingGroup(source.getSlotSharingGroup(), Collections.emptyList()); streamGraph.addSource(source.getId(), slotSharingGroup, source.getCoLocationGroupKey(), source.getOperatorFactory(), null, source.getOutputType(), "Source: " + source.getName()); int parallelism = source.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? source.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(source.getId(), parallelism); streamGraph.setMaxParallelism(source.getId(), source.getMaxParallelism()); return Collections.singleton(source.getId()); } /** * Transforms a {@code LegacySourceTransformation}. */ private <T> Collection<Integer> transformLegacySource(LegacySourceTransformation<T> source) { String slotSharingGroup = determineSlotSharingGroup(source.getSlotSharingGroup(), Collections.emptyList()); streamGraph.addLegacySource(source.getId(), slotSharingGroup, source.getCoLocationGroupKey(), source.getOperatorFactory(), null, source.getOutputType(), "Source: " + source.getName()); if (source.getOperatorFactory() instanceof InputFormatOperatorFactory) { streamGraph.setInputFormat(source.getId(), ((InputFormatOperatorFactory<T>) source.getOperatorFactory()).getInputFormat()); } int parallelism = source.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? source.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(source.getId(), parallelism); streamGraph.setMaxParallelism(source.getId(), source.getMaxParallelism()); return Collections.singleton(source.getId()); } /** * Transforms a {@code SinkTransformation}. */ private <T> Collection<Integer> transformSink(SinkTransformation<T> sink) { Collection<Integer> inputIds = transform(sink.getInput()); String slotSharingGroup = determineSlotSharingGroup(sink.getSlotSharingGroup(), inputIds); streamGraph.addSink(sink.getId(), slotSharingGroup, sink.getCoLocationGroupKey(), sink.getOperatorFactory(), sink.getInput().getOutputType(), null, "Sink: " + sink.getName()); StreamOperatorFactory operatorFactory = sink.getOperatorFactory(); if (operatorFactory instanceof OutputFormatOperatorFactory) { streamGraph.setOutputFormat(sink.getId(), ((OutputFormatOperatorFactory) operatorFactory).getOutputFormat()); } int parallelism = sink.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? sink.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(sink.getId(), parallelism); streamGraph.setMaxParallelism(sink.getId(), sink.getMaxParallelism()); for (Integer inputId: inputIds) { streamGraph.addEdge(inputId, sink.getId(), 0 ); } if (sink.getStateKeySelector() != null) { TypeSerializer<?> keySerializer = sink.getStateKeyType().createSerializer(executionConfig); streamGraph.setOneInputStateKey(sink.getId(), sink.getStateKeySelector(), keySerializer); } return Collections.emptyList(); } /** * Transforms a {@code OneInputTransformation}. * * <p>This recursively transforms the inputs, creates a new {@code StreamNode} in the graph and * wired the inputs to this new node. */ private <IN, OUT> Collection<Integer> transformOneInputTransform(OneInputTransformation<IN, OUT> transform) { Collection<Integer> inputIds = transform(transform.getInput()); if (alreadyTransformed.containsKey(transform)) { return alreadyTransformed.get(transform); } String slotSharingGroup = determineSlotSharingGroup(transform.getSlotSharingGroup(), inputIds); streamGraph.addOperator(transform.getId(), slotSharingGroup, transform.getCoLocationGroupKey(), transform.getOperatorFactory(), transform.getInputType(), transform.getOutputType(), transform.getName()); if (transform.getStateKeySelector() != null) { TypeSerializer<?> keySerializer = transform.getStateKeyType().createSerializer(executionConfig); streamGraph.setOneInputStateKey(transform.getId(), transform.getStateKeySelector(), keySerializer); } int parallelism = transform.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? transform.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(transform.getId(), parallelism); streamGraph.setMaxParallelism(transform.getId(), transform.getMaxParallelism()); for (Integer inputId: inputIds) { streamGraph.addEdge(inputId, transform.getId(), 0); } return Collections.singleton(transform.getId()); } /** * Transforms a {@code TwoInputTransformation}. * * <p>This recursively transforms the inputs, creates a new {@code StreamNode} in the graph and * wired the inputs to this new node. */ private <IN1, IN2, OUT> Collection<Integer> transformTwoInputTransform(TwoInputTransformation<IN1, IN2, OUT> transform) { Collection<Integer> inputIds1 = transform(transform.getInput1()); Collection<Integer> inputIds2 = transform(transform.getInput2()); if (alreadyTransformed.containsKey(transform)) { return alreadyTransformed.get(transform); } List<Integer> allInputIds = new ArrayList<>(); allInputIds.addAll(inputIds1); allInputIds.addAll(inputIds2); String slotSharingGroup = determineSlotSharingGroup(transform.getSlotSharingGroup(), allInputIds); streamGraph.addCoOperator( transform.getId(), slotSharingGroup, transform.getCoLocationGroupKey(), transform.getOperatorFactory(), transform.getInputType1(), transform.getInputType2(), transform.getOutputType(), transform.getName()); if (transform.getStateKeySelector1() != null || transform.getStateKeySelector2() != null) { TypeSerializer<?> keySerializer = transform.getStateKeyType().createSerializer(executionConfig); streamGraph.setTwoInputStateKey(transform.getId(), transform.getStateKeySelector1(), transform.getStateKeySelector2(), keySerializer); } int parallelism = transform.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? transform.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(transform.getId(), parallelism); streamGraph.setMaxParallelism(transform.getId(), transform.getMaxParallelism()); for (Integer inputId: inputIds1) { streamGraph.addEdge(inputId, transform.getId(), 1 ); } for (Integer inputId: inputIds2) { streamGraph.addEdge(inputId, transform.getId(), 2 ); } return Collections.singleton(transform.getId()); } private <OUT> Collection<Integer> transformMultipleInputTransform(AbstractMultipleInputTransformation<OUT> transform) { checkArgument(!transform.getInputs().isEmpty(), "Empty inputs for MultipleInputTransformation. Did you forget to add inputs?"); MultipleInputSelectionHandler.checkSupportedInputCount(transform.getInputs().size()); List<Collection<Integer>> allInputIds = new ArrayList<>(); for (Transformation<?> input : transform.getInputs()) { allInputIds.add(transform(input)); } if (alreadyTransformed.containsKey(transform)) { return alreadyTransformed.get(transform); } String slotSharingGroup = determineSlotSharingGroup( transform.getSlotSharingGroup(), allInputIds.stream() .flatMap(Collection::stream) .collect(Collectors.toList())); streamGraph.addMultipleInputOperator( transform.getId(), slotSharingGroup, transform.getCoLocationGroupKey(), transform.getOperatorFactory(), transform.getInputTypes(), transform.getOutputType(), transform.getName()); int parallelism = transform.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? transform.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(transform.getId(), parallelism); streamGraph.setMaxParallelism(transform.getId(), transform.getMaxParallelism()); if (transform instanceof KeyedMultipleInputTransformation) { KeyedMultipleInputTransformation keyedTransform = (KeyedMultipleInputTransformation) transform; TypeSerializer<?> keySerializer = keyedTransform.getStateKeyType().createSerializer(executionConfig); streamGraph.setMultipleInputStateKey(transform.getId(), keyedTransform.getStateKeySelectors(), keySerializer); } for (int i = 0; i < allInputIds.size(); i++) { Collection<Integer> inputIds = allInputIds.get(i); for (Integer inputId: inputIds) { streamGraph.addEdge(inputId, transform.getId(), i + 1 ); } } return Collections.singleton(transform.getId()); } /** * Determines the slot sharing group for an operation based on the slot sharing group set by * the user and the slot sharing groups of the inputs. * * <p>If the user specifies a group name, this is taken as is. If nothing is specified and * the input operations all have the same group name then this name is taken. Otherwise the * default group is chosen. * * @param specifiedGroup The group specified by the user. * @param inputIds The IDs of the input operations. */ private String determineSlotSharingGroup(String specifiedGroup, Collection<Integer> inputIds) { if (specifiedGroup != null) { return specifiedGroup; } else { String inputGroup = null; for (int id: inputIds) { String inputGroupCandidate = streamGraph.getSlotSharingGroup(id); if (inputGroup == null) { inputGroup = inputGroupCandidate; } else if (!inputGroup.equals(inputGroupCandidate)) { return DEFAULT_SLOT_SHARING_GROUP; } } return inputGroup == null ? DEFAULT_SLOT_SHARING_GROUP : inputGroup; } } private <T> void validateSplitTransformation(Transformation<T> input) { if (input instanceof SelectTransformation || input instanceof SplitTransformation) { throw new IllegalStateException("Consecutive multiple splits are not supported. Splits are deprecated. Please use side-outputs."); } else if (input instanceof SideOutputTransformation) { throw new IllegalStateException("Split after side-outputs are not supported. Splits are deprecated. Please use side-outputs."); } else if (input instanceof UnionTransformation) { for (Transformation<T> transformation : ((UnionTransformation<T>) input).getInputs()) { validateSplitTransformation(transformation); } } else if (input instanceof PartitionTransformation) { validateSplitTransformation(((PartitionTransformation) input).getInput()); } else { return; } } }
class StreamGraphGenerator { private static final Logger LOG = LoggerFactory.getLogger(StreamGraphGenerator.class); public static final int DEFAULT_LOWER_BOUND_MAX_PARALLELISM = KeyGroupRangeAssignment.DEFAULT_LOWER_BOUND_MAX_PARALLELISM; public static final ScheduleMode DEFAULT_SCHEDULE_MODE = ScheduleMode.EAGER; public static final TimeCharacteristic DEFAULT_TIME_CHARACTERISTIC = TimeCharacteristic.ProcessingTime; public static final String DEFAULT_JOB_NAME = "Flink Streaming Job"; public static final String DEFAULT_SLOT_SHARING_GROUP = "default"; private final List<Transformation<?>> transformations; private final ExecutionConfig executionConfig; private final CheckpointConfig checkpointConfig; private SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.none(); private StateBackend stateBackend; private boolean chaining = true; private ScheduleMode scheduleMode = DEFAULT_SCHEDULE_MODE; private Collection<Tuple2<String, DistributedCache.DistributedCacheEntry>> userArtifacts; private TimeCharacteristic timeCharacteristic = DEFAULT_TIME_CHARACTERISTIC; private long defaultBufferTimeout = StreamingJobGraphGenerator.UNDEFINED_NETWORK_BUFFER_TIMEOUT; private String jobName = DEFAULT_JOB_NAME; private GlobalDataExchangeMode globalDataExchangeMode = GlobalDataExchangeMode.ALL_EDGES_PIPELINED; protected static Integer iterationIdCounter = 0; public static int getNewIterationNodeId() { iterationIdCounter--; return iterationIdCounter; } private StreamGraph streamGraph; private Map<Transformation<?>, Collection<Integer>> alreadyTransformed; public StreamGraphGenerator(List<Transformation<?>> transformations, ExecutionConfig executionConfig, CheckpointConfig checkpointConfig) { this.transformations = checkNotNull(transformations); this.executionConfig = checkNotNull(executionConfig); this.checkpointConfig = checkNotNull(checkpointConfig); } public StreamGraphGenerator setStateBackend(StateBackend stateBackend) { this.stateBackend = stateBackend; return this; } public StreamGraphGenerator setChaining(boolean chaining) { this.chaining = chaining; return this; } public StreamGraphGenerator setScheduleMode(ScheduleMode scheduleMode) { this.scheduleMode = scheduleMode; return this; } public StreamGraphGenerator setUserArtifacts(Collection<Tuple2<String, DistributedCache.DistributedCacheEntry>> userArtifacts) { this.userArtifacts = userArtifacts; return this; } public StreamGraphGenerator setTimeCharacteristic(TimeCharacteristic timeCharacteristic) { this.timeCharacteristic = timeCharacteristic; return this; } public StreamGraphGenerator setDefaultBufferTimeout(long defaultBufferTimeout) { this.defaultBufferTimeout = defaultBufferTimeout; return this; } public StreamGraphGenerator setJobName(String jobName) { this.jobName = jobName; return this; } public StreamGraphGenerator setGlobalDataExchangeMode(GlobalDataExchangeMode globalDataExchangeMode) { this.globalDataExchangeMode = globalDataExchangeMode; return this; } public void setSavepointRestoreSettings(SavepointRestoreSettings savepointRestoreSettings) { this.savepointRestoreSettings = savepointRestoreSettings; } public StreamGraph generate() { streamGraph = new StreamGraph(executionConfig, checkpointConfig, savepointRestoreSettings); streamGraph.setStateBackend(stateBackend); streamGraph.setChaining(chaining); streamGraph.setScheduleMode(scheduleMode); streamGraph.setUserArtifacts(userArtifacts); streamGraph.setTimeCharacteristic(timeCharacteristic); streamGraph.setJobName(jobName); streamGraph.setGlobalDataExchangeMode(globalDataExchangeMode); alreadyTransformed = new HashMap<>(); for (Transformation<?> transformation: transformations) { transform(transformation); } final StreamGraph builtStreamGraph = streamGraph; alreadyTransformed.clear(); alreadyTransformed = null; streamGraph = null; return builtStreamGraph; } /** * Transforms one {@code Transformation}. * * <p>This checks whether we already transformed it and exits early in that case. If not it * delegates to one of the transformation specific methods. */ /** * Transforms a {@code UnionTransformation}. * * <p>This is easy, we only have to transform the inputs and return all the IDs in a list so * that downstream operations can connect to all upstream nodes. */ private <T> Collection<Integer> transformUnion(UnionTransformation<T> union) { List<Transformation<T>> inputs = union.getInputs(); List<Integer> resultIds = new ArrayList<>(); for (Transformation<T> input: inputs) { resultIds.addAll(transform(input)); } return resultIds; } /** * Transforms a {@code PartitionTransformation}. * * <p>For this we create a virtual node in the {@code StreamGraph} that holds the partition * property. @see StreamGraphGenerator */ private <T> Collection<Integer> transformPartition(PartitionTransformation<T> partition) { Transformation<T> input = partition.getInput(); List<Integer> resultIds = new ArrayList<>(); Collection<Integer> transformedIds = transform(input); for (Integer transformedId: transformedIds) { int virtualId = Transformation.getNewNodeId(); streamGraph.addVirtualPartitionNode( transformedId, virtualId, partition.getPartitioner(), partition.getShuffleMode()); resultIds.add(virtualId); } return resultIds; } /** * Transforms a {@code SplitTransformation}. * * <p>We add the output selector to previously transformed nodes. */ private <T> Collection<Integer> transformSplit(SplitTransformation<T> split) { Transformation<T> input = split.getInput(); Collection<Integer> resultIds = transform(input); validateSplitTransformation(input); if (alreadyTransformed.containsKey(split)) { return alreadyTransformed.get(split); } for (int inputId : resultIds) { streamGraph.addOutputSelector(inputId, split.getOutputSelector()); } return resultIds; } /** * Transforms a {@code SelectTransformation}. * * <p>For this we create a virtual node in the {@code StreamGraph} holds the selected names. * * @see org.apache.flink.streaming.api.graph.StreamGraphGenerator */ private <T> Collection<Integer> transformSelect(SelectTransformation<T> select) { Transformation<T> input = select.getInput(); Collection<Integer> resultIds = transform(input); if (alreadyTransformed.containsKey(select)) { return alreadyTransformed.get(select); } List<Integer> virtualResultIds = new ArrayList<>(); for (int inputId : resultIds) { int virtualId = Transformation.getNewNodeId(); streamGraph.addVirtualSelectNode(inputId, virtualId, select.getSelectedNames()); virtualResultIds.add(virtualId); } return virtualResultIds; } /** * Transforms a {@code SideOutputTransformation}. * * <p>For this we create a virtual node in the {@code StreamGraph} that holds the side-output * {@link org.apache.flink.util.OutputTag}. * * @see org.apache.flink.streaming.api.graph.StreamGraphGenerator */ private <T> Collection<Integer> transformSideOutput(SideOutputTransformation<T> sideOutput) { Transformation<?> input = sideOutput.getInput(); Collection<Integer> resultIds = transform(input); if (alreadyTransformed.containsKey(sideOutput)) { return alreadyTransformed.get(sideOutput); } List<Integer> virtualResultIds = new ArrayList<>(); for (int inputId : resultIds) { int virtualId = Transformation.getNewNodeId(); streamGraph.addVirtualSideOutputNode(inputId, virtualId, sideOutput.getOutputTag()); virtualResultIds.add(virtualId); } return virtualResultIds; } /** * Transforms a {@code FeedbackTransformation}. * * <p>This will recursively transform the input and the feedback edges. We return the * concatenation of the input IDs and the feedback IDs so that downstream operations can be * wired to both. * * <p>This is responsible for creating the IterationSource and IterationSink which are used to * feed back the elements. */ private <T> Collection<Integer> transformFeedback(FeedbackTransformation<T> iterate) { if (iterate.getFeedbackEdges().size() <= 0) { throw new IllegalStateException("Iteration " + iterate + " does not have any feedback edges."); } Transformation<T> input = iterate.getInput(); List<Integer> resultIds = new ArrayList<>(); Collection<Integer> inputIds = transform(input); resultIds.addAll(inputIds); if (alreadyTransformed.containsKey(iterate)) { return alreadyTransformed.get(iterate); } Tuple2<StreamNode, StreamNode> itSourceAndSink = streamGraph.createIterationSourceAndSink( iterate.getId(), getNewIterationNodeId(), getNewIterationNodeId(), iterate.getWaitTime(), iterate.getParallelism(), iterate.getMaxParallelism(), iterate.getMinResources(), iterate.getPreferredResources()); StreamNode itSource = itSourceAndSink.f0; StreamNode itSink = itSourceAndSink.f1; streamGraph.setSerializers(itSource.getId(), null, null, iterate.getOutputType().createSerializer(executionConfig)); streamGraph.setSerializers(itSink.getId(), iterate.getOutputType().createSerializer(executionConfig), null, null); resultIds.add(itSource.getId()); alreadyTransformed.put(iterate, resultIds); List<Integer> allFeedbackIds = new ArrayList<>(); for (Transformation<T> feedbackEdge : iterate.getFeedbackEdges()) { Collection<Integer> feedbackIds = transform(feedbackEdge); allFeedbackIds.addAll(feedbackIds); for (Integer feedbackId: feedbackIds) { streamGraph.addEdge(feedbackId, itSink.getId(), 0 ); } } String slotSharingGroup = determineSlotSharingGroup(null, allFeedbackIds); if (slotSharingGroup == null) { slotSharingGroup = "SlotSharingGroup-" + iterate.getId(); } itSink.setSlotSharingGroup(slotSharingGroup); itSource.setSlotSharingGroup(slotSharingGroup); return resultIds; } /** * Transforms a {@code CoFeedbackTransformation}. * * <p>This will only transform feedback edges, the result of this transform will be wired * to the second input of a Co-Transform. The original input is wired directly to the first * input of the downstream Co-Transform. * * <p>This is responsible for creating the IterationSource and IterationSink which * are used to feed back the elements. */ private <F> Collection<Integer> transformCoFeedback(CoFeedbackTransformation<F> coIterate) { Tuple2<StreamNode, StreamNode> itSourceAndSink = streamGraph.createIterationSourceAndSink( coIterate.getId(), getNewIterationNodeId(), getNewIterationNodeId(), coIterate.getWaitTime(), coIterate.getParallelism(), coIterate.getMaxParallelism(), coIterate.getMinResources(), coIterate.getPreferredResources()); StreamNode itSource = itSourceAndSink.f0; StreamNode itSink = itSourceAndSink.f1; streamGraph.setSerializers(itSource.getId(), null, null, coIterate.getOutputType().createSerializer(executionConfig)); streamGraph.setSerializers(itSink.getId(), coIterate.getOutputType().createSerializer(executionConfig), null, null); Collection<Integer> resultIds = Collections.singleton(itSource.getId()); alreadyTransformed.put(coIterate, resultIds); List<Integer> allFeedbackIds = new ArrayList<>(); for (Transformation<F> feedbackEdge : coIterate.getFeedbackEdges()) { Collection<Integer> feedbackIds = transform(feedbackEdge); allFeedbackIds.addAll(feedbackIds); for (Integer feedbackId: feedbackIds) { streamGraph.addEdge(feedbackId, itSink.getId(), 0 ); } } String slotSharingGroup = determineSlotSharingGroup(null, allFeedbackIds); itSink.setSlotSharingGroup(slotSharingGroup); itSource.setSlotSharingGroup(slotSharingGroup); return Collections.singleton(itSource.getId()); } /** * Transforms a {@code SourceTransformation}. */ private <T> Collection<Integer> transformSource(SourceTransformation<T> source) { String slotSharingGroup = determineSlotSharingGroup(source.getSlotSharingGroup(), Collections.emptyList()); streamGraph.addSource(source.getId(), slotSharingGroup, source.getCoLocationGroupKey(), source.getOperatorFactory(), null, source.getOutputType(), "Source: " + source.getName()); int parallelism = source.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? source.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(source.getId(), parallelism); streamGraph.setMaxParallelism(source.getId(), source.getMaxParallelism()); return Collections.singleton(source.getId()); } /** * Transforms a {@code LegacySourceTransformation}. */ private <T> Collection<Integer> transformLegacySource(LegacySourceTransformation<T> source) { String slotSharingGroup = determineSlotSharingGroup(source.getSlotSharingGroup(), Collections.emptyList()); streamGraph.addLegacySource(source.getId(), slotSharingGroup, source.getCoLocationGroupKey(), source.getOperatorFactory(), null, source.getOutputType(), "Source: " + source.getName()); if (source.getOperatorFactory() instanceof InputFormatOperatorFactory) { streamGraph.setInputFormat(source.getId(), ((InputFormatOperatorFactory<T>) source.getOperatorFactory()).getInputFormat()); } int parallelism = source.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? source.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(source.getId(), parallelism); streamGraph.setMaxParallelism(source.getId(), source.getMaxParallelism()); return Collections.singleton(source.getId()); } /** * Transforms a {@code SinkTransformation}. */ private <T> Collection<Integer> transformSink(SinkTransformation<T> sink) { Collection<Integer> inputIds = transform(sink.getInput()); String slotSharingGroup = determineSlotSharingGroup(sink.getSlotSharingGroup(), inputIds); streamGraph.addSink(sink.getId(), slotSharingGroup, sink.getCoLocationGroupKey(), sink.getOperatorFactory(), sink.getInput().getOutputType(), null, "Sink: " + sink.getName()); StreamOperatorFactory operatorFactory = sink.getOperatorFactory(); if (operatorFactory instanceof OutputFormatOperatorFactory) { streamGraph.setOutputFormat(sink.getId(), ((OutputFormatOperatorFactory) operatorFactory).getOutputFormat()); } int parallelism = sink.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? sink.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(sink.getId(), parallelism); streamGraph.setMaxParallelism(sink.getId(), sink.getMaxParallelism()); for (Integer inputId: inputIds) { streamGraph.addEdge(inputId, sink.getId(), 0 ); } if (sink.getStateKeySelector() != null) { TypeSerializer<?> keySerializer = sink.getStateKeyType().createSerializer(executionConfig); streamGraph.setOneInputStateKey(sink.getId(), sink.getStateKeySelector(), keySerializer); } return Collections.emptyList(); } /** * Transforms a {@code OneInputTransformation}. * * <p>This recursively transforms the inputs, creates a new {@code StreamNode} in the graph and * wired the inputs to this new node. */ private <IN, OUT> Collection<Integer> transformOneInputTransform(OneInputTransformation<IN, OUT> transform) { Collection<Integer> inputIds = transform(transform.getInput()); if (alreadyTransformed.containsKey(transform)) { return alreadyTransformed.get(transform); } String slotSharingGroup = determineSlotSharingGroup(transform.getSlotSharingGroup(), inputIds); streamGraph.addOperator(transform.getId(), slotSharingGroup, transform.getCoLocationGroupKey(), transform.getOperatorFactory(), transform.getInputType(), transform.getOutputType(), transform.getName()); if (transform.getStateKeySelector() != null) { TypeSerializer<?> keySerializer = transform.getStateKeyType().createSerializer(executionConfig); streamGraph.setOneInputStateKey(transform.getId(), transform.getStateKeySelector(), keySerializer); } int parallelism = transform.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? transform.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(transform.getId(), parallelism); streamGraph.setMaxParallelism(transform.getId(), transform.getMaxParallelism()); for (Integer inputId: inputIds) { streamGraph.addEdge(inputId, transform.getId(), 0); } return Collections.singleton(transform.getId()); } /** * Transforms a {@code TwoInputTransformation}. * * <p>This recursively transforms the inputs, creates a new {@code StreamNode} in the graph and * wired the inputs to this new node. */ private <IN1, IN2, OUT> Collection<Integer> transformTwoInputTransform(TwoInputTransformation<IN1, IN2, OUT> transform) { Collection<Integer> inputIds1 = transform(transform.getInput1()); Collection<Integer> inputIds2 = transform(transform.getInput2()); if (alreadyTransformed.containsKey(transform)) { return alreadyTransformed.get(transform); } List<Integer> allInputIds = new ArrayList<>(); allInputIds.addAll(inputIds1); allInputIds.addAll(inputIds2); String slotSharingGroup = determineSlotSharingGroup(transform.getSlotSharingGroup(), allInputIds); streamGraph.addCoOperator( transform.getId(), slotSharingGroup, transform.getCoLocationGroupKey(), transform.getOperatorFactory(), transform.getInputType1(), transform.getInputType2(), transform.getOutputType(), transform.getName()); if (transform.getStateKeySelector1() != null || transform.getStateKeySelector2() != null) { TypeSerializer<?> keySerializer = transform.getStateKeyType().createSerializer(executionConfig); streamGraph.setTwoInputStateKey(transform.getId(), transform.getStateKeySelector1(), transform.getStateKeySelector2(), keySerializer); } int parallelism = transform.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? transform.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(transform.getId(), parallelism); streamGraph.setMaxParallelism(transform.getId(), transform.getMaxParallelism()); for (Integer inputId: inputIds1) { streamGraph.addEdge(inputId, transform.getId(), 1 ); } for (Integer inputId: inputIds2) { streamGraph.addEdge(inputId, transform.getId(), 2 ); } return Collections.singleton(transform.getId()); } private <OUT> Collection<Integer> transformMultipleInputTransform(AbstractMultipleInputTransformation<OUT> transform) { checkArgument(!transform.getInputs().isEmpty(), "Empty inputs for MultipleInputTransformation. Did you forget to add inputs?"); MultipleInputSelectionHandler.checkSupportedInputCount(transform.getInputs().size()); List<Collection<Integer>> allInputIds = new ArrayList<>(); for (Transformation<?> input : transform.getInputs()) { allInputIds.add(transform(input)); } if (alreadyTransformed.containsKey(transform)) { return alreadyTransformed.get(transform); } String slotSharingGroup = determineSlotSharingGroup( transform.getSlotSharingGroup(), allInputIds.stream() .flatMap(Collection::stream) .collect(Collectors.toList())); streamGraph.addMultipleInputOperator( transform.getId(), slotSharingGroup, transform.getCoLocationGroupKey(), transform.getOperatorFactory(), transform.getInputTypes(), transform.getOutputType(), transform.getName()); int parallelism = transform.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? transform.getParallelism() : executionConfig.getParallelism(); streamGraph.setParallelism(transform.getId(), parallelism); streamGraph.setMaxParallelism(transform.getId(), transform.getMaxParallelism()); if (transform instanceof KeyedMultipleInputTransformation) { KeyedMultipleInputTransformation keyedTransform = (KeyedMultipleInputTransformation) transform; TypeSerializer<?> keySerializer = keyedTransform.getStateKeyType().createSerializer(executionConfig); streamGraph.setMultipleInputStateKey(transform.getId(), keyedTransform.getStateKeySelectors(), keySerializer); } for (int i = 0; i < allInputIds.size(); i++) { Collection<Integer> inputIds = allInputIds.get(i); for (Integer inputId: inputIds) { streamGraph.addEdge(inputId, transform.getId(), i + 1 ); } } return Collections.singleton(transform.getId()); } /** * Determines the slot sharing group for an operation based on the slot sharing group set by * the user and the slot sharing groups of the inputs. * * <p>If the user specifies a group name, this is taken as is. If nothing is specified and * the input operations all have the same group name then this name is taken. Otherwise the * default group is chosen. * * @param specifiedGroup The group specified by the user. * @param inputIds The IDs of the input operations. */ private String determineSlotSharingGroup(String specifiedGroup, Collection<Integer> inputIds) { if (specifiedGroup != null) { return specifiedGroup; } else { String inputGroup = null; for (int id: inputIds) { String inputGroupCandidate = streamGraph.getSlotSharingGroup(id); if (inputGroup == null) { inputGroup = inputGroupCandidate; } else if (!inputGroup.equals(inputGroupCandidate)) { return DEFAULT_SLOT_SHARING_GROUP; } } return inputGroup == null ? DEFAULT_SLOT_SHARING_GROUP : inputGroup; } } private <T> void validateSplitTransformation(Transformation<T> input) { if (input instanceof SelectTransformation || input instanceof SplitTransformation) { throw new IllegalStateException("Consecutive multiple splits are not supported. Splits are deprecated. Please use side-outputs."); } else if (input instanceof SideOutputTransformation) { throw new IllegalStateException("Split after side-outputs are not supported. Splits are deprecated. Please use side-outputs."); } else if (input instanceof UnionTransformation) { for (Transformation<T> transformation : ((UnionTransformation<T>) input).getInputs()) { validateSplitTransformation(transformation); } } else if (input instanceof PartitionTransformation) { validateSplitTransformation(((PartitionTransformation) input).getInput()); } else { return; } } }
Please rename variable name `utils` because of there is no any relationship with variable name and class type.
public UpdateStatementContext(final UpdateStatement sqlStatement) { super(sqlStatement); TableExtractor utils = new TableExtractor(); utils.extractTablesFromUpdate(sqlStatement); tablesContext = new TablesContext(utils.getRewriteTables()); }
TableExtractor utils = new TableExtractor();
public UpdateStatementContext(final UpdateStatement sqlStatement) { super(sqlStatement); TableExtractor tableExtractor = new TableExtractor(); tableExtractor.extractTablesFromUpdate(sqlStatement); tablesContext = new TablesContext(tableExtractor.getRewriteTables()); }
class UpdateStatementContext extends CommonSQLStatementContext<UpdateStatement> implements TableAvailable, WhereAvailable { private final TablesContext tablesContext; @Override public Collection<SimpleTableSegment> getAllTables() { TableExtractor tableExtractor = new TableExtractor(); tableExtractor.extractTablesFromUpdate(getSqlStatement()); return tableExtractor.getRewriteTables(); } @Override public Optional<WhereSegment> getWhere() { return getSqlStatement().getWhere(); } }
class UpdateStatementContext extends CommonSQLStatementContext<UpdateStatement> implements TableAvailable, WhereAvailable { private final TablesContext tablesContext; @Override public Collection<SimpleTableSegment> getAllTables() { TableExtractor tableExtractor = new TableExtractor(); tableExtractor.extractTablesFromUpdate(getSqlStatement()); return tableExtractor.getRewriteTables(); } @Override public Optional<WhereSegment> getWhere() { return getSqlStatement().getWhere(); } }
We don't really have something, but it would likely make sense to add it
Object aroundInvoke(InvocationContext ctx) throws Exception { if (ctx.getMethod().getReturnType().equals(Uni.class)) { return invokeUni(ctx); } return invoke(ctx); }
if (ctx.getMethod().getReturnType().equals(Uni.class)) {
Object aroundInvoke(InvocationContext ctx) throws Exception { switch (ReactiveType.valueOf(ctx.getMethod())) { case UNI: return invokeUni(ctx); case MULTI: return invokeMulti(ctx); case STAGE: return invokeStage(ctx); default: return invoke(ctx); } }
class ActivateRequestContextInterceptor { @AroundInvoke private Uni<?> invokeUni(InvocationContext ctx) { return Uni.createFrom().item(Arc.container()::requestContext) .chain(requestContext -> { if (requestContext.isActive()) { return proceedWithUni(ctx); } return Uni.createFrom().deferred(() -> { requestContext.activate(); return proceedWithUni(ctx); }).eventually(requestContext::terminate); }); } private Uni<?> proceedWithUni(InvocationContext ctx) { try { return (Uni<?>) ctx.proceed(); } catch (Throwable t) { return Uni.createFrom().failure(t); } } private Object invoke(InvocationContext ctx) throws Exception { ManagedContext requestContext = Arc.container().requestContext(); if (requestContext.isActive()) { return ctx.proceed(); } try { requestContext.activate(); return ctx.proceed(); } finally { requestContext.terminate(); } } }
class ActivateRequestContextInterceptor { @AroundInvoke private CompletionStage<?> invokeStage(InvocationContext ctx) { ManagedContext requestContext = Arc.container().requestContext(); if (requestContext.isActive()) { return proceedWithStage(ctx); } return activate(requestContext) .thenCompose(v -> proceedWithStage(ctx)) .whenComplete((r, t) -> requestContext.terminate()); } private static CompletionStage<ManagedContext> activate(ManagedContext requestContext) { try { requestContext.activate(); return CompletableFuture.completedStage(requestContext); } catch (Throwable t) { return CompletableFuture.failedStage(t); } } private CompletionStage<?> proceedWithStage(InvocationContext ctx) { try { return (CompletionStage<?>) ctx.proceed(); } catch (Throwable t) { return CompletableFuture.failedStage(t); } } private Multi<?> invokeMulti(InvocationContext ctx) { return Multi.createFrom().deferred(() -> { ManagedContext requestContext = Arc.container().requestContext(); if (requestContext.isActive()) { return proceedWithMulti(ctx); } return Multi.createFrom().deferred(() -> { requestContext.activate(); return proceedWithMulti(ctx); }).onTermination().invoke(requestContext::terminate); }); } private Multi<?> proceedWithMulti(InvocationContext ctx) { try { return (Multi<?>) ctx.proceed(); } catch (Throwable t) { return Multi.createFrom().failure(t); } } private Uni<?> invokeUni(InvocationContext ctx) { return Uni.createFrom().deferred(() -> { ManagedContext requestContext = Arc.container().requestContext(); if (requestContext.isActive()) { return proceedWithUni(ctx); } return Uni.createFrom().deferred(() -> { requestContext.activate(); return proceedWithUni(ctx); }).eventually(requestContext::terminate); }); } private Uni<?> proceedWithUni(InvocationContext ctx) { try { return (Uni<?>) ctx.proceed(); } catch (Throwable t) { return Uni.createFrom().failure(t); } } private Object invoke(InvocationContext ctx) throws Exception { ManagedContext requestContext = Arc.container().requestContext(); if (requestContext.isActive()) { return ctx.proceed(); } try { requestContext.activate(); return ctx.proceed(); } finally { requestContext.terminate(); } } }
Only these test jobs will be triggered automatically. Undeclared jobs may still be triggered by users, though, but that likely won't happen if they aren't shown here :)
public List<StepStatus> allSteps() { List<JobId> firstTestJobs = List.of(firstDeclaredOrElseImplicitTest(systemTest), firstDeclaredOrElseImplicitTest(stagingTest)); return allSteps.stream() .filter(step -> step.isDeclared() || firstTestJobs.contains(step.job().orElseThrow())) .collect(toUnmodifiableList()); }
List<JobId> firstTestJobs = List.of(firstDeclaredOrElseImplicitTest(systemTest),
public List<StepStatus> allSteps() { List<JobId> firstTestJobs = List.of(firstDeclaredOrElseImplicitTest(systemTest), firstDeclaredOrElseImplicitTest(stagingTest)); return allSteps.stream() .filter(step -> step.isDeclared() || firstTestJobs.contains(step.job().orElseThrow())) .collect(toUnmodifiableList()); }
class DeploymentStatus { public static List<JobId> jobsFor(Application application, SystemName system) { if (DeploymentSpec.empty.equals(application.deploymentSpec())) return List.of(); return application.deploymentSpec().instances().stream() .flatMap(spec -> Stream.concat(Stream.of(systemTest, stagingTest), flatten(spec).filter(step -> step.concerns(prod)) .map(step -> { if (step instanceof DeclaredZone) return JobType.from(system, prod, ((DeclaredZone) step).region().get()); return JobType.testFrom(system, ((DeclaredTest) step).region()); }) .flatMap(Optional::stream)) .map(type -> new JobId(application.id().instance(spec.name()), type))) .collect(toUnmodifiableList()); } private static Stream<DeploymentSpec.Step> flatten(DeploymentSpec.Step step) { return step instanceof DeploymentSpec.Steps ? step.steps().stream().flatMap(DeploymentStatus::flatten) : Stream.of(step); } private static <T> List<T> union(List<T> first, List<T> second) { return Stream.concat(first.stream(), second.stream()).distinct().collect(toUnmodifiableList()); } private final Application application; private final JobList allJobs; private final SystemName system; private final Version systemVersion; private final Instant now; private final Map<JobId, StepStatus> jobSteps; private final List<StepStatus> allSteps; public DeploymentStatus(Application application, Map<JobId, JobStatus> allJobs, SystemName system, Version systemVersion, Instant now) { this.application = requireNonNull(application); this.allJobs = JobList.from(allJobs.values()); this.system = requireNonNull(system); this.systemVersion = requireNonNull(systemVersion); this.now = requireNonNull(now); List<StepStatus> allSteps = new ArrayList<>(); this.jobSteps = jobDependencies(application.deploymentSpec(), allSteps); this.allSteps = List.copyOf(allSteps); } /** The application this deployment status concerns. */ public Application application() { return application; } /** A filterable list of the status of all jobs for this application. */ public JobList jobs() { return allJobs; } /** Whether any jobs of this application are failing with other errors than lack of capacity in a test zone. */ public boolean hasFailures() { return ! allJobs.failing() .not().withStatus(RunStatus.outOfCapacity) .isEmpty(); } /** All job statuses, by job type, for the given instance. */ public Map<JobType, JobStatus> instanceJobs(InstanceName instance) { return allJobs.asList().stream() .filter(job -> job.id().application().equals(application.id().instance(instance))) .collect(Collectors.toUnmodifiableMap(job -> job.id().type(), job -> job)); } /** Filterable job status lists for each instance of this application. */ public Map<ApplicationId, JobList> instanceJobs() { return allJobs.asList().stream() .collect(groupingBy(job -> job.id().application(), collectingAndThen(toUnmodifiableList(), JobList::from))); } /** * The set of jobs that need to run for the changes of each instance of the application to be considered complete, * and any test jobs for any oustanding change, which will likely be needed to lated deploy this change. */ public Map<JobId, List<Versions>> jobsToRun() { Map<InstanceName, Change> changes = new LinkedHashMap<>(); for (InstanceName instance : application.deploymentSpec().instanceNames()) changes.put(instance, application.require(instance).change()); Map<JobId, List<Versions>> jobs = jobsToRun(changes); for (InstanceName instance : application.deploymentSpec().instanceNames()) changes.put(instance, outstandingChange(instance).onTopOf(application.require(instance).change())); var testJobs = jobsToRun(changes, true).entrySet().stream() .filter(entry -> ! entry.getKey().type().isProduction()); return Stream.concat(jobs.entrySet().stream(), testJobs) .collect(collectingAndThen(toMap(Map.Entry::getKey, Map.Entry::getValue, DeploymentStatus::union, LinkedHashMap::new), ImmutableMap::copyOf)); } private Map<JobId, List<Versions>> jobsToRun(Map<InstanceName, Change> changes, boolean eagerTests) { Map<JobId, Versions> productionJobs = new LinkedHashMap<>(); changes.forEach((instance, change) -> productionJobs.putAll(productionJobs(instance, change, eagerTests))); Map<JobId, List<Versions>> testJobs = testJobs(productionJobs); Map<JobId, List<Versions>> jobs = new LinkedHashMap<>(testJobs); productionJobs.forEach((job, versions) -> jobs.put(job, List.of(versions))); jobSteps.forEach((job, step) -> { if ( ! step.isDeclared() || jobs.containsKey(job)) return; Change change = changes.get(job.application().instance()); if (change == null || ! change.hasTargets()) return; Optional<JobId> firstProductionJobWithDeployment = jobSteps.keySet().stream() .filter(jobId -> jobId.type().isProduction() && jobId.type().isDeployment()) .filter(jobId -> deploymentFor(jobId).isPresent()) .findFirst(); Versions versions = Versions.from(change, application, firstProductionJobWithDeployment.flatMap(this::deploymentFor), systemVersion); if (step.completedAt(change, firstProductionJobWithDeployment).isEmpty()) jobs.merge(job, List.of(versions), DeploymentStatus::union); }); return ImmutableMap.copyOf(jobs); } /** The set of jobs that need to run for the given changes to be considered complete. */ public Map<JobId, List<Versions>> jobsToRun(Map<InstanceName, Change> changes) { return jobsToRun(changes, false); } /** The step status for all steps in the deployment spec of this, which are jobs, in the same order as in the deployment spec. */ public Map<JobId, StepStatus> jobSteps() { return jobSteps; } public Map<InstanceName, StepStatus> instanceSteps() { ImmutableMap.Builder<InstanceName, StepStatus> instances = ImmutableMap.builder(); for (StepStatus status : allSteps) if (status instanceof InstanceStatus) instances.put(status.instance(), status); return instances.build(); } /** The step status for all relevant steps in the deployment spec of this, in the same order as in the deployment spec. */ public Optional<Deployment> deploymentFor(JobId job) { return Optional.ofNullable(application.require(job.application().instance()) .deployments().get(job.type().zone(system))); } /** * The change of this application's latest submission, if this upgrades any of its production deployments, * and has not yet started rolling out, due to some other change or a block window being present at the time of submission. */ public Change outstandingChange(InstanceName instance) { return application.latestVersion().map(Change::of) .filter(change -> application.require(instance).change().application().map(change::upgrades).orElse(true)) .filter(change -> ! jobsToRun(Map.of(instance, change)).isEmpty()) .orElse(Change.empty()); } /** * True if the job has already been triggered on the given versions, or if all test types (systemTest, stagingTest), * restricted to the job's instance if declared in that instance, have successful runs on the given versions. */ public boolean isTested(JobId job, Change change) { Versions versions = Versions.from(change, application, deploymentFor(job), systemVersion); return allJobs.triggeredOn(versions).get(job).isPresent() || Stream.of(systemTest, stagingTest) .noneMatch(testType -> declaredTest(job.application(), testType).map(__ -> allJobs.instance(job.application().instance())) .orElse(allJobs) .type(testType) .successOn(versions).isEmpty()); } private Map<JobId, Versions> productionJobs(InstanceName instance, Change change, boolean assumeUpgradesSucceed) { ImmutableMap.Builder<JobId, Versions> jobs = ImmutableMap.builder(); jobSteps.forEach((job, step) -> { Optional<Deployment> deployment = deploymentFor(job) .map(existing -> assumeUpgradesSucceed ? new Deployment(existing.zone(), existing.applicationVersion(), change.platform().orElse(existing.version()), existing.at(), existing.metrics(), existing.activity(), existing.quota()) : existing); if ( job.application().instance().equals(instance) && job.type().isProduction() && step.completedAt(change).isEmpty()) jobs.put(job, Versions.from(change, application, deployment, systemVersion)); }); return jobs.build(); } /** The production jobs that need to run to complete roll-out of the given change to production. */ public Map<JobId, Versions> productionJobs(InstanceName instance, Change change) { return productionJobs(instance, change, false); } /** The test jobs that need to run prior to the given production deployment jobs. */ public Map<JobId, List<Versions>> testJobs(Map<JobId, Versions> jobs) { Map<JobId, List<Versions>> testJobs = new LinkedHashMap<>(); for (JobType testType : List.of(systemTest, stagingTest)) { jobs.forEach((job, versions) -> { if (job.type().isProduction() && job.type().isDeployment()) { declaredTest(job.application(), testType).ifPresent(testJob -> { if (allJobs.successOn(versions).get(testJob).isEmpty()) testJobs.merge(testJob, List.of(versions), DeploymentStatus::union); }); } }); jobs.forEach((job, versions) -> { if ( job.type().isProduction() && job.type().isDeployment() && allJobs.successOn(versions).type(testType).isEmpty() && testJobs.keySet().stream() .noneMatch(test -> test.type() == testType && testJobs.get(test).contains(versions))) testJobs.merge(firstDeclaredOrElseImplicitTest(testType), List.of(versions), DeploymentStatus::union); }); } return ImmutableMap.copyOf(testJobs); } private JobId firstDeclaredOrElseImplicitTest(JobType testJob) { return application.deploymentSpec().instanceNames().stream() .map(name -> new JobId(application.id().instance(name), testJob)) .min(comparing(id -> ! jobSteps.get(id).isDeclared())).orElseThrow(); } /** JobId of any declared test of the given type, for the given instance. */ private Optional<JobId> declaredTest(ApplicationId instanceId, JobType testJob) { JobId jobId = new JobId(instanceId, testJob); return jobSteps.get(jobId).isDeclared() ? Optional.of(jobId) : Optional.empty(); } /** A DAG of the dependencies between the primitive steps in the spec, with iteration order equal to declaration order. */ private Map<JobId, StepStatus> jobDependencies(DeploymentSpec spec, List<StepStatus> allSteps) { if (DeploymentSpec.empty.equals(spec)) return Map.of(); Map<JobId, StepStatus> dependencies = new LinkedHashMap<>(); List<StepStatus> previous = List.of(); for (DeploymentSpec.Step step : spec.steps()) previous = fillStep(dependencies, allSteps, step, previous, null); return ImmutableMap.copyOf(dependencies); } /** Adds the primitive steps contained in the given step, which depend on the given previous primitives, to the dependency graph. */ private List<StepStatus> fillStep(Map<JobId, StepStatus> dependencies, List<StepStatus> allSteps, DeploymentSpec.Step step, List<StepStatus> previous, InstanceName instance) { if (step.steps().isEmpty()) { if (instance == null) return previous; if ( ! step.delay().isZero()) { StepStatus stepStatus = new DelayStatus((DeploymentSpec.Delay) step, previous, instance); allSteps.add(stepStatus); return List.of(stepStatus); } JobType jobType; StepStatus stepStatus; if (step.concerns(test) || step.concerns(staging)) { jobType = JobType.from(system, ((DeclaredZone) step).environment(), null) .orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system)); stepStatus = JobStepStatus.ofTestDeployment((DeclaredZone) step, List.of(), this, instance, jobType, true); previous = new ArrayList<>(previous); previous.add(stepStatus); } else if (step.isTest()) { jobType = JobType.testFrom(system, ((DeclaredTest) step).region()) .orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system)); JobType preType = JobType.from(system, prod, ((DeclaredTest) step).region()) .orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system)); stepStatus = JobStepStatus.ofProductionTest((DeclaredTest) step, previous, this, instance, jobType, preType); previous = List.of(stepStatus); } else if (step.concerns(prod)) { jobType = JobType.from(system, ((DeclaredZone) step).environment(), ((DeclaredZone) step).region().get()) .orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system)); stepStatus = JobStepStatus.ofProductionDeployment((DeclaredZone) step, previous, this, instance, jobType); previous = List.of(stepStatus); } else return previous; JobId jobId = new JobId(application.id().instance(instance), jobType); allSteps.removeIf(existing -> existing.job().equals(Optional.of(jobId))); allSteps.add(stepStatus); dependencies.put(jobId, stepStatus); return previous; } if (step instanceof DeploymentInstanceSpec) { DeploymentInstanceSpec spec = ((DeploymentInstanceSpec) step); StepStatus instanceStatus = new InstanceStatus(spec, previous, now, application.require(spec.name()), this); instance = spec.name(); allSteps.add(instanceStatus); previous = List.of(instanceStatus); for (JobType test : List.of(systemTest, stagingTest)) { JobId job = new JobId(application.id().instance(instance), test); if ( ! dependencies.containsKey(job)) { var testStatus = JobStepStatus.ofTestDeployment(new DeclaredZone(test.environment()), List.of(), this, job.application().instance(), test, false); dependencies.put(job, testStatus); allSteps.add(testStatus); } } } if (step.isOrdered()) { for (DeploymentSpec.Step nested : step.steps()) previous = fillStep(dependencies, allSteps, nested, previous, instance); return previous; } List<StepStatus> parallel = new ArrayList<>(); for (DeploymentSpec.Step nested : step.steps()) parallel.addAll(fillStep(dependencies, allSteps, nested, previous, instance)); return List.copyOf(parallel); } public enum StepType { /** An instance — completion marks a change as ready for the jobs contained in it. */ instance, /** A timed delay. */ delay, /** A system, staging or production test. */ test, /** A production deployment. */ deployment, } /** * Used to represent all steps — explicit and implicit — that may run in order to complete deployment of a change. * * Each node contains a step describing the node, * a list of steps which need to be complete before the step may start, * a list of jobs from which completion of the step is computed, and * optionally, an instance name used to identify a job type for the step, * * The completion criterion for each type of step is implemented in subclasses of this. */ public static abstract class StepStatus { private final StepType type; private final DeploymentSpec.Step step; private final List<StepStatus> dependencies; private final InstanceName instance; private StepStatus(StepType type, DeploymentSpec.Step step, List<StepStatus> dependencies, InstanceName instance) { this.type = requireNonNull(type); this.step = requireNonNull(step); this.dependencies = List.copyOf(dependencies); this.instance = instance; } /** The type of step this is. */ public final StepType type() { return type; } /** The step defining this. */ public final DeploymentSpec.Step step() { return step; } /** The list of steps that need to be complete before this may start. */ public final List<StepStatus> dependencies() { return dependencies; } /** The instance of this. */ public final InstanceName instance() { return instance; } /** The id of the job this corresponds to, if any. */ public Optional<JobId> job() { return Optional.empty(); } /** The time at which this is, or was, complete on the given change and / or versions. */ public Optional<Instant> completedAt(Change change) { return completedAt(change, Optional.empty()); } /** The time at which this is, or was, complete on the given change and / or versions. */ abstract Optional<Instant> completedAt(Change change, Optional<JobId> dependent); /** The time at which this step is ready to run the specified change and / or versions. */ public Optional<Instant> readyAt(Change change) { return readyAt(change, Optional.empty()); } /** The time at which this step is ready to run the specified change and / or versions. */ Optional<Instant> readyAt(Change change, Optional<JobId> dependent) { return dependenciesCompletedAt(change, dependent) .map(ready -> Stream.of(blockedUntil(change), pausedUntil(), coolingDownUntil(change)) .flatMap(Optional::stream) .reduce(ready, maxBy(naturalOrder()))); } /** The time at which all dependencies completed on the given change and / or versions. */ Optional<Instant> dependenciesCompletedAt(Change change, Optional<JobId> dependent) { return dependencies.stream().allMatch(step -> step.completedAt(change, dependent).isPresent()) ? dependencies.stream().map(step -> step.completedAt(change, dependent).get()) .max(naturalOrder()) .or(() -> Optional.of(Instant.EPOCH)) : Optional.empty(); } /** The time until which this step is blocked by a change blocker. */ public Optional<Instant> blockedUntil(Change change) { return Optional.empty(); } /** The time until which this step is paused by user intervention. */ public Optional<Instant> pausedUntil() { return Optional.empty(); } /** The time until which this step is cooling down, due to consecutive failures. */ public Optional<Instant> coolingDownUntil(Change change) { return Optional.empty(); } /** Whether this step is declared in the deployment spec, or is an implicit step. */ public boolean isDeclared() { return true; } } private static class DelayStatus extends StepStatus { private DelayStatus(DeploymentSpec.Delay step, List<StepStatus> dependencies, InstanceName instance) { super(StepType.delay, step, dependencies, instance); } @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { return readyAt(change, dependent).map(completion -> completion.plus(step().delay())); } } private static class InstanceStatus extends StepStatus { private final DeploymentInstanceSpec spec; private final Instant now; private final Instance instance; private final DeploymentStatus status; private InstanceStatus(DeploymentInstanceSpec spec, List<StepStatus> dependencies, Instant now, Instance instance, DeploymentStatus status) { super(StepType.instance, spec, dependencies, spec.name()); this.spec = spec; this.now = now; this.instance = instance; this.status = status; } /** * Time of completion of its dependencies, if all parts of the given change are contained in the change * for this instance, or if no more jobs should run for this instance for the given change. */ @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { return ( (change.platform().isEmpty() || change.platform().equals(instance.change().platform())) && (change.application().isEmpty() || change.application().equals(instance.change().application())) || status.jobsToRun(Map.of(instance.name(), change)).isEmpty()) ? dependenciesCompletedAt(change, dependent) : Optional.empty(); } @Override public Optional<Instant> blockedUntil(Change change) { for (Instant current = now; now.plus(Duration.ofDays(7)).isAfter(current); ) { boolean blocked = false; for (DeploymentSpec.ChangeBlocker blocker : spec.changeBlocker()) { while ( blocker.window().includes(current) && now.plus(Duration.ofDays(7)).isAfter(current) && ( change.platform().isPresent() && blocker.blocksVersions() || change.application().isPresent() && blocker.blocksRevisions())) { blocked = true; current = current.plus(Duration.ofHours(1)).truncatedTo(ChronoUnit.HOURS); } } if ( ! blocked) return current == now ? Optional.empty() : Optional.of(current); } return Optional.of(now.plusSeconds(1 << 30)); } } private static abstract class JobStepStatus extends StepStatus { private final JobStatus job; private final DeploymentStatus status; private JobStepStatus(StepType type, DeploymentSpec.Step step, List<StepStatus> dependencies, JobStatus job, DeploymentStatus status) { super(type, step, dependencies, job.id().application().instance()); this.job = requireNonNull(job); this.status = requireNonNull(status); } @Override public Optional<JobId> job() { return Optional.of(job.id()); } @Override public Optional<Instant> pausedUntil() { return status.application().require(job.id().application().instance()).jobPause(job.id().type()); } @Override public Optional<Instant> coolingDownUntil(Change change) { if (job.lastTriggered().isEmpty()) return Optional.empty(); if (job.lastCompleted().isEmpty()) return Optional.empty(); if (job.firstFailing().isEmpty()) return Optional.empty(); Versions lastVersions = job.lastCompleted().get().versions(); if (change.platform().isPresent() && ! change.platform().get().equals(lastVersions.targetPlatform())) return Optional.empty(); if (change.application().isPresent() && ! change.application().get().equals(lastVersions.targetApplication())) return Optional.empty(); if (status.application.deploymentSpec().requireInstance(job.id().application().instance()).upgradePolicy() == DeploymentSpec.UpgradePolicy.canary) return Optional.empty(); if (job.id().type().environment().isTest() && job.isOutOfCapacity()) return Optional.empty(); Instant firstFailing = job.firstFailing().get().end().get(); Instant lastCompleted = job.lastCompleted().get().end().get(); return firstFailing.equals(lastCompleted) ? Optional.of(lastCompleted) : Optional.of(lastCompleted.plus(Duration.ofMinutes(10)) .plus(Duration.between(firstFailing, lastCompleted) .dividedBy(2))) .filter(status.now::isBefore); } private static JobStepStatus ofProductionDeployment(DeclaredZone step, List<StepStatus> dependencies, DeploymentStatus status, InstanceName instance, JobType jobType) { ZoneId zone = ZoneId.from(step.environment(), step.region().get()); JobStatus job = status.instanceJobs(instance).get(jobType); Optional<Deployment> existingDeployment = Optional.ofNullable(status.application().require(instance) .deployments().get(zone)); return new JobStepStatus(StepType.deployment, step, dependencies, job, status) { @Override public Optional<Instant> readyAt(Change change, Optional<JobId> dependent) { return super.readyAt(change, Optional.of(job.id())) .filter(__ -> status.isTested(job.id(), change)); } /** Complete if deployment is on pinned version, and last successful deployment, or if given versions is strictly a downgrade, and this isn't forced by a pin. */ @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { if ( change.isPinned() && change.platform().isPresent() && ! existingDeployment.map(Deployment::version).equals(change.platform())) return Optional.empty(); Change fullChange = status.application().require(instance).change(); if (existingDeployment.map(deployment -> ! (change.upgrades(deployment.version()) || change.upgrades(deployment.applicationVersion())) && (fullChange.downgrades(deployment.version()) || fullChange.downgrades(deployment.applicationVersion()))) .orElse(false)) return job.lastCompleted().flatMap(Run::end); return job.lastSuccess() .filter(run -> change.platform().map(run.versions().targetPlatform()::equals).orElse(true) && change.application().map(run.versions().targetApplication()::equals).orElse(true)) .flatMap(Run::end); } }; } private static JobStepStatus ofProductionTest(DeclaredTest step, List<StepStatus> dependencies, DeploymentStatus status, InstanceName instance, JobType testType, JobType prodType) { JobStatus job = status.instanceJobs(instance).get(testType); return new JobStepStatus(StepType.test, step, dependencies, job, status) { @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { Versions versions = Versions.from(change, status.application, status.deploymentFor(job.id()), status.systemVersion); return job.lastSuccess() .filter(run -> versions.targetsMatch(run.versions())) .filter(run -> ! status.jobs() .instance(instance) .type(prodType) .lastCompleted().endedNoLaterThan(run.start()) .isEmpty()) .map(run -> run.end().get()); } }; } private static JobStepStatus ofTestDeployment(DeclaredZone step, List<StepStatus> dependencies, DeploymentStatus status, InstanceName instance, JobType jobType, boolean declared) { JobStatus job = status.instanceJobs(instance).get(jobType); return new JobStepStatus(StepType.test, step, dependencies, job, status) { @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { return RunList.from(job) .matching(run -> run.versions().targetsMatch(Versions.from(change, status.application, dependent.flatMap(status::deploymentFor), status.systemVersion))) .status(RunStatus.success) .asList().stream() .map(run -> run.end().get()) .max(naturalOrder()); } @Override public boolean isDeclared() { return declared; } }; } } }
class DeploymentStatus { public static List<JobId> jobsFor(Application application, SystemName system) { if (DeploymentSpec.empty.equals(application.deploymentSpec())) return List.of(); return application.deploymentSpec().instances().stream() .flatMap(spec -> Stream.concat(Stream.of(systemTest, stagingTest), flatten(spec).filter(step -> step.concerns(prod)) .map(step -> { if (step instanceof DeclaredZone) return JobType.from(system, prod, ((DeclaredZone) step).region().get()); return JobType.testFrom(system, ((DeclaredTest) step).region()); }) .flatMap(Optional::stream)) .map(type -> new JobId(application.id().instance(spec.name()), type))) .collect(toUnmodifiableList()); } private static Stream<DeploymentSpec.Step> flatten(DeploymentSpec.Step step) { return step instanceof DeploymentSpec.Steps ? step.steps().stream().flatMap(DeploymentStatus::flatten) : Stream.of(step); } private static <T> List<T> union(List<T> first, List<T> second) { return Stream.concat(first.stream(), second.stream()).distinct().collect(toUnmodifiableList()); } private final Application application; private final JobList allJobs; private final SystemName system; private final Version systemVersion; private final Instant now; private final Map<JobId, StepStatus> jobSteps; private final List<StepStatus> allSteps; public DeploymentStatus(Application application, Map<JobId, JobStatus> allJobs, SystemName system, Version systemVersion, Instant now) { this.application = requireNonNull(application); this.allJobs = JobList.from(allJobs.values()); this.system = requireNonNull(system); this.systemVersion = requireNonNull(systemVersion); this.now = requireNonNull(now); List<StepStatus> allSteps = new ArrayList<>(); this.jobSteps = jobDependencies(application.deploymentSpec(), allSteps); this.allSteps = List.copyOf(allSteps); } /** The application this deployment status concerns. */ public Application application() { return application; } /** A filterable list of the status of all jobs for this application. */ public JobList jobs() { return allJobs; } /** Whether any jobs of this application are failing with other errors than lack of capacity in a test zone. */ public boolean hasFailures() { return ! allJobs.failing() .not().withStatus(RunStatus.outOfCapacity) .isEmpty(); } /** All job statuses, by job type, for the given instance. */ public Map<JobType, JobStatus> instanceJobs(InstanceName instance) { return allJobs.asList().stream() .filter(job -> job.id().application().equals(application.id().instance(instance))) .collect(Collectors.toUnmodifiableMap(job -> job.id().type(), job -> job)); } /** Filterable job status lists for each instance of this application. */ public Map<ApplicationId, JobList> instanceJobs() { return allJobs.asList().stream() .collect(groupingBy(job -> job.id().application(), collectingAndThen(toUnmodifiableList(), JobList::from))); } /** * The set of jobs that need to run for the changes of each instance of the application to be considered complete, * and any test jobs for any oustanding change, which will likely be needed to lated deploy this change. */ public Map<JobId, List<Versions>> jobsToRun() { Map<InstanceName, Change> changes = new LinkedHashMap<>(); for (InstanceName instance : application.deploymentSpec().instanceNames()) changes.put(instance, application.require(instance).change()); Map<JobId, List<Versions>> jobs = jobsToRun(changes); for (InstanceName instance : application.deploymentSpec().instanceNames()) changes.put(instance, outstandingChange(instance).onTopOf(application.require(instance).change())); var testJobs = jobsToRun(changes, true).entrySet().stream() .filter(entry -> ! entry.getKey().type().isProduction()); return Stream.concat(jobs.entrySet().stream(), testJobs) .collect(collectingAndThen(toMap(Map.Entry::getKey, Map.Entry::getValue, DeploymentStatus::union, LinkedHashMap::new), ImmutableMap::copyOf)); } private Map<JobId, List<Versions>> jobsToRun(Map<InstanceName, Change> changes, boolean eagerTests) { Map<JobId, Versions> productionJobs = new LinkedHashMap<>(); changes.forEach((instance, change) -> productionJobs.putAll(productionJobs(instance, change, eagerTests))); Map<JobId, List<Versions>> testJobs = testJobs(productionJobs); Map<JobId, List<Versions>> jobs = new LinkedHashMap<>(testJobs); productionJobs.forEach((job, versions) -> jobs.put(job, List.of(versions))); jobSteps.forEach((job, step) -> { if ( ! step.isDeclared() || jobs.containsKey(job)) return; Change change = changes.get(job.application().instance()); if (change == null || ! change.hasTargets()) return; Optional<JobId> firstProductionJobWithDeployment = jobSteps.keySet().stream() .filter(jobId -> jobId.type().isProduction() && jobId.type().isDeployment()) .filter(jobId -> deploymentFor(jobId).isPresent()) .findFirst(); Versions versions = Versions.from(change, application, firstProductionJobWithDeployment.flatMap(this::deploymentFor), systemVersion); if (step.completedAt(change, firstProductionJobWithDeployment).isEmpty()) jobs.merge(job, List.of(versions), DeploymentStatus::union); }); return ImmutableMap.copyOf(jobs); } /** The set of jobs that need to run for the given changes to be considered complete. */ public Map<JobId, List<Versions>> jobsToRun(Map<InstanceName, Change> changes) { return jobsToRun(changes, false); } /** The step status for all steps in the deployment spec of this, which are jobs, in the same order as in the deployment spec. */ public Map<JobId, StepStatus> jobSteps() { return jobSteps; } public Map<InstanceName, StepStatus> instanceSteps() { ImmutableMap.Builder<InstanceName, StepStatus> instances = ImmutableMap.builder(); for (StepStatus status : allSteps) if (status instanceof InstanceStatus) instances.put(status.instance(), status); return instances.build(); } /** The step status for all relevant steps in the deployment spec of this, in the same order as in the deployment spec. */ public Optional<Deployment> deploymentFor(JobId job) { return Optional.ofNullable(application.require(job.application().instance()) .deployments().get(job.type().zone(system))); } /** * The change of this application's latest submission, if this upgrades any of its production deployments, * and has not yet started rolling out, due to some other change or a block window being present at the time of submission. */ public Change outstandingChange(InstanceName instance) { return application.latestVersion().map(Change::of) .filter(change -> application.require(instance).change().application().map(change::upgrades).orElse(true)) .filter(change -> ! jobsToRun(Map.of(instance, change)).isEmpty()) .orElse(Change.empty()); } /** * True if the job has already been triggered on the given versions, or if all test types (systemTest, stagingTest), * restricted to the job's instance if declared in that instance, have successful runs on the given versions. */ public boolean isTested(JobId job, Change change) { Versions versions = Versions.from(change, application, deploymentFor(job), systemVersion); return allJobs.triggeredOn(versions).get(job).isPresent() || Stream.of(systemTest, stagingTest) .noneMatch(testType -> declaredTest(job.application(), testType).map(__ -> allJobs.instance(job.application().instance())) .orElse(allJobs) .type(testType) .successOn(versions).isEmpty()); } private Map<JobId, Versions> productionJobs(InstanceName instance, Change change, boolean assumeUpgradesSucceed) { ImmutableMap.Builder<JobId, Versions> jobs = ImmutableMap.builder(); jobSteps.forEach((job, step) -> { Optional<Deployment> deployment = deploymentFor(job) .map(existing -> assumeUpgradesSucceed ? new Deployment(existing.zone(), existing.applicationVersion(), change.platform().orElse(existing.version()), existing.at(), existing.metrics(), existing.activity(), existing.quota()) : existing); if ( job.application().instance().equals(instance) && job.type().isProduction() && step.completedAt(change).isEmpty()) jobs.put(job, Versions.from(change, application, deployment, systemVersion)); }); return jobs.build(); } /** The production jobs that need to run to complete roll-out of the given change to production. */ public Map<JobId, Versions> productionJobs(InstanceName instance, Change change) { return productionJobs(instance, change, false); } /** The test jobs that need to run prior to the given production deployment jobs. */ public Map<JobId, List<Versions>> testJobs(Map<JobId, Versions> jobs) { Map<JobId, List<Versions>> testJobs = new LinkedHashMap<>(); for (JobType testType : List.of(systemTest, stagingTest)) { jobs.forEach((job, versions) -> { if (job.type().isProduction() && job.type().isDeployment()) { declaredTest(job.application(), testType).ifPresent(testJob -> { if (allJobs.successOn(versions).get(testJob).isEmpty()) testJobs.merge(testJob, List.of(versions), DeploymentStatus::union); }); } }); jobs.forEach((job, versions) -> { if ( job.type().isProduction() && job.type().isDeployment() && allJobs.successOn(versions).type(testType).isEmpty() && testJobs.keySet().stream() .noneMatch(test -> test.type() == testType && testJobs.get(test).contains(versions))) testJobs.merge(firstDeclaredOrElseImplicitTest(testType), List.of(versions), DeploymentStatus::union); }); } return ImmutableMap.copyOf(testJobs); } private JobId firstDeclaredOrElseImplicitTest(JobType testJob) { return application.deploymentSpec().instanceNames().stream() .map(name -> new JobId(application.id().instance(name), testJob)) .min(comparing(id -> ! jobSteps.get(id).isDeclared())).orElseThrow(); } /** JobId of any declared test of the given type, for the given instance. */ private Optional<JobId> declaredTest(ApplicationId instanceId, JobType testJob) { JobId jobId = new JobId(instanceId, testJob); return jobSteps.get(jobId).isDeclared() ? Optional.of(jobId) : Optional.empty(); } /** A DAG of the dependencies between the primitive steps in the spec, with iteration order equal to declaration order. */ private Map<JobId, StepStatus> jobDependencies(DeploymentSpec spec, List<StepStatus> allSteps) { if (DeploymentSpec.empty.equals(spec)) return Map.of(); Map<JobId, StepStatus> dependencies = new LinkedHashMap<>(); List<StepStatus> previous = List.of(); for (DeploymentSpec.Step step : spec.steps()) previous = fillStep(dependencies, allSteps, step, previous, null); return ImmutableMap.copyOf(dependencies); } /** Adds the primitive steps contained in the given step, which depend on the given previous primitives, to the dependency graph. */ private List<StepStatus> fillStep(Map<JobId, StepStatus> dependencies, List<StepStatus> allSteps, DeploymentSpec.Step step, List<StepStatus> previous, InstanceName instance) { if (step.steps().isEmpty()) { if (instance == null) return previous; if ( ! step.delay().isZero()) { StepStatus stepStatus = new DelayStatus((DeploymentSpec.Delay) step, previous, instance); allSteps.add(stepStatus); return List.of(stepStatus); } JobType jobType; StepStatus stepStatus; if (step.concerns(test) || step.concerns(staging)) { jobType = JobType.from(system, ((DeclaredZone) step).environment(), null) .orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system)); stepStatus = JobStepStatus.ofTestDeployment((DeclaredZone) step, List.of(), this, instance, jobType, true); previous = new ArrayList<>(previous); previous.add(stepStatus); } else if (step.isTest()) { jobType = JobType.testFrom(system, ((DeclaredTest) step).region()) .orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system)); JobType preType = JobType.from(system, prod, ((DeclaredTest) step).region()) .orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system)); stepStatus = JobStepStatus.ofProductionTest((DeclaredTest) step, previous, this, instance, jobType, preType); previous = List.of(stepStatus); } else if (step.concerns(prod)) { jobType = JobType.from(system, ((DeclaredZone) step).environment(), ((DeclaredZone) step).region().get()) .orElseThrow(() -> new IllegalStateException(application + " specifies " + step + ", but this has no job in " + system)); stepStatus = JobStepStatus.ofProductionDeployment((DeclaredZone) step, previous, this, instance, jobType); previous = List.of(stepStatus); } else return previous; JobId jobId = new JobId(application.id().instance(instance), jobType); allSteps.removeIf(existing -> existing.job().equals(Optional.of(jobId))); allSteps.add(stepStatus); dependencies.put(jobId, stepStatus); return previous; } if (step instanceof DeploymentInstanceSpec) { DeploymentInstanceSpec spec = ((DeploymentInstanceSpec) step); StepStatus instanceStatus = new InstanceStatus(spec, previous, now, application.require(spec.name()), this); instance = spec.name(); allSteps.add(instanceStatus); previous = List.of(instanceStatus); for (JobType test : List.of(systemTest, stagingTest)) { JobId job = new JobId(application.id().instance(instance), test); if ( ! dependencies.containsKey(job)) { var testStatus = JobStepStatus.ofTestDeployment(new DeclaredZone(test.environment()), List.of(), this, job.application().instance(), test, false); dependencies.put(job, testStatus); allSteps.add(testStatus); } } } if (step.isOrdered()) { for (DeploymentSpec.Step nested : step.steps()) previous = fillStep(dependencies, allSteps, nested, previous, instance); return previous; } List<StepStatus> parallel = new ArrayList<>(); for (DeploymentSpec.Step nested : step.steps()) parallel.addAll(fillStep(dependencies, allSteps, nested, previous, instance)); return List.copyOf(parallel); } public enum StepType { /** An instance — completion marks a change as ready for the jobs contained in it. */ instance, /** A timed delay. */ delay, /** A system, staging or production test. */ test, /** A production deployment. */ deployment, } /** * Used to represent all steps — explicit and implicit — that may run in order to complete deployment of a change. * * Each node contains a step describing the node, * a list of steps which need to be complete before the step may start, * a list of jobs from which completion of the step is computed, and * optionally, an instance name used to identify a job type for the step, * * The completion criterion for each type of step is implemented in subclasses of this. */ public static abstract class StepStatus { private final StepType type; private final DeploymentSpec.Step step; private final List<StepStatus> dependencies; private final InstanceName instance; private StepStatus(StepType type, DeploymentSpec.Step step, List<StepStatus> dependencies, InstanceName instance) { this.type = requireNonNull(type); this.step = requireNonNull(step); this.dependencies = List.copyOf(dependencies); this.instance = instance; } /** The type of step this is. */ public final StepType type() { return type; } /** The step defining this. */ public final DeploymentSpec.Step step() { return step; } /** The list of steps that need to be complete before this may start. */ public final List<StepStatus> dependencies() { return dependencies; } /** The instance of this. */ public final InstanceName instance() { return instance; } /** The id of the job this corresponds to, if any. */ public Optional<JobId> job() { return Optional.empty(); } /** The time at which this is, or was, complete on the given change and / or versions. */ public Optional<Instant> completedAt(Change change) { return completedAt(change, Optional.empty()); } /** The time at which this is, or was, complete on the given change and / or versions. */ abstract Optional<Instant> completedAt(Change change, Optional<JobId> dependent); /** The time at which this step is ready to run the specified change and / or versions. */ public Optional<Instant> readyAt(Change change) { return readyAt(change, Optional.empty()); } /** The time at which this step is ready to run the specified change and / or versions. */ Optional<Instant> readyAt(Change change, Optional<JobId> dependent) { return dependenciesCompletedAt(change, dependent) .map(ready -> Stream.of(blockedUntil(change), pausedUntil(), coolingDownUntil(change)) .flatMap(Optional::stream) .reduce(ready, maxBy(naturalOrder()))); } /** The time at which all dependencies completed on the given change and / or versions. */ Optional<Instant> dependenciesCompletedAt(Change change, Optional<JobId> dependent) { return dependencies.stream().allMatch(step -> step.completedAt(change, dependent).isPresent()) ? dependencies.stream().map(step -> step.completedAt(change, dependent).get()) .max(naturalOrder()) .or(() -> Optional.of(Instant.EPOCH)) : Optional.empty(); } /** The time until which this step is blocked by a change blocker. */ public Optional<Instant> blockedUntil(Change change) { return Optional.empty(); } /** The time until which this step is paused by user intervention. */ public Optional<Instant> pausedUntil() { return Optional.empty(); } /** The time until which this step is cooling down, due to consecutive failures. */ public Optional<Instant> coolingDownUntil(Change change) { return Optional.empty(); } /** Whether this step is declared in the deployment spec, or is an implicit step. */ public boolean isDeclared() { return true; } } private static class DelayStatus extends StepStatus { private DelayStatus(DeploymentSpec.Delay step, List<StepStatus> dependencies, InstanceName instance) { super(StepType.delay, step, dependencies, instance); } @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { return readyAt(change, dependent).map(completion -> completion.plus(step().delay())); } } private static class InstanceStatus extends StepStatus { private final DeploymentInstanceSpec spec; private final Instant now; private final Instance instance; private final DeploymentStatus status; private InstanceStatus(DeploymentInstanceSpec spec, List<StepStatus> dependencies, Instant now, Instance instance, DeploymentStatus status) { super(StepType.instance, spec, dependencies, spec.name()); this.spec = spec; this.now = now; this.instance = instance; this.status = status; } /** * Time of completion of its dependencies, if all parts of the given change are contained in the change * for this instance, or if no more jobs should run for this instance for the given change. */ @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { return ( (change.platform().isEmpty() || change.platform().equals(instance.change().platform())) && (change.application().isEmpty() || change.application().equals(instance.change().application())) || status.jobsToRun(Map.of(instance.name(), change)).isEmpty()) ? dependenciesCompletedAt(change, dependent) : Optional.empty(); } @Override public Optional<Instant> blockedUntil(Change change) { for (Instant current = now; now.plus(Duration.ofDays(7)).isAfter(current); ) { boolean blocked = false; for (DeploymentSpec.ChangeBlocker blocker : spec.changeBlocker()) { while ( blocker.window().includes(current) && now.plus(Duration.ofDays(7)).isAfter(current) && ( change.platform().isPresent() && blocker.blocksVersions() || change.application().isPresent() && blocker.blocksRevisions())) { blocked = true; current = current.plus(Duration.ofHours(1)).truncatedTo(ChronoUnit.HOURS); } } if ( ! blocked) return current == now ? Optional.empty() : Optional.of(current); } return Optional.of(now.plusSeconds(1 << 30)); } } private static abstract class JobStepStatus extends StepStatus { private final JobStatus job; private final DeploymentStatus status; private JobStepStatus(StepType type, DeploymentSpec.Step step, List<StepStatus> dependencies, JobStatus job, DeploymentStatus status) { super(type, step, dependencies, job.id().application().instance()); this.job = requireNonNull(job); this.status = requireNonNull(status); } @Override public Optional<JobId> job() { return Optional.of(job.id()); } @Override public Optional<Instant> pausedUntil() { return status.application().require(job.id().application().instance()).jobPause(job.id().type()); } @Override public Optional<Instant> coolingDownUntil(Change change) { if (job.lastTriggered().isEmpty()) return Optional.empty(); if (job.lastCompleted().isEmpty()) return Optional.empty(); if (job.firstFailing().isEmpty()) return Optional.empty(); Versions lastVersions = job.lastCompleted().get().versions(); if (change.platform().isPresent() && ! change.platform().get().equals(lastVersions.targetPlatform())) return Optional.empty(); if (change.application().isPresent() && ! change.application().get().equals(lastVersions.targetApplication())) return Optional.empty(); if (status.application.deploymentSpec().requireInstance(job.id().application().instance()).upgradePolicy() == DeploymentSpec.UpgradePolicy.canary) return Optional.empty(); if (job.id().type().environment().isTest() && job.isOutOfCapacity()) return Optional.empty(); Instant firstFailing = job.firstFailing().get().end().get(); Instant lastCompleted = job.lastCompleted().get().end().get(); return firstFailing.equals(lastCompleted) ? Optional.of(lastCompleted) : Optional.of(lastCompleted.plus(Duration.ofMinutes(10)) .plus(Duration.between(firstFailing, lastCompleted) .dividedBy(2))) .filter(status.now::isBefore); } private static JobStepStatus ofProductionDeployment(DeclaredZone step, List<StepStatus> dependencies, DeploymentStatus status, InstanceName instance, JobType jobType) { ZoneId zone = ZoneId.from(step.environment(), step.region().get()); JobStatus job = status.instanceJobs(instance).get(jobType); Optional<Deployment> existingDeployment = Optional.ofNullable(status.application().require(instance) .deployments().get(zone)); return new JobStepStatus(StepType.deployment, step, dependencies, job, status) { @Override public Optional<Instant> readyAt(Change change, Optional<JobId> dependent) { return super.readyAt(change, Optional.of(job.id())) .filter(__ -> status.isTested(job.id(), change)); } /** Complete if deployment is on pinned version, and last successful deployment, or if given versions is strictly a downgrade, and this isn't forced by a pin. */ @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { if ( change.isPinned() && change.platform().isPresent() && ! existingDeployment.map(Deployment::version).equals(change.platform())) return Optional.empty(); Change fullChange = status.application().require(instance).change(); if (existingDeployment.map(deployment -> ! (change.upgrades(deployment.version()) || change.upgrades(deployment.applicationVersion())) && (fullChange.downgrades(deployment.version()) || fullChange.downgrades(deployment.applicationVersion()))) .orElse(false)) return job.lastCompleted().flatMap(Run::end); return job.lastSuccess() .filter(run -> change.platform().map(run.versions().targetPlatform()::equals).orElse(true) && change.application().map(run.versions().targetApplication()::equals).orElse(true)) .flatMap(Run::end); } }; } private static JobStepStatus ofProductionTest(DeclaredTest step, List<StepStatus> dependencies, DeploymentStatus status, InstanceName instance, JobType testType, JobType prodType) { JobStatus job = status.instanceJobs(instance).get(testType); return new JobStepStatus(StepType.test, step, dependencies, job, status) { @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { Versions versions = Versions.from(change, status.application, status.deploymentFor(job.id()), status.systemVersion); return job.lastSuccess() .filter(run -> versions.targetsMatch(run.versions())) .filter(run -> ! status.jobs() .instance(instance) .type(prodType) .lastCompleted().endedNoLaterThan(run.start()) .isEmpty()) .map(run -> run.end().get()); } }; } private static JobStepStatus ofTestDeployment(DeclaredZone step, List<StepStatus> dependencies, DeploymentStatus status, InstanceName instance, JobType jobType, boolean declared) { JobStatus job = status.instanceJobs(instance).get(jobType); return new JobStepStatus(StepType.test, step, dependencies, job, status) { @Override public Optional<Instant> completedAt(Change change, Optional<JobId> dependent) { return RunList.from(job) .matching(run -> run.versions().targetsMatch(Versions.from(change, status.application, dependent.flatMap(status::deploymentFor), status.systemVersion))) .status(RunStatus.success) .asList().stream() .map(run -> run.end().get()) .max(naturalOrder()); } @Override public boolean isDeclared() { return declared; } }; } } }
IMHO, if the exist check is expensive, we shouldn't do it and rather rely on the implementation but add a comment in the `FlinkKubeClient.deleteConfigMapsBy*` implementations. Doing the exist call would just be more coherent with the actual contract of the interface provided by the fabric8 client.
public CompletableFuture<Void> deleteConfigMapsByLabels(Map<String, String> labels) { return CompletableFuture.runAsync( () -> { if (!this.internalClient.configMaps().withLabels(labels).delete()) { final List<ConfigMap> notDeletedConfigMaps = internalClient.configMaps().withLabels(labels).list().getItems(); if (!notDeletedConfigMaps.isEmpty()) { final String notDeletedConfigMapsNameStr = notDeletedConfigMaps.stream() .map(cm -> cm.getMetadata().getName()) .collect(Collectors.joining(", ")); throw new CompletionException( new KubernetesException( "The following ConfigMaps labeled with " + labelsToString(labels) + " couldn't be deleted for unknown reasons: " + notDeletedConfigMapsNameStr)); } } }, kubeClientExecutorService); }
if (!this.internalClient.configMaps().withLabels(labels).delete()) {
public CompletableFuture<Void> deleteConfigMapsByLabels(Map<String, String> labels) { return CompletableFuture.runAsync( () -> this.internalClient.configMaps().withLabels(labels).delete(), kubeClientExecutorService); }
class Fabric8FlinkKubeClient implements FlinkKubeClient { private static final Logger LOG = LoggerFactory.getLogger(Fabric8FlinkKubeClient.class); private final String clusterId; private final String namespace; private final int maxRetryAttempts; private final KubernetesConfigOptions.NodePortAddressType nodePortAddressType; private final NamespacedKubernetesClient internalClient; private final ExecutorService kubeClientExecutorService; private final AtomicReference<Deployment> masterDeploymentRef; public Fabric8FlinkKubeClient( Configuration flinkConfig, NamespacedKubernetesClient client, ExecutorService executorService) { this.clusterId = flinkConfig .getOptional(KubernetesConfigOptions.CLUSTER_ID) .orElseThrow( () -> new IllegalArgumentException( String.format( "Configuration option '%s' is not set.", KubernetesConfigOptions.CLUSTER_ID.key()))); this.namespace = flinkConfig.getString(KubernetesConfigOptions.NAMESPACE); this.maxRetryAttempts = flinkConfig.getInteger( KubernetesConfigOptions.KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES); this.nodePortAddressType = flinkConfig.get( KubernetesConfigOptions.REST_SERVICE_EXPOSED_NODE_PORT_ADDRESS_TYPE); this.internalClient = checkNotNull(client); this.kubeClientExecutorService = checkNotNull(executorService); this.masterDeploymentRef = new AtomicReference<>(); } @Override public void createJobManagerComponent(KubernetesJobManagerSpecification kubernetesJMSpec) { final Deployment deployment = kubernetesJMSpec.getDeployment(); final List<HasMetadata> accompanyingResources = kubernetesJMSpec.getAccompanyingResources(); LOG.debug( "Start to create deployment with spec {}{}", System.lineSeparator(), KubernetesUtils.tryToGetPrettyPrintYaml(deployment)); final Deployment createdDeployment = this.internalClient.apps().deployments().create(deployment); setOwnerReference(createdDeployment, accompanyingResources); this.internalClient.resourceList(accompanyingResources).createOrReplace(); } @Override public CompletableFuture<Void> createTaskManagerPod(KubernetesPod kubernetesPod) { return CompletableFuture.runAsync( () -> { if (masterDeploymentRef.get() == null) { final Deployment masterDeployment = this.internalClient .apps() .deployments() .withName(KubernetesUtils.getDeploymentName(clusterId)) .get(); if (masterDeployment == null) { throw new RuntimeException( "Failed to find Deployment named " + clusterId + " in namespace " + this.namespace); } masterDeploymentRef.compareAndSet(null, masterDeployment); } setOwnerReference( checkNotNull(masterDeploymentRef.get()), Collections.singletonList(kubernetesPod.getInternalResource())); LOG.debug( "Start to create pod with spec {}{}", System.lineSeparator(), KubernetesUtils.tryToGetPrettyPrintYaml( kubernetesPod.getInternalResource())); this.internalClient.pods().create(kubernetesPod.getInternalResource()); }, kubeClientExecutorService); } @Override public CompletableFuture<Void> stopPod(String podName) { return CompletableFuture.runAsync( () -> this.internalClient.pods().withName(podName).delete(), kubeClientExecutorService); } @Override public Optional<Endpoint> getRestEndpoint(String clusterId) { Optional<KubernetesService> restService = getService(KubernetesService.ServiceType.REST_SERVICE, clusterId); if (!restService.isPresent()) { return Optional.empty(); } final Service service = restService.get().getInternalResource(); final int restPort = getRestPortFromExternalService(service); final KubernetesConfigOptions.ServiceExposedType serviceExposedType = ServiceType.classify(service); if (serviceExposedType.isClusterIP()) { return Optional.of( new Endpoint( ExternalServiceDecorator.getNamespacedExternalServiceName( clusterId, namespace), restPort)); } return getRestEndPointFromService(service, restPort); } @Override public List<KubernetesPod> getPodsWithLabels(Map<String, String> labels) { final List<Pod> podList = this.internalClient.pods().withLabels(labels).list().getItems(); if (podList == null || podList.isEmpty()) { return new ArrayList<>(); } return podList.stream().map(KubernetesPod::new).collect(Collectors.toList()); } @Override public void stopAndCleanupCluster(String clusterId) { this.internalClient .apps() .deployments() .withName(KubernetesUtils.getDeploymentName(clusterId)) .cascading(true) .delete(); } @Override public Optional<KubernetesService> getService( KubernetesService.ServiceType serviceType, String clusterId) { final String serviceName = getServiceName(serviceType, clusterId); final Service service = this.internalClient.services().withName(serviceName).fromServer().get(); if (service == null) { LOG.debug("Service {} does not exist", serviceName); return Optional.empty(); } return Optional.of(new KubernetesService(service)); } @Override public KubernetesWatch watchPodsAndDoCallback( Map<String, String> labels, WatchCallbackHandler<KubernetesPod> podCallbackHandler) throws Exception { return FutureUtils.retry( () -> CompletableFuture.supplyAsync( () -> new KubernetesWatch( this.internalClient .pods() .withLabels(labels) .watch( new KubernetesPodsWatcher( podCallbackHandler))), kubeClientExecutorService), maxRetryAttempts, t -> ExceptionUtils.findThrowable(t, KubernetesClientException.class) .isPresent(), kubeClientExecutorService) .get(); } @Override public KubernetesLeaderElector createLeaderElector( KubernetesLeaderElectionConfiguration leaderElectionConfiguration, KubernetesLeaderElector.LeaderCallbackHandler leaderCallbackHandler) { return new KubernetesLeaderElector( this.internalClient, leaderElectionConfiguration, leaderCallbackHandler); } @Override public CompletableFuture<Void> createConfigMap(KubernetesConfigMap configMap) { final String configMapName = configMap.getName(); return CompletableFuture.runAsync( () -> this.internalClient .configMaps() .create(configMap.getInternalResource()), kubeClientExecutorService) .exceptionally( throwable -> { throw new CompletionException( new KubernetesException( "Failed to create ConfigMap " + configMapName, throwable)); }); } @Override public Optional<KubernetesConfigMap> getConfigMap(String name) { final ConfigMap configMap = this.internalClient.configMaps().withName(name).get(); return configMap == null ? Optional.empty() : Optional.of(new KubernetesConfigMap(configMap)); } @Override public CompletableFuture<Boolean> checkAndUpdateConfigMap( String configMapName, Function<KubernetesConfigMap, Optional<KubernetesConfigMap>> updateFunction) { return FutureUtils.retry( () -> attemptCheckAndUpdateConfigMap(configMapName, updateFunction), maxRetryAttempts, t -> ExceptionUtils.findThrowable(t, KubernetesClientException.class).isPresent(), kubeClientExecutorService); } private CompletableFuture<Boolean> attemptCheckAndUpdateConfigMap( String configMapName, Function<KubernetesConfigMap, Optional<KubernetesConfigMap>> updateFunction) { return CompletableFuture.supplyAsync( () -> { final KubernetesConfigMap configMap = getConfigMap(configMapName) .orElseThrow( () -> new CompletionException( new KubernetesException( "Cannot retry checkAndUpdateConfigMap with configMap " + configMapName + " because it does not exist."))); final Optional<KubernetesConfigMap> maybeUpdate = updateFunction.apply(configMap); if (maybeUpdate.isPresent()) { try { internalClient .configMaps() .withName(configMapName) .lockResourceVersion(maybeUpdate.get().getResourceVersion()) .replace(maybeUpdate.get().getInternalResource()); return true; } catch (Throwable throwable) { LOG.debug( "Failed to update ConfigMap {} with data {}. Trying again.", configMap.getName(), configMap.getData()); throw new CompletionException( new PossibleInconsistentStateException(throwable)); } } return false; }, kubeClientExecutorService); } @Override private String labelsToString(Map<String, String> labels) { return labels.entrySet().stream() .map(entry -> String.format("%s=%s", entry.getKey(), entry.getValue())) .collect(Collectors.joining(",", "{", "}")); } @Override public CompletableFuture<Void> deleteConfigMap(String configMapName) { return CompletableFuture.runAsync( () -> { if (!this.internalClient.configMaps().withName(configMapName).delete()) { if (this.internalClient.configMaps().withName(configMapName).get() != null) { throw new CompletionException( new KubernetesException( "The ConfigMap '" + configMapName + "' couldn't be deleted for unknown reasons")); } } }, kubeClientExecutorService); } @Override public KubernetesConfigMapSharedWatcher createConfigMapSharedWatcher( Map<String, String> labels) { return new KubernetesConfigMapSharedInformer(this.internalClient, labels); } @Override public void close() { this.internalClient.close(); ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, this.kubeClientExecutorService); } @Override public KubernetesPod loadPodFromTemplateFile(File file) { if (!file.exists()) { throw new FlinkRuntimeException( String.format("Pod template file %s does not exist.", file)); } return new KubernetesPod(this.internalClient.pods().load(file).get()); } @Override public CompletableFuture<Void> updateServiceTargetPort( KubernetesService.ServiceType serviceType, String clusterId, String portName, int targetPort) { LOG.debug("Update {} target port to {}", portName, targetPort); return CompletableFuture.runAsync( () -> getService(serviceType, clusterId) .ifPresent( service -> { final Service updatedService = new ServiceBuilder( service.getInternalResource()) .editSpec() .editMatchingPort( servicePortBuilder -> servicePortBuilder .build() .getName() .equals( portName)) .withTargetPort( new IntOrString(targetPort)) .endPort() .endSpec() .build(); this.internalClient .services() .withName( getServiceName(serviceType, clusterId)) .replace(updatedService); }), kubeClientExecutorService); } /** * Get the Kubernetes service name. * * @param serviceType The service type * @param clusterId The cluster id * @return Return the Kubernetes service name if the service type is known. */ private String getServiceName(KubernetesService.ServiceType serviceType, String clusterId) { switch (serviceType) { case REST_SERVICE: return ExternalServiceDecorator.getExternalServiceName(clusterId); case INTERNAL_SERVICE: return InternalServiceDecorator.getInternalServiceName(clusterId); default: throw new IllegalArgumentException( "Unrecognized service type: " + serviceType.name()); } } private void setOwnerReference(Deployment deployment, List<HasMetadata> resources) { final OwnerReference deploymentOwnerReference = new OwnerReferenceBuilder() .withName(deployment.getMetadata().getName()) .withApiVersion(deployment.getApiVersion()) .withUid(deployment.getMetadata().getUid()) .withKind(deployment.getKind()) .withController(true) .withBlockOwnerDeletion(true) .build(); resources.forEach( resource -> resource.getMetadata() .setOwnerReferences( Collections.singletonList(deploymentOwnerReference))); } /** Get rest port from the external Service. */ private int getRestPortFromExternalService(Service externalService) { final List<ServicePort> servicePortCandidates = externalService.getSpec().getPorts().stream() .filter(x -> x.getName().equals(Constants.REST_PORT_NAME)) .collect(Collectors.toList()); if (servicePortCandidates.isEmpty()) { throw new RuntimeException( "Failed to find port \"" + Constants.REST_PORT_NAME + "\" in Service \"" + ExternalServiceDecorator.getExternalServiceName(this.clusterId) + "\""); } final ServicePort externalServicePort = servicePortCandidates.get(0); final KubernetesConfigOptions.ServiceExposedType externalServiceType = KubernetesConfigOptions.ServiceExposedType.valueOf( externalService.getSpec().getType()); switch (externalServiceType) { case ClusterIP: case LoadBalancer: return externalServicePort.getPort(); case NodePort: return externalServicePort.getNodePort(); default: throw new RuntimeException("Unrecognized Service type: " + externalServiceType); } } private Optional<Endpoint> getRestEndPointFromService(Service service, int restPort) { if (service.getStatus() == null) { return Optional.empty(); } LoadBalancerStatus loadBalancer = service.getStatus().getLoadBalancer(); boolean hasExternalIP = service.getSpec() != null && service.getSpec().getExternalIPs() != null && !service.getSpec().getExternalIPs().isEmpty(); if (loadBalancer != null) { return getLoadBalancerRestEndpoint(loadBalancer, restPort); } else if (hasExternalIP) { final String address = service.getSpec().getExternalIPs().get(0); if (address != null && !address.isEmpty()) { return Optional.of(new Endpoint(address, restPort)); } } return Optional.empty(); } private Optional<Endpoint> getLoadBalancerRestEndpoint( LoadBalancerStatus loadBalancer, int restPort) { boolean hasIngress = loadBalancer.getIngress() != null && !loadBalancer.getIngress().isEmpty(); String address; if (hasIngress) { address = loadBalancer.getIngress().get(0).getIp(); if (address == null || address.isEmpty()) { address = loadBalancer.getIngress().get(0).getHostname(); } } else { address = internalClient.nodes().list().getItems().stream() .flatMap(node -> node.getStatus().getAddresses().stream()) .filter( nodeAddress -> nodePortAddressType .name() .equals(nodeAddress.getType())) .map(NodeAddress::getAddress) .filter(ip -> !ip.isEmpty()) .findAny() .orElse(null); if (address == null) { LOG.warn( "Unable to find any node ip with type [{}]. Please see [{}] config option for more details.", nodePortAddressType, KubernetesConfigOptions.REST_SERVICE_EXPOSED_NODE_PORT_ADDRESS_TYPE.key()); } } boolean noAddress = address == null || address.isEmpty(); return noAddress ? Optional.empty() : Optional.of(new Endpoint(address, restPort)); } }
class Fabric8FlinkKubeClient implements FlinkKubeClient { private static final Logger LOG = LoggerFactory.getLogger(Fabric8FlinkKubeClient.class); private final String clusterId; private final String namespace; private final int maxRetryAttempts; private final KubernetesConfigOptions.NodePortAddressType nodePortAddressType; private final NamespacedKubernetesClient internalClient; private final ExecutorService kubeClientExecutorService; private final AtomicReference<Deployment> masterDeploymentRef; public Fabric8FlinkKubeClient( Configuration flinkConfig, NamespacedKubernetesClient client, ExecutorService executorService) { this.clusterId = flinkConfig .getOptional(KubernetesConfigOptions.CLUSTER_ID) .orElseThrow( () -> new IllegalArgumentException( String.format( "Configuration option '%s' is not set.", KubernetesConfigOptions.CLUSTER_ID.key()))); this.namespace = flinkConfig.getString(KubernetesConfigOptions.NAMESPACE); this.maxRetryAttempts = flinkConfig.getInteger( KubernetesConfigOptions.KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES); this.nodePortAddressType = flinkConfig.get( KubernetesConfigOptions.REST_SERVICE_EXPOSED_NODE_PORT_ADDRESS_TYPE); this.internalClient = checkNotNull(client); this.kubeClientExecutorService = checkNotNull(executorService); this.masterDeploymentRef = new AtomicReference<>(); } @Override public void createJobManagerComponent(KubernetesJobManagerSpecification kubernetesJMSpec) { final Deployment deployment = kubernetesJMSpec.getDeployment(); final List<HasMetadata> accompanyingResources = kubernetesJMSpec.getAccompanyingResources(); LOG.debug( "Start to create deployment with spec {}{}", System.lineSeparator(), KubernetesUtils.tryToGetPrettyPrintYaml(deployment)); final Deployment createdDeployment = this.internalClient.apps().deployments().create(deployment); setOwnerReference(createdDeployment, accompanyingResources); this.internalClient.resourceList(accompanyingResources).createOrReplace(); } @Override public CompletableFuture<Void> createTaskManagerPod(KubernetesPod kubernetesPod) { return CompletableFuture.runAsync( () -> { if (masterDeploymentRef.get() == null) { final Deployment masterDeployment = this.internalClient .apps() .deployments() .withName(KubernetesUtils.getDeploymentName(clusterId)) .get(); if (masterDeployment == null) { throw new RuntimeException( "Failed to find Deployment named " + clusterId + " in namespace " + this.namespace); } masterDeploymentRef.compareAndSet(null, masterDeployment); } setOwnerReference( checkNotNull(masterDeploymentRef.get()), Collections.singletonList(kubernetesPod.getInternalResource())); LOG.debug( "Start to create pod with spec {}{}", System.lineSeparator(), KubernetesUtils.tryToGetPrettyPrintYaml( kubernetesPod.getInternalResource())); this.internalClient.pods().create(kubernetesPod.getInternalResource()); }, kubeClientExecutorService); } @Override public CompletableFuture<Void> stopPod(String podName) { return CompletableFuture.runAsync( () -> this.internalClient.pods().withName(podName).delete(), kubeClientExecutorService); } @Override public Optional<Endpoint> getRestEndpoint(String clusterId) { Optional<KubernetesService> restService = getService(KubernetesService.ServiceType.REST_SERVICE, clusterId); if (!restService.isPresent()) { return Optional.empty(); } final Service service = restService.get().getInternalResource(); final int restPort = getRestPortFromExternalService(service); final KubernetesConfigOptions.ServiceExposedType serviceExposedType = ServiceType.classify(service); if (serviceExposedType.isClusterIP()) { return Optional.of( new Endpoint( ExternalServiceDecorator.getNamespacedExternalServiceName( clusterId, namespace), restPort)); } return getRestEndPointFromService(service, restPort); } @Override public List<KubernetesPod> getPodsWithLabels(Map<String, String> labels) { final List<Pod> podList = this.internalClient.pods().withLabels(labels).list().getItems(); if (podList == null || podList.isEmpty()) { return new ArrayList<>(); } return podList.stream().map(KubernetesPod::new).collect(Collectors.toList()); } @Override public void stopAndCleanupCluster(String clusterId) { this.internalClient .apps() .deployments() .withName(KubernetesUtils.getDeploymentName(clusterId)) .cascading(true) .delete(); } @Override public Optional<KubernetesService> getService( KubernetesService.ServiceType serviceType, String clusterId) { final String serviceName = getServiceName(serviceType, clusterId); final Service service = this.internalClient.services().withName(serviceName).fromServer().get(); if (service == null) { LOG.debug("Service {} does not exist", serviceName); return Optional.empty(); } return Optional.of(new KubernetesService(service)); } @Override public KubernetesWatch watchPodsAndDoCallback( Map<String, String> labels, WatchCallbackHandler<KubernetesPod> podCallbackHandler) throws Exception { return FutureUtils.retry( () -> CompletableFuture.supplyAsync( () -> new KubernetesWatch( this.internalClient .pods() .withLabels(labels) .watch( new KubernetesPodsWatcher( podCallbackHandler))), kubeClientExecutorService), maxRetryAttempts, t -> ExceptionUtils.findThrowable(t, KubernetesClientException.class) .isPresent(), kubeClientExecutorService) .get(); } @Override public KubernetesLeaderElector createLeaderElector( KubernetesLeaderElectionConfiguration leaderElectionConfiguration, KubernetesLeaderElector.LeaderCallbackHandler leaderCallbackHandler) { return new KubernetesLeaderElector( this.internalClient, leaderElectionConfiguration, leaderCallbackHandler); } @Override public CompletableFuture<Void> createConfigMap(KubernetesConfigMap configMap) { final String configMapName = configMap.getName(); return CompletableFuture.runAsync( () -> this.internalClient .configMaps() .create(configMap.getInternalResource()), kubeClientExecutorService) .exceptionally( throwable -> { throw new CompletionException( new KubernetesException( "Failed to create ConfigMap " + configMapName, throwable)); }); } @Override public Optional<KubernetesConfigMap> getConfigMap(String name) { final ConfigMap configMap = this.internalClient.configMaps().withName(name).get(); return configMap == null ? Optional.empty() : Optional.of(new KubernetesConfigMap(configMap)); } @Override public CompletableFuture<Boolean> checkAndUpdateConfigMap( String configMapName, Function<KubernetesConfigMap, Optional<KubernetesConfigMap>> updateFunction) { return FutureUtils.retry( () -> attemptCheckAndUpdateConfigMap(configMapName, updateFunction), maxRetryAttempts, t -> ExceptionUtils.findThrowable(t, KubernetesClientException.class).isPresent(), kubeClientExecutorService); } private CompletableFuture<Boolean> attemptCheckAndUpdateConfigMap( String configMapName, Function<KubernetesConfigMap, Optional<KubernetesConfigMap>> updateFunction) { return CompletableFuture.supplyAsync( () -> { final KubernetesConfigMap configMap = getConfigMap(configMapName) .orElseThrow( () -> new CompletionException( new KubernetesException( "Cannot retry checkAndUpdateConfigMap with configMap " + configMapName + " because it does not exist."))); final Optional<KubernetesConfigMap> maybeUpdate = updateFunction.apply(configMap); if (maybeUpdate.isPresent()) { try { internalClient .configMaps() .withName(configMapName) .lockResourceVersion(maybeUpdate.get().getResourceVersion()) .replace(maybeUpdate.get().getInternalResource()); return true; } catch (Throwable throwable) { LOG.debug( "Failed to update ConfigMap {} with data {}. Trying again.", configMap.getName(), configMap.getData()); throw new CompletionException( new PossibleInconsistentStateException(throwable)); } } return false; }, kubeClientExecutorService); } @Override @Override public CompletableFuture<Void> deleteConfigMap(String configMapName) { return CompletableFuture.runAsync( () -> this.internalClient.configMaps().withName(configMapName).delete(), kubeClientExecutorService); } @Override public KubernetesConfigMapSharedWatcher createConfigMapSharedWatcher( Map<String, String> labels) { return new KubernetesConfigMapSharedInformer(this.internalClient, labels); } @Override public void close() { this.internalClient.close(); ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, this.kubeClientExecutorService); } @Override public KubernetesPod loadPodFromTemplateFile(File file) { if (!file.exists()) { throw new FlinkRuntimeException( String.format("Pod template file %s does not exist.", file)); } return new KubernetesPod(this.internalClient.pods().load(file).get()); } @Override public CompletableFuture<Void> updateServiceTargetPort( KubernetesService.ServiceType serviceType, String clusterId, String portName, int targetPort) { LOG.debug("Update {} target port to {}", portName, targetPort); return CompletableFuture.runAsync( () -> getService(serviceType, clusterId) .ifPresent( service -> { final Service updatedService = new ServiceBuilder( service.getInternalResource()) .editSpec() .editMatchingPort( servicePortBuilder -> servicePortBuilder .build() .getName() .equals( portName)) .withTargetPort( new IntOrString(targetPort)) .endPort() .endSpec() .build(); this.internalClient .services() .withName( getServiceName(serviceType, clusterId)) .replace(updatedService); }), kubeClientExecutorService); } /** * Get the Kubernetes service name. * * @param serviceType The service type * @param clusterId The cluster id * @return Return the Kubernetes service name if the service type is known. */ private String getServiceName(KubernetesService.ServiceType serviceType, String clusterId) { switch (serviceType) { case REST_SERVICE: return ExternalServiceDecorator.getExternalServiceName(clusterId); case INTERNAL_SERVICE: return InternalServiceDecorator.getInternalServiceName(clusterId); default: throw new IllegalArgumentException( "Unrecognized service type: " + serviceType.name()); } } private void setOwnerReference(Deployment deployment, List<HasMetadata> resources) { final OwnerReference deploymentOwnerReference = new OwnerReferenceBuilder() .withName(deployment.getMetadata().getName()) .withApiVersion(deployment.getApiVersion()) .withUid(deployment.getMetadata().getUid()) .withKind(deployment.getKind()) .withController(true) .withBlockOwnerDeletion(true) .build(); resources.forEach( resource -> resource.getMetadata() .setOwnerReferences( Collections.singletonList(deploymentOwnerReference))); } /** Get rest port from the external Service. */ private int getRestPortFromExternalService(Service externalService) { final List<ServicePort> servicePortCandidates = externalService.getSpec().getPorts().stream() .filter(x -> x.getName().equals(Constants.REST_PORT_NAME)) .collect(Collectors.toList()); if (servicePortCandidates.isEmpty()) { throw new RuntimeException( "Failed to find port \"" + Constants.REST_PORT_NAME + "\" in Service \"" + ExternalServiceDecorator.getExternalServiceName(this.clusterId) + "\""); } final ServicePort externalServicePort = servicePortCandidates.get(0); final KubernetesConfigOptions.ServiceExposedType externalServiceType = KubernetesConfigOptions.ServiceExposedType.valueOf( externalService.getSpec().getType()); switch (externalServiceType) { case ClusterIP: case LoadBalancer: return externalServicePort.getPort(); case NodePort: return externalServicePort.getNodePort(); default: throw new RuntimeException("Unrecognized Service type: " + externalServiceType); } } private Optional<Endpoint> getRestEndPointFromService(Service service, int restPort) { if (service.getStatus() == null) { return Optional.empty(); } LoadBalancerStatus loadBalancer = service.getStatus().getLoadBalancer(); boolean hasExternalIP = service.getSpec() != null && service.getSpec().getExternalIPs() != null && !service.getSpec().getExternalIPs().isEmpty(); if (loadBalancer != null) { return getLoadBalancerRestEndpoint(loadBalancer, restPort); } else if (hasExternalIP) { final String address = service.getSpec().getExternalIPs().get(0); if (address != null && !address.isEmpty()) { return Optional.of(new Endpoint(address, restPort)); } } return Optional.empty(); } private Optional<Endpoint> getLoadBalancerRestEndpoint( LoadBalancerStatus loadBalancer, int restPort) { boolean hasIngress = loadBalancer.getIngress() != null && !loadBalancer.getIngress().isEmpty(); String address; if (hasIngress) { address = loadBalancer.getIngress().get(0).getIp(); if (address == null || address.isEmpty()) { address = loadBalancer.getIngress().get(0).getHostname(); } } else { address = internalClient.nodes().list().getItems().stream() .flatMap(node -> node.getStatus().getAddresses().stream()) .filter( nodeAddress -> nodePortAddressType .name() .equals(nodeAddress.getType())) .map(NodeAddress::getAddress) .filter(ip -> !ip.isEmpty()) .findAny() .orElse(null); if (address == null) { LOG.warn( "Unable to find any node ip with type [{}]. Please see [{}] config option for more details.", nodePortAddressType, KubernetesConfigOptions.REST_SERVICE_EXPOSED_NODE_PORT_ADDRESS_TYPE.key()); } } boolean noAddress = address == null || address.isEmpty(); return noAddress ? Optional.empty() : Optional.of(new Endpoint(address, restPort)); } }
our code style unfortunately does not cover the placement of such braces; hence we reject any changes such as this to existing code.
private static List<ReporterSetup> setupReporters(Map<String, MetricReporterFactory> reporterFactories, List<Tuple2<String, Configuration>> reporterConfigurations) { List<ReporterSetup> reporterSetups = new ArrayList<>(reporterConfigurations.size()); for (Tuple2<String, Configuration> reporterConfiguration: reporterConfigurations) { String reporterName = reporterConfiguration.f0; Configuration reporterConfig = reporterConfiguration.f1; try { Optional<MetricReporter> metricReporterOptional = loadReporter(reporterName, reporterConfig, reporterFactories); metricReporterOptional.ifPresent(reporter -> { MetricConfig metricConfig = new MetricConfig(); reporterConfig.addAllToProperties(metricConfig); reporterSetups.add(createReporterSetup(reporterName, metricConfig, reporter)); }); } catch (Throwable t) { LOG.error("Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.", reporterName, t); } } return reporterSetups; }
reporterConfig.addAllToProperties(metricConfig);
private static List<ReporterSetup> setupReporters(Map<String, MetricReporterFactory> reporterFactories, List<Tuple2<String, Configuration>> reporterConfigurations) { List<ReporterSetup> reporterSetups = new ArrayList<>(reporterConfigurations.size()); for (Tuple2<String, Configuration> reporterConfiguration: reporterConfigurations) { String reporterName = reporterConfiguration.f0; Configuration reporterConfig = reporterConfiguration.f1; try { Optional<MetricReporter> metricReporterOptional = loadReporter(reporterName, reporterConfig, reporterFactories); metricReporterOptional.ifPresent(reporter -> { MetricConfig metricConfig = new MetricConfig(); reporterConfig.addAllToProperties(metricConfig); reporterSetups.add(createReporterSetup(reporterName, metricConfig, reporter)); }); } catch (Throwable t) { LOG.error("Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.", reporterName, t); } } return reporterSetups; }
class ReporterSetup { private static final Logger LOG = LoggerFactory.getLogger(ReporterSetup.class); private static final Pattern reporterListPattern = Pattern.compile("\\s*,\\s*"); private static final Pattern reporterClassPattern = Pattern.compile( Pattern.quote(ConfigConstants.METRICS_REPORTER_PREFIX) + "([\\S&&[^.]]*)\\." + '(' + Pattern.quote(ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX) + '|' + Pattern.quote(ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX) + ')'); private final String name; private final MetricConfig configuration; private final MetricReporter reporter; public ReporterSetup(final String name, final MetricConfig configuration, MetricReporter reporter) { this.name = name; this.configuration = configuration; this.reporter = reporter; } public Optional<String> getDelimiter() { return Optional.ofNullable(configuration.getString(ConfigConstants.METRICS_REPORTER_SCOPE_DELIMITER, null)); } public Optional<String> getIntervalSettings() { return Optional.ofNullable(configuration.getString(ConfigConstants.METRICS_REPORTER_INTERVAL_SUFFIX, null)); } public Set<String> getExcludedVariables() { String excludedVariablesList = configuration.getString(ConfigConstants.METRICS_REPORTER_EXCLUDED_VARIABLES, null); if (excludedVariablesList == null) { return Collections.emptySet(); } else { final Set<String> excludedVariables = new HashSet<>(); for (String exclusion : excludedVariablesList.split(";")) { excludedVariables.add(ScopeFormat.asVariable(exclusion)); } return Collections.unmodifiableSet(excludedVariables); } } public String getName() { return name; } @VisibleForTesting MetricConfig getConfiguration() { return configuration; } public MetricReporter getReporter() { return reporter; } @VisibleForTesting public static ReporterSetup forReporter(String reporterName, MetricReporter reporter) { return createReporterSetup(reporterName, new MetricConfig(), reporter); } @VisibleForTesting public static ReporterSetup forReporter(String reporterName, MetricConfig metricConfig, MetricReporter reporter) { return createReporterSetup(reporterName, metricConfig, reporter); } private static ReporterSetup createReporterSetup(String reporterName, MetricConfig metricConfig, MetricReporter reporter) { LOG.debug("Configuring {} with {}.", reporterName, metricConfig); reporter.open(metricConfig); return new ReporterSetup(reporterName, metricConfig, reporter); } public static List<ReporterSetup> fromConfiguration(final Configuration configuration, final PluginManager pluginManager) { LOG.debug("Initializing Reporters from Configuration: {}", configuration); String includedReportersString = configuration.getString(MetricOptions.REPORTERS_LIST, ""); Set<String> namedReporters = findEnabledReportersInConfiguration(configuration, includedReportersString); if (namedReporters.isEmpty()) { return Collections.emptyList(); } final Map<String, MetricReporterFactory> reporterFactories = loadAvailableReporterFactories(pluginManager); LOG.debug("Loaded Reporter Factories: {}", reporterFactories); final List<Tuple2<String, Configuration>> reporterConfigurations = loadReporterConfigurations(configuration, namedReporters); LOG.debug("Loaded Reporter Configurations: {}", reporterConfigurations); List<ReporterSetup> reporterSetups = setupReporters(reporterFactories, reporterConfigurations); LOG.debug("All initialized Reporters:"); reporterSetups.forEach(i -> LOG.debug("{} - {}", i.getName(), i.getConfiguration())); return reporterSetups; } private static List<Tuple2<String, Configuration>> loadReporterConfigurations(Configuration configuration, Set<String> namedReporters) { final List<Tuple2<String, Configuration>> reporterConfigurations = new ArrayList<>(namedReporters.size()); for (String namedReporter: namedReporters) { DelegatingConfiguration delegatingConfiguration = new DelegatingConfiguration( configuration, ConfigConstants.METRICS_REPORTER_PREFIX + namedReporter + '.'); reporterConfigurations.add(Tuple2.of(namedReporter, delegatingConfiguration)); } return reporterConfigurations; } private static Set<String> findEnabledReportersInConfiguration(Configuration configuration, String includedReportersString) { Set<String> includedReporters = reporterListPattern.splitAsStream(includedReportersString) .filter(r -> !r.isEmpty()) .collect(Collectors.toSet()); Set<String> namedOrderedReporters = new TreeSet<>(String::compareTo); for (String key : configuration.keySet()) { if (key.startsWith(ConfigConstants.METRICS_REPORTER_PREFIX)) { Matcher matcher = reporterClassPattern.matcher(key); if (matcher.matches()) { String reporterName = matcher.group(1); if (includedReporters.isEmpty() || includedReporters.contains(reporterName)) { if (namedOrderedReporters.contains(reporterName)) { LOG.warn("Duplicate class configuration detected for reporter {}.", reporterName); } else { namedOrderedReporters.add(reporterName); } } else { LOG.info("Excluding reporter {}, not configured in reporter list ({}).", reporterName, includedReportersString); } } } } return namedOrderedReporters; } private static Map<String, MetricReporterFactory> loadAvailableReporterFactories(PluginManager pluginManager) { final Map<String, MetricReporterFactory> reporterFactories = new HashMap<>(2); final Iterator<MetricReporterFactory> factoryIterator = getAllReporterFactories(pluginManager); LOG.debug("All available factories (from both SPIs and Plugins):"); getAllReporterFactories(pluginManager).forEachRemaining(i -> LOG.debug(i.toString())); while (factoryIterator.hasNext()) { try { MetricReporterFactory factory = factoryIterator.next(); String factoryClassName = factory.getClass().getName(); MetricReporterFactory existingFactory = reporterFactories.get(factoryClassName); if (existingFactory == null){ reporterFactories.put(factoryClassName, factory); LOG.warn(new File(factory.getClass().getProtectionDomain().getCodeSource().getLocation() .toURI()).getCanonicalPath()); } else { LOG.warn("Multiple implementations of the same reporter were found in 'lib' and 'plugins' directories for {}. It is recommended to remove redundant reporter JARs to resolve used versions' ambiguity.", factoryClassName); } } catch (Exception | ServiceConfigurationError e) { LOG.warn("Error while loading reporter factory.", e); } } return Collections.unmodifiableMap(reporterFactories); } private static Iterator<MetricReporterFactory> getAllReporterFactories(PluginManager pluginManager){ final Iterator<MetricReporterFactory> factoryIteratorSPI = ServiceLoader.load(MetricReporterFactory.class).iterator(); final Iterator<MetricReporterFactory> factoryIteratorPlugins = pluginManager.load(MetricReporterFactory.class); return Iterators.concat(factoryIteratorPlugins, factoryIteratorSPI); } private static Optional<MetricReporter> loadReporter( final String reporterName, final Configuration reporterConfig, final Map<String, MetricReporterFactory> reporterFactories) throws ClassNotFoundException, IllegalAccessException, InstantiationException { final String reporterClassName = reporterConfig.getString(ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, null); final String factoryClassName = reporterConfig.getString(ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX, null); if (factoryClassName != null) { return loadViaFactory(factoryClassName, reporterName, reporterConfig, reporterFactories); } if (reporterClassName != null) { return loadViaReflection(reporterClassName, reporterName, reporterConfig, reporterFactories); } LOG.warn("No reporter class nor factory set for reporter {}. Metrics might not be exposed/reported.", reporterName); return Optional.empty(); } private static Optional<MetricReporter> loadViaFactory( final String factoryClassName, final String reporterName, final Configuration reporterConfig, final Map<String, MetricReporterFactory> reporterFactories) { MetricReporterFactory factory = reporterFactories.get(factoryClassName); if (factory == null) { LOG.warn("The reporter factory ({}) could not be found for reporter {}. Available factories: {}.", factoryClassName, reporterName, reporterFactories.keySet()); return Optional.empty(); } else { final MetricConfig metricConfig = new MetricConfig(); reporterConfig.addAllToProperties(metricConfig); return Optional.of(factory.createMetricReporter(metricConfig)); } } private static Optional<MetricReporter> loadViaReflection( final String reporterClassName, final String reporterName, final Configuration reporterConfig, final Map<String, MetricReporterFactory> reporterFactories) throws ClassNotFoundException, IllegalAccessException, InstantiationException { final Class<?> reporterClass = Class.forName(reporterClassName); final InstantiateViaFactory alternativeFactoryAnnotation = reporterClass.getAnnotation(InstantiateViaFactory.class); if (alternativeFactoryAnnotation != null) { final String alternativeFactoryClassName = alternativeFactoryAnnotation.factoryClassName(); LOG.info("The reporter configuration of {} is out-dated (but still supported)." + " Please configure a factory class instead: '{}{}.{}: {}' to ensure that the configuration" + " continues to work with future versions.", reporterName, ConfigConstants.METRICS_REPORTER_PREFIX, reporterName, ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX, alternativeFactoryClassName); return loadViaFactory(alternativeFactoryClassName, reporterName, reporterConfig, reporterFactories); } return Optional.of((MetricReporter) reporterClass.newInstance()); } }
class configuration detected for reporter {}
Not totally understanding this, would you please kindly talk about it more clearly?
public void testIODirectoryNotWritable() throws Exception { File nonWritable = tempFolder.newFolder(); Assume.assumeTrue("Cannot create non-writable temporary file directory. Skipping test.", nonWritable.setWritable(false, false)); try { Configuration cfg = new Configuration(); cfg.setString(CoreOptions.TMP_DIRS, nonWritable.getAbsolutePath()); try { TaskManagerRunner.startTaskManager( cfg, ResourceID.generate(), rpcService, mock(HighAvailabilityServices.class), mock(HeartbeatServices.class), NoOpMetricRegistry.INSTANCE, mock(BlobCacheService.class), false, mock(FatalErrorHandler.class)); fail("Should fail synchronously with an exception"); } catch (IOException e) { } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { nonWritable.setWritable(true, false); try { FileUtils.deleteDirectory(nonWritable); } catch (IOException e) { } } }
mock(FatalErrorHandler.class));
public void testIODirectoryNotWritable() throws Exception { File nonWritable = tempFolder.newFolder(); Assume.assumeTrue("Cannot create non-writable temporary file directory. Skipping test.", nonWritable.setWritable(false, false)); try { Configuration cfg = new Configuration(); cfg.setString(CoreOptions.TMP_DIRS, nonWritable.getAbsolutePath()); try { startTaskManager( cfg, rpcService, highAvailabilityServices); fail("Should fail synchronously with an IOException"); } catch (IOException e) { } } finally { nonWritable.setWritable(true, false); try { FileUtils.deleteDirectory(nonWritable); } catch (IOException e) { } } }
class TaskManagerRunnerStartupTest extends TestLogger { private static final String LOCAL_HOST = "localhost"; @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); private RpcService rpcService = createRpcService(); /** * Tests that the TaskManagerRunner startup fails synchronously when the I/O * directories are not writable. */ @Test /** * Tests that the TaskManagerRunner startup fails synchronously when the memory configuration is wrong. */ @Test public void testMemoryConfigWrong() { try { Configuration cfg = new Configuration(); cfg.setBoolean(TaskManagerOptions.MANAGED_MEMORY_PRE_ALLOCATE, true); cfg.setString(TaskManagerOptions.MANAGED_MEMORY_SIZE, "-42m"); try { TaskManagerRunner.startTaskManager( cfg, ResourceID.generate(), rpcService, mock(HighAvailabilityServices.class), mock(HeartbeatServices.class), NoOpMetricRegistry.INSTANCE, mock(BlobCacheService.class), false, mock(FatalErrorHandler.class)); fail("Should fail synchronously with an exception"); } catch (IllegalConfigurationException e) { } final long memSize = (((long) Integer.MAX_VALUE - 1) * MemorySize.parse(TaskManagerOptions.MEMORY_SEGMENT_SIZE.defaultValue()).getBytes()) >> 20; cfg.setString(TaskManagerOptions.MANAGED_MEMORY_SIZE, memSize + "m"); try { TaskManagerRunner.startTaskManager( cfg, ResourceID.generate(), rpcService, mock(HighAvailabilityServices.class), mock(HeartbeatServices.class), NoOpMetricRegistry.INSTANCE, mock(BlobCacheService.class), false, mock(FatalErrorHandler.class)); fail("Should fail synchronously with an exception"); } catch (Exception e) { assertTrue(e.getCause() instanceof OutOfMemoryError); } } catch(Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Tests that the TaskManagerRunner startup fails if the network stack cannot be initialized. */ @Test public void testStartupWhenNetworkStackFailsToInitialize() throws Exception { final ServerSocket blocker = blockServerSocket(); try { final Configuration cfg = new Configuration(); cfg.setInteger(TaskManagerOptions.DATA_PORT, blocker.getLocalPort()); TaskManagerRunner.startTaskManager( cfg, ResourceID.generate(), rpcService, mock(HighAvailabilityServices.class), mock(HeartbeatServices.class), NoOpMetricRegistry.INSTANCE, mock(BlobCacheService.class), false, mock(FatalErrorHandler.class)); fail("Should throw IOException when the network stack cannot be initialized."); } catch (IOException e) { } finally { IOUtils.closeQuietly(blocker); } } private static RpcService createRpcService() { final RpcService rpcService = mock(RpcService.class); when(rpcService.getAddress()).thenReturn(LOCAL_HOST); return rpcService; } private static ServerSocket blockServerSocket() { try { return new ServerSocket(0, 50, InetAddress.getByName(LOCAL_HOST)); } catch (IOException e) { assumeNoException("Skip test because could not open a server socket", e); throw new RuntimeException("satisfy compiler"); } } }
class TaskManagerRunnerStartupTest extends TestLogger { private static final String LOCAL_HOST = "localhost"; @Rule public final TemporaryFolder tempFolder = new TemporaryFolder(); private final RpcService rpcService = createRpcService(); private TestingHighAvailabilityServices highAvailabilityServices; @Before public void setupTest() { highAvailabilityServices = new TestingHighAvailabilityServices(); } @After public void tearDownTest() throws Exception { highAvailabilityServices.closeAndCleanupAllData(); highAvailabilityServices = null; } /** * Tests that the TaskManagerRunner startup fails synchronously when the I/O * directories are not writable. */ @Test /** * Tests that the TaskManagerRunner startup fails synchronously when the memory configuration is wrong. */ @Test public void testMemoryConfigWrong() throws Exception { Configuration cfg = new Configuration(); cfg.setBoolean(TaskManagerOptions.MANAGED_MEMORY_PRE_ALLOCATE, true); cfg.setString(TaskManagerOptions.MANAGED_MEMORY_SIZE, "-42m"); try { startTaskManager( cfg, rpcService, highAvailabilityServices); fail("Should fail synchronously with an exception"); } catch (IllegalConfigurationException e) { } final long memSize = (((long) Integer.MAX_VALUE - 1) * MemorySize.parse(TaskManagerOptions.MEMORY_SEGMENT_SIZE.defaultValue()).getBytes()) >> 20; cfg.setString(TaskManagerOptions.MANAGED_MEMORY_SIZE, memSize + "m"); try { startTaskManager( cfg, rpcService, highAvailabilityServices); fail("Should fail synchronously with an exception"); } catch (Exception e) { assertTrue(e.getCause() instanceof OutOfMemoryError); } } /** * Tests that the TaskManagerRunner startup fails if the network stack cannot be initialized. */ @Test public void testStartupWhenNetworkStackFailsToInitialize() throws Exception { final ServerSocket blocker = new ServerSocket(0, 50, InetAddress.getByName(LOCAL_HOST)); try { final Configuration cfg = new Configuration(); cfg.setInteger(TaskManagerOptions.DATA_PORT, blocker.getLocalPort()); startTaskManager( cfg, rpcService, highAvailabilityServices); fail("Should throw IOException when the network stack cannot be initialized."); } catch (IOException e) { } finally { IOUtils.closeQuietly(blocker); } } private static RpcService createRpcService() { final RpcService rpcService = mock(RpcService.class); when(rpcService.getAddress()).thenReturn(LOCAL_HOST); return rpcService; } private static void startTaskManager( Configuration configuration, RpcService rpcService, HighAvailabilityServices highAvailabilityServices ) throws Exception { TaskManagerRunner.startTaskManager( configuration, ResourceID.generate(), rpcService, highAvailabilityServices, mock(HeartbeatServices.class), NoOpMetricRegistry.INSTANCE, mock(BlobCacheService.class), false, error -> {}); } }
`@DoNotRecord(skipInPlayback = true)` -> there is this case in resources. I will delete this part logic and try on playback and record mode.
protected void beforeTest() { TokenCredential credential; HttpPipeline httpPipeline; Map<String, String> textReplacementRules = new HashMap<>(); String logLevel = Configuration.getGlobalConfiguration().get(AZURE_TEST_LOG_LEVEL); HttpLogDetailLevel httpLogDetailLevel; try { httpLogDetailLevel = HttpLogDetailLevel.valueOf(logLevel); } catch (Exception e) { if (isPlaybackMode()) { httpLogDetailLevel = HttpLogDetailLevel.NONE; LOGGER.error("Environment variable '{}' has not been set yet. Using 'NONE' for PLAYBACK.", AZURE_TEST_LOG_LEVEL); } else { httpLogDetailLevel = HttpLogDetailLevel.BODY_AND_HEADERS; LOGGER.error("Environment variable '{}' has not been set yet. Using 'BODY_AND_HEADERS' for RECORD/LIVE.", AZURE_TEST_LOG_LEVEL); } } if (httpLogDetailLevel == HttpLogDetailLevel.NONE) { try { System.setOut(new PrintStream(EMPTY_OUTPUT_STREAM, false, Charset.defaultCharset().name())); System.setErr(new PrintStream(EMPTY_OUTPUT_STREAM, false, Charset.defaultCharset().name())); } catch (UnsupportedEncodingException e) { } } if (isPlaybackMode()) { if (interceptorManager.getRecordedData() == null) { skipInPlayback(); } testProfile = PLAYBACK_PROFILE; List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new TextReplacementPolicy(interceptorManager.getRecordedData(), textReplacementRules)); httpPipeline = buildHttpPipeline( null, testProfile, new HttpLogOptions().setLogLevel(httpLogDetailLevel), policies, interceptorManager.getPlaybackClient()); textReplacementRules.put(PLAYBACK_URI_BASE + "1234", PLAYBACK_URI); addTextReplacementRules(textReplacementRules); } else { if (System.getenv(AZURE_AUTH_LOCATION) != null) { final File credFile = new File(System.getenv(AZURE_AUTH_LOCATION)); try { testAuthFile = AuthFile.parse(credFile); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException("Cannot parse auth file. Please check file format.", e)); } credential = testAuthFile.getCredential(); testProfile = new AzureProfile(testAuthFile.getTenantId(), testAuthFile.getSubscriptionId(), testAuthFile.getEnvironment()); } else { Configuration configuration = Configuration.getGlobalConfiguration(); String clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID); String tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID); String clientSecret = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET); String subscriptionId = configuration.get(Configuration.PROPERTY_AZURE_SUBSCRIPTION_ID); if (clientId == null || tenantId == null || clientSecret == null || subscriptionId == null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("When running tests in record mode either 'AZURE_AUTH_LOCATION' or 'AZURE_CLIENT_ID, AZURE_TENANT_ID, AZURE_CLIENT_SECRET and AZURE_SUBSCRIPTION_ID' needs to be set")); } credential = new ClientSecretCredentialBuilder() .tenantId(tenantId) .clientId(clientId) .clientSecret(clientSecret) .authorityHost(AzureEnvironment.AZURE.getActiveDirectoryEndpoint()) .build(); testProfile = new AzureProfile(tenantId, subscriptionId, AzureEnvironment.AZURE); } List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new TimeoutPolicy(Duration.ofMinutes(1))); if (!interceptorManager.isLiveMode() && !testContextManager.doNotRecordTest()) { policies.add(new TextReplacementPolicy(interceptorManager.getRecordedData(), textReplacementRules)); policies.add(this.interceptorManager.getRecordPolicy()); } if (httpLogDetailLevel == HttpLogDetailLevel.BODY_AND_HEADERS) { policies.add(new HttpDebugLoggingPolicy()); httpLogDetailLevel = HttpLogDetailLevel.NONE; } httpPipeline = buildHttpPipeline( credential, testProfile, new HttpLogOptions().setLogLevel(httpLogDetailLevel), policies, generateHttpClientWithProxy(null, null)); textReplacementRules.put(testProfile.getSubscriptionId(), ZERO_SUBSCRIPTION); textReplacementRules.put(testProfile.getTenantId(), ZERO_TENANT); textReplacementRules.put(Pattern.quote(AzureEnvironment.AZURE.getResourceManagerEndpoint()), PLAYBACK_URI + "/"); textReplacementRules.put(Pattern.quote(AzureEnvironment.AZURE.getMicrosoftGraphEndpoint()), PLAYBACK_URI + "/"); textReplacementRules.put("https: textReplacementRules.put("https: addTextReplacementRules(textReplacementRules); } initializeClients(httpPipeline, testProfile); }
protected void beforeTest() { TokenCredential credential; HttpPipeline httpPipeline; String logLevel = Configuration.getGlobalConfiguration().get(AZURE_TEST_LOG_LEVEL); HttpLogDetailLevel httpLogDetailLevel; try { httpLogDetailLevel = HttpLogDetailLevel.valueOf(logLevel); } catch (Exception e) { if (isPlaybackMode()) { httpLogDetailLevel = HttpLogDetailLevel.NONE; LOGGER.error("Environment variable '{}' has not been set yet. Using 'NONE' for PLAYBACK.", AZURE_TEST_LOG_LEVEL); } else { httpLogDetailLevel = HttpLogDetailLevel.BODY_AND_HEADERS; LOGGER.error("Environment variable '{}' has not been set yet. Using 'BODY_AND_HEADERS' for RECORD/LIVE.", AZURE_TEST_LOG_LEVEL); } } if (httpLogDetailLevel == HttpLogDetailLevel.NONE) { try { System.setOut(new PrintStream(EMPTY_OUTPUT_STREAM, false, Charset.defaultCharset().name())); System.setErr(new PrintStream(EMPTY_OUTPUT_STREAM, false, Charset.defaultCharset().name())); } catch (UnsupportedEncodingException e) { } } if (isPlaybackMode()) { testProfile = PLAYBACK_PROFILE; List<HttpPipelinePolicy> policies = new ArrayList<>(); httpPipeline = buildHttpPipeline( request -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), testProfile, new HttpLogOptions().setLogLevel(httpLogDetailLevel), policies, interceptorManager.getPlaybackClient()); if (!testContextManager.doNotRecordTest()) { interceptorManager.addMatchers(Arrays.asList(new CustomMatcher().setIgnoredQueryParameters(Arrays.asList("api-version")))); addSanitizers(); } } else { if (System.getenv(AZURE_AUTH_LOCATION) != null) { final File credFile = new File(System.getenv(AZURE_AUTH_LOCATION)); try { testAuthFile = AuthFile.parse(credFile); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException("Cannot parse auth file. Please check file format.", e)); } credential = testAuthFile.getCredential(); testProfile = new AzureProfile(testAuthFile.getTenantId(), testAuthFile.getSubscriptionId(), testAuthFile.getEnvironment()); } else { Configuration configuration = Configuration.getGlobalConfiguration(); String clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID); String tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID); String clientSecret = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET); String subscriptionId = configuration.get(Configuration.PROPERTY_AZURE_SUBSCRIPTION_ID); if (clientId == null || tenantId == null || clientSecret == null || subscriptionId == null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("When running tests in record mode either 'AZURE_AUTH_LOCATION' or 'AZURE_CLIENT_ID, AZURE_TENANT_ID, AZURE_CLIENT_SECRET and AZURE_SUBSCRIPTION_ID' needs to be set")); } credential = new ClientSecretCredentialBuilder() .tenantId(tenantId) .clientId(clientId) .clientSecret(clientSecret) .authorityHost(AzureEnvironment.AZURE.getActiveDirectoryEndpoint()) .build(); testProfile = new AzureProfile(tenantId, subscriptionId, AzureEnvironment.AZURE); } List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new TimeoutPolicy(Duration.ofMinutes(1))); if (!interceptorManager.isLiveMode() && !testContextManager.doNotRecordTest()) { policies.add(this.interceptorManager.getRecordPolicy()); addSanitizers(); } if (httpLogDetailLevel == HttpLogDetailLevel.BODY_AND_HEADERS) { policies.add(new HttpDebugLoggingPolicy()); httpLogDetailLevel = HttpLogDetailLevel.NONE; } httpPipeline = buildHttpPipeline( credential, testProfile, new HttpLogOptions().setLogLevel(httpLogDetailLevel), policies, generateHttpClientWithProxy(null, null)); } initializeClients(httpPipeline, testProfile); }
class ResourceManagerTestProxyTestBase extends TestProxyTestBase { private static final String ZERO_UUID = "00000000-0000-0000-0000-000000000000"; private static final String ZERO_SUBSCRIPTION = ZERO_UUID; private static final String ZERO_TENANT = ZERO_UUID; private static final String PLAYBACK_URI_BASE = "https: private static final String AZURE_AUTH_LOCATION = "AZURE_AUTH_LOCATION"; private static final String AZURE_TEST_LOG_LEVEL = "AZURE_TEST_LOG_LEVEL"; private static final String HTTPS_PROXY_HOST = "https.proxyHost"; private static final String HTTPS_PROXY_PORT = "https.proxyPort"; private static final String HTTP_PROXY_HOST = "http.proxyHost"; private static final String HTTP_PROXY_PORT = "http.proxyPort"; private static final String USE_SYSTEM_PROXY = "java.net.useSystemProxies"; private static final String VALUE_TRUE = "true"; private static final String PLAYBACK_URI = PLAYBACK_URI_BASE + "1234"; private static final AzureProfile PLAYBACK_PROFILE = new AzureProfile( ZERO_TENANT, ZERO_SUBSCRIPTION, new AzureEnvironment(Arrays.stream(AzureEnvironment.Endpoint.values()) .collect(Collectors.toMap(AzureEnvironment.Endpoint::identifier, endpoint -> PLAYBACK_URI))) ); private static final OutputStream EMPTY_OUTPUT_STREAM = new OutputStream() { @Override public void write(int b) { } }; private static final ClientLogger LOGGER = new ClientLogger(ResourceManagerTestProxyTestBase.class); private AzureProfile testProfile; private AuthFile testAuthFile; private boolean isSkipInPlayback; /** * Sets upper bound execution timeout for each @Test method. * {@link org.junit.jupiter.api.Timeout} annotation on test methods will only narrow the timeout, not affecting the upper * bound. */ @RegisterExtension final PlaybackTimeoutInterceptor playbackTimeoutInterceptor = new PlaybackTimeoutInterceptor(() -> Duration.ofSeconds(30)); /** * Generates a random resource name. * * @param prefix Prefix for the resource name. * @param maxLen Maximum length of the resource name. * @return A randomly generated resource name with a given prefix and maximum length. */ protected String generateRandomResourceName(String prefix, int maxLen) { return testResourceNamer.randomName(prefix, maxLen); } /** * @return A randomly generated UUID. */ protected String generateRandomUuid() { return testResourceNamer.randomUuid(); } /** * @return random password */ public static String password() { String password = new ResourceNamer("").randomName("Pa5$", 12); LOGGER.info("Password: {}", password); return password; } private static String sshPublicKey; /** * @return an SSH public key */ public static String sshPublicKey() { if (sshPublicKey == null) { try { KeyPairGenerator keyGen = KeyPairGenerator.getInstance("RSA"); keyGen.initialize(1024); KeyPair pair = keyGen.generateKeyPair(); PublicKey publicKey = pair.getPublic(); RSAPublicKey rsaPublicKey = (RSAPublicKey) publicKey; ByteArrayOutputStream byteOs = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(byteOs); dos.writeInt("ssh-rsa".getBytes(StandardCharsets.US_ASCII).length); dos.write("ssh-rsa".getBytes(StandardCharsets.US_ASCII)); dos.writeInt(rsaPublicKey.getPublicExponent().toByteArray().length); dos.write(rsaPublicKey.getPublicExponent().toByteArray()); dos.writeInt(rsaPublicKey.getModulus().toByteArray().length); dos.write(rsaPublicKey.getModulus().toByteArray()); String publicKeyEncoded = new String(Base64.getEncoder().encode(byteOs.toByteArray()), StandardCharsets.US_ASCII); sshPublicKey = "ssh-rsa " + publicKeyEncoded; } catch (NoSuchAlgorithmException | IOException e) { throw LOGGER.logExceptionAsError(new IllegalStateException("failed to generate ssh key", e)); } } return sshPublicKey; } /** * Loads a credential from file. * * @return A credential loaded from a file. */ protected TokenCredential credentialFromFile() { return testAuthFile.getCredential(); } /** * Loads a client ID from file. * * @return A client ID loaded from a file. */ protected String clientIdFromFile() { String clientId = testAuthFile == null ? null : testAuthFile.getClientId(); return testResourceNamer.recordValueFromConfig(clientId); } /** * @return The test profile. */ protected AzureProfile profile() { return testProfile; } /** * @return Whether the test mode is {@link TestMode */ protected boolean isPlaybackMode() { return getTestMode() == TestMode.PLAYBACK; } /** * @return Whether the test should be skipped in playback. */ protected boolean skipInPlayback() { if (isPlaybackMode()) { isSkipInPlayback = true; } return isSkipInPlayback; } @Override /** * Generates an {@link HttpClient} with a proxy. * * @param clientBuilder The HttpClient builder. * @param proxyOptions The proxy. * @return An HttpClient with a proxy. */ protected HttpClient generateHttpClientWithProxy(NettyAsyncHttpClientBuilder clientBuilder, ProxyOptions proxyOptions) { if (clientBuilder == null) { clientBuilder = new NettyAsyncHttpClientBuilder(); } if (proxyOptions != null) { clientBuilder.proxy(proxyOptions); } else { try { System.setProperty(USE_SYSTEM_PROXY, VALUE_TRUE); List<Proxy> proxies = ProxySelector.getDefault().select(new URI(AzureEnvironment.AZURE.getResourceManagerEndpoint())); if (!proxies.isEmpty()) { for (Proxy proxy : proxies) { if (proxy.address() instanceof InetSocketAddress) { String host = ((InetSocketAddress) proxy.address()).getHostName(); int port = ((InetSocketAddress) proxy.address()).getPort(); switch (proxy.type()) { case HTTP: return clientBuilder.proxy(new ProxyOptions(ProxyOptions.Type.HTTP, new InetSocketAddress(host, port))).build(); case SOCKS: return clientBuilder.proxy(new ProxyOptions(ProxyOptions.Type.SOCKS5, new InetSocketAddress(host, port))).build(); default: } } } } String host = null; int port = 0; if (System.getProperty(HTTPS_PROXY_HOST) != null && System.getProperty(HTTPS_PROXY_PORT) != null) { host = System.getProperty(HTTPS_PROXY_HOST); port = Integer.parseInt(System.getProperty(HTTPS_PROXY_PORT)); } else if (System.getProperty(HTTP_PROXY_HOST) != null && System.getProperty(HTTP_PROXY_PORT) != null) { host = System.getProperty(HTTP_PROXY_HOST); port = Integer.parseInt(System.getProperty(HTTP_PROXY_PORT)); } if (host != null) { clientBuilder.proxy(new ProxyOptions(ProxyOptions.Type.HTTP, new InetSocketAddress(host, port))); } } catch (URISyntaxException e) { } } return clientBuilder.build(); } @Override protected void afterTest() { if (!isSkipInPlayback) { cleanUpResources(); } } private void addTextReplacementRules(Map<String, String> rules) { for (Map.Entry<String, String> entry : rules.entrySet()) { interceptorManager.addTextReplacementRule(entry.getKey(), entry.getValue()); } } /** * Sets sdk context when running the tests * * @param internalContext the internal runtime context * @param objects the manager classes to change internal context * @param <T> the type of internal context * @throws RuntimeException when field cannot be found or set. */ protected <T> void setInternalContext(T internalContext, Object... objects) { try { for (Object obj : objects) { for (final Field field : obj.getClass().getSuperclass().getDeclaredFields()) { if (field.getName().equals("resourceManager")) { setAccessible(field); Field context = field.get(obj).getClass().getDeclaredField("internalContext"); setAccessible(context); context.set(field.get(obj), internalContext); } } for (Field field : obj.getClass().getDeclaredFields()) { if (field.getName().equals("internalContext")) { setAccessible(field); field.set(obj, internalContext); } else if (field.getName().contains("Manager")) { setAccessible(field); setInternalContext(internalContext, field.get(obj)); } } } } catch (IllegalAccessException | NoSuchFieldException ex) { throw LOGGER.logExceptionAsError(new RuntimeException(ex)); } } private void setAccessible(final AccessibleObject accessibleObject) { Runnable runnable = () -> accessibleObject.setAccessible(true); runnable.run(); } /** * Builds the manager with provided http pipeline and profile in general manner. * * @param manager the class of the manager * @param httpPipeline the http pipeline * @param profile the azure profile * @param <T> the type of the manager * @return the manager instance * @throws RuntimeException when field cannot be found or set. */ protected <T> T buildManager(Class<T> manager, HttpPipeline httpPipeline, AzureProfile profile) { try { Constructor<T> constructor = manager.getDeclaredConstructor(httpPipeline.getClass(), profile.getClass()); setAccessible(constructor); return constructor.newInstance(httpPipeline, profile); } catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException ex) { throw LOGGER.logExceptionAsError(new RuntimeException(ex)); } } /** * Builds an HttpPipeline. * * @param credential The credentials to use in the pipeline. * @param profile The AzureProfile to use in the pipeline. * @param httpLogOptions The HTTP logging options to use in the pipeline. * @param policies Additional policies to use in the pipeline. * @param httpClient The HttpClient to use in the pipeline. * @return A new constructed HttpPipeline. */ protected abstract HttpPipeline buildHttpPipeline( TokenCredential credential, AzureProfile profile, HttpLogOptions httpLogOptions, List<HttpPipelinePolicy> policies, HttpClient httpClient); /** * Initializes service clients used in testing. * * @param httpPipeline The HttpPipeline to use in the clients. * @param profile The AzureProfile to use in the clients. */ protected abstract void initializeClients(HttpPipeline httpPipeline, AzureProfile profile); /** * Cleans up resources. */ protected abstract void cleanUpResources(); private final class PlaybackTimeoutInterceptor implements InvocationInterceptor { private final Duration duration; private PlaybackTimeoutInterceptor(Supplier<Duration> timeoutSupplier) { Objects.requireNonNull(timeoutSupplier); this.duration = timeoutSupplier.get(); } @Override public void interceptTestMethod(Invocation<Void> invocation, ReflectiveInvocationContext<Method> invocationContext, ExtensionContext extensionContext) throws Throwable { if (isPlaybackMode()) { Assertions.assertTimeoutPreemptively(duration, invocation::proceed); } else { invocation.proceed(); } } } }
class ResourceManagerTestProxyTestBase extends TestProxyTestBase { private static final String ZERO_UUID = "00000000-0000-0000-0000-000000000000"; private static final String ZERO_SUBSCRIPTION = ZERO_UUID; private static final String ZERO_TENANT = ZERO_UUID; private static final String PLAYBACK_URI_BASE = "https: private static final String AZURE_AUTH_LOCATION = "AZURE_AUTH_LOCATION"; private static final String AZURE_TEST_LOG_LEVEL = "AZURE_TEST_LOG_LEVEL"; private static final String HTTPS_PROXY_HOST = "https.proxyHost"; private static final String HTTPS_PROXY_PORT = "https.proxyPort"; private static final String HTTP_PROXY_HOST = "http.proxyHost"; private static final String HTTP_PROXY_PORT = "http.proxyPort"; private static final String USE_SYSTEM_PROXY = "java.net.useSystemProxies"; private static final String VALUE_TRUE = "true"; private static final String PLAYBACK_URI = PLAYBACK_URI_BASE + "1234"; private static final AzureProfile PLAYBACK_PROFILE = new AzureProfile( ZERO_TENANT, ZERO_SUBSCRIPTION, new AzureEnvironment(Arrays.stream(AzureEnvironment.Endpoint.values()) .collect(Collectors.toMap(AzureEnvironment.Endpoint::identifier, endpoint -> PLAYBACK_URI))) ); private static final OutputStream EMPTY_OUTPUT_STREAM = new OutputStream() { @Override public void write(int b) { } }; private static final ClientLogger LOGGER = new ClientLogger(ResourceManagerTestProxyTestBase.class); private AzureProfile testProfile; private AuthFile testAuthFile; private boolean isSkipInPlayback; /** * Sets upper bound execution timeout for each @Test method. * {@link org.junit.jupiter.api.Timeout} annotation on test methods will only narrow the timeout, not affecting the upper * bound. */ @RegisterExtension final PlaybackTimeoutInterceptor playbackTimeoutInterceptor = new PlaybackTimeoutInterceptor(() -> Duration.ofSeconds(30)); /** * Generates a random resource name. * * @param prefix Prefix for the resource name. * @param maxLen Maximum length of the resource name. * @return A randomly generated resource name with a given prefix and maximum length. */ protected String generateRandomResourceName(String prefix, int maxLen) { return testResourceNamer.randomName(prefix, maxLen); } /** * @return A randomly generated UUID. */ protected String generateRandomUuid() { return testResourceNamer.randomUuid(); } /** * @return random password */ public static String password() { String password = new ResourceNamer("").randomName("Pa5$", 12); LOGGER.info("Password: {}", password); return password; } private static String sshPublicKey; /** * @return an SSH public key */ public static String sshPublicKey() { if (sshPublicKey == null) { try { KeyPairGenerator keyGen = KeyPairGenerator.getInstance("RSA"); keyGen.initialize(1024); KeyPair pair = keyGen.generateKeyPair(); PublicKey publicKey = pair.getPublic(); RSAPublicKey rsaPublicKey = (RSAPublicKey) publicKey; ByteArrayOutputStream byteOs = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(byteOs); dos.writeInt("ssh-rsa".getBytes(StandardCharsets.US_ASCII).length); dos.write("ssh-rsa".getBytes(StandardCharsets.US_ASCII)); dos.writeInt(rsaPublicKey.getPublicExponent().toByteArray().length); dos.write(rsaPublicKey.getPublicExponent().toByteArray()); dos.writeInt(rsaPublicKey.getModulus().toByteArray().length); dos.write(rsaPublicKey.getModulus().toByteArray()); String publicKeyEncoded = new String(Base64.getEncoder().encode(byteOs.toByteArray()), StandardCharsets.US_ASCII); sshPublicKey = "ssh-rsa " + publicKeyEncoded; } catch (NoSuchAlgorithmException | IOException e) { throw LOGGER.logExceptionAsError(new IllegalStateException("failed to generate ssh key", e)); } } return sshPublicKey; } /** * Loads a credential from file. * * @return A credential loaded from a file. */ protected TokenCredential credentialFromFile() { return testAuthFile.getCredential(); } /** * Loads a client ID from file. * * @return A client ID loaded from a file. */ protected String clientIdFromFile() { String clientId = testAuthFile == null ? null : testAuthFile.getClientId(); return testResourceNamer.recordValueFromConfig(clientId); } /** * @return The test profile. */ protected AzureProfile profile() { return testProfile; } /** * @return Whether the test mode is {@link TestMode */ protected boolean isPlaybackMode() { return getTestMode() == TestMode.PLAYBACK; } /** * @return Whether the test should be skipped in playback. */ protected boolean skipInPlayback() { if (isPlaybackMode()) { isSkipInPlayback = true; } return isSkipInPlayback; } @Override /** * Generates an {@link HttpClient} with a proxy. * * @param clientBuilder The HttpClient builder. * @param proxyOptions The proxy. * @return An HttpClient with a proxy. */ protected HttpClient generateHttpClientWithProxy(NettyAsyncHttpClientBuilder clientBuilder, ProxyOptions proxyOptions) { if (clientBuilder == null) { clientBuilder = new NettyAsyncHttpClientBuilder(); } if (proxyOptions != null) { clientBuilder.proxy(proxyOptions); } else { try { System.setProperty(USE_SYSTEM_PROXY, VALUE_TRUE); List<Proxy> proxies = ProxySelector.getDefault().select(new URI(AzureEnvironment.AZURE.getResourceManagerEndpoint())); if (!proxies.isEmpty()) { for (Proxy proxy : proxies) { if (proxy.address() instanceof InetSocketAddress) { String host = ((InetSocketAddress) proxy.address()).getHostName(); int port = ((InetSocketAddress) proxy.address()).getPort(); switch (proxy.type()) { case HTTP: return clientBuilder.proxy(new ProxyOptions(ProxyOptions.Type.HTTP, new InetSocketAddress(host, port))).build(); case SOCKS: return clientBuilder.proxy(new ProxyOptions(ProxyOptions.Type.SOCKS5, new InetSocketAddress(host, port))).build(); default: } } } } String host = null; int port = 0; if (System.getProperty(HTTPS_PROXY_HOST) != null && System.getProperty(HTTPS_PROXY_PORT) != null) { host = System.getProperty(HTTPS_PROXY_HOST); port = Integer.parseInt(System.getProperty(HTTPS_PROXY_PORT)); } else if (System.getProperty(HTTP_PROXY_HOST) != null && System.getProperty(HTTP_PROXY_PORT) != null) { host = System.getProperty(HTTP_PROXY_HOST); port = Integer.parseInt(System.getProperty(HTTP_PROXY_PORT)); } if (host != null) { clientBuilder.proxy(new ProxyOptions(ProxyOptions.Type.HTTP, new InetSocketAddress(host, port))); } } catch (URISyntaxException e) { } } return clientBuilder.build(); } @Override protected void afterTest() { if (!isSkipInPlayback) { cleanUpResources(); } } /** * Sets sdk context when running the tests * * @param internalContext the internal runtime context * @param objects the manager classes to change internal context * @param <T> the type of internal context * @throws RuntimeException when field cannot be found or set. */ protected <T> void setInternalContext(T internalContext, Object... objects) { try { for (Object obj : objects) { for (final Field field : obj.getClass().getSuperclass().getDeclaredFields()) { if (field.getName().equals("resourceManager")) { setAccessible(field); Field context = field.get(obj).getClass().getDeclaredField("internalContext"); setAccessible(context); context.set(field.get(obj), internalContext); } } for (Field field : obj.getClass().getDeclaredFields()) { if (field.getName().equals("internalContext")) { setAccessible(field); field.set(obj, internalContext); } else if (field.getName().contains("Manager")) { setAccessible(field); setInternalContext(internalContext, field.get(obj)); } } } } catch (IllegalAccessException | NoSuchFieldException ex) { throw LOGGER.logExceptionAsError(new RuntimeException(ex)); } } private void setAccessible(final AccessibleObject accessibleObject) { Runnable runnable = () -> accessibleObject.setAccessible(true); runnable.run(); } /** * Builds the manager with provided http pipeline and profile in general manner. * * @param manager the class of the manager * @param httpPipeline the http pipeline * @param profile the azure profile * @param <T> the type of the manager * @return the manager instance * @throws RuntimeException when field cannot be found or set. */ protected <T> T buildManager(Class<T> manager, HttpPipeline httpPipeline, AzureProfile profile) { try { Constructor<T> constructor = manager.getDeclaredConstructor(httpPipeline.getClass(), profile.getClass()); setAccessible(constructor); return constructor.newInstance(httpPipeline, profile); } catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException ex) { throw LOGGER.logExceptionAsError(new RuntimeException(ex)); } } /** * Builds an HttpPipeline. * * @param credential The credentials to use in the pipeline. * @param profile The AzureProfile to use in the pipeline. * @param httpLogOptions The HTTP logging options to use in the pipeline. * @param policies Additional policies to use in the pipeline. * @param httpClient The HttpClient to use in the pipeline. * @return A new constructed HttpPipeline. */ protected abstract HttpPipeline buildHttpPipeline( TokenCredential credential, AzureProfile profile, HttpLogOptions httpLogOptions, List<HttpPipelinePolicy> policies, HttpClient httpClient); /** * Initializes service clients used in testing. * * @param httpPipeline The HttpPipeline to use in the clients. * @param profile The AzureProfile to use in the clients. */ protected abstract void initializeClients(HttpPipeline httpPipeline, AzureProfile profile); /** * Cleans up resources. */ protected abstract void cleanUpResources(); private void addSanitizers() { interceptorManager.addSanitizers(Arrays.asList( new TestProxySanitizer("(?<=/subscriptions/)([^/?]+)", ZERO_UUID, TestProxySanitizerType.URL), new TestProxySanitizer("(?<=%2Fsubscriptions%2F)([^/?]+)", ZERO_UUID, TestProxySanitizerType.URL), new TestProxySanitizer("Retry-After", null, "0", TestProxySanitizerType.HEADER) )); } private final class PlaybackTimeoutInterceptor implements InvocationInterceptor { private final Duration duration; private PlaybackTimeoutInterceptor(Supplier<Duration> timeoutSupplier) { Objects.requireNonNull(timeoutSupplier); this.duration = timeoutSupplier.get(); } @Override public void interceptTestMethod(Invocation<Void> invocation, ReflectiveInvocationContext<Method> invocationContext, ExtensionContext extensionContext) throws Throwable { if (isPlaybackMode()) { Assertions.assertTimeoutPreemptively(duration, invocation::proceed); } else { invocation.proceed(); } } } }
Do we need this codepath at all - can we just always use path from line 163? It also looks like this method is called quite often, so it'd be good to avoid a FileSystems.match() call every time it's called.
private ResourceId getJobDirResourceId(String stagingSessionToken) throws IOException { ResourceId baseResourceId; StagingSessionToken parsedToken = decodeStagingSessionToken(stagingSessionToken); try { baseResourceId = FileSystems.matchSingleFileSpec(parsedToken.getBasePath()) .resourceId(); } catch (FileNotFoundException fne) { baseResourceId = FileSystems .matchNewResource(parsedToken.getBasePath(), true /* isDirectory */); } return baseResourceId .resolve(parsedToken.getSessionId(), StandardResolveOptions.RESOLVE_DIRECTORY); }
baseResourceId = FileSystems.matchSingleFileSpec(parsedToken.getBasePath())
private ResourceId getJobDirResourceId(String stagingSessionToken) throws Exception { ResourceId baseResourceId; StagingSessionToken parsedToken = decodeStagingSessionToken(stagingSessionToken); baseResourceId = FileSystems .matchNewResource(parsedToken.getBasePath(), true /* isDirectory */); return baseResourceId .resolve(parsedToken.getSessionId(), StandardResolveOptions.RESOLVE_DIRECTORY); }
class BeamFileSystemArtifactStagingService extends ArtifactStagingServiceImplBase implements FnService { private static final Logger LOG = LoggerFactory.getLogger(BeamFileSystemArtifactStagingService.class); private static final ObjectMapper MAPPER = new ObjectMapper(); private static final Charset CHARSET = StandardCharsets.UTF_8; public static final String MANIFEST = "MANIFEST"; @Override public StreamObserver<PutArtifactRequest> putArtifact( StreamObserver<PutArtifactResponse> responseObserver) { return new PutArtifactStreamObserver(responseObserver); } @Override public void commitManifest( CommitManifestRequest request, StreamObserver<CommitManifestResponse> responseObserver) { try { ResourceId manifestResourceId = getManifestFileResourceId(request.getStagingSessionToken()); ResourceId artifactDirResourceId = getArtifactDirResourceId(request.getStagingSessionToken()); ProxyManifest.Builder proxyManifestBuilder = ProxyManifest.newBuilder() .setManifest(request.getManifest()); for (ArtifactMetadata artifactMetadata : request.getManifest().getArtifactList()) { proxyManifestBuilder.addLocation(Location.newBuilder() .setName(artifactMetadata.getName()) .setUri(artifactDirResourceId .resolve(encodedFileName(artifactMetadata), StandardResolveOptions.RESOLVE_FILE) .toString()).build()); } try (WritableByteChannel manifestWritableByteChannel = FileSystems .create(manifestResourceId, MimeTypes.TEXT)) { manifestWritableByteChannel .write(CHARSET.encode(JsonFormat.printer().print(proxyManifestBuilder.build()))); } responseObserver.onNext(CommitManifestResponse.newBuilder() .setStagingToken(manifestResourceId.toString()) .build()); responseObserver.onCompleted(); } catch (IOException e) { LOG.error("Unable to commit manifest.", e); responseObserver.onError(e); } } @Override public void close() throws Exception { } /** * Generate a stagingSessionToken compatible with {@link BeamFileSystemArtifactStagingService}. * * @param sessionId Unique sessionId for artifact staging. * @param basePath Base path to upload artifacts. * @return Encoded stagingSessionToken. */ public static String generateStagingSessionToken(String sessionId, String basePath) throws Exception { StagingSessionToken stagingSessionToken = new StagingSessionToken(); stagingSessionToken.setSessionId(sessionId); stagingSessionToken.setBasePath(basePath); return encodeStagingSessionToken(stagingSessionToken); } private String encodedFileName(ArtifactMetadata artifactMetadata) { return "artifact_" + Hashing.sha256().hashString(artifactMetadata.getName(), CHARSET) .toString(); } private StagingSessionToken decodeStagingSessionToken(String stagingSessionToken) throws IOException { try { return MAPPER.readValue(stagingSessionToken, StagingSessionToken.class); } catch (IOException e) { try { LOG.error( "Unable to deserialize staging token {}. Expected format {}. Error {}", stagingSessionToken, MAPPER.writeValueAsString(new StagingSessionToken()), e.getMessage()); } catch (JsonProcessingException e1) { LOG.error("Error {} occurred while serializing {}.", e.getMessage(), StagingSessionToken.class); } throw e; } } private static String encodeStagingSessionToken(StagingSessionToken stagingSessionToken) throws Exception { try { return MAPPER.writeValueAsString(stagingSessionToken); } catch (JsonProcessingException e) { LOG.error("Error {} occurred while serializing {}.", e.getMessage(), StagingSessionToken.class); throw e; } } private ResourceId getManifestFileResourceId(String stagingSessionToken) throws IOException { return getJobDirResourceId(stagingSessionToken) .resolve(MANIFEST, StandardResolveOptions.RESOLVE_FILE); } private ResourceId getArtifactDirResourceId(String stagingSessionToken) throws IOException { return getJobDirResourceId(stagingSessionToken) .resolve("artifacts", StandardResolveOptions.RESOLVE_DIRECTORY); } private class PutArtifactStreamObserver implements StreamObserver<PutArtifactRequest> { private final StreamObserver<PutArtifactResponse> outboundObserver; private PutArtifactMetadata metadata; private ResourceId artifactId; private WritableByteChannel artifactWritableByteChannel; PutArtifactStreamObserver(StreamObserver<PutArtifactResponse> outboundObserver) { this.outboundObserver = outboundObserver; } @Override public void onNext(PutArtifactRequest putArtifactRequest) { if (metadata == null) { metadata = putArtifactRequest.getMetadata(); try { ResourceId artifactsDirId = getArtifactDirResourceId( putArtifactRequest.getMetadata().getStagingSessionToken()); LOG.info("Going to stage artifact {} in {}.", metadata.getMetadata().getName(), artifactsDirId); artifactId = artifactsDirId .resolve(encodedFileName(metadata.getMetadata()), StandardResolveOptions.RESOLVE_FILE); artifactWritableByteChannel = FileSystems.create(artifactId, MimeTypes.BINARY); } catch (IOException e) { LOG.error("Staging failed for artifact {} for staging token {}", encodedFileName(metadata.getMetadata()), metadata.getStagingSessionToken()); outboundObserver.onError(e); } } else { try { artifactWritableByteChannel .write(putArtifactRequest.getData().getData().asReadOnlyByteBuffer()); } catch (IOException e) { LOG.error("Staging failed for artifact {} to file {}.", metadata.getMetadata().getName(), artifactId); outboundObserver.onError(e); } } } @Override public void onError(Throwable throwable) { LOG.error("Staging artifact failed for " + artifactId, throwable); try { if (artifactWritableByteChannel != null) { artifactWritableByteChannel.close(); } if (artifactId != null) { FileSystems.delete(Collections.singletonList(artifactId), StandardMoveOptions.IGNORE_MISSING_FILES); } } catch (IOException e) { LOG.error("Unable to save artifact {}", artifactId); outboundObserver.onError(e); return; } outboundObserver.onCompleted(); } @Override public void onCompleted() { LOG.info("Staging artifact completed for " + artifactId); if (artifactWritableByteChannel != null) { try { artifactWritableByteChannel.close(); } catch (IOException e) { onError(e); return; } } outboundObserver.onCompleted(); } } /** * Serializable StagingSessionToken used to stage files with {@link * BeamFileSystemArtifactStagingService}. */ private static class StagingSessionToken implements Serializable { private String sessionId; private String basePath; /** * Access is public for json conversion. */ public String getSessionId() { return sessionId; } private void setSessionId(String sessionId) { this.sessionId = sessionId; } /** * Access is public for json conversion. */ public String getBasePath() { return basePath; } private void setBasePath(String basePath) { this.basePath = basePath; } } }
class BeamFileSystemArtifactStagingService extends ArtifactStagingServiceImplBase implements FnService { private static final Logger LOG = LoggerFactory.getLogger(BeamFileSystemArtifactStagingService.class); private static final ObjectMapper MAPPER = new ObjectMapper(); private static final Charset CHARSET = StandardCharsets.UTF_8; public static final String MANIFEST = "MANIFEST"; public static final String ARTIFACTS = "artifacts"; @Override public StreamObserver<PutArtifactRequest> putArtifact( StreamObserver<PutArtifactResponse> responseObserver) { return new PutArtifactStreamObserver(responseObserver); } @Override public void commitManifest( CommitManifestRequest request, StreamObserver<CommitManifestResponse> responseObserver) { try { ResourceId manifestResourceId = getManifestFileResourceId(request.getStagingSessionToken()); ResourceId artifactDirResourceId = getArtifactDirResourceId(request.getStagingSessionToken()); ProxyManifest.Builder proxyManifestBuilder = ProxyManifest.newBuilder() .setManifest(request.getManifest()); for (ArtifactMetadata artifactMetadata : request.getManifest().getArtifactList()) { proxyManifestBuilder.addLocation(Location.newBuilder() .setName(artifactMetadata.getName()) .setUri(artifactDirResourceId .resolve(encodedFileName(artifactMetadata), StandardResolveOptions.RESOLVE_FILE) .toString()).build()); } try (WritableByteChannel manifestWritableByteChannel = FileSystems .create(manifestResourceId, MimeTypes.TEXT)) { manifestWritableByteChannel .write(CHARSET.encode(JsonFormat.printer().print(proxyManifestBuilder.build()))); } responseObserver.onNext(CommitManifestResponse.newBuilder() .setRetrievalToken(manifestResourceId.toString()) .build()); responseObserver.onCompleted(); } catch (Exception e) { LOG.error("Unable to commit manifest.", e); responseObserver.onError(e); } } @Override public void close() throws Exception { } /** * Generate a stagingSessionToken compatible with {@link BeamFileSystemArtifactStagingService}. * * @param sessionId Unique sessionId for artifact staging. * @param basePath Base path to upload artifacts. * @return Encoded stagingSessionToken. */ public static String generateStagingSessionToken(String sessionId, String basePath) throws Exception { StagingSessionToken stagingSessionToken = new StagingSessionToken(); stagingSessionToken.setSessionId(sessionId); stagingSessionToken.setBasePath(basePath); return encodeStagingSessionToken(stagingSessionToken); } private String encodedFileName(ArtifactMetadata artifactMetadata) { return "artifact_" + Hashing.sha256().hashString(artifactMetadata.getName(), CHARSET) .toString(); } private static StagingSessionToken decodeStagingSessionToken(String stagingSessionToken) throws Exception { try { return MAPPER.readValue(stagingSessionToken, StagingSessionToken.class); } catch (JsonProcessingException e) { LOG.error( "Unable to deserialize staging token {}. Expected format {}. Error {}", stagingSessionToken, "{\"sessionId\": \"sessionId\", \"basePath\": \"basePath\"", e.getMessage()); throw e; } } private static String encodeStagingSessionToken(StagingSessionToken stagingSessionToken) throws Exception { try { return MAPPER.writeValueAsString(stagingSessionToken); } catch (JsonProcessingException e) { LOG.error("Error {} occurred while serializing {}.", e.getMessage(), stagingSessionToken); throw e; } } private ResourceId getManifestFileResourceId(String stagingSessionToken) throws Exception { return getJobDirResourceId(stagingSessionToken) .resolve(MANIFEST, StandardResolveOptions.RESOLVE_FILE); } private ResourceId getArtifactDirResourceId(String stagingSessionToken) throws Exception { return getJobDirResourceId(stagingSessionToken) .resolve(ARTIFACTS, StandardResolveOptions.RESOLVE_DIRECTORY); } private class PutArtifactStreamObserver implements StreamObserver<PutArtifactRequest> { private final StreamObserver<PutArtifactResponse> outboundObserver; private PutArtifactMetadata metadata; private ResourceId artifactId; private WritableByteChannel artifactWritableByteChannel; PutArtifactStreamObserver(StreamObserver<PutArtifactResponse> outboundObserver) { this.outboundObserver = outboundObserver; } @Override public void onNext(PutArtifactRequest putArtifactRequest) { if (metadata == null) { checkNotNull(putArtifactRequest); checkNotNull(putArtifactRequest.getMetadata()); metadata = putArtifactRequest.getMetadata(); try { ResourceId artifactsDirId = getArtifactDirResourceId( putArtifactRequest.getMetadata().getStagingSessionToken()); artifactId = artifactsDirId.resolve(encodedFileName(metadata.getMetadata()), StandardResolveOptions.RESOLVE_FILE); LOG.info("Going to stage artifact {} to {}.", metadata.getMetadata().getName(), artifactId); artifactWritableByteChannel = FileSystems.create(artifactId, MimeTypes.BINARY); } catch (Exception e) { LOG.error("Staging failed for artifact {} for staging token {}", encodedFileName(metadata.getMetadata()), metadata.getStagingSessionToken()); outboundObserver.onError(e); } } else { try { artifactWritableByteChannel .write(putArtifactRequest.getData().getData().asReadOnlyByteBuffer()); } catch (IOException e) { LOG.error("Staging failed for artifact {} to file {}.", metadata.getMetadata().getName(), artifactId); outboundObserver.onError(e); } } } @Override public void onError(Throwable throwable) { LOG.error("Staging artifact failed for " + artifactId, throwable); try { if (artifactWritableByteChannel != null) { artifactWritableByteChannel.close(); } if (artifactId != null) { FileSystems.delete(Collections.singletonList(artifactId), StandardMoveOptions.IGNORE_MISSING_FILES); } } catch (IOException e) { LOG.error("Unable to save artifact {}", artifactId); outboundObserver.onError(e); return; } outboundObserver.onCompleted(); } @Override public void onCompleted() { LOG.info("Staging artifact completed for " + artifactId); if (artifactWritableByteChannel != null) { try { artifactWritableByteChannel.close(); } catch (IOException e) { onError(e); return; } } outboundObserver.onCompleted(); } } /** * Serializable StagingSessionToken used to stage files with {@link * BeamFileSystemArtifactStagingService}. */ private static class StagingSessionToken implements Serializable { private String sessionId; private String basePath; /** * Access is public for json conversion. */ public String getSessionId() { return sessionId; } private void setSessionId(String sessionId) { this.sessionId = sessionId; } /** * Access is public for json conversion. */ public String getBasePath() { return basePath; } private void setBasePath(String basePath) { this.basePath = basePath; } @Override public String toString() { return "StagingSessionToken{" + "sessionId='" + sessionId + "', " + "basePath='" + basePath + "'" + "}"; } } }
Nit: spelling/grammar ```suggestion // to Calc nodes before merging with other Project/Filter/Calc nodes. Thus we should not add ```
private static RuleSet[] modifyRuleSetsForZetaSql(RuleSet[] ruleSets) { RuleSet[] ret = new RuleSet[ruleSets.length]; for (int i = 0; i < ruleSets.length; i++) { ImmutableList.Builder<RelOptRule> bd = ImmutableList.builder(); for (RelOptRule rule : ruleSets[i]) { if (rule instanceof JoinCommuteRule) { continue; } else if (rule instanceof FilterCalcMergeRule || rule instanceof ProjectCalcMergeRule) { continue; } else if (rule instanceof BeamCalcRule) { bd.add(BeamZetaSqlCalcRule.INSTANCE); } else { bd.add(rule); } } ret[i] = RuleSets.ofList(bd.build()); } return ret; }
private static RuleSet[] modifyRuleSetsForZetaSql(RuleSet[] ruleSets) { RuleSet[] ret = new RuleSet[ruleSets.length]; for (int i = 0; i < ruleSets.length; i++) { ImmutableList.Builder<RelOptRule> bd = ImmutableList.builder(); for (RelOptRule rule : ruleSets[i]) { if (rule instanceof JoinCommuteRule) { continue; } else if (rule instanceof FilterCalcMergeRule || rule instanceof ProjectCalcMergeRule) { continue; } else if (rule instanceof BeamCalcRule) { bd.add(BeamZetaSqlCalcRule.INSTANCE); } else { bd.add(rule); } } ret[i] = RuleSets.ofList(bd.build()); } return ret; }
class ZetaSQLQueryPlanner implements QueryPlanner { private final ZetaSQLPlannerImpl plannerImpl; public ZetaSQLQueryPlanner(FrameworkConfig config) { plannerImpl = new ZetaSQLPlannerImpl(config); } /** * Called by {@link org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv}.instantiatePlanner() * reflectively. */ public ZetaSQLQueryPlanner(JdbcConnection jdbcConnection, RuleSet[] ruleSets) { plannerImpl = new ZetaSQLPlannerImpl(defaultConfig(jdbcConnection, modifyRuleSetsForZetaSql(ruleSets))); setDefaultTimezone( jdbcConnection .getPipelineOptions() .as(BeamSqlPipelineOptions.class) .getZetaSqlDefaultTimezone()); } public static RuleSet[] getZetaSqlRuleSets() { return modifyRuleSetsForZetaSql(BeamRuleSets.getRuleSets()); } public String getDefaultTimezone() { return plannerImpl.getDefaultTimezone(); } public void setDefaultTimezone(String timezone) { plannerImpl.setDefaultTimezone(timezone); } public static LanguageOptions getLanguageOptions() { return ZetaSQLPlannerImpl.getLanguageOptions(); } public BeamRelNode convertToBeamRel(String sqlStatement) { return convertToBeamRel(sqlStatement, QueryParameters.ofNone()); } public BeamRelNode convertToBeamRel(String sqlStatement, Map<String, Value> queryParams) throws ParseException, SqlConversionException { return convertToBeamRel(sqlStatement, QueryParameters.ofNamed(queryParams)); } public BeamRelNode convertToBeamRel(String sqlStatement, List<Value> queryParams) throws ParseException, SqlConversionException { return convertToBeamRel(sqlStatement, QueryParameters.ofPositional(queryParams)); } @Override public BeamRelNode convertToBeamRel(String sqlStatement, QueryParameters queryParameters) throws ParseException, SqlConversionException { return convertToBeamRelInternal(sqlStatement, queryParameters); } @Override public SqlNode parse(String sqlStatement) throws ParseException { throw new UnsupportedOperationException( String.format( "%s.parse(String) is not implemented and should need be called", this.getClass().getCanonicalName())); } private BeamRelNode convertToBeamRelInternal(String sql, QueryParameters queryParams) { RelRoot root = plannerImpl.rel(sql, queryParams); RelTraitSet desiredTraits = root.rel .getTraitSet() .replace(BeamLogicalConvention.INSTANCE) .replace(root.collation) .simplify(); root.rel .getCluster() .setMetadataProvider( ChainedRelMetadataProvider.of( org.apache.beam.vendor.calcite.v1_20_0.com.google.common.collect.ImmutableList.of( NonCumulativeCostImpl.SOURCE, RelMdNodeStats.SOURCE, root.rel.getCluster().getMetadataProvider()))); RelMetadataQuery.THREAD_PROVIDERS.set( JaninoRelMetadataProvider.of(root.rel.getCluster().getMetadataProvider())); root.rel.getCluster().invalidateMetadataQuery(); return (BeamRelNode) plannerImpl.transform(0, desiredTraits, root.rel); } private static FrameworkConfig defaultConfig(JdbcConnection connection, RuleSet[] ruleSets) { final CalciteConnectionConfig config = connection.config(); final SqlParser.ConfigBuilder parserConfig = SqlParser.configBuilder() .setQuotedCasing(config.quotedCasing()) .setUnquotedCasing(config.unquotedCasing()) .setQuoting(config.quoting()) .setConformance(config.conformance()) .setCaseSensitive(config.caseSensitive()); final SqlParserImplFactory parserFactory = config.parserFactory(SqlParserImplFactory.class, null); if (parserFactory != null) { parserConfig.setParserFactory(parserFactory); } final SchemaPlus schema = connection.getRootSchema(); final SchemaPlus defaultSchema = connection.getCurrentSchemaPlus(); final ImmutableList<RelTraitDef> traitDefs = ImmutableList.of(ConventionTraitDef.INSTANCE); final CalciteCatalogReader catalogReader = new CalciteCatalogReader( CalciteSchema.from(schema), ImmutableList.of(defaultSchema.getName()), connection.getTypeFactory(), connection.config()); final SqlOperatorTable opTab0 = connection.config().fun(SqlOperatorTable.class, SqlStdOperatorTable.instance()); return Frameworks.newConfigBuilder() .parserConfig(parserConfig.build()) .defaultSchema(defaultSchema) .traitDefs(traitDefs) .ruleSets(ruleSets) .costFactory(BeamCostModel.FACTORY) .typeSystem(connection.getTypeFactory().getTypeSystem()) .operatorTable(ChainedSqlOperatorTable.of(opTab0, catalogReader)) .build(); } }
class ZetaSQLQueryPlanner implements QueryPlanner { private final ZetaSQLPlannerImpl plannerImpl; public ZetaSQLQueryPlanner(FrameworkConfig config) { plannerImpl = new ZetaSQLPlannerImpl(config); } /** * Called by {@link org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv}.instantiatePlanner() * reflectively. */ public ZetaSQLQueryPlanner(JdbcConnection jdbcConnection, RuleSet[] ruleSets) { plannerImpl = new ZetaSQLPlannerImpl(defaultConfig(jdbcConnection, modifyRuleSetsForZetaSql(ruleSets))); setDefaultTimezone( jdbcConnection .getPipelineOptions() .as(BeamSqlPipelineOptions.class) .getZetaSqlDefaultTimezone()); } public static RuleSet[] getZetaSqlRuleSets() { return modifyRuleSetsForZetaSql(BeamRuleSets.getRuleSets()); } public String getDefaultTimezone() { return plannerImpl.getDefaultTimezone(); } public void setDefaultTimezone(String timezone) { plannerImpl.setDefaultTimezone(timezone); } public static LanguageOptions getLanguageOptions() { return ZetaSQLPlannerImpl.getLanguageOptions(); } public BeamRelNode convertToBeamRel(String sqlStatement) { return convertToBeamRel(sqlStatement, QueryParameters.ofNone()); } public BeamRelNode convertToBeamRel(String sqlStatement, Map<String, Value> queryParams) throws ParseException, SqlConversionException { return convertToBeamRel(sqlStatement, QueryParameters.ofNamed(queryParams)); } public BeamRelNode convertToBeamRel(String sqlStatement, List<Value> queryParams) throws ParseException, SqlConversionException { return convertToBeamRel(sqlStatement, QueryParameters.ofPositional(queryParams)); } @Override public BeamRelNode convertToBeamRel(String sqlStatement, QueryParameters queryParameters) throws ParseException, SqlConversionException { return convertToBeamRelInternal(sqlStatement, queryParameters); } @Override public SqlNode parse(String sqlStatement) throws ParseException { throw new UnsupportedOperationException( String.format( "%s.parse(String) is not implemented and should need be called", this.getClass().getCanonicalName())); } private BeamRelNode convertToBeamRelInternal(String sql, QueryParameters queryParams) { RelRoot root = plannerImpl.rel(sql, queryParams); RelTraitSet desiredTraits = root.rel .getTraitSet() .replace(BeamLogicalConvention.INSTANCE) .replace(root.collation) .simplify(); root.rel .getCluster() .setMetadataProvider( ChainedRelMetadataProvider.of( org.apache.beam.vendor.calcite.v1_20_0.com.google.common.collect.ImmutableList.of( NonCumulativeCostImpl.SOURCE, RelMdNodeStats.SOURCE, root.rel.getCluster().getMetadataProvider()))); RelMetadataQuery.THREAD_PROVIDERS.set( JaninoRelMetadataProvider.of(root.rel.getCluster().getMetadataProvider())); root.rel.getCluster().invalidateMetadataQuery(); return (BeamRelNode) plannerImpl.transform(0, desiredTraits, root.rel); } private static FrameworkConfig defaultConfig(JdbcConnection connection, RuleSet[] ruleSets) { final CalciteConnectionConfig config = connection.config(); final SqlParser.ConfigBuilder parserConfig = SqlParser.configBuilder() .setQuotedCasing(config.quotedCasing()) .setUnquotedCasing(config.unquotedCasing()) .setQuoting(config.quoting()) .setConformance(config.conformance()) .setCaseSensitive(config.caseSensitive()); final SqlParserImplFactory parserFactory = config.parserFactory(SqlParserImplFactory.class, null); if (parserFactory != null) { parserConfig.setParserFactory(parserFactory); } final SchemaPlus schema = connection.getRootSchema(); final SchemaPlus defaultSchema = connection.getCurrentSchemaPlus(); final ImmutableList<RelTraitDef> traitDefs = ImmutableList.of(ConventionTraitDef.INSTANCE); final CalciteCatalogReader catalogReader = new CalciteCatalogReader( CalciteSchema.from(schema), ImmutableList.of(defaultSchema.getName()), connection.getTypeFactory(), connection.config()); final SqlOperatorTable opTab0 = connection.config().fun(SqlOperatorTable.class, SqlStdOperatorTable.instance()); return Frameworks.newConfigBuilder() .parserConfig(parserConfig.build()) .defaultSchema(defaultSchema) .traitDefs(traitDefs) .ruleSets(ruleSets) .costFactory(BeamCostModel.FACTORY) .typeSystem(connection.getTypeFactory().getTypeSystem()) .operatorTable(ChainedSqlOperatorTable.of(opTab0, catalogReader)) .build(); } }
minor note: I think you meant to delete these 3 commented lines.
public void testFlattenWithDuplicateInputCollectionProducesMultipleOutputs() throws Exception { String pTransformId = "pTransformId"; String mainOutputId = "101"; RunnerApi.FunctionSpec functionSpec = RunnerApi.FunctionSpec.newBuilder() .setUrn(PTransformTranslation.FLATTEN_TRANSFORM_URN) .build(); RunnerApi.PTransform pTransform = RunnerApi.PTransform.newBuilder() .setSpec(functionSpec) .putInputs("inputA", "inputATarget") .putInputs("inputAAgain", "inputATarget") .putOutputs(mainOutputId, "mainOutputTarget") .build(); List<WindowedValue<String>> mainOutputValues = new ArrayList<>(); ListMultimap<String, FnDataReceiver<WindowedValue<?>>> consumers = ArrayListMultimap.create(); consumers.put( "mainOutputTarget", (FnDataReceiver) (FnDataReceiver<WindowedValue<String>>) mainOutputValues::add); new FlattenRunner.Factory<>() .createRunnerForPTransform( PipelineOptionsFactory.create(), null /* beamFnDataClient */, null /* beamFnStateClient */, pTransformId, pTransform, Suppliers.ofInstance("57L")::get, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), consumers, null /* addStartFunction */, null, /* addFinishFunction */ null /* splitListener */); mainOutputValues.clear(); assertThat(consumers.keySet(), containsInAnyOrder("inputATarget", "mainOutputTarget")); assertThat(consumers.get("inputATarget"), hasSize(2)); FnDataReceiver<WindowedValue<?>> input = MultiplexingFnDataReceiver.forConsumers(consumers.get("inputATarget")); input.accept(WindowedValue.valueInGlobalWindow("A1")); input.accept(WindowedValue.valueInGlobalWindow("A2")); assertThat( mainOutputValues, containsInAnyOrder( valueInGlobalWindow("A1"), valueInGlobalWindow("A1"), valueInGlobalWindow("A2"), valueInGlobalWindow("A2"))); }
public void testFlattenWithDuplicateInputCollectionProducesMultipleOutputs() throws Exception { String pTransformId = "pTransformId"; String mainOutputId = "101"; RunnerApi.FunctionSpec functionSpec = RunnerApi.FunctionSpec.newBuilder() .setUrn(PTransformTranslation.FLATTEN_TRANSFORM_URN) .build(); RunnerApi.PTransform pTransform = RunnerApi.PTransform.newBuilder() .setSpec(functionSpec) .putInputs("inputA", "inputATarget") .putInputs("inputAAgain", "inputATarget") .putOutputs(mainOutputId, "mainOutputTarget") .build(); List<WindowedValue<String>> mainOutputValues = new ArrayList<>(); ListMultimap<String, FnDataReceiver<WindowedValue<?>>> consumers = ArrayListMultimap.create(); consumers.put( "mainOutputTarget", (FnDataReceiver) (FnDataReceiver<WindowedValue<String>>) mainOutputValues::add); new FlattenRunner.Factory<>() .createRunnerForPTransform( PipelineOptionsFactory.create(), null /* beamFnDataClient */, null /* beamFnStateClient */, pTransformId, pTransform, Suppliers.ofInstance("57L")::get, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), consumers, null /* addStartFunction */, null, /* addFinishFunction */ null /* splitListener */); mainOutputValues.clear(); assertThat(consumers.keySet(), containsInAnyOrder("inputATarget", "mainOutputTarget")); assertThat(consumers.get("inputATarget"), hasSize(2)); FnDataReceiver<WindowedValue<?>> input = MultiplexingFnDataReceiver.forConsumers(consumers.get("inputATarget")); input.accept(WindowedValue.valueInGlobalWindow("A1")); input.accept(WindowedValue.valueInGlobalWindow("A2")); assertThat( mainOutputValues, containsInAnyOrder( valueInGlobalWindow("A1"), valueInGlobalWindow("A1"), valueInGlobalWindow("A2"), valueInGlobalWindow("A2"))); }
class FlattenRunnerTest { /** * Create a Flatten that has 4 inputs (inputATarget1, inputATarget2, inputBTarget, inputCTarget) * and one output (mainOutput). Validate that inputs are flattened together and directed to the * output. */ @Test public void testCreatingAndProcessingDoFlatten() throws Exception { String pTransformId = "pTransformId"; String mainOutputId = "101"; RunnerApi.FunctionSpec functionSpec = RunnerApi.FunctionSpec.newBuilder() .setUrn(PTransformTranslation.FLATTEN_TRANSFORM_URN) .build(); RunnerApi.PTransform pTransform = RunnerApi.PTransform.newBuilder() .setSpec(functionSpec) .putInputs("inputA", "inputATarget") .putInputs("inputB", "inputBTarget") .putInputs("inputC", "inputCTarget") .putOutputs(mainOutputId, "mainOutputTarget") .build(); List<WindowedValue<String>> mainOutputValues = new ArrayList<>(); Multimap<String, FnDataReceiver<WindowedValue<?>>> consumers = HashMultimap.create(); consumers.put("mainOutputTarget", (FnDataReceiver) (FnDataReceiver<WindowedValue<String>>) mainOutputValues::add); new FlattenRunner.Factory<>().createRunnerForPTransform( PipelineOptionsFactory.create(), null /* beamFnDataClient */, null /* beamFnStateClient */, pTransformId, pTransform, Suppliers.ofInstance("57L")::get, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), consumers, null /* addStartFunction */, null, /* addFinishFunction */ null /* splitListener */); mainOutputValues.clear(); assertThat(consumers.keySet(), containsInAnyOrder( "inputATarget", "inputBTarget", "inputCTarget", "mainOutputTarget")); Iterables.getOnlyElement(consumers.get("inputATarget")).accept(valueInGlobalWindow("A1")); Iterables.getOnlyElement(consumers.get("inputATarget")).accept(valueInGlobalWindow("A2")); Iterables.getOnlyElement(consumers.get("inputBTarget")).accept(valueInGlobalWindow("B")); Iterables.getOnlyElement(consumers.get("inputCTarget")).accept(valueInGlobalWindow("C")); assertThat( mainOutputValues, contains( valueInGlobalWindow("A1"), valueInGlobalWindow("A2"), valueInGlobalWindow("B"), valueInGlobalWindow("C"))); mainOutputValues.clear(); } /** * Create a Flatten that consumes data from the same PCollection duplicated through two outputs * and validates that inputs are flattened together and directed to the output. */ @Test }
class FlattenRunnerTest { /** * Create a Flatten that has 4 inputs (inputATarget1, inputATarget2, inputBTarget, inputCTarget) * and one output (mainOutput). Validate that inputs are flattened together and directed to the * output. */ @Test public void testCreatingAndProcessingDoFlatten() throws Exception { String pTransformId = "pTransformId"; String mainOutputId = "101"; RunnerApi.FunctionSpec functionSpec = RunnerApi.FunctionSpec.newBuilder() .setUrn(PTransformTranslation.FLATTEN_TRANSFORM_URN) .build(); RunnerApi.PTransform pTransform = RunnerApi.PTransform.newBuilder() .setSpec(functionSpec) .putInputs("inputA", "inputATarget") .putInputs("inputB", "inputBTarget") .putInputs("inputC", "inputCTarget") .putOutputs(mainOutputId, "mainOutputTarget") .build(); List<WindowedValue<String>> mainOutputValues = new ArrayList<>(); Multimap<String, FnDataReceiver<WindowedValue<?>>> consumers = HashMultimap.create(); consumers.put("mainOutputTarget", (FnDataReceiver) (FnDataReceiver<WindowedValue<String>>) mainOutputValues::add); new FlattenRunner.Factory<>().createRunnerForPTransform( PipelineOptionsFactory.create(), null /* beamFnDataClient */, null /* beamFnStateClient */, pTransformId, pTransform, Suppliers.ofInstance("57L")::get, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), consumers, null /* addStartFunction */, null, /* addFinishFunction */ null /* splitListener */); mainOutputValues.clear(); assertThat(consumers.keySet(), containsInAnyOrder( "inputATarget", "inputBTarget", "inputCTarget", "mainOutputTarget")); Iterables.getOnlyElement(consumers.get("inputATarget")).accept(valueInGlobalWindow("A1")); Iterables.getOnlyElement(consumers.get("inputATarget")).accept(valueInGlobalWindow("A2")); Iterables.getOnlyElement(consumers.get("inputBTarget")).accept(valueInGlobalWindow("B")); Iterables.getOnlyElement(consumers.get("inputCTarget")).accept(valueInGlobalWindow("C")); assertThat( mainOutputValues, contains( valueInGlobalWindow("A1"), valueInGlobalWindow("A2"), valueInGlobalWindow("B"), valueInGlobalWindow("C"))); mainOutputValues.clear(); } /** * Create a Flatten that consumes data from the same PCollection duplicated through two outputs * and validates that inputs are flattened together and directed to the output. */ @Test }
Something like this ````java @ProcessElement public void processElement(@Element T input, ProcessContext context) { try { publishMessage(input); } catch (JMSException | JmsIOException | IOException | InterruptedException exception) { LOG.error("Error while publishing the message", exception); context.output(this.failedMessagesTags, input); if (exception instanceof InterruptedException) { Thread.currentThread().interrupt(); } } } private void publishMessage(T input) throws JMSException, JmsIOException, IOException, InterruptedException { Sleeper sleeper = Sleeper.DEFAULT; BackOff backoff = checkStateNotNull(retryBackOff).backoff(); while (true) { try { this.jmsConnection.publishMessage(input); break; } catch (JMSException | JmsIOException exception) { if (!BackOffUtils.next(sleeper, backoff)) { throw exception; } else { publicationRetries.inc(); } } } } ````
public void processElement(@Element T input, ProcessContext context) { try { publishMessage(input, context); } catch (IOException | InterruptedException exception) { LOG.error("Error while publishing the message", exception); context.output(this.failedMessagesTags, input); if (exception instanceof InterruptedException) { Thread.currentThread().interrupt(); } } }
LOG.error("Error while publishing the message", exception);
public void processElement(@Element T input, ProcessContext context) { try { publishMessage(input); } catch (JMSException | JmsIOException | IOException | InterruptedException exception) { LOG.error("Error while publishing the message", exception); context.output(this.failedMessagesTags, input); if (exception instanceof InterruptedException) { Thread.currentThread().interrupt(); } } }
class JmsIOProducerFn<T> extends DoFn<T, T> { private final @Initialized JmsConnection<T> jmsConnection; private final TupleTag<T> failedMessagesTag; JmsIOProducerFn(JmsIO.Write<T> spec, TupleTag<T> failedMessagesTag) { this.failedMessagesTag = failedMessagesTag; this.jmsConnection = new JmsConnection<>(spec); } @StartBundle public void startBundle() throws JMSException { this.jmsConnection.start(); } @ProcessElement @FinishBundle public void finishBundle() throws JMSException { this.jmsConnection.close(); } @Teardown public void tearDown() throws JMSException { this.jmsConnection.close(); } }
class JmsIOProducerFn<T> extends DoFn<T, T> { private transient @Initialized FluentBackoff retryBackOff; private final JmsIO.Write<T> spec; private final TupleTag<T> failedMessagesTags; private final @Initialized JmsConnection<T> jmsConnection; private final Counter publicationRetries = Metrics.counter(JMS_IO_PRODUCER_METRIC_NAME, PUBLICATION_RETRIES_METRIC_NAME); JmsIOProducerFn(JmsIO.Write<T> spec, TupleTag<T> failedMessagesTags) { this.spec = spec; this.failedMessagesTags = failedMessagesTags; this.jmsConnection = new JmsConnection<>(spec); } @Setup public void setup() { RetryConfiguration retryConfiguration = checkStateNotNull(spec.getRetryConfiguration()); retryBackOff = FluentBackoff.DEFAULT .withInitialBackoff(checkStateNotNull(retryConfiguration.getInitialDuration())) .withMaxCumulativeBackoff(checkStateNotNull(retryConfiguration.getMaxDuration())) .withMaxRetries(retryConfiguration.getMaxAttempts()); } @StartBundle public void startBundle() throws JMSException { this.jmsConnection.start(); } @ProcessElement private void publishMessage(T input) throws JMSException, JmsIOException, IOException, InterruptedException { Sleeper sleeper = Sleeper.DEFAULT; BackOff backoff = checkStateNotNull(retryBackOff).backoff(); while (true) { try { this.jmsConnection.publishMessage(input); break; } catch (JMSException | JmsIOException exception) { if (!BackOffUtils.next(sleeper, backoff)) { throw exception; } else { publicationRetries.inc(); } } } } @FinishBundle public void finishBundle() throws JMSException { this.jmsConnection.close(); } @Teardown public void tearDown() throws JMSException { this.jmsConnection.close(); } }
Java `HashMap` and SQL MAP allow null keys and null values. I will add a test for this in `testSerDeMultiRowsWithNullValues`.
private DeserializationRuntimeConverter createMapConverter(MapType mapType) { LogicalType keyType = mapType.getKeyType(); if (!LogicalTypeChecks.hasFamily(keyType, LogicalTypeFamily.CHARACTER_STRING)) { throw new UnsupportedOperationException( "JSON format doesn't support non-string as key type of map. " + "The map type is: " + mapType.asSummaryString()); } final DeserializationRuntimeConverter keyConverter = createConverter(keyType); final DeserializationRuntimeConverter valueConverter = createConverter(mapType.getValueType()); return jsonNode -> { Iterator<Map.Entry<String, JsonNode>> fields = jsonNode.fields(); Map<Object, Object> result = new HashMap<>(); while (fields.hasNext()) { Map.Entry<String, JsonNode> entry = fields.next(); Object key = keyConverter.convert(TextNode.valueOf(entry.getKey())); Object value = valueConverter.convert(entry.getValue()); result.put(key, value); } return new GenericMapData(result); }; }
result.put(key, value);
private DeserializationRuntimeConverter createMapConverter(MapType mapType) { LogicalType keyType = mapType.getKeyType(); if (!LogicalTypeChecks.hasFamily(keyType, LogicalTypeFamily.CHARACTER_STRING)) { throw new UnsupportedOperationException( "JSON format doesn't support non-string as key type of map. " + "The map type is: " + mapType.asSummaryString()); } final DeserializationRuntimeConverter keyConverter = createConverter(keyType); final DeserializationRuntimeConverter valueConverter = createConverter(mapType.getValueType()); return jsonNode -> { Iterator<Map.Entry<String, JsonNode>> fields = jsonNode.fields(); Map<Object, Object> result = new HashMap<>(); while (fields.hasNext()) { Map.Entry<String, JsonNode> entry = fields.next(); Object key = keyConverter.convert(TextNode.valueOf(entry.getKey())); Object value = valueConverter.convert(entry.getValue()); result.put(key, value); } return new GenericMapData(result); }; }
class Builder { private RowType rowType; private TypeInformation<RowData> resultTypeInfo; private boolean failOnMissingField = false; private boolean ignoreParseErrors = false; /** * Configures with the {@link RowType} schema information. */ public Builder schema(RowType rowType) { this.rowType = rowType; return this; } /** * Configures the {@link TypeInformation} of the produced {@link RowData}. */ public Builder resultTypeInfo(TypeInformation<RowData> resultTypeInfo) { this.resultTypeInfo = resultTypeInfo; return this; } /** * Configures to fail if a JSON field is missing. * * <p>By default, a missing field is ignored and the field is set to null. */ public Builder failOnMissingField() { this.failOnMissingField = true; return this; } /** * Configures to fail when parsing json failed. * * <p>By default, an exception will be thrown when parsing json fails. */ public Builder ignoreParseErrors() { this.ignoreParseErrors = true; return this; } /** * Creates the instance of {@link JsonRowDataDeserializationSchema}. */ public JsonRowDataDeserializationSchema build() { return new JsonRowDataDeserializationSchema( rowType, resultTypeInfo, failOnMissingField, ignoreParseErrors); } }
class JsonRowDataDeserializationSchema implements DeserializationSchema<RowData> { private static final long serialVersionUID = 1L; /** Flag indicating whether to fail if a field is missing. */ private final boolean failOnMissingField; /** Flag indicating whether to ignore invalid fields/rows (default: throw an exception). */ private final boolean ignoreParseErrors; /** TypeInformation of the produced {@link RowData}. **/ private final TypeInformation<RowData> resultTypeInfo; /** * Runtime converter that converts {@link JsonNode}s into * objects of Flink SQL internal data structures. **/ private final DeserializationRuntimeConverter runtimeConverter; /** Object mapper for parsing the JSON. */ private final ObjectMapper objectMapper = new ObjectMapper(); public JsonRowDataDeserializationSchema( RowType rowType, TypeInformation<RowData> resultTypeInfo, boolean failOnMissingField, boolean ignoreParseErrors) { if (ignoreParseErrors && failOnMissingField) { throw new IllegalArgumentException( "JSON format doesn't support failOnMissingField and ignoreParseErrors are both enabled."); } this.resultTypeInfo = checkNotNull(resultTypeInfo); this.failOnMissingField = failOnMissingField; this.ignoreParseErrors = ignoreParseErrors; this.runtimeConverter = createRowConverter(checkNotNull(rowType)); } @Override public RowData deserialize(byte[] message) throws IOException { try { final JsonNode root = objectMapper.readTree(message); return (RowData) runtimeConverter.convert(root); } catch (Throwable t) { if (ignoreParseErrors) { return null; } throw new IOException(format("Failed to deserialize JSON '%s'.", new String(message)), t); } } @Override public boolean isEndOfStream(RowData nextElement) { return false; } @Override public TypeInformation<RowData> getProducedType() { return resultTypeInfo; } /** * Runtime converter that converts {@link JsonNode}s into objects of Flink Table & SQL * internal data structures. */ @FunctionalInterface private interface DeserializationRuntimeConverter extends Serializable { Object convert(JsonNode jsonNode); } /** * Creates a runtime converter which is null safe. */ private DeserializationRuntimeConverter createConverter(LogicalType type) { return wrapIntoNullableConverter(createNotNullConverter(type)); } /** * Creates a runtime converter which assuming input object is not null. */ private DeserializationRuntimeConverter createNotNullConverter(LogicalType type) { switch (type.getTypeRoot()) { case NULL: return jsonNode -> null; case BOOLEAN: return this::convertToBoolean; case TINYINT: return jsonNode -> Byte.parseByte(jsonNode.asText().trim()); case SMALLINT: return jsonNode -> Short.parseShort(jsonNode.asText().trim()); case INTEGER: case INTERVAL_YEAR_MONTH: return this::convertToInt; case BIGINT: case INTERVAL_DAY_TIME: return this::convertToLong; case DATE: return this::convertToDate; case TIME_WITHOUT_TIME_ZONE: return this::convertToTime; case TIMESTAMP_WITH_TIME_ZONE: case TIMESTAMP_WITHOUT_TIME_ZONE: return this::convertToTimestamp; case FLOAT: return this::convertToFloat; case DOUBLE: return this::convertToDouble; case CHAR: case VARCHAR: return this::convertToString; case BINARY: case VARBINARY: return this::convertToBytes; case DECIMAL: return createDecimalConverter((DecimalType) type); case ARRAY: return createArrayConverter((ArrayType) type); case MAP: case MULTISET: return createMapConverter((MapType) type); case ROW: return createRowConverter((RowType) type); case RAW: default: throw new UnsupportedOperationException("Unsupported type: " + type); } } private boolean convertToBoolean(JsonNode jsonNode) { if (jsonNode.isBoolean()) { return jsonNode.asBoolean(); } else { return Boolean.parseBoolean(jsonNode.asText().trim()); } } private int convertToInt(JsonNode jsonNode) { if (jsonNode.canConvertToInt()) { return jsonNode.asInt(); } else { return Integer.parseInt(jsonNode.asText().trim()); } } private long convertToLong(JsonNode jsonNode) { if (jsonNode.canConvertToLong()) { return jsonNode.asLong(); } else { return Long.parseLong(jsonNode.asText().trim()); } } private double convertToDouble(JsonNode jsonNode) { if (jsonNode.isDouble()) { return jsonNode.asDouble(); } else { return Double.parseDouble(jsonNode.asText().trim()); } } private float convertToFloat(JsonNode jsonNode) { if (jsonNode.isDouble()) { return (float) jsonNode.asDouble(); } else { return Float.parseFloat(jsonNode.asText().trim()); } } private int convertToDate(JsonNode jsonNode) { LocalDate date = ISO_LOCAL_DATE.parse(jsonNode.asText()).query(TemporalQueries.localDate()); return (int) date.toEpochDay(); } private int convertToTime(JsonNode jsonNode) { TemporalAccessor parsedTime = RFC3339_TIME_FORMAT.parse(jsonNode.asText()); ZoneOffset zoneOffset = parsedTime.query(TemporalQueries.offset()); LocalTime localTime = parsedTime.query(TemporalQueries.localTime()); if (zoneOffset != null && zoneOffset.getTotalSeconds() != 0 || localTime.getNano() != 0) { throw new JsonParseException( "Invalid time format. Only a time in UTC timezone without milliseconds is supported yet."); } return localTime.toSecondOfDay() * 1000; } private TimestampData convertToTimestamp(JsonNode jsonNode) { TemporalAccessor parsedTimestamp = RFC3339_TIMESTAMP_FORMAT.parse(jsonNode.asText()); ZoneOffset zoneOffset = parsedTimestamp.query(TemporalQueries.offset()); if (zoneOffset != null && zoneOffset.getTotalSeconds() != 0) { throw new JsonParseException( "Invalid timestamp format. Only a timestamp in UTC timezone is supported yet. " + "Format: yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); } LocalTime localTime = parsedTimestamp.query(TemporalQueries.localTime()); LocalDate localDate = parsedTimestamp.query(TemporalQueries.localDate()); return TimestampData.fromLocalDateTime(LocalDateTime.of(localDate, localTime)); } private StringData convertToString(JsonNode jsonNode) { return StringData.fromString(jsonNode.asText()); } private byte[] convertToBytes(JsonNode jsonNode) { try { return jsonNode.binaryValue(); } catch (IOException e) { throw new JsonParseException("Unable to deserialize byte array.", e); } } private DeserializationRuntimeConverter createDecimalConverter(DecimalType decimalType) { final int precision = decimalType.getPrecision(); final int scale = decimalType.getScale(); return jsonNode -> { BigDecimal bigDecimal; if (jsonNode.isBigDecimal()) { bigDecimal = jsonNode.decimalValue(); } else { bigDecimal = new BigDecimal(jsonNode.asText()); } return DecimalData.fromBigDecimal(bigDecimal, precision, scale); }; } private DeserializationRuntimeConverter createArrayConverter(ArrayType arrayType) { DeserializationRuntimeConverter elementConverter = createConverter(arrayType.getElementType()); final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType()); return jsonNode -> { final ArrayNode node = (ArrayNode) jsonNode; final Object[] array = (Object[]) Array.newInstance(elementClass, node.size()); for (int i = 0; i < node.size(); i++) { final JsonNode innerNode = node.get(i); array[i] = elementConverter.convert(innerNode); } return new GenericArrayData(array); }; } private DeserializationRuntimeConverter createRowConverter(RowType rowType) { final DeserializationRuntimeConverter[] fieldConverters = rowType.getFields().stream() .map(RowType.RowField::getType) .map(this::createConverter) .toArray(DeserializationRuntimeConverter[]::new); final String[] fieldNames = rowType.getFieldNames().toArray(new String[0]); return jsonNode -> { ObjectNode node = (ObjectNode) jsonNode; int arity = fieldNames.length; GenericRowData row = new GenericRowData(arity); for (int i = 0; i < arity; i++) { String fieldName = fieldNames[i]; JsonNode field = node.get(fieldName); Object convertedField = convertField(fieldConverters[i], fieldName, field); row.setField(i, convertedField); } return row; }; } private Object convertField( DeserializationRuntimeConverter fieldConverter, String fieldName, JsonNode field) { if (field == null) { if (failOnMissingField) { throw new JsonParseException( "Could not find field with name '" + fieldName + "'."); } else { return null; } } else { return fieldConverter.convert(field); } } private DeserializationRuntimeConverter wrapIntoNullableConverter( DeserializationRuntimeConverter converter) { return jsonNode -> { if (jsonNode == null || jsonNode.isNull()) { return null; } try { return converter.convert(jsonNode); } catch (Throwable t) { if (!ignoreParseErrors) { throw t; } return null; } }; } /** * Exception which refers to parse errors in converters. * */ private static final class JsonParseException extends RuntimeException { private static final long serialVersionUID = 1L; public JsonParseException(String message) { super(message); } public JsonParseException(String message, Throwable cause) { super(message, cause); } } }
counter.get() is not correct here as counter is the number of default action executions, not the number of mails. At the moment, the number of mails processed includes only the poison mail and manual the stop mail. So it would be 2. However, if there are changes in future that some house-keeping mails are generated and processed, one would need to adjust this test assertion. The > 0 proves that the counter is working in general. I can change it to > 1 which is IMO here better than =2.
public void testRunDefaultActionAndMails() throws Exception { AtomicBoolean stop = new AtomicBoolean(false); AtomicInteger counter = new AtomicInteger(); MailboxThread mailboxThread = new MailboxThread() { @Override public void runDefaultAction(Controller controller) throws Exception { counter.incrementAndGet(); if (stop.get()) { controller.allActionsCompleted(); } else { Thread.sleep(10L); } } }; MailboxProcessor mailboxProcessor = start(mailboxThread); mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY).execute(() -> stop.set(true), "stop"); mailboxThread.join(); Assert.assertTrue(counter.get() > 0); Assert.assertTrue(mailboxProcessor.getNumMailsProcessedCounter().getCount() > 0); }
Assert.assertTrue(mailboxProcessor.getNumMailsProcessedCounter().getCount() > 0);
public void testRunDefaultActionAndMails() throws Exception { AtomicBoolean stop = new AtomicBoolean(false); AtomicInteger counter = new AtomicInteger(); MailboxThread mailboxThread = new MailboxThread() { @Override public void runDefaultAction(Controller controller) throws Exception { counter.incrementAndGet(); if (stop.get()) { controller.allActionsCompleted(); } else { Thread.sleep(10L); } } }; MailboxProcessor mailboxProcessor = start(mailboxThread); mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY).execute(() -> stop.set(true), "stop"); mailboxThread.join(); assertThat(counter.get(), greaterThan(0)); assertThat(mailboxProcessor.getNumMailsProcessedCounter().getCount(), greaterThan(0L)); }
class TaskMailboxProcessorTest { public static final int DEFAULT_PRIORITY = 0; @Rule public ExpectedException expectedException = ExpectedException.none(); @Test public void testRejectIfNotOpen() { MailboxProcessor mailboxProcessor = new MailboxProcessor(controller -> {}); mailboxProcessor.prepareClose(); try { mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY).execute(() -> {}, "dummy"); Assert.fail("Should not be able to accept runnables if not opened."); } catch (RejectedExecutionException expected) { } } @Test public void testSubmittingRunnableWithException() throws Exception { expectedException.expectMessage("Expected"); try (MailboxProcessor mailboxProcessor = new MailboxProcessor(controller -> {})) { final Thread submitThread = new Thread( () -> { mailboxProcessor .getMainMailboxExecutor() .execute( this::throwFlinkException, "testSubmittingRunnableWithException"); }); submitThread.start(); mailboxProcessor.runMailboxLoop(); submitThread.join(); } } private void throwFlinkException() throws FlinkException { throw new FlinkException("Expected"); } @Test public void testShutdown() { MailboxProcessor mailboxProcessor = new MailboxProcessor(controller -> {}); FutureTaskWithException<Void> testRunnableFuture = new FutureTaskWithException<>(() -> {}); mailboxProcessor .getMailboxExecutor(DEFAULT_PRIORITY) .execute(testRunnableFuture, "testRunnableFuture"); mailboxProcessor.prepareClose(); try { mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY).execute(() -> {}, "dummy"); Assert.fail("Should not be able to accept runnables if not opened."); } catch (RejectedExecutionException expected) { } Assert.assertFalse(testRunnableFuture.isDone()); mailboxProcessor.close(); assertTrue(testRunnableFuture.isCancelled()); } @Test @Test public void testRunDefaultAction() throws Exception { final int expectedInvocations = 3; final AtomicInteger counter = new AtomicInteger(0); MailboxThread mailboxThread = new MailboxThread() { @Override public void runDefaultAction(Controller controller) { if (counter.incrementAndGet() == expectedInvocations) { controller.allActionsCompleted(); } } }; start(mailboxThread); mailboxThread.join(); Assert.assertEquals(expectedInvocations, counter.get()); } @Test public void testSignalUnAvailable() throws Exception { final AtomicInteger counter = new AtomicInteger(0); final AtomicReference<MailboxDefaultAction.Suspension> suspendedActionRef = new AtomicReference<>(); final OneShotLatch actionSuspendedLatch = new OneShotLatch(); final int blockAfterInvocations = 3; final int totalInvocations = blockAfterInvocations * 2; MailboxThread mailboxThread = new MailboxThread() { @Override public void runDefaultAction(Controller controller) { if (counter.incrementAndGet() == blockAfterInvocations) { suspendedActionRef.set(controller.suspendDefaultAction()); actionSuspendedLatch.trigger(); } else if (counter.get() == totalInvocations) { controller.allActionsCompleted(); } } }; MailboxProcessor mailboxProcessor = start(mailboxThread); actionSuspendedLatch.await(); Assert.assertEquals(blockAfterInvocations, counter.get()); MailboxDefaultAction.Suspension suspension = suspendedActionRef.get(); mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY).execute(suspension::resume, "resume"); mailboxThread.join(); Assert.assertEquals(totalInvocations, counter.get()); } @Test public void testSignalUnAvailablePingPong() throws Exception { final AtomicReference<MailboxDefaultAction.Suspension> suspendedActionRef = new AtomicReference<>(); final int totalSwitches = 10000; final MailboxThread mailboxThread = new MailboxThread() { int count = 0; @Override public void runDefaultAction(Controller controller) { assertTrue( suspendedActionRef.compareAndSet( null, controller.suspendDefaultAction())); ++count; if (count == totalSwitches) { controller.allActionsCompleted(); } else if (count % 1000 == 0) { try { Thread.sleep(1L); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } }; mailboxThread.start(); final MailboxProcessor mailboxProcessor = mailboxThread.getMailboxProcessor(); final Thread asyncUnblocker = new Thread( () -> { int count = 0; while (!Thread.currentThread().isInterrupted()) { final MailboxDefaultAction.Suspension resume = suspendedActionRef.getAndSet(null); if (resume != null) { mailboxProcessor .getMailboxExecutor(DEFAULT_PRIORITY) .execute(resume::resume, "resume"); } else { try { mailboxProcessor .getMailboxExecutor(DEFAULT_PRIORITY) .execute(() -> {}, "dummy"); } catch (RejectedExecutionException ignore) { } } ++count; if (count % 5000 == 0) { try { Thread.sleep(1L); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } }); asyncUnblocker.start(); mailboxThread.signalStart(); mailboxThread.join(); asyncUnblocker.interrupt(); asyncUnblocker.join(); mailboxThread.checkException(); } /** Testing that canceling after closing will not lead to an exception. */ @Test public void testCancelAfterClose() { MailboxProcessor mailboxProcessor = new MailboxProcessor((ctx) -> {}); mailboxProcessor.close(); mailboxProcessor.allActionsCompleted(); } private static MailboxProcessor start(MailboxThread mailboxThread) { mailboxThread.start(); final MailboxProcessor mailboxProcessor = mailboxThread.getMailboxProcessor(); mailboxThread.signalStart(); return mailboxProcessor; } /** FLINK-14304: Avoid newly spawned letters to prevent input processing from ever happening. */ @Test public void testAvoidStarvation() throws Exception { final int expectedInvocations = 3; final AtomicInteger counter = new AtomicInteger(0); MailboxThread mailboxThread = new MailboxThread() { @Override public void runDefaultAction(Controller controller) { if (counter.incrementAndGet() == expectedInvocations) { controller.allActionsCompleted(); } } }; mailboxThread.start(); final MailboxProcessor mailboxProcessor = mailboxThread.getMailboxProcessor(); final MailboxExecutor mailboxExecutor = mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY); AtomicInteger index = new AtomicInteger(); mailboxExecutor.execute( new RunnableWithException() { @Override public void run() { mailboxExecutor.execute(this, "Blocking mail" + index.incrementAndGet()); } }, "Blocking mail" + index.get()); mailboxThread.signalStart(); mailboxThread.join(); Assert.assertEquals(expectedInvocations, counter.get()); Assert.assertEquals(expectedInvocations, index.get()); } @Test public void testSuspendRunningMailboxLoop() throws Exception { OneShotLatch doSomeWork = new OneShotLatch(); AtomicBoolean stop = new AtomicBoolean(false); MailboxProcessor mailboxProcessor = new MailboxProcessor( controller -> { doSomeWork.trigger(); if (stop.get()) { controller.allActionsCompleted(); } }); Thread suspendThread = new Thread( () -> { try { doSomeWork.await(); mailboxProcessor.suspend(); mailboxProcessor .getMailboxExecutor(DEFAULT_PRIORITY) .execute(() -> stop.set(true), "stop"); } catch (Exception ignore) { } }); suspendThread.start(); mailboxProcessor.runMailboxLoop(); suspendThread.join(); assertFalse(stop.get()); mailboxProcessor.runMailboxLoop(); assertFalse(mailboxProcessor.isMailboxLoopRunning()); assertTrue(stop.get()); } @Test public void testResumeMailboxLoopAfterAllActionsCompleted() throws Exception { AtomicBoolean start = new AtomicBoolean(false); MailboxProcessor mailboxProcessor = new MailboxProcessor(controller -> start.set(true)); mailboxProcessor.allActionsCompleted(); mailboxProcessor.runMailboxLoop(); assertFalse(mailboxProcessor.isMailboxLoopRunning()); assertFalse(start.get()); mailboxProcessor.runMailboxLoop(); assertFalse(start.get()); } @Test public void testResumeMailboxLoop() throws Exception { AtomicBoolean start = new AtomicBoolean(false); MailboxProcessor mailboxProcessor = new MailboxProcessor( controller -> { start.set(true); controller.allActionsCompleted(); }); mailboxProcessor.suspend(); mailboxProcessor.runMailboxLoop(); assertFalse(start.get()); mailboxProcessor.runMailboxLoop(); assertTrue(start.get()); } static class MailboxThread extends Thread implements MailboxDefaultAction { MailboxProcessor mailboxProcessor; OneShotLatch mailboxCreatedLatch = new OneShotLatch(); OneShotLatch canRun = new OneShotLatch(); private Throwable caughtException; @Override public final void run() { mailboxProcessor = new MailboxProcessor(this); mailboxCreatedLatch.trigger(); try { canRun.await(); mailboxProcessor.runMailboxLoop(); } catch (Throwable t) { this.caughtException = t; } } @Override public void runDefaultAction(Controller controller) throws Exception { controller.allActionsCompleted(); } final MailboxProcessor getMailboxProcessor() { try { mailboxCreatedLatch.await(); return mailboxProcessor; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } final void signalStart() { if (mailboxCreatedLatch.isTriggered()) { canRun.trigger(); } } void checkException() throws Exception { if (caughtException != null) { throw new Exception(caughtException); } } } }
class TaskMailboxProcessorTest { public static final int DEFAULT_PRIORITY = 0; @Rule public ExpectedException expectedException = ExpectedException.none(); @Test public void testRejectIfNotOpen() { MailboxProcessor mailboxProcessor = new MailboxProcessor(controller -> {}); mailboxProcessor.prepareClose(); try { mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY).execute(() -> {}, "dummy"); Assert.fail("Should not be able to accept runnables if not opened."); } catch (RejectedExecutionException expected) { } } @Test public void testSubmittingRunnableWithException() throws Exception { expectedException.expectMessage("Expected"); try (MailboxProcessor mailboxProcessor = new MailboxProcessor(controller -> {})) { final Thread submitThread = new Thread( () -> { mailboxProcessor .getMainMailboxExecutor() .execute( this::throwFlinkException, "testSubmittingRunnableWithException"); }); submitThread.start(); mailboxProcessor.runMailboxLoop(); submitThread.join(); } } private void throwFlinkException() throws FlinkException { throw new FlinkException("Expected"); } @Test public void testShutdown() { MailboxProcessor mailboxProcessor = new MailboxProcessor(controller -> {}); FutureTaskWithException<Void> testRunnableFuture = new FutureTaskWithException<>(() -> {}); mailboxProcessor .getMailboxExecutor(DEFAULT_PRIORITY) .execute(testRunnableFuture, "testRunnableFuture"); mailboxProcessor.prepareClose(); try { mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY).execute(() -> {}, "dummy"); Assert.fail("Should not be able to accept runnables if not opened."); } catch (RejectedExecutionException expected) { } Assert.assertFalse(testRunnableFuture.isDone()); mailboxProcessor.close(); assertTrue(testRunnableFuture.isCancelled()); } @Test @Test public void testRunDefaultAction() throws Exception { final int expectedInvocations = 3; final AtomicInteger counter = new AtomicInteger(0); MailboxThread mailboxThread = new MailboxThread() { @Override public void runDefaultAction(Controller controller) { if (counter.incrementAndGet() == expectedInvocations) { controller.allActionsCompleted(); } } }; start(mailboxThread); mailboxThread.join(); Assert.assertEquals(expectedInvocations, counter.get()); } @Test public void testSignalUnAvailable() throws Exception { final AtomicInteger counter = new AtomicInteger(0); final AtomicReference<MailboxDefaultAction.Suspension> suspendedActionRef = new AtomicReference<>(); final OneShotLatch actionSuspendedLatch = new OneShotLatch(); final int blockAfterInvocations = 3; final int totalInvocations = blockAfterInvocations * 2; MailboxThread mailboxThread = new MailboxThread() { @Override public void runDefaultAction(Controller controller) { if (counter.incrementAndGet() == blockAfterInvocations) { suspendedActionRef.set(controller.suspendDefaultAction()); actionSuspendedLatch.trigger(); } else if (counter.get() == totalInvocations) { controller.allActionsCompleted(); } } }; MailboxProcessor mailboxProcessor = start(mailboxThread); actionSuspendedLatch.await(); Assert.assertEquals(blockAfterInvocations, counter.get()); MailboxDefaultAction.Suspension suspension = suspendedActionRef.get(); mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY).execute(suspension::resume, "resume"); mailboxThread.join(); Assert.assertEquals(totalInvocations, counter.get()); } @Test public void testSignalUnAvailablePingPong() throws Exception { final AtomicReference<MailboxDefaultAction.Suspension> suspendedActionRef = new AtomicReference<>(); final int totalSwitches = 10000; final MailboxThread mailboxThread = new MailboxThread() { int count = 0; @Override public void runDefaultAction(Controller controller) { assertTrue( suspendedActionRef.compareAndSet( null, controller.suspendDefaultAction())); ++count; if (count == totalSwitches) { controller.allActionsCompleted(); } else if (count % 1000 == 0) { try { Thread.sleep(1L); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } }; mailboxThread.start(); final MailboxProcessor mailboxProcessor = mailboxThread.getMailboxProcessor(); final Thread asyncUnblocker = new Thread( () -> { int count = 0; while (!Thread.currentThread().isInterrupted()) { final MailboxDefaultAction.Suspension resume = suspendedActionRef.getAndSet(null); if (resume != null) { mailboxProcessor .getMailboxExecutor(DEFAULT_PRIORITY) .execute(resume::resume, "resume"); } else { try { mailboxProcessor .getMailboxExecutor(DEFAULT_PRIORITY) .execute(() -> {}, "dummy"); } catch (RejectedExecutionException ignore) { } } ++count; if (count % 5000 == 0) { try { Thread.sleep(1L); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } }); asyncUnblocker.start(); mailboxThread.signalStart(); mailboxThread.join(); asyncUnblocker.interrupt(); asyncUnblocker.join(); mailboxThread.checkException(); } /** Testing that canceling after closing will not lead to an exception. */ @Test public void testCancelAfterClose() { MailboxProcessor mailboxProcessor = new MailboxProcessor((ctx) -> {}); mailboxProcessor.close(); mailboxProcessor.allActionsCompleted(); } private static MailboxProcessor start(MailboxThread mailboxThread) { mailboxThread.start(); final MailboxProcessor mailboxProcessor = mailboxThread.getMailboxProcessor(); mailboxThread.signalStart(); return mailboxProcessor; } /** FLINK-14304: Avoid newly spawned letters to prevent input processing from ever happening. */ @Test public void testAvoidStarvation() throws Exception { final int expectedInvocations = 3; final AtomicInteger counter = new AtomicInteger(0); MailboxThread mailboxThread = new MailboxThread() { @Override public void runDefaultAction(Controller controller) { if (counter.incrementAndGet() == expectedInvocations) { controller.allActionsCompleted(); } } }; mailboxThread.start(); final MailboxProcessor mailboxProcessor = mailboxThread.getMailboxProcessor(); final MailboxExecutor mailboxExecutor = mailboxProcessor.getMailboxExecutor(DEFAULT_PRIORITY); AtomicInteger index = new AtomicInteger(); mailboxExecutor.execute( new RunnableWithException() { @Override public void run() { mailboxExecutor.execute(this, "Blocking mail" + index.incrementAndGet()); } }, "Blocking mail" + index.get()); mailboxThread.signalStart(); mailboxThread.join(); Assert.assertEquals(expectedInvocations, counter.get()); Assert.assertEquals(expectedInvocations, index.get()); } @Test public void testSuspendRunningMailboxLoop() throws Exception { OneShotLatch doSomeWork = new OneShotLatch(); AtomicBoolean stop = new AtomicBoolean(false); MailboxProcessor mailboxProcessor = new MailboxProcessor( controller -> { doSomeWork.trigger(); if (stop.get()) { controller.allActionsCompleted(); } }); Thread suspendThread = new Thread( () -> { try { doSomeWork.await(); mailboxProcessor.suspend(); mailboxProcessor .getMailboxExecutor(DEFAULT_PRIORITY) .execute(() -> stop.set(true), "stop"); } catch (Exception ignore) { } }); suspendThread.start(); mailboxProcessor.runMailboxLoop(); suspendThread.join(); assertFalse(stop.get()); mailboxProcessor.runMailboxLoop(); assertFalse(mailboxProcessor.isMailboxLoopRunning()); assertTrue(stop.get()); } @Test public void testResumeMailboxLoopAfterAllActionsCompleted() throws Exception { AtomicBoolean start = new AtomicBoolean(false); MailboxProcessor mailboxProcessor = new MailboxProcessor(controller -> start.set(true)); mailboxProcessor.allActionsCompleted(); mailboxProcessor.runMailboxLoop(); assertFalse(mailboxProcessor.isMailboxLoopRunning()); assertFalse(start.get()); mailboxProcessor.runMailboxLoop(); assertFalse(start.get()); } @Test public void testResumeMailboxLoop() throws Exception { AtomicBoolean start = new AtomicBoolean(false); MailboxProcessor mailboxProcessor = new MailboxProcessor( controller -> { start.set(true); controller.allActionsCompleted(); }); mailboxProcessor.suspend(); mailboxProcessor.runMailboxLoop(); assertFalse(start.get()); mailboxProcessor.runMailboxLoop(); assertTrue(start.get()); } static class MailboxThread extends Thread implements MailboxDefaultAction { MailboxProcessor mailboxProcessor; OneShotLatch mailboxCreatedLatch = new OneShotLatch(); OneShotLatch canRun = new OneShotLatch(); private Throwable caughtException; @Override public final void run() { mailboxProcessor = new MailboxProcessor(this); mailboxCreatedLatch.trigger(); try { canRun.await(); mailboxProcessor.runMailboxLoop(); } catch (Throwable t) { this.caughtException = t; } } @Override public void runDefaultAction(Controller controller) throws Exception { controller.allActionsCompleted(); } final MailboxProcessor getMailboxProcessor() { try { mailboxCreatedLatch.await(); return mailboxProcessor; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } final void signalStart() { if (mailboxCreatedLatch.isTriggered()) { canRun.trigger(); } } void checkException() throws Exception { if (caughtException != null) { throw new Exception(caughtException); } } } }
`IntermediateDataSetID` has a param-less ctor I think.
public IntermediateResultPartitionID() { this.partitionNum = -1; this.intermediateDataSetID = new IntermediateDataSetID(new AbstractID()); }
this.intermediateDataSetID = new IntermediateDataSetID(new AbstractID());
public IntermediateResultPartitionID() { this.partitionNum = -1; this.intermediateDataSetID = new IntermediateDataSetID(); }
class IntermediateResultPartitionID implements ResultID { private static final long serialVersionUID = 1L; private final IntermediateDataSetID intermediateDataSetID; private final int partitionNum; /** * Creates an new random intermediate result partition ID for testing. */ @VisibleForTesting /** * Creates an new intermediate result partition ID with {@link IntermediateDataSetID} and the partitionNum. */ public IntermediateResultPartitionID(IntermediateDataSetID intermediateDataSetID, int partitionNum) { this.intermediateDataSetID = intermediateDataSetID; this.partitionNum = partitionNum; } public void writeTo(ByteBuf buf) { intermediateDataSetID.writeTo(buf); buf.writeInt(partitionNum); } public static IntermediateResultPartitionID fromByteBuf(ByteBuf buf) { final IntermediateDataSetID intermediateDataSetID = IntermediateDataSetID.fromByteBuf(buf); final int partitionNum = buf.readInt(); return new IntermediateResultPartitionID(intermediateDataSetID, partitionNum); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } else if (obj != null && obj.getClass() == getClass()) { IntermediateResultPartitionID that = (IntermediateResultPartitionID) obj; return that.intermediateDataSetID.equals(this.intermediateDataSetID) && that.partitionNum == this.partitionNum; } else { return false; } } @Override public int hashCode() { return this.intermediateDataSetID.hashCode() ^ this.partitionNum; } @Override public String toString() { return intermediateDataSetID.toString() + " } }
class IntermediateResultPartitionID implements ResultID { private static final long serialVersionUID = 1L; private final IntermediateDataSetID intermediateDataSetID; private final int partitionNum; /** * Creates an new random intermediate result partition ID for testing. */ @VisibleForTesting /** * Creates an new intermediate result partition ID with {@link IntermediateDataSetID} and the partitionNum. */ public IntermediateResultPartitionID(IntermediateDataSetID intermediateDataSetID, int partitionNum) { this.intermediateDataSetID = intermediateDataSetID; this.partitionNum = partitionNum; } public void writeTo(ByteBuf buf) { intermediateDataSetID.writeTo(buf); buf.writeInt(partitionNum); } public static IntermediateResultPartitionID fromByteBuf(ByteBuf buf) { final IntermediateDataSetID intermediateDataSetID = IntermediateDataSetID.fromByteBuf(buf); final int partitionNum = buf.readInt(); return new IntermediateResultPartitionID(intermediateDataSetID, partitionNum); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } else if (obj != null && obj.getClass() == getClass()) { IntermediateResultPartitionID that = (IntermediateResultPartitionID) obj; return that.intermediateDataSetID.equals(this.intermediateDataSetID) && that.partitionNum == this.partitionNum; } else { return false; } } @Override public int hashCode() { return this.intermediateDataSetID.hashCode() ^ this.partitionNum; } @Override public String toString() { return intermediateDataSetID.toString() + " } }
This check excludes the decimal value zero (whole set of decimal zero values with different precision) from the set of values covered by the check `bd.abs(MathContext.DECIMAL128).compareTo(MIN_DECIMAL_MAGNITUDE) < 0`.
private static BigDecimal getValidDecimalValue(BigDecimal bd) { if (bd.compareTo(DECIMAL_MAX) > 0 || bd.compareTo(DECIMAL_MIN) < 0) { throw ErrorCreator.createError(BallerinaErrorReasons.NUMBER_OVERFLOW, BLangExceptionHelper.getErrorDetails(RuntimeErrors.DECIMAL_VALUE_OUT_OF_RANGE)); } else if (bd.abs(MathContext.DECIMAL128).compareTo(MIN_DECIMAL_MAGNITUDE) < 0 && bd.abs(MathContext.DECIMAL128).compareTo(BigDecimal.ZERO) > 0) { return BigDecimal.ZERO; } return bd; }
bd.abs(MathContext.DECIMAL128).compareTo(BigDecimal.ZERO) > 0) {
private static BigDecimal getValidDecimalValue(BigDecimal bd) { if (bd.compareTo(DECIMAL_MAX) > 0 || bd.compareTo(DECIMAL_MIN) < 0) { throw ErrorCreator.createError(BallerinaErrorReasons.NUMBER_OVERFLOW, BLangExceptionHelper.getErrorDetails(RuntimeErrors.DECIMAL_VALUE_OUT_OF_RANGE)); } else if (bd.abs(MathContext.DECIMAL128).compareTo(MIN_DECIMAL_MAGNITUDE) < 0 && bd.abs(MathContext.DECIMAL128).compareTo(BigDecimal.ZERO) > 0) { return BigDecimal.ZERO; } return bd; }
class DecimalValue implements SimpleValue, BDecimal { private static final String INF_STRING = "Infinity"; private static final String NEG_INF_STRING = "-" + INF_STRING; private static final String NAN = "NaN"; private static final BigDecimal DECIMAL_MAX = new BigDecimal("9.999999999999999999999999999999999e6144", MathContext.DECIMAL128); private static final BigDecimal DECIMAL_MIN = new BigDecimal("-9.999999999999999999999999999999999e6144", MathContext.DECIMAL128); private static final BigDecimal MIN_DECIMAL_MAGNITUDE = new BigDecimal("1.000000e-6143", MathContext.DECIMAL128); @Deprecated public DecimalValueKind valueKind = DecimalValueKind.OTHER; private final BigDecimal value; public DecimalValue(BigDecimal value) { this.value = getValidDecimalValue(value); if (!this.booleanValue()) { this.valueKind = DecimalValueKind.ZERO; } } public DecimalValue(String value) { BigDecimal bd; if (isHexValueString(value)) { bd = hexToDecimalFloatingPointNumber(value); } else { try { bd = new BigDecimal(value, MathContext.DECIMAL128); } catch (NumberFormatException exception) { String message = exception.getMessage(); if ((message != null) && (message.equals("Too many nonzero exponent digits.") || message.equals("Exponent overflow."))) { throw ErrorCreator.createError(BallerinaErrorReasons.LARGE_EXPONENT_ERROR, BLangExceptionHelper.getErrorDetails(RuntimeErrors.LARGE_EXPONENTS_IN_DECIMAL, value)); } throw exception; } } this.value = getValidDecimalValue(bd); if (!this.booleanValue()) { this.valueKind = DecimalValueKind.ZERO; } } public DecimalValue(String value, DecimalValueKind valueKind) { this(value); this.valueKind = valueKind; } private static boolean isHexValueString(String value) { String upperCaseValue = value.toUpperCase(); return upperCaseValue.startsWith("0X") || upperCaseValue.startsWith("-0X"); } /** * Method used to convert the hexadecimal number to decimal floating point number. * BigDecimal does not support hexadecimal numbers. Hence, we need to convert the hexadecimal number to a * decimal floating point number before passing the string value to the BigDecimal constructor. * * @param value Hexadecimal string value that needs to be converted. * @return BigDecimal corresponds to the hexadecimal number provided. */ private static BigDecimal hexToDecimalFloatingPointNumber(String value) { String upperCaseValue = value.toUpperCase(); String hexValue = upperCaseValue.replace("0X", ""); if (!hexValue.contains("P")) { hexValue = hexValue.concat("P0"); } String[] splitAtExponent = hexValue.split("P"); int binaryExponent = Integer.parseInt(splitAtExponent[1]); String numberWithoutExp = splitAtExponent[0]; String intComponent; if (numberWithoutExp.contains(".")) { String[] numberComponents = numberWithoutExp.split("\\."); intComponent = numberComponents[0]; String decimalComponent = numberComponents[1]; binaryExponent += 4 * (-1) * decimalComponent.length(); intComponent = intComponent.concat(decimalComponent); } else { intComponent = numberWithoutExp; } BigDecimal exponentValue; if (binaryExponent >= 0) { exponentValue = new BigDecimal(2).pow(binaryExponent); } else { exponentValue = BigDecimal.ONE.divide(new BigDecimal(2).pow(-binaryExponent), MathContext.DECIMAL128); } BigInteger hexEquivalentNumber = new BigInteger(intComponent, 16); return new BigDecimal(hexEquivalentNumber).multiply(exponentValue, MathContext.DECIMAL128); } /** * Get value of the decimal. * @return the value */ public BigDecimal decimalValue() { return this.value; } /** * Get the int value of the decimal. * May result in a {@code ErrorValue} * @return the integer value */ public long intValue() { if (!isDecimalWithinIntRange(this)) { throw ErrorUtils.createNumericConversionError(this.stringValue(null), PredefinedTypes.TYPE_DECIMAL, PredefinedTypes.TYPE_INT); } return value.setScale(0, RoundingMode.HALF_EVEN).longValue(); } /** * Check the given value is in int range. * @param decimalValue value to be checked * @return true if the value is in int range */ public static boolean isDecimalWithinIntRange(DecimalValue decimalValue) { BigDecimal value = decimalValue.value; return value.compareTo(RuntimeConstants.BINT_MAX_VALUE_BIG_DECIMAL_RANGE_MAX) < 0 && value.compareTo(RuntimeConstants.BINT_MIN_VALUE_BIG_DECIMAL_RANGE_MIN) > 0; } /** * Get the byte value. * May result in a {@code ErrorValue} * @return the byte value */ public int byteValue() { int intVal = (int) Math.rint(this.value.doubleValue()); if (!isByteLiteral(intVal)) { throw ErrorUtils.createNumericConversionError(value, PredefinedTypes.TYPE_DECIMAL, PredefinedTypes.TYPE_BYTE); } return intVal; } private static boolean isByteLiteral(long longValue) { return (longValue >= RuntimeConstants.BBYTE_MIN_VALUE && longValue <= RuntimeConstants.BBYTE_MAX_VALUE); } /** * Get the float value. * @return the double value */ public double floatValue() { return value.doubleValue(); } /** * Check the given value represents true or false. * @return true if the value is non zero */ public boolean booleanValue() { return value.compareTo(BigDecimal.ZERO) != 0; } @Override public Object copy(Map<Object, Object> refs) { return this; } @Override public Object frozenCopy(Map<Object, Object> refs) { return this; } /** * Get the string value. * @return string value * @param parent The link to the parent node */ public String stringValue(BLink parent) { if (this.valueKind != DecimalValueKind.OTHER) { return this.valueKind.getValue(); } return value.toString(); } /** * Get the string value in expression style. * @return string value in expression style * @param parent The link to the parent node */ public String expressionStringValue(BLink parent) { if (this.valueKind != DecimalValueKind.OTHER) { return this.valueKind.getValue() + "d"; } return value.toString() + "d"; } /** * Get the {@code BigDecimal} value. * @return the decimal value */ public BigDecimal value() { return this.value; } /** * Get the {@code BType} of the value. * @return the type */ public Type getType() { return PredefinedTypes.TYPE_DECIMAL; } /** * Returns a {decimal whose value is {@code (this + augend)}. * @param augend value to be added. * @return new value */ public DecimalValue add(DecimalValue augend) { if (this.valueKind == DecimalValueKind.ZERO) { return augend; } if (augend.valueKind == DecimalValueKind.ZERO) { return this; } return new DecimalValue(this.decimalValue().add(augend.decimalValue(), MathContext.DECIMAL128)); } /** * Returns a decimal whose value is {@code (this - subtrahend)}. * @param subtrahend value to be subtracted * @return value after subtraction */ public DecimalValue subtract(DecimalValue subtrahend) { if (this.valueKind == DecimalValueKind.ZERO) { if (subtrahend.valueKind == DecimalValueKind.ZERO) { return subtrahend; } return subtrahend.negate(); } if (subtrahend.valueKind == DecimalValueKind.ZERO) { return this; } return new DecimalValue(this.decimalValue().subtract(subtrahend.decimalValue(), MathContext.DECIMAL128)); } /** * Returns a decimal whose value is <tt>(this &times; * multiplicand)</tt>. * @param multiplicand value to be multiplied * @return value after multiplication */ public DecimalValue multiply(DecimalValue multiplicand) { if (this.valueKind == DecimalValueKind.ZERO) { return this; } if (multiplicand.valueKind == DecimalValueKind.OTHER) { return new DecimalValue(this.decimalValue().multiply(multiplicand.decimalValue(), MathContext.DECIMAL128)); } return multiplicand; } /** * Returns a decimal whose value is {@code (this / * divisor)}. * @param divisor value by which this decimal is to be divided * @return value after division */ public DecimalValue divide(DecimalValue divisor) { if (this.valueKind == DecimalValueKind.ZERO) { if (divisor.valueKind == DecimalValueKind.ZERO) { throw ErrorUtils.createInvalidDecimalError(NAN); } return this; } if (divisor.valueKind == DecimalValueKind.OTHER) { return new DecimalValue(this.decimalValue().divide(divisor.decimalValue(), MathContext.DECIMAL128)); } if (this.decimalValue().compareTo(BigDecimal.ZERO) > 0) { throw ErrorUtils.createInvalidDecimalError(INF_STRING); } else { throw ErrorUtils.createInvalidDecimalError(NEG_INF_STRING); } } /** * Returns a decimal whose value is {@code (this % * divisor)}. * @param divisor value by which this decimal is to be divided * @return {@code this % divisor} */ public DecimalValue remainder(DecimalValue divisor) { if (divisor.valueKind == DecimalValueKind.OTHER) { return new DecimalValue(this.decimalValue().remainder(divisor.decimalValue(), MathContext.DECIMAL128)); } throw ErrorUtils.createInvalidDecimalError(NAN); } /** * Returns a decimal whose value is {@code (-this)}. * @return {@code -this} */ public DecimalValue negate() { if (this.valueKind == DecimalValueKind.OTHER) { return new DecimalValue(this.decimalValue().negate()); } return this; } @Override public BDecimal add(BDecimal augend) { return add((DecimalValue) augend); } @Override public BDecimal subtract(BDecimal subtrahend) { return subtract((DecimalValue) subtrahend); } @Override public BDecimal multiply(BDecimal multiplicand) { return multiply((DecimalValue) multiplicand); } @Override public BDecimal divide(BDecimal divisor) { return divide((DecimalValue) divisor); } @Override public BDecimal remainder(BDecimal divisor) { return remainder((DecimalValue) divisor); } /** * Returns value kind of {@code (-this)}. * @return value kind */ public DecimalValueKind getValueKind() { return valueKind; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } DecimalValue bDecimal = (DecimalValue) obj; return ((value.compareTo(bDecimal.value) == 0) && (this.valueKind == bDecimal.valueKind)); } @Override public int hashCode() { return value.hashCode(); } /** * Get the string value. * @return string value */ @Override public String toString() { return this.stringValue(null); } /** * Returns decimal of given int value. * @param value integer value * @return decimal value */ public static DecimalValue valueOf(int value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, RoundingMode.HALF_EVEN)); } /** * Returns decimal of given long value. * @param value long value * @return decimal value */ public static DecimalValue valueOf(long value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, RoundingMode.HALF_EVEN)); } /** * Returns decimal of given double value. * @param value double value * @return decimal value */ public static DecimalValue valueOf(double value) { if (Double.isNaN(value)) { throw ErrorUtils.createInvalidDecimalError(NAN); } if (value == Double.POSITIVE_INFINITY) { throw ErrorUtils.createInvalidDecimalError(INF_STRING); } if (value == Double.NEGATIVE_INFINITY) { throw ErrorUtils.createInvalidDecimalError(NEG_INF_STRING); } return new DecimalValue(BigDecimal.valueOf(value)); } /** * Returns decimal of given boolean value. * @param value boolean value * @return decimal value */ public static DecimalValue valueOf(boolean value) { return new DecimalValue(value ? BigDecimal.ONE.setScale(1, BigDecimal.ROUND_HALF_EVEN) : BigDecimal.ZERO.setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(byte value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(char value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(short value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(int value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(long value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(float value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(double value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(BigDecimal value) { return new DecimalValue(new BigDecimal(value.toString(), MathContext.DECIMAL128) .setScale(1, BigDecimal.ROUND_HALF_EVEN)); } }
class DecimalValue implements SimpleValue, BDecimal { private static final String INF_STRING = "Infinity"; private static final String NEG_INF_STRING = "-" + INF_STRING; private static final String NAN = "NaN"; private static final BigDecimal DECIMAL_MAX = new BigDecimal("9.999999999999999999999999999999999e6144", MathContext.DECIMAL128); private static final BigDecimal DECIMAL_MIN = new BigDecimal("-9.999999999999999999999999999999999e6144", MathContext.DECIMAL128); private static final BigDecimal MIN_DECIMAL_MAGNITUDE = new BigDecimal("1.000000000000000000000000000000000e-6143", MathContext.DECIMAL128); @Deprecated public DecimalValueKind valueKind = DecimalValueKind.OTHER; private final BigDecimal value; public DecimalValue(BigDecimal value) { this.value = getValidDecimalValue(value); if (!this.booleanValue()) { this.valueKind = DecimalValueKind.ZERO; } } public DecimalValue(String value) { BigDecimal bd; try { bd = new BigDecimal(value, MathContext.DECIMAL128); } catch (NumberFormatException exception) { String message = exception.getMessage(); if ((message != null) && (message.equals("Too many nonzero exponent digits.") || message.equals("Exponent overflow."))) { throw ErrorCreator.createError(BallerinaErrorReasons.LARGE_EXPONENT_ERROR, BLangExceptionHelper.getErrorDetails(RuntimeErrors.LARGE_EXPONENTS_IN_DECIMAL, value)); } throw exception; } this.value = getValidDecimalValue(bd); if (!this.booleanValue()) { this.valueKind = DecimalValueKind.ZERO; } } public DecimalValue(String value, DecimalValueKind valueKind) { this(value); this.valueKind = valueKind; } /** * Get value of the decimal. * @return the value */ public BigDecimal decimalValue() { return this.value; } /** * Get the int value of the decimal. * May result in a {@code ErrorValue} * @return the integer value */ public long intValue() { if (!isDecimalWithinIntRange(this)) { throw ErrorUtils.createNumericConversionError(this.stringValue(null), PredefinedTypes.TYPE_DECIMAL, PredefinedTypes.TYPE_INT); } return value.setScale(0, RoundingMode.HALF_EVEN).longValue(); } /** * Check the given value is in int range. * @param decimalValue value to be checked * @return true if the value is in int range */ public static boolean isDecimalWithinIntRange(DecimalValue decimalValue) { BigDecimal value = decimalValue.value; return value.compareTo(RuntimeConstants.BINT_MAX_VALUE_BIG_DECIMAL_RANGE_MAX) < 0 && value.compareTo(RuntimeConstants.BINT_MIN_VALUE_BIG_DECIMAL_RANGE_MIN) > 0; } /** * Get the byte value. * May result in a {@code ErrorValue} * @return the byte value */ public int byteValue() { int intVal = (int) Math.rint(this.value.doubleValue()); if (!isByteLiteral(intVal)) { throw ErrorUtils.createNumericConversionError(value, PredefinedTypes.TYPE_DECIMAL, PredefinedTypes.TYPE_BYTE); } return intVal; } private static boolean isByteLiteral(long longValue) { return (longValue >= RuntimeConstants.BBYTE_MIN_VALUE && longValue <= RuntimeConstants.BBYTE_MAX_VALUE); } /** * Get the float value. * @return the double value */ public double floatValue() { return value.doubleValue(); } /** * Check the given value represents true or false. * @return true if the value is non zero */ public boolean booleanValue() { return value.compareTo(BigDecimal.ZERO) != 0; } @Override public Object copy(Map<Object, Object> refs) { return this; } @Override public Object frozenCopy(Map<Object, Object> refs) { return this; } /** * Get the string value. * @return string value * @param parent The link to the parent node */ public String stringValue(BLink parent) { if (this.valueKind != DecimalValueKind.OTHER) { return this.valueKind.getValue(); } return value.toString(); } /** * Get the string value in expression style. * @return string value in expression style * @param parent The link to the parent node */ public String expressionStringValue(BLink parent) { if (this.valueKind != DecimalValueKind.OTHER) { return this.valueKind.getValue() + "d"; } return value.toString() + "d"; } /** * Get the {@code BigDecimal} value. * @return the decimal value */ public BigDecimal value() { return this.value; } /** * Get the {@code BType} of the value. * @return the type */ public Type getType() { return PredefinedTypes.TYPE_DECIMAL; } /** * Returns a {decimal whose value is {@code (this + augend)}. * @param augend value to be added. * @return new value */ public DecimalValue add(DecimalValue augend) { if (this.valueKind == DecimalValueKind.ZERO) { return augend; } if (augend.valueKind == DecimalValueKind.ZERO) { return this; } return new DecimalValue(this.decimalValue().add(augend.decimalValue(), MathContext.DECIMAL128)); } /** * Returns a decimal whose value is {@code (this - subtrahend)}. * @param subtrahend value to be subtracted * @return value after subtraction */ public DecimalValue subtract(DecimalValue subtrahend) { if (this.valueKind == DecimalValueKind.ZERO) { if (subtrahend.valueKind == DecimalValueKind.ZERO) { return subtrahend; } return subtrahend.negate(); } if (subtrahend.valueKind == DecimalValueKind.ZERO) { return this; } return new DecimalValue(this.decimalValue().subtract(subtrahend.decimalValue(), MathContext.DECIMAL128)); } /** * Returns a decimal whose value is <tt>(this &times; * multiplicand)</tt>. * @param multiplicand value to be multiplied * @return value after multiplication */ public DecimalValue multiply(DecimalValue multiplicand) { if (this.valueKind == DecimalValueKind.ZERO) { return this; } if (multiplicand.valueKind == DecimalValueKind.OTHER) { return new DecimalValue(this.decimalValue().multiply(multiplicand.decimalValue(), MathContext.DECIMAL128)); } return multiplicand; } /** * Returns a decimal whose value is {@code (this / * divisor)}. * @param divisor value by which this decimal is to be divided * @return value after division */ public DecimalValue divide(DecimalValue divisor) { if (this.valueKind == DecimalValueKind.ZERO) { if (divisor.valueKind == DecimalValueKind.ZERO) { throw ErrorUtils.createInvalidDecimalError(NAN); } return this; } if (divisor.valueKind == DecimalValueKind.OTHER) { return new DecimalValue(this.decimalValue().divide(divisor.decimalValue(), MathContext.DECIMAL128)); } if (this.decimalValue().compareTo(BigDecimal.ZERO) > 0) { throw ErrorUtils.createInvalidDecimalError(INF_STRING); } else { throw ErrorUtils.createInvalidDecimalError(NEG_INF_STRING); } } /** * Returns a decimal whose value is {@code (this % * divisor)}. * @param divisor value by which this decimal is to be divided * @return {@code this % divisor} */ public DecimalValue remainder(DecimalValue divisor) { if (divisor.valueKind == DecimalValueKind.OTHER) { return new DecimalValue(this.decimalValue().remainder(divisor.decimalValue(), MathContext.DECIMAL128)); } throw ErrorUtils.createInvalidDecimalError(NAN); } /** * Returns a decimal whose value is {@code (-this)}. * @return {@code -this} */ public DecimalValue negate() { if (this.valueKind == DecimalValueKind.OTHER) { return new DecimalValue(this.decimalValue().negate()); } return this; } @Override public BDecimal add(BDecimal augend) { return add((DecimalValue) augend); } @Override public BDecimal subtract(BDecimal subtrahend) { return subtract((DecimalValue) subtrahend); } @Override public BDecimal multiply(BDecimal multiplicand) { return multiply((DecimalValue) multiplicand); } @Override public BDecimal divide(BDecimal divisor) { return divide((DecimalValue) divisor); } @Override public BDecimal remainder(BDecimal divisor) { return remainder((DecimalValue) divisor); } /** * Returns value kind of {@code (-this)}. * @return value kind */ public DecimalValueKind getValueKind() { return valueKind; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } DecimalValue bDecimal = (DecimalValue) obj; return ((value.compareTo(bDecimal.value) == 0) && (this.valueKind == bDecimal.valueKind)); } @Override public int hashCode() { return value.hashCode(); } /** * Get the string value. * @return string value */ @Override public String toString() { return this.stringValue(null); } /** * Returns decimal of given int value. * @param value integer value * @return decimal value */ public static DecimalValue valueOf(int value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, RoundingMode.HALF_EVEN)); } /** * Returns decimal of given long value. * @param value long value * @return decimal value */ public static DecimalValue valueOf(long value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, RoundingMode.HALF_EVEN)); } /** * Returns decimal of given double value. * @param value double value * @return decimal value */ public static DecimalValue valueOf(double value) { if (Double.isNaN(value)) { throw ErrorUtils.createInvalidDecimalError(NAN); } if (value == Double.POSITIVE_INFINITY) { throw ErrorUtils.createInvalidDecimalError(INF_STRING); } if (value == Double.NEGATIVE_INFINITY) { throw ErrorUtils.createInvalidDecimalError(NEG_INF_STRING); } return new DecimalValue(BigDecimal.valueOf(value)); } /** * Returns decimal of given boolean value. * @param value boolean value * @return decimal value */ public static DecimalValue valueOf(boolean value) { return new DecimalValue(value ? BigDecimal.ONE.setScale(1, BigDecimal.ROUND_HALF_EVEN) : BigDecimal.ZERO.setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(byte value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(char value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(short value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(int value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(long value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(float value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(double value) { return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN)); } public static DecimalValue valueOfJ(BigDecimal value) { return new DecimalValue(new BigDecimal(value.toString(), MathContext.DECIMAL128) .setScale(1, BigDecimal.ROUND_HALF_EVEN)); } }
You are right. Avoiding creating many instances of DataFormat is a big optimization.
private long convertTimestampToMillis(String timestamp) throws ParseException { SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); return dateFormat.parse(timestamp).getTime(); }
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private long convertTimestampToMillis(String timestamp) throws ParseException { return dateFormat.parse(timestamp).getTime(); }
class BeamSqlLineIT implements Serializable { @Rule public transient TestPubsub eventsTopic = TestPubsub.create(); private static String project = TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject(); private static String createPubsubTableStatement; private static String setProject; private static PubsubMessageJSONStringConstructor constructor; private static final String publicTopic = "projects/pubsub-public-data/topics/taxirides-realtime"; @BeforeClass public static void setUp() { setProject = String.format("SET project = '%s';", project); createPubsubTableStatement = "CREATE TABLE taxi_rides (\n" + " event_timestamp TIMESTAMP,\n" + " attributes MAP<VARCHAR, VARCHAR>,\n" + " payload ROW<\n" + " ride_id VARCHAR,\n" + " point_idx INT,\n" + " latitude DOUBLE,\n" + " longitude DOUBLE,\n" + " meter_reading DOUBLE,\n" + " meter_increment DOUBLE,\n" + " ride_status VARCHAR,\n" + " passenger_count TINYINT>)\n" + " TYPE pubsub \n" + " LOCATION '%s'\n" + " TBLPROPERTIES '{\"timestampAttributeKey\": \"ts\"}';"; constructor = new PubsubMessageJSONStringConstructor( "ride_id", "point_idx", "latitude", "longitude", "meter_reading", "meter_increment", "ride_status", "passenger_count"); } @Test public void testSelectFromPubsub() throws Exception { ExecutorService pool = Executors.newFixedThreadPool(1); Future<List<List<String>>> expectedResult = pool.submit( (Callable) () -> { String[] args = buildArgs( String.format(createPubsubTableStatement, eventsTopic.topicPath()), setProject, "SELECT event_timestamp, taxi_rides.payload.ride_status, taxi_rides.payload.latitude, " + "taxi_rides.payload.longitude from taxi_rides LIMIT 3;"); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); BeamSqlLine.runSqlLine(args, null, outputStream, null); return toLines(outputStream); }); checkIfTopicExists(eventsTopic, 10); List<PubsubMessage> messages = ImmutableList.of( message( convertTimestampToMillis("2018-07-01 21:25:20"), constructor.construct("id1", 1, 40.702, -74.001, 1000, 10, "enroute", 2)), message( convertTimestampToMillis("2018-07-01 21:26:06"), constructor.construct("id2", 2, 40.703, -74.002, 1000, 10, "enroute", 4)), message( convertTimestampToMillis("2018-07-02 13:26:06"), constructor.construct("id3", 3, 30.0, -72.32324, 2000, 20, "enroute", 7))); eventsTopic.publish(messages); assertThat( Arrays.asList( Arrays.asList("2018-07-01 21:25:20", "enroute", "40.702", "-74.001"), Arrays.asList("2018-07-01 21:26:06", "enroute", "40.703", "-74.002"), Arrays.asList("2018-07-02 13:26:06", "enroute", "30.0", "-72.32324")), everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray()))); } @Test public void testFilterForSouthManhattan() throws Exception { ExecutorService pool = Executors.newFixedThreadPool(1); Future<List<List<String>>> expectedResult = pool.submit( (Callable) () -> { String[] args = buildArgs( String.format(createPubsubTableStatement, eventsTopic.topicPath()), setProject, "SELECT event_timestamp, taxi_rides.payload.ride_status, \n" + "taxi_rides.payload.latitude, taxi_rides.payload.longitude from taxi_rides\n" + " WHERE taxi_rides.payload.longitude > -74.747\n" + " AND taxi_rides.payload.longitude < -73.969\n" + " AND taxi_rides.payload.latitude > 40.699\n" + " AND taxi_rides.payload.latitude < 40.720 LIMIT 2;"); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); BeamSqlLine.runSqlLine(args, null, outputStream, null); return toLines(outputStream); }); checkIfTopicExists(eventsTopic, 10); List<PubsubMessage> messages = ImmutableList.of( message( convertTimestampToMillis("2018-07-01 21:25:20"), constructor.construct("id1", 1, 40.701, -74.001, 1000, 10, "enroute", 2)), message( convertTimestampToMillis("2018-07-01 21:26:06"), constructor.construct("id2", 2, 40.702, -74.002, 1000, 10, "enroute", 4)), message( convertTimestampToMillis("2018-07-02 13:26:06"), constructor.construct("id3", 3, 30, -72.32324, 2000, 20, "enroute", 7)), message( convertTimestampToMillis("2018-07-02 14:28:22"), constructor.construct("id4", 4, 34, -73.32324, 2000, 20, "enroute", 8))); eventsTopic.publish(messages); assertThat( Arrays.asList( Arrays.asList("2018-07-01 21:25:20", "enroute", "40.701", "-74.001"), Arrays.asList("2018-07-01 21:26:06", "enroute", "40.702", "-74.002")), everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray()))); } @Test public void testFixedWindow() throws Exception { ExecutorService pool = Executors.newFixedThreadPool(1); Future<List<List<String>>> expectedResult = pool.submit( (Callable) () -> { String[] args = buildArgs( String.format(createPubsubTableStatement, publicTopic), setProject, "WITH geo_cells AS (\n" + " SELECT FLOOR(taxi_rides.payload.latitude / 0.05) * 0.05 AS reduced_lat,\n" + " FLOOR(taxi_rides.payload.longitude / 0.05) * 0.05 AS reduced_lon,\n" + " taxi_rides.event_timestamp\n" + " FROM taxi_rides)\n" + " SELECT COUNT(*) as num_events,\n" + " geo_cells.reduced_lat,\n" + " geo_cells.reduced_lon, \n" + " TUMBLE_START(geo_cells.event_timestamp, INTERVAL '1' SECOND)\n" + " FROM geo_cells \n" + " GROUP BY geo_cells.reduced_lat,\n" + " geo_cells.reduced_lon,\n" + " TUMBLE(geo_cells.event_timestamp, INTERVAL '1' SECOND)\n" + " LIMIT 2;"); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); BeamSqlLine.runSqlLine(args, null, outputStream, null); return toLines(outputStream); }); assertTrue(expectedResult.get().size() == 6); } @Test public void testSlidingWindow() throws Exception { ExecutorService pool = Executors.newFixedThreadPool(1); Future<List<List<String>>> expectedResult = pool.submit( (Callable) () -> { String[] args = buildArgs( String.format(createPubsubTableStatement, publicTopic), setProject, "SELECT COUNT(*) AS num_events,\n" + " SUM(taxi_rides.payload.meter_increment) as revenue,\n" + " HOP_END(\n" + " taxi_rides.event_timestamp, \n" + " INTERVAL '1' SECOND, \n" + " INTERVAL '2' SECOND) as minute_end\n" + " FROM taxi_rides\n" + " GROUP BY HOP(\n" + " taxi_rides.event_timestamp,\n" + " INTERVAL '1' SECOND, \n" + " INTERVAL '2' SECOND) LIMIT 2"); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); BeamSqlLine.runSqlLine(args, null, outputStream, null); return toLines(outputStream); }); assertTrue(expectedResult.get().size() == 6); } /** * Check if topic exists {@param attempts} times. There is 1 sec wait time between two attempts. * * @param attempts number of attempts. Has to be a value > 0 */ private void checkIfTopicExists(TestPubsub eventsTopic, int attempts) throws InterruptedException, IllegalArgumentException, IOException { if (attempts <= 0) { throw new IllegalArgumentException( String.format("Set %d attempts, which should be > 0", attempts)); } while (attempts > 0) { List<SubscriptionPath> topics = eventsTopic.listSubscriptions( projectPathFromPath(String.format("projects/%s", project)), eventsTopic.topicPath()); if (topics.size() > 0) { return; } attempts--; Thread.sleep(1000); } throw new RuntimeException( String.format("Failed to create subscription within %d attempts", attempts)); } private PubsubMessage message(long timestampInMillis, String jsonPayload) { return new PubsubMessage( jsonPayload.getBytes(UTF_8), ImmutableMap.of("ts", String.valueOf(timestampInMillis))); } private static class PubsubMessageJSONStringConstructor { private List<String> messageSchema; public PubsubMessageJSONStringConstructor(String... schemas) { ImmutableList.Builder<String> builder = ImmutableList.<String>builder(); for (String schema : schemas) { builder.add(schema); } messageSchema = builder.build(); } public String construct(Object... values) throws IllegalArgumentException { if (values.length != messageSchema.size()) { throw new IllegalArgumentException( String.format( "length of values %d does not match " + "with size of schema %d", values.length, messageSchema.size())); } JSONObject jsonObject = new JSONObject(); for (int i = 0; i < values.length; i++) { jsonObject.put(messageSchema.get(i), values[i]); } return jsonObject.toString(); } } }
class BeamSqlLineIT implements Serializable { @Rule public transient TestPubsub eventsTopic = TestPubsub.create(); private static String project; private static String createPubsubTableStatement; private static String setProject; private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private ExecutorService pool; @BeforeClass public static void setUpClass() { project = TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject(); setProject = String.format("SET project = '%s';", project); createPubsubTableStatement = "CREATE TABLE taxi_rides (\n" + " event_timestamp TIMESTAMP,\n" + " attributes MAP<VARCHAR, VARCHAR>,\n" + " payload ROW<\n" + " ride_id VARCHAR,\n" + " point_idx INT,\n" + " latitude DOUBLE,\n" + " longitude DOUBLE,\n" + " meter_reading DOUBLE,\n" + " meter_increment DOUBLE,\n" + " ride_status VARCHAR,\n" + " passenger_count TINYINT>)\n" + " TYPE pubsub \n" + " LOCATION '%s'\n" + " TBLPROPERTIES '{\"timestampAttributeKey\": \"ts\"}';"; dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); } @Before public void setUp() { pool = Executors.newFixedThreadPool(1); } @After public void tearDown() { pool.shutdown(); } @Test public void testSelectFromPubsub() throws Exception { String[] args = buildArgs( String.format(createPubsubTableStatement, eventsTopic.topicPath()), setProject, "SELECT event_timestamp, taxi_rides.payload.ride_status, taxi_rides.payload.latitude, " + "taxi_rides.payload.longitude from taxi_rides LIMIT 3;"); Future<List<List<String>>> expectedResult = runQueryInBackground(args); eventsTopic.checkIfAnySubscriptionExists(project, Duration.standardMinutes(1)); List<PubsubMessage> messages = ImmutableList.of( message( convertTimestampToMillis("2018-07-01 21:25:20"), taxiRideJSON("id1", 1, 40.702, -74.001, 1000, 10, "enroute", 2)), message( convertTimestampToMillis("2018-07-01 21:26:06"), taxiRideJSON("id2", 2, 40.703, -74.002, 1000, 10, "enroute", 4)), message( convertTimestampToMillis("2018-07-02 13:26:06"), taxiRideJSON("id3", 3, 30.0, -72.32324, 2000, 20, "enroute", 7))); eventsTopic.publish(messages); assertThat( Arrays.asList( Arrays.asList("2018-07-01 21:25:20", "enroute", "40.702", "-74.001"), Arrays.asList("2018-07-01 21:26:06", "enroute", "40.703", "-74.002"), Arrays.asList("2018-07-02 13:26:06", "enroute", "30.0", "-72.32324")), everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray()))); } @Test public void testFilterForSouthManhattan() throws Exception { String[] args = buildArgs( String.format(createPubsubTableStatement, eventsTopic.topicPath()), setProject, "SELECT event_timestamp, taxi_rides.payload.ride_status, \n" + "taxi_rides.payload.latitude, taxi_rides.payload.longitude from taxi_rides\n" + " WHERE taxi_rides.payload.longitude > -74.747\n" + " AND taxi_rides.payload.longitude < -73.969\n" + " AND taxi_rides.payload.latitude > 40.699\n" + " AND taxi_rides.payload.latitude < 40.720 LIMIT 2;"); Future<List<List<String>>> expectedResult = runQueryInBackground(args); eventsTopic.checkIfAnySubscriptionExists(project, Duration.standardMinutes(1)); List<PubsubMessage> messages = ImmutableList.of( message( convertTimestampToMillis("2018-07-01 21:25:20"), taxiRideJSON("id1", 1, 40.701, -74.001, 1000, 10, "enroute", 2)), message( convertTimestampToMillis("2018-07-01 21:26:06"), taxiRideJSON("id2", 2, 40.702, -74.002, 1000, 10, "enroute", 4)), message( convertTimestampToMillis("2018-07-02 13:26:06"), taxiRideJSON("id3", 3, 30, -72.32324, 2000, 20, "enroute", 7)), message( convertTimestampToMillis("2018-07-02 14:28:22"), taxiRideJSON("id4", 4, 34, -73.32324, 2000, 20, "enroute", 8))); eventsTopic.publish(messages); assertThat( Arrays.asList( Arrays.asList("2018-07-01 21:25:20", "enroute", "40.701", "-74.001"), Arrays.asList("2018-07-01 21:26:06", "enroute", "40.702", "-74.002")), everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray()))); } private String taxiRideJSON( String rideId, int pointIdex, double latitude, double longitude, int meterReading, int meterIncrement, String rideStatus, int passengerCount) { ObjectMapper mapper = new ObjectMapper(); ObjectNode objectNode = mapper.createObjectNode(); objectNode.put("ride_id", rideId); objectNode.put("point_idx", pointIdex); objectNode.put("latitude", latitude); objectNode.put("longitude", longitude); objectNode.put("meter_reading", meterReading); objectNode.put("meter_increment", meterIncrement); objectNode.put("ride_status", rideStatus); objectNode.put("passenger_count", passengerCount); return objectNode.toString(); } private Future<List<List<String>>> runQueryInBackground(String[] args) { return pool.submit( (Callable) () -> { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); BeamSqlLine.runSqlLine(args, null, outputStream, null); return toLines(outputStream); }); } private PubsubMessage message(long timestampInMillis, String jsonPayload) { return new PubsubMessage( jsonPayload.getBytes(UTF_8), ImmutableMap.of("ts", String.valueOf(timestampInMillis))); } }
This is required because we dont have access to the created `LeaderElection`? Shouldn't this use `TestingLeaderElectionService#triggerContenderCleanup`?
public static void teardownClass() throws Exception { haService.closeAndCleanupAllData(); if (rpcService != null) { RpcUtils.terminateRpcService(rpcService); } }
haService.closeAndCleanupAllData();
public static void teardownClass() throws Exception { if (rpcService != null) { RpcUtils.terminateRpcService(rpcService); } }
class ResourceManagerServiceImplTest extends TestLogger { private static final Time TIMEOUT = Time.seconds(10L); private static final Time FAST_TIMEOUT = Time.milliseconds(50L); private static final HeartbeatServices heartbeatServices = new TestingHeartbeatServices(); private static final DelegationTokenManager delegationTokenManager = new NoOpDelegationTokenManager(); private static final ClusterInformation clusterInformation = new ClusterInformation("localhost", 1234); private static final MetricRegistry metricRegistry = TestingMetricRegistry.builder().build(); private static TestingRpcService rpcService; private static TestingHighAvailabilityServices haService; private static TestingFatalErrorHandler fatalErrorHandler; private TestingResourceManagerFactory.Builder rmFactoryBuilder; private TestingLeaderElectionService leaderElectionService; private LeaderElection leaderElection; private ResourceManagerServiceImpl resourceManagerService; @BeforeClass public static void setupClass() { rpcService = new TestingRpcService(); haService = new TestingHighAvailabilityServices(); fatalErrorHandler = new TestingFatalErrorHandler(); } @Before public void setup() throws Exception { fatalErrorHandler.clearError(); rmFactoryBuilder = new TestingResourceManagerFactory.Builder(); leaderElectionService = new TestingLeaderElectionService(); haService.setResourceManagerLeaderElectionService(leaderElectionService); } @After public void teardown() throws Exception { if (resourceManagerService != null) { resourceManagerService.close(); } if (fatalErrorHandler.hasExceptionOccurred()) { fatalErrorHandler.rethrowError(); } } @AfterClass private void createAndStartResourceManager() throws Exception { createResourceManager(); resourceManagerService.start(); } private void createResourceManager() throws Exception { final TestingResourceManagerFactory rmFactory = rmFactoryBuilder.build(); resourceManagerService = ResourceManagerServiceImpl.create( rmFactory, new Configuration(), ResourceID.generate(), rpcService, haService, heartbeatServices, delegationTokenManager, fatalErrorHandler, clusterInformation, null, metricRegistry, "localhost", ForkJoinPool.commonPool()); } @Test public void grantLeadership_startRmAndConfirmLeaderSession() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<UUID> startRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer(startRmFuture::complete); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId); assertThat(startRmFuture.get(TIMEOUT.getSize(), TIMEOUT.getUnit()), is(leaderSessionId)); assertThat( leaderElectionService .getConfirmationFuture() .get(TIMEOUT.getSize(), TIMEOUT.getUnit()) .getLeaderSessionId(), is(leaderSessionId)); } @Test public void grantLeadership_confirmLeaderSessionAfterRmStarted() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<Void> finishRmInitializationFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer( (ignore) -> blockOnFuture(finishRmInitializationFuture)); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId); assertNotComplete(leaderElectionService.getConfirmationFuture()); finishRmInitializationFuture.complete(null); assertThat( leaderElectionService .getConfirmationFuture() .get(TIMEOUT.getSize(), TIMEOUT.getUnit()) .getLeaderSessionId(), is(leaderSessionId)); } @Test public void grantLeadership_withExistingLeader_stopExistLeader() throws Exception { final UUID leaderSessionId1 = UUID.randomUUID(); final UUID leaderSessionId2 = UUID.randomUUID(); final CompletableFuture<UUID> startRmFuture1 = new CompletableFuture<>(); final CompletableFuture<UUID> startRmFuture2 = new CompletableFuture<>(); final CompletableFuture<UUID> terminateRmFuture = new CompletableFuture<>(); rmFactoryBuilder .setInitializeConsumer( uuid -> { if (!startRmFuture1.isDone()) { startRmFuture1.complete(uuid); } else { startRmFuture2.complete(uuid); } }) .setTerminateConsumer(terminateRmFuture::complete); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId1); assertRmStarted(); leaderElectionService.isLeader(leaderSessionId2); assertThat( terminateRmFuture.get(TIMEOUT.getSize(), TIMEOUT.getUnit()), is(leaderSessionId1)); assertThat(startRmFuture2.get(TIMEOUT.getSize(), TIMEOUT.getUnit()), is(leaderSessionId2)); assertThat( leaderElectionService .getConfirmationFuture() .get(TIMEOUT.getSize(), TIMEOUT.getUnit()) .getLeaderSessionId(), is(leaderSessionId2)); } @Test public void grantLeadership_withExistingLeader_waitTerminationOfExistingLeader() throws Exception { final UUID leaderSessionId1 = UUID.randomUUID(); final UUID leaderSessionId2 = UUID.randomUUID(); final CompletableFuture<UUID> startRmFuture1 = new CompletableFuture<>(); final CompletableFuture<UUID> startRmFuture2 = new CompletableFuture<>(); final CompletableFuture<Void> finishRmTerminationFuture = new CompletableFuture<>(); rmFactoryBuilder .setInitializeConsumer( uuid -> { if (!startRmFuture1.isDone()) { startRmFuture1.complete(uuid); } else { startRmFuture2.complete(uuid); } }) .setTerminateConsumer((ignore) -> blockOnFuture(finishRmTerminationFuture)); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId1); assertRmStarted(); leaderElectionService.isLeader(leaderSessionId2); assertNotComplete(startRmFuture2); finishRmTerminationFuture.complete(null); assertThat(startRmFuture2.get(TIMEOUT.getSize(), TIMEOUT.getUnit()), is(leaderSessionId2)); assertThat( leaderElectionService .getConfirmationFuture() .get(TIMEOUT.getSize(), TIMEOUT.getUnit()) .getLeaderSessionId(), is(leaderSessionId2)); } @Test public void grantLeadership_notStarted_doesNotStartNewRm() throws Exception { final CompletableFuture<UUID> startRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer(startRmFuture::complete); createResourceManager(); leaderElectionService.isLeader(UUID.randomUUID()); assertNotComplete(startRmFuture); assertNotComplete(leaderElectionService.getConfirmationFuture()); } @Test public void grantLeadership_stopped_doesNotStartNewRm() throws Exception { final CompletableFuture<UUID> startRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer(startRmFuture::complete); createAndStartResourceManager(); resourceManagerService.close(); leaderElectionService.isLeader(UUID.randomUUID()); assertNotComplete(startRmFuture); assertNotComplete(leaderElectionService.getConfirmationFuture()); } @Test public void revokeLeadership_stopExistLeader() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<UUID> terminateRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setTerminateConsumer(terminateRmFuture::complete); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId); assertRmStarted(); leaderElectionService.notLeader(); assertThat( terminateRmFuture.get(TIMEOUT.getSize(), TIMEOUT.getUnit()), is(leaderSessionId)); } @Test public void revokeLeadership_terminateService_multiLeaderSessionNotSupported() throws Exception { rmFactoryBuilder.setSupportMultiLeaderSession(false); createAndStartResourceManager(); leaderElectionService.isLeader(UUID.randomUUID()); assertRmStarted(); leaderElectionService.notLeader(); resourceManagerService.getTerminationFuture().get(TIMEOUT.getSize(), TIMEOUT.getUnit()); } @Test public void leaderRmTerminated_terminateService() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<Void> rmTerminationFuture = new CompletableFuture<>(); rmFactoryBuilder.setGetTerminationFutureFunction((ignore1, ignore2) -> rmTerminationFuture); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId); assertRmStarted(); rmTerminationFuture.complete(null); resourceManagerService.getTerminationFuture().get(TIMEOUT.getSize(), TIMEOUT.getUnit()); } @Test public void nonLeaderRmTerminated_doseNotTerminateService() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<UUID> terminateRmFuture = new CompletableFuture<>(); final CompletableFuture<Void> rmTerminationFuture = new CompletableFuture<>(); rmFactoryBuilder .setTerminateConsumer(terminateRmFuture::complete) .setGetTerminationFutureFunction((ignore1, ignore2) -> rmTerminationFuture); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId); assertRmStarted(); leaderElectionService.notLeader(); assertThat( terminateRmFuture.get(TIMEOUT.getSize(), TIMEOUT.getUnit()), is(leaderSessionId)); rmTerminationFuture.complete(null); assertNotComplete(resourceManagerService.getTerminationFuture()); } @Test public void closeService_stopRmAndLeaderElection() throws Exception { final CompletableFuture<UUID> terminateRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setTerminateConsumer(terminateRmFuture::complete); createAndStartResourceManager(); leaderElectionService.isLeader(UUID.randomUUID()); assertRmStarted(); assertFalse(leaderElectionService.isStopped()); resourceManagerService.close(); assertTrue(terminateRmFuture.isDone()); assertTrue(leaderElectionService.isStopped()); } @Test public void closeService_futureCompleteAfterRmTerminated() throws Exception { final CompletableFuture<Void> finishRmTerminationFuture = new CompletableFuture<>(); rmFactoryBuilder.setTerminateConsumer((ignore) -> blockOnFuture(finishRmTerminationFuture)); createAndStartResourceManager(); leaderElectionService.isLeader(UUID.randomUUID()); assertRmStarted(); final CompletableFuture<Void> closeServiceFuture = resourceManagerService.closeAsync(); assertNotComplete(closeServiceFuture); finishRmTerminationFuture.complete(null); closeServiceFuture.get(TIMEOUT.getSize(), TIMEOUT.getUnit()); } @Test public void deregisterApplication_leaderRmNotStarted() throws Exception { final CompletableFuture<Void> startRmInitializationFuture = new CompletableFuture<>(); final CompletableFuture<Void> finishRmInitializationFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer( (ignore) -> { startRmInitializationFuture.complete(null); blockOnFuture(finishRmInitializationFuture); }); createAndStartResourceManager(); leaderElectionService.isLeader(UUID.randomUUID()); startRmInitializationFuture.get(TIMEOUT.getSize(), TIMEOUT.getUnit()); final CompletableFuture<Void> deregisterApplicationFuture = resourceManagerService.deregisterApplication(ApplicationStatus.CANCELED, null); assertNotComplete(deregisterApplicationFuture); finishRmInitializationFuture.complete(null); deregisterApplicationFuture.get(TIMEOUT.getSize(), TIMEOUT.getUnit()); } @Test public void deregisterApplication_noLeaderRm() throws Exception { createAndStartResourceManager(); final CompletableFuture<Void> deregisterApplicationFuture = resourceManagerService.deregisterApplication(ApplicationStatus.CANCELED, null); deregisterApplicationFuture.get(TIMEOUT.getSize(), TIMEOUT.getUnit()); } @Test public void grantAndRevokeLeadership_verifyMetrics() throws Exception { final Set<String> registeredMetrics = Collections.newSetFromMap(new ConcurrentHashMap<>()); TestingMetricRegistry metricRegistry = TestingMetricRegistry.builder() .setRegisterConsumer((a, b, c) -> registeredMetrics.add(b)) .setUnregisterConsumer((a, b, c) -> registeredMetrics.remove(b)) .build(); final TestingResourceManagerFactory rmFactory = rmFactoryBuilder.build(); resourceManagerService = ResourceManagerServiceImpl.create( rmFactory, new Configuration(), ResourceID.generate(), rpcService, haService, heartbeatServices, delegationTokenManager, fatalErrorHandler, clusterInformation, null, metricRegistry, "localhost", ForkJoinPool.commonPool()); resourceManagerService.start(); Assert.assertEquals(0, registeredMetrics.size()); leaderElectionService.isLeader(UUID.randomUUID()); assertRmStarted(); Set<String> expectedMetrics = Sets.set( MetricNames.NUM_REGISTERED_TASK_MANAGERS, MetricNames.TASK_SLOTS_TOTAL, MetricNames.TASK_SLOTS_AVAILABLE); Assert.assertTrue( "Expected RM to register leader metrics", registeredMetrics.containsAll(expectedMetrics)); revokeLeadership(); Set<String> intersection = new HashSet<>(registeredMetrics); intersection.retainAll(expectedMetrics); Assert.assertTrue("Expected RM to unregister leader metrics", intersection.isEmpty()); leaderElectionService.isLeader(UUID.randomUUID()); assertRmStarted(); Assert.assertTrue( "Expected RM to re-register leader metrics", registeredMetrics.containsAll(expectedMetrics)); } private static void blockOnFuture(CompletableFuture<?> future) { try { future.get(); } catch (Exception e) { e.printStackTrace(); fail(); } } private static void assertNotComplete(CompletableFuture<?> future) throws Exception { try { future.get(FAST_TIMEOUT.getSize(), FAST_TIMEOUT.getUnit()); fail(); } catch (TimeoutException e) { } } private void assertRmStarted() throws Exception { leaderElectionService.getConfirmationFuture().get(TIMEOUT.getSize(), TIMEOUT.getUnit()); } private void revokeLeadership() { ResourceManager<?> leaderResourceManager = resourceManagerService.getLeaderResourceManager(); leaderElectionService.notLeader(); blockOnFuture(leaderResourceManager.getTerminationFuture()); } }
class ResourceManagerServiceImplTest extends TestLogger { private static final HeartbeatServices heartbeatServices = new TestingHeartbeatServices(); private static final DelegationTokenManager delegationTokenManager = new NoOpDelegationTokenManager(); private static final ClusterInformation clusterInformation = new ClusterInformation("localhost", 1234); private static final MetricRegistry metricRegistry = TestingMetricRegistry.builder().build(); private static TestingRpcService rpcService; private static TestingHighAvailabilityServices haService; private static TestingFatalErrorHandler fatalErrorHandler; private TestingResourceManagerFactory.Builder rmFactoryBuilder; private TestingLeaderElectionService leaderElectionService; private ResourceManagerServiceImpl resourceManagerService; @BeforeClass public static void setupClass() { rpcService = new TestingRpcService(); haService = new TestingHighAvailabilityServices(); fatalErrorHandler = new TestingFatalErrorHandler(); } @Before public void setup() throws Exception { fatalErrorHandler.clearError(); rmFactoryBuilder = new TestingResourceManagerFactory.Builder(); leaderElectionService = new TestingLeaderElectionService(); haService.setResourceManagerLeaderElectionService(leaderElectionService); } @After public void teardown() throws Exception { if (resourceManagerService != null) { resourceManagerService.close(); } if (fatalErrorHandler.hasExceptionOccurred()) { fatalErrorHandler.rethrowError(); } } @AfterClass private void createAndStartResourceManager() throws Exception { createResourceManager(); resourceManagerService.start(); } private void createResourceManager() throws Exception { final TestingResourceManagerFactory rmFactory = rmFactoryBuilder.build(); resourceManagerService = ResourceManagerServiceImpl.create( rmFactory, new Configuration(), ResourceID.generate(), rpcService, haService, heartbeatServices, delegationTokenManager, fatalErrorHandler, clusterInformation, null, metricRegistry, "localhost", ForkJoinPool.commonPool()); } @Test public void grantLeadership_startRmAndConfirmLeaderSession() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<UUID> startRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer(startRmFuture::complete); createAndStartResourceManager(); final CompletableFuture<LeaderInformation> confirmedLeaderInformation = leaderElectionService.isLeader(leaderSessionId); assertThat(startRmFuture.get(), is(leaderSessionId)); assertThat(confirmedLeaderInformation.get().getLeaderSessionID(), is(leaderSessionId)); } @Test public void grantLeadership_confirmLeaderSessionAfterRmStarted() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<Void> finishRmInitializationFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer( (ignore) -> blockOnFuture(finishRmInitializationFuture)); createAndStartResourceManager(); final CompletableFuture<LeaderInformation> confirmedLeaderInformation = leaderElectionService.isLeader(leaderSessionId); assertNotComplete(confirmedLeaderInformation); finishRmInitializationFuture.complete(null); assertThat(confirmedLeaderInformation.get().getLeaderSessionID(), is(leaderSessionId)); } @Test public void grantLeadership_withExistingLeader_stopExistLeader() throws Exception { final UUID leaderSessionId1 = UUID.randomUUID(); final UUID leaderSessionId2 = UUID.randomUUID(); final CompletableFuture<UUID> startRmFuture1 = new CompletableFuture<>(); final CompletableFuture<UUID> startRmFuture2 = new CompletableFuture<>(); final CompletableFuture<UUID> terminateRmFuture = new CompletableFuture<>(); rmFactoryBuilder .setInitializeConsumer( uuid -> { if (!startRmFuture1.isDone()) { startRmFuture1.complete(uuid); } else { startRmFuture2.complete(uuid); } }) .setTerminateConsumer(terminateRmFuture::complete); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId1).join(); final CompletableFuture<LeaderInformation> confirmedLeaderInformation = leaderElectionService.isLeader(leaderSessionId2); assertThat(terminateRmFuture.get(), is(leaderSessionId1)); assertThat(startRmFuture2.get(), is(leaderSessionId2)); assertThat(confirmedLeaderInformation.get().getLeaderSessionID(), is(leaderSessionId2)); } @Test public void grantLeadership_withExistingLeader_waitTerminationOfExistingLeader() throws Exception { final UUID leaderSessionId1 = UUID.randomUUID(); final UUID leaderSessionId2 = UUID.randomUUID(); final CompletableFuture<UUID> startRmFuture1 = new CompletableFuture<>(); final CompletableFuture<UUID> startRmFuture2 = new CompletableFuture<>(); final CompletableFuture<Void> finishRmTerminationFuture = new CompletableFuture<>(); rmFactoryBuilder .setInitializeConsumer( uuid -> { if (!startRmFuture1.isDone()) { startRmFuture1.complete(uuid); } else { startRmFuture2.complete(uuid); } }) .setTerminateConsumer((ignore) -> blockOnFuture(finishRmTerminationFuture)); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId1).join(); final CompletableFuture<LeaderInformation> confirmedLeaderInformation = leaderElectionService.isLeader(leaderSessionId2); assertNotComplete(startRmFuture2); finishRmTerminationFuture.complete(null); assertThat(startRmFuture2.get(), is(leaderSessionId2)); assertThat(confirmedLeaderInformation.get().getLeaderSessionID(), is(leaderSessionId2)); } @Test public void grantLeadership_notStarted_doesNotStartNewRm() throws Exception { final CompletableFuture<UUID> startRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer(startRmFuture::complete); createResourceManager(); final CompletableFuture<LeaderInformation> confirmedLeaderInformation = leaderElectionService.isLeader(UUID.randomUUID()); assertNotComplete(startRmFuture); assertNotComplete(confirmedLeaderInformation); } @Test public void grantLeadership_stopped_doesNotStartNewRm() throws Exception { final CompletableFuture<UUID> startRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer(startRmFuture::complete); createAndStartResourceManager(); resourceManagerService.close(); final CompletableFuture<LeaderInformation> confirmedLeaderInformation = leaderElectionService.isLeader(UUID.randomUUID()); assertNotComplete(startRmFuture); assertNotComplete(confirmedLeaderInformation); } @Test public void revokeLeadership_stopExistLeader() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<UUID> terminateRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setTerminateConsumer(terminateRmFuture::complete); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId).join(); leaderElectionService.notLeader(); assertThat(terminateRmFuture.get(), is(leaderSessionId)); } @Test public void revokeLeadership_terminateService_multiLeaderSessionNotSupported() throws Exception { rmFactoryBuilder.setSupportMultiLeaderSession(false); createAndStartResourceManager(); leaderElectionService.isLeader(UUID.randomUUID()).join(); leaderElectionService.notLeader(); resourceManagerService.getTerminationFuture().get(); } @Test public void leaderRmTerminated_terminateService() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<Void> rmTerminationFuture = new CompletableFuture<>(); rmFactoryBuilder.setGetTerminationFutureFunction((ignore1, ignore2) -> rmTerminationFuture); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId).join(); rmTerminationFuture.complete(null); resourceManagerService.getTerminationFuture().get(); } @Test public void nonLeaderRmTerminated_doseNotTerminateService() throws Exception { final UUID leaderSessionId = UUID.randomUUID(); final CompletableFuture<UUID> terminateRmFuture = new CompletableFuture<>(); final CompletableFuture<Void> rmTerminationFuture = new CompletableFuture<>(); rmFactoryBuilder .setTerminateConsumer(terminateRmFuture::complete) .setGetTerminationFutureFunction((ignore1, ignore2) -> rmTerminationFuture); createAndStartResourceManager(); leaderElectionService.isLeader(leaderSessionId).join(); leaderElectionService.notLeader(); assertThat(terminateRmFuture.get(), is(leaderSessionId)); rmTerminationFuture.complete(null); assertNotComplete(resourceManagerService.getTerminationFuture()); } @Test public void closeService_stopRmAndLeaderElection() throws Exception { final CompletableFuture<UUID> terminateRmFuture = new CompletableFuture<>(); rmFactoryBuilder.setTerminateConsumer(terminateRmFuture::complete); createAndStartResourceManager(); leaderElectionService.isLeader(UUID.randomUUID()).join(); assertFalse(leaderElectionService.isStopped()); resourceManagerService.close(); assertTrue(terminateRmFuture.isDone()); assertTrue(leaderElectionService.isStopped()); } @Test public void closeService_futureCompleteAfterRmTerminated() throws Exception { final CompletableFuture<Void> finishRmTerminationFuture = new CompletableFuture<>(); rmFactoryBuilder.setTerminateConsumer((ignore) -> blockOnFuture(finishRmTerminationFuture)); createAndStartResourceManager(); leaderElectionService.isLeader(UUID.randomUUID()).join(); final CompletableFuture<Void> closeServiceFuture = resourceManagerService.closeAsync(); assertNotComplete(closeServiceFuture); finishRmTerminationFuture.complete(null); closeServiceFuture.get(); } @Test public void deregisterApplication_leaderRmNotStarted() throws Exception { final CompletableFuture<Void> startRmInitializationFuture = new CompletableFuture<>(); final CompletableFuture<Void> finishRmInitializationFuture = new CompletableFuture<>(); rmFactoryBuilder.setInitializeConsumer( (ignore) -> { startRmInitializationFuture.complete(null); blockOnFuture(finishRmInitializationFuture); }); createAndStartResourceManager(); leaderElectionService.isLeader(UUID.randomUUID()); startRmInitializationFuture.get(); final CompletableFuture<Void> deregisterApplicationFuture = resourceManagerService.deregisterApplication(ApplicationStatus.CANCELED, null); assertNotComplete(deregisterApplicationFuture); finishRmInitializationFuture.complete(null); deregisterApplicationFuture.get(); } @Test public void deregisterApplication_noLeaderRm() throws Exception { createAndStartResourceManager(); final CompletableFuture<Void> deregisterApplicationFuture = resourceManagerService.deregisterApplication(ApplicationStatus.CANCELED, null); deregisterApplicationFuture.get(); } @Test public void grantAndRevokeLeadership_verifyMetrics() throws Exception { final Set<String> registeredMetrics = Collections.newSetFromMap(new ConcurrentHashMap<>()); TestingMetricRegistry metricRegistry = TestingMetricRegistry.builder() .setRegisterConsumer((a, b, c) -> registeredMetrics.add(b)) .setUnregisterConsumer((a, b, c) -> registeredMetrics.remove(b)) .build(); final TestingResourceManagerFactory rmFactory = rmFactoryBuilder.build(); resourceManagerService = ResourceManagerServiceImpl.create( rmFactory, new Configuration(), ResourceID.generate(), rpcService, haService, heartbeatServices, delegationTokenManager, fatalErrorHandler, clusterInformation, null, metricRegistry, "localhost", ForkJoinPool.commonPool()); resourceManagerService.start(); Assert.assertEquals(0, registeredMetrics.size()); leaderElectionService.isLeader(UUID.randomUUID()).join(); Set<String> expectedMetrics = Sets.set( MetricNames.NUM_REGISTERED_TASK_MANAGERS, MetricNames.TASK_SLOTS_TOTAL, MetricNames.TASK_SLOTS_AVAILABLE); Assert.assertTrue( "Expected RM to register leader metrics", registeredMetrics.containsAll(expectedMetrics)); revokeLeadership(); Set<String> intersection = new HashSet<>(registeredMetrics); intersection.retainAll(expectedMetrics); Assert.assertTrue("Expected RM to unregister leader metrics", intersection.isEmpty()); leaderElectionService.isLeader(UUID.randomUUID()).join(); Assert.assertTrue( "Expected RM to re-register leader metrics", registeredMetrics.containsAll(expectedMetrics)); } private static void blockOnFuture(CompletableFuture<?> future) { try { future.get(); } catch (Exception e) { e.printStackTrace(); fail(); } } private static void assertNotComplete(CompletableFuture<?> future) throws Exception { try { future.get(50, TimeUnit.MILLISECONDS); fail(); } catch (TimeoutException e) { } } private void revokeLeadership() { ResourceManager<?> leaderResourceManager = resourceManagerService.getLeaderResourceManager(); leaderElectionService.notLeader(); blockOnFuture(leaderResourceManager.getTerminationFuture()); } }
I guess the result is the same but I always favor Objects.equals then I don't have to think too much about NPE (even if in that case it's not the case :p)
DefaultCodestartFileStrategyHandler getSelectedDefaultStrategy() { for (CodestartFileStrategy codestartFileStrategy : strategies) { if (Objects.equals(codestartFileStrategy.getFilter(), "*")) { if (codestartFileStrategy.getHandler() instanceof DefaultCodestartFileStrategyHandler) { return (DefaultCodestartFileStrategyHandler) codestartFileStrategy.getHandler(); } throw new CodestartDefinitionException( codestartFileStrategy.getHandler().name() + " can't be used as '*' file strategy"); } } return CodestartFileStrategyHandler.DEFAULT_STRATEGY; }
if (Objects.equals(codestartFileStrategy.getFilter(), "*")) {
DefaultCodestartFileStrategyHandler getSelectedDefaultStrategy() { for (CodestartFileStrategy codestartFileStrategy : strategies) { if (Objects.equals(codestartFileStrategy.getFilter(), "*")) { if (codestartFileStrategy.getHandler() instanceof DefaultCodestartFileStrategyHandler) { return (DefaultCodestartFileStrategyHandler) codestartFileStrategy.getHandler(); } throw new CodestartDefinitionException( codestartFileStrategy.getHandler().name() + " can't be used as '*' file strategy"); } } return CodestartFileStrategyHandler.DEFAULT_STRATEGY; }
class CodestartProcessor { private final CodestartResourceLoader resourceLoader; private final String languageName; private final Path targetDirectory; private final List<CodestartFileStrategy> strategies; private final Map<String, Object> data; private final Map<String, List<CodestartFile>> files = new LinkedHashMap<>(); CodestartProcessor(final CodestartResourceLoader resourceLoader, final String languageName, final Path targetDirectory, List<CodestartFileStrategy> strategies, final Map<String, Object> data) { this.resourceLoader = resourceLoader; this.languageName = languageName; this.targetDirectory = targetDirectory; this.strategies = strategies; this.data = data; } void process(final Codestart codestart) throws IOException { addBuiltinData(); resourceLoader.loadResourceAsPath(codestart.getResourceDir(), p -> { final Path baseDir = p.resolve(BASE_LANGUAGE); final Path languageDir = p.resolve(languageName); Stream.of(baseDir, languageDir) .filter(Files::isDirectory) .forEach(dirPath -> processCodestartDir(dirPath, CodestartData.buildCodestartData(codestart, languageName, data))); return null; }); } void addBuiltinData() { data.put("gen-info", Collections.singletonMap("time", System.currentTimeMillis())); } void processCodestartDir(final Path sourceDirectory, final Map<String, Object> finalData) { final Collection<Path> sources = findSources(sourceDirectory); for (Path sourcePath : sources) { final Path relativeSourcePath = sourceDirectory.relativize(sourcePath); if (!Files.isDirectory(sourcePath)) { final String sourceFileName = sourcePath.getFileName().toString(); final Optional<CodestartFileReader> possibleReader = CodestartFileReader.ALL.stream() .filter(r -> r.matches(sourceFileName)) .findFirst(); final CodestartFileReader reader = possibleReader.orElse(CodestartFileReader.DEFAULT); final String targetFileName = reader.cleanFileName(sourceFileName); final Path relativeTargetPath = relativeSourcePath.getNameCount() > 1 ? relativeSourcePath.getParent().resolve(targetFileName) : Paths.get(targetFileName); final boolean hasFileStrategyHandler = getStrategy(relativeTargetPath.toString()).isPresent(); try { if (!possibleReader.isPresent() && !hasFileStrategyHandler) { final Path targetPath = targetDirectory.resolve(relativeTargetPath.toString()); getSelectedDefaultStrategy().copyStaticFile(sourcePath, targetPath); continue; } final Optional<String> content = reader.read(sourceDirectory, relativeSourcePath, languageName, finalData); if (content.isPresent()) { final String key = relativeTargetPath.toString(); this.files.putIfAbsent(key, new ArrayList<>()); this.files.get(key).add(new CodestartFile(relativeSourcePath.toString(), content.get())); } } catch (IOException e) { throw new UncheckedIOException(e); } } } } private List<Path> findSources(Path sourceDirectory) { try (final Stream<Path> pathStream = Files.walk(sourceDirectory)) { return pathStream .filter(path -> !path.equals(sourceDirectory)) .collect(Collectors.toList()); } catch (IOException e) { throw new UncheckedIOException(e); } } void checkTargetDir() throws IOException { if (!Files.exists(targetDirectory)) { boolean mkdirStatus = targetDirectory.toFile().mkdirs(); if (!mkdirStatus) { throw new IOException("Failed to create the project directory: " + targetDirectory); } return; } if (!Files.isDirectory(targetDirectory)) { throw new IOException("Project path needs to point to a directory: " + targetDirectory); } final String[] files = targetDirectory.toFile().list(); if (files != null && files.length > 0) { throw new IOException("You can't create a project when the directory is not empty: " + targetDirectory); } } public void writeFiles() throws IOException { for (Map.Entry<String, List<CodestartFile>> e : files.entrySet()) { final String relativePath = e.getKey(); Files.createDirectories(targetDirectory.resolve(relativePath).getParent()); getStrategy(relativePath).orElse(getSelectedDefaultStrategy()) .process(targetDirectory, relativePath, e.getValue(), data); } } Optional<CodestartFileStrategyHandler> getStrategy(final String key) { for (CodestartFileStrategy codestartFileStrategy : strategies) { if (codestartFileStrategy.test(key)) { return Optional.of(codestartFileStrategy.getHandler()); } } return Optional.empty(); } static List<CodestartFileStrategy> buildStrategies(Map<String, String> spec) { final List<CodestartFileStrategy> codestartFileStrategyHandlers = new ArrayList<>(spec.size()); for (Map.Entry<String, String> entry : spec.entrySet()) { final CodestartFileStrategyHandler handler = CodestartFileStrategyHandler.BY_NAME.get(entry.getValue()); if (handler == null) { throw new CodestartDefinitionException("ConflictStrategyHandler named '" + entry.getValue() + "' not found. Used with filter '" + entry.getKey() + "'"); } codestartFileStrategyHandlers.add(new CodestartFileStrategy(entry.getKey(), handler)); } return codestartFileStrategyHandlers; } }
class CodestartProcessor { private final CodestartResourceLoader resourceLoader; private final String languageName; private final Path targetDirectory; private final List<CodestartFileStrategy> strategies; private final Map<String, Object> data; private final Map<String, List<CodestartFile>> files = new LinkedHashMap<>(); CodestartProcessor(final CodestartResourceLoader resourceLoader, final String languageName, final Path targetDirectory, List<CodestartFileStrategy> strategies, final Map<String, Object> data) { this.resourceLoader = resourceLoader; this.languageName = languageName; this.targetDirectory = targetDirectory; this.strategies = strategies; this.data = data; } void process(final Codestart codestart) throws IOException { addBuiltinData(); resourceLoader.loadResourceAsPath(codestart.getResourceDir(), p -> { final Path baseDir = p.resolve(BASE_LANGUAGE); final Path languageDir = p.resolve(languageName); Stream.of(baseDir, languageDir) .filter(Files::isDirectory) .forEach(dirPath -> processCodestartDir(dirPath, CodestartData.buildCodestartData(codestart, languageName, data))); return null; }); } void addBuiltinData() { data.put("gen-info", Collections.singletonMap("time", System.currentTimeMillis())); } void processCodestartDir(final Path sourceDirectory, final Map<String, Object> finalData) { final Collection<Path> sources = findSources(sourceDirectory); for (Path sourcePath : sources) { final Path relativeSourcePath = sourceDirectory.relativize(sourcePath); if (!Files.isDirectory(sourcePath)) { final String sourceFileName = sourcePath.getFileName().toString(); final Optional<CodestartFileReader> possibleReader = CodestartFileReader.ALL.stream() .filter(r -> r.matches(sourceFileName)) .findFirst(); final CodestartFileReader reader = possibleReader.orElse(CodestartFileReader.DEFAULT); final String targetFileName = reader.cleanFileName(sourceFileName); final Path relativeTargetPath = relativeSourcePath.getNameCount() > 1 ? relativeSourcePath.getParent().resolve(targetFileName) : Paths.get(targetFileName); final boolean hasFileStrategyHandler = getStrategy(relativeTargetPath.toString()).isPresent(); try { if (!possibleReader.isPresent() && !hasFileStrategyHandler) { final Path targetPath = targetDirectory.resolve(relativeTargetPath.toString()); getSelectedDefaultStrategy().copyStaticFile(sourcePath, targetPath); continue; } final Optional<String> content = reader.read(sourceDirectory, relativeSourcePath, languageName, finalData); if (content.isPresent()) { final String key = relativeTargetPath.toString(); this.files.putIfAbsent(key, new ArrayList<>()); this.files.get(key).add(new CodestartFile(relativeSourcePath.toString(), content.get())); } } catch (IOException e) { throw new UncheckedIOException(e); } } } } private List<Path> findSources(Path sourceDirectory) { try (final Stream<Path> pathStream = Files.walk(sourceDirectory)) { return pathStream .filter(path -> !path.equals(sourceDirectory)) .collect(Collectors.toList()); } catch (IOException e) { throw new UncheckedIOException(e); } } void checkTargetDir() throws IOException { if (!Files.exists(targetDirectory)) { boolean mkdirStatus = targetDirectory.toFile().mkdirs(); if (!mkdirStatus) { throw new IOException("Failed to create the project directory: " + targetDirectory); } return; } if (!Files.isDirectory(targetDirectory)) { throw new IOException("Project path needs to point to a directory: " + targetDirectory); } final String[] files = targetDirectory.toFile().list(); if (files != null && files.length > 0) { throw new IOException("You can't create a project when the directory is not empty: " + targetDirectory); } } public void writeFiles() throws IOException { for (Map.Entry<String, List<CodestartFile>> e : files.entrySet()) { final String relativePath = e.getKey(); Files.createDirectories(targetDirectory.resolve(relativePath).getParent()); getStrategy(relativePath).orElse(getSelectedDefaultStrategy()) .process(targetDirectory, relativePath, e.getValue(), data); } } Optional<CodestartFileStrategyHandler> getStrategy(final String key) { for (CodestartFileStrategy codestartFileStrategy : strategies) { if (codestartFileStrategy.test(key)) { return Optional.of(codestartFileStrategy.getHandler()); } } return Optional.empty(); } static List<CodestartFileStrategy> buildStrategies(Map<String, String> spec) { final List<CodestartFileStrategy> codestartFileStrategyHandlers = new ArrayList<>(spec.size()); for (Map.Entry<String, String> entry : spec.entrySet()) { final CodestartFileStrategyHandler handler = CodestartFileStrategyHandler.BY_NAME.get(entry.getValue()); if (handler == null) { throw new CodestartDefinitionException("ConflictStrategyHandler named '" + entry.getValue() + "' not found. Used with filter '" + entry.getKey() + "'"); } codestartFileStrategyHandlers.add(new CodestartFileStrategy(entry.getKey(), handler)); } return codestartFileStrategyHandlers; } }
The write tests also read from bigquery table for validation with pipelines, with exactly same pipelines of the read test. So I have consolidated these two read tests into writeandread.
public void readAndValidateRows(BigQueryIOJsonOptions options) { TypedRead<TableRow> bigqueryIO = BigQueryIO.readTableRows().withMethod(options.getReadMethod()); if (!options.getInputQuery().isEmpty()) { bigqueryIO = bigqueryIO.fromQuery(options.getInputQuery()).usingStandardSql(); } else { bigqueryIO = bigqueryIO.from(options.getInputTable()); } PCollection<TableRow> jsonRows = p.apply("Read rows", bigqueryIO); if (!options.getInputQuery().isEmpty()) { PAssert.that(jsonRows).containsInAnyOrder(JSON_QUERY_TEST_DATA); p.run().waitUntilFinish(); return; } final boolean unescape = options.getWriteMethod() == Write.Method.FILE_LOADS; PCollection<KV<String, String>> countries = jsonRows.apply( "Convert countries to KV JSON Strings", ParDo.of(new CountryToKVJsonString())); PAssert.that(countries).satisfies(new CompareJsonStrings(getTestData("countries"), unescape)); PCollection<KV<String, String>> stats = jsonRows.apply("Convert stats to KV JSON Strings", ParDo.of(new StatsToKVJsonString())); PAssert.that(stats).satisfies(new CompareJsonStrings(getTestData("stats"), unescape)); PCollection<KV<String, String>> cities = jsonRows.apply("Convert cities to KV JSON Strings", ParDo.of(new CitiesToKVJsonString())); PAssert.that(cities).satisfies(new CompareJsonStrings(getTestData("cities"), unescape)); PCollection<KV<String, String>> landmarks = jsonRows.apply( "Convert landmarks to KV JSON Strings", ParDo.of(new LandmarksToKVJsonString())); PAssert.that(landmarks).satisfies(new CompareJsonStrings(getTestData("landmarks"), unescape)); p.run().waitUntilFinish(); }
jsonRows.apply(
public void readAndValidateRows(BigQueryIOJsonOptions options) { TypedRead<TableRow> bigqueryIO = BigQueryIO.readTableRows().withMethod(options.getReadMethod()); if (!options.getInputQuery().isEmpty()) { bigqueryIO = bigqueryIO.fromQuery(options.getInputQuery()).usingStandardSql(); } else { bigqueryIO = bigqueryIO.from(options.getInputTable()); } PCollection<TableRow> jsonRows = p.apply("Read rows", bigqueryIO); if (!options.getInputQuery().isEmpty()) { PAssert.that(jsonRows).containsInAnyOrder(JSON_QUERY_TEST_DATA); p.run().waitUntilFinish(); return; } final boolean unescape = options.getWriteMethod() == Write.Method.FILE_LOADS; PCollection<KV<String, String>> countries = jsonRows.apply( "Convert countries to KV JSON Strings", ParDo.of(new CountryToKVJsonString())); PAssert.that(countries).satisfies(new CompareJsonStrings(getTestData("countries"), unescape)); PCollection<KV<String, String>> stats = jsonRows.apply("Convert stats to KV JSON Strings", ParDo.of(new StatsToKVJsonString())); PAssert.that(stats).satisfies(new CompareJsonStrings(getTestData("stats"), unescape)); PCollection<KV<String, String>> cities = jsonRows.apply("Convert cities to KV JSON Strings", ParDo.of(new CitiesToKVJsonString())); PAssert.that(cities).satisfies(new CompareJsonStrings(getTestData("cities"), unescape)); PCollection<KV<String, String>> landmarks = jsonRows.apply( "Convert landmarks to KV JSON Strings", ParDo.of(new LandmarksToKVJsonString())); PAssert.that(landmarks).satisfies(new CompareJsonStrings(getTestData("landmarks"), unescape)); p.run().waitUntilFinish(); }
class CompareJsonStrings implements SerializableFunction<Iterable<KV<String, String>>, Void> { Map<String, String> expected; final boolean unescape; public CompareJsonStrings(Map<String, String> expected) { this(expected, false); } public CompareJsonStrings(Map<String, String> expected, boolean unescape) { this.expected = expected; this.unescape = unescape; } @Override public Void apply(Iterable<KV<String, String>> input) throws RuntimeException { int counter = 0; for (KV<String, String> actual : input) { String key = actual.getKey(); if (!expected.containsKey(key)) { throw new NoSuchElementException( String.format( "Unexpected key '%s' found in input but does not exist in expected results.", key)); } String jsonStringActual = actual.getValue(); if (unescape && jsonStringActual.length() > 1) { jsonStringActual = StringEscapeUtils.unescapeEcmaScript( jsonStringActual.substring(1, jsonStringActual.length() - 1)); } JsonElement jsonActual = JsonParser.parseString(jsonStringActual); String jsonStringExpected = expected.get(key); JsonElement jsonExpected = JsonParser.parseString(jsonStringExpected); assertEquals(jsonExpected, jsonActual); counter += 1; } if (counter != expected.size()) { throw new RuntimeException( String.format("Expected %d elements but got %d elements.", expected.size(), counter)); } return null; } }
class CompareJsonStrings implements SerializableFunction<Iterable<KV<String, String>>, Void> { Map<String, String> expected; final boolean unescape; public CompareJsonStrings(Map<String, String> expected) { this(expected, false); } public CompareJsonStrings(Map<String, String> expected, boolean unescape) { this.expected = expected; this.unescape = unescape; } @Override public Void apply(Iterable<KV<String, String>> input) throws RuntimeException { int counter = 0; for (KV<String, String> actual : input) { String key = actual.getKey(); if (!expected.containsKey(key)) { throw new NoSuchElementException( String.format( "Unexpected key '%s' found in input but does not exist in expected results.", key)); } String jsonStringActual = actual.getValue(); if (unescape && jsonStringActual.length() > 1) { jsonStringActual = StringEscapeUtils.unescapeEcmaScript( jsonStringActual.substring(1, jsonStringActual.length() - 1)); } JsonElement jsonActual = JsonParser.parseString(jsonStringActual); String jsonStringExpected = expected.get(key); JsonElement jsonExpected = JsonParser.parseString(jsonStringExpected); assertEquals(jsonExpected, jsonActual); counter += 1; } if (counter != expected.size()) { throw new RuntimeException( String.format("Expected %d elements but got %d elements.", expected.size(), counter)); } return null; } }
Shall we set `symTable.builtinPos` instead since in this case, name is a compiler-generated one?
public BLangNode transform(RequiredParameterNode requiredParameter) { BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(), requiredParameter.typeName(), requiredParameter.annotations()); simpleVar.pos = getPosition(requiredParameter); if (requiredParameter.paramName().isPresent()) { simpleVar.name.pos = getPosition(requiredParameter.paramName().get()); } else if (simpleVar.name.pos == null) { simpleVar.name.pos = simpleVar.pos; } simpleVar.flagSet.add(Flag.REQUIRED_PARAM); return simpleVar; }
simpleVar.name.pos = simpleVar.pos;
public BLangNode transform(RequiredParameterNode requiredParameter) { BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(), requiredParameter.typeName(), requiredParameter.annotations()); simpleVar.pos = getPosition(requiredParameter); if (requiredParameter.paramName().isPresent()) { simpleVar.name.pos = getPosition(requiredParameter.paramName().get()); } else if (simpleVar.name.pos == null) { simpleVar.name.pos = symTable.builtinPos; } simpleVar.flagSet.add(Flag.REQUIRED_PARAM); return simpleVar; }
class definition */ @Override public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) { Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode); BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members()); anonClass.pos = pos; BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression(); objectCtorExpression.pos = pos; objectCtorExpression.classNode = anonClass; String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(pos, genName); anonClass.setName(anonTypeGenName); anonClass.flagSet.add(Flag.PUBLIC); Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference(); typeReference.ifPresent(typeReferenceNode -> { objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode)); }); anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations()); addToTop(anonClass); NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers(); for (Token qualifier : objectConstructorQualifierList) { SyntaxKind kind = qualifier.kind(); if (kind == SyntaxKind.CLIENT_KEYWORD) { anonClass.flagSet.add(Flag.CLIENT); objectCtorExpression.isClient = true; } else if (kind == SyntaxKind.ISOLATED_KEYWORD) { anonClass.flagSet.add(Flag.ISOLATED); } else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) { anonClass.flagSet.add(SERVICE); objectCtorExpression.isService = true; } else { throw new RuntimeException("Syntax kind is not supported: " + kind); } } BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode(); BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name); BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode(); initNode.pos = pos; initNode.userDefinedType = userDefinedType; BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier pkgAlias = createIdentifier(pos, ""); BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName)); invocationNode.name = (BLangIdentifier) nameReference.name; invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; objectCtorExpression.typeInit = initNode; return objectCtorExpression; }
class definition */ @Override public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) { Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode); BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members()); anonClass.pos = pos; BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression(); objectCtorExpression.pos = pos; objectCtorExpression.classNode = anonClass; String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(pos, genName); anonClass.setName(anonTypeGenName); anonClass.flagSet.add(Flag.PUBLIC); Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference(); typeReference.ifPresent(typeReferenceNode -> { objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode)); }); anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations()); addToTop(anonClass); NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers(); for (Token qualifier : objectConstructorQualifierList) { SyntaxKind kind = qualifier.kind(); if (kind == SyntaxKind.CLIENT_KEYWORD) { anonClass.flagSet.add(Flag.CLIENT); objectCtorExpression.isClient = true; } else if (kind == SyntaxKind.ISOLATED_KEYWORD) { anonClass.flagSet.add(Flag.ISOLATED); } else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) { anonClass.flagSet.add(SERVICE); objectCtorExpression.isService = true; } else { throw new RuntimeException("Syntax kind is not supported: " + kind); } } BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode(); BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name); BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode(); initNode.pos = pos; initNode.userDefinedType = userDefinedType; BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier pkgAlias = createIdentifier(pos, ""); BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName)); invocationNode.name = (BLangIdentifier) nameReference.name; invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; objectCtorExpression.typeInit = initNode; return objectCtorExpression; }
When retry the export task, does the queryId changed or not?
protected void exec() { LOG.info("begin execute sub task, task idx: {}, task query id: {}", taskIdx, getQueryId()); boolean success = false; String failMsg = null; for (int i = 0; i < RETRY_NUM; ++i) { if (job.isExportDone()) { break; } try { execOneCoord(coord); if (coord.getExecStatus().ok()) { success = true; break; } } catch (Exception e) { failMsg = e.getMessage(); TUniqueId queryId = coord.getQueryId(); LOG.warn("export sub task internal error. task idx: {}, task query id: {}", taskIdx, getQueryId(), e); } if (i < RETRY_NUM - 1) { TUniqueId queryId = coord.getQueryId(); UUID uuid = UUID.randomUUID(); if (exportJob.needResetCoord()) { try { Coordinator newCoord = exportJob.resetCoord(taskIdx, uuid); coord = newCoord; } catch (UserException e) { LOG.warn("fail to reset coord for task idx: {}, task query id: {}, reason: {}", taskIdx, getQueryId(), e.getMessage()); coord.clearExportStatus(); coord.setQueryId(new TUniqueId(queryId.hi, uuid.getLeastSignificantBits())); } } else { coord.clearExportStatus(); coord.setQueryId(new TUniqueId(queryId.hi, uuid.getLeastSignificantBits())); } LOG.warn( "export sub task fail. err: {}. task idx: {}, task query id: {}. retry: {}, new query id: {}", coord.getExecStatus().getErrorMsg(), taskIdx, DebugUtil.printId(queryId), i, DebugUtil.printId(coord.getQueryId())); } } if (!success) { onSubTaskFailed(coord, failMsg); } coord.getQueryProfile().getCounterTotalTime().setValue(TimeUtils.getEstimatedTime(job.getStartTimeMs())); coord.endProfile(); synchronized (fragmentProfiles) { fragmentProfiles.add(coord.getQueryProfile()); } }
DebugUtil.printId(coord.getQueryId()));
protected void exec() { if (job.getState() != ExportJob.JobState.EXPORTING) { return; } LOG.info("begin execute export job in exporting state. job: {}", job); if (getLeftTimeSecond() < 0) { job.cancelInternal(ExportFailMsg.CancelType.TIMEOUT, "timeout"); return; } synchronized (job) { if (job.getDoExportingThread() != null) { LOG.warn("export job is already being executed"); return; } job.setDoExportingThread(Thread.currentThread()); } if (job.isReplayed()) { String failMsg = "FE restarted or Leader changed during exporting. Job must be cancelled"; job.cancelInternal(ExportFailMsg.CancelType.RUN_FAIL, failMsg); return; } List<Coordinator> coords = job.getCoordList(); int coordSize = coords.size(); List<ExportExportingSubTask> subTasks = Lists.newArrayList(); for (int i = 0; i < coordSize; i++) { Coordinator coord = coords.get(i); ExportExportingSubTask subTask = new ExportExportingSubTask(coord, i, coordSize, job); subTasks.add(subTask); subTasksDoneSignal.addMark(i, -1); } for (ExportExportingSubTask subTask : subTasks) { if (!submitSubTask(subTask)) { job.cancelInternal(ExportFailMsg.CancelType.RUN_FAIL, "submit exporting task failed"); return; } LOG.info("submit export sub task success. task idx: {}, task query id: {}", subTask.getTaskIdx(), subTask.getQueryId()); } boolean success = false; try { success = subTasksDoneSignal.await(getLeftTimeSecond(), TimeUnit.SECONDS); } catch (InterruptedException e) { LOG.warn("export sub task signal await error", e); } Status status = subTasksDoneSignal.getStatus(); if (!success || !status.ok()) { if (!success) { job.cancelInternal(ExportFailMsg.CancelType.TIMEOUT, "timeout"); } else { job.cancelInternal(ExportFailMsg.CancelType.RUN_FAIL, status.getErrorMsg()); } registerProfile(); return; } Status mvStatus = moveTmpFiles(); if (!mvStatus.ok()) { String failMsg = "move tmp file to final destination fail, "; failMsg += mvStatus.getErrorMsg(); job.cancelInternal(ExportFailMsg.CancelType.RUN_FAIL, failMsg); LOG.warn("move tmp file to final destination fail. job:{}", job); registerProfile(); return; } job.finish(); registerProfile(); synchronized (this) { job.setDoExportingThread(null); } }
class ExportExportingTask extends PriorityLeaderTask { private static final Logger LOG = LogManager.getLogger(ExportExportingTask.class); private static final int RETRY_NUM = 2; protected final ExportJob job; private RuntimeProfile profile = new RuntimeProfile("Export"); private final List<RuntimeProfile> fragmentProfiles = Lists.newArrayList(); private final MarkedCountDownLatch<Integer, Integer> subTasksDoneSignal; public ExportExportingTask(ExportJob job) { this.job = job; this.signature = job.getId(); this.subTasksDoneSignal = new MarkedCountDownLatch<Integer, Integer>(job.getCoordList().size()); } @Override private boolean submitSubTask(ExportExportingSubTask subTask) { int retryNum = 0; while (!ExportChecker.getExportingSubTaskExecutor().submit(subTask)) { LOG.warn("submit export sub task failed. try to resubmit. task idx {}, task query id: {}, retry: {}", subTask.getTaskIdx(), subTask.getQueryId(), retryNum); if (++retryNum > RETRY_NUM) { return false; } try { Thread.sleep(1000); } catch (InterruptedException e) { LOG.warn(e); } } return true; } private int getLeftTimeSecond() { return (int) (job.getTimeoutSecond() - (System.currentTimeMillis() - job.getCreateTimeMs()) / 1000); } private void initProfile() { profile = new RuntimeProfile("Query"); RuntimeProfile summaryProfile = new RuntimeProfile("Summary"); summaryProfile.addInfoString(ProfileManager.QUERY_ID, String.valueOf(job.getId())); summaryProfile.addInfoString(ProfileManager.START_TIME, TimeUtils.longToTimeString(job.getStartTimeMs())); long currentTimestamp = System.currentTimeMillis(); long totalTimeMs = currentTimestamp - job.getStartTimeMs(); summaryProfile.addInfoString(ProfileManager.END_TIME, TimeUtils.longToTimeString(currentTimestamp)); summaryProfile.addInfoString(ProfileManager.TOTAL_TIME, DebugUtil.getPrettyStringMs(totalTimeMs)); summaryProfile.addInfoString(ProfileManager.QUERY_TYPE, "Query"); summaryProfile.addInfoString(ProfileManager.QUERY_STATE, job.getState().toString()); summaryProfile.addInfoString("StarRocks Version", String.format("%s-%s", Version.STARROCKS_VERSION, Version.STARROCKS_COMMIT_HASH)); summaryProfile.addInfoString(ProfileManager.USER, "xxx"); summaryProfile.addInfoString(ProfileManager.DEFAULT_DB, String.valueOf(job.getDbId())); summaryProfile.addInfoString(ProfileManager.SQL_STATEMENT, job.getSql()); profile.addChild(summaryProfile); } private void registerProfile() { initProfile(); synchronized (fragmentProfiles) { for (RuntimeProfile p : fragmentProfiles) { profile.addChild(p); } } ProfileManager.getInstance().pushProfile(profile); } private Status moveTmpFiles() { Set<String> exportedTempFiles = job.getExportedTempFiles(); String exportPath = job.getExportPath(); for (String exportedTempFile : exportedTempFiles) { String exportedFile = exportedTempFile.substring(exportedTempFile.lastIndexOf("/") + 1); exportedFile = exportedFile.substring(0, exportedFile.lastIndexOf(".")); exportedFile = exportPath + exportedFile; boolean success = false; String failMsg = null; for (int i = 0; i < RETRY_NUM; ++i) { try { if (!job.getBrokerDesc().hasBroker()) { if (HdfsUtil.checkPathExist(exportedFile, job.getBrokerDesc())) { failMsg = exportedFile + " already exist"; LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); break; } if (!HdfsUtil.checkPathExist(exportedTempFile, job.getBrokerDesc())) { failMsg = exportedFile + " temp file not exist"; LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); break; } } else { if (BrokerUtil.checkPathExist(exportedFile, job.getBrokerDesc())) { failMsg = exportedFile + " already exist"; LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); break; } if (!BrokerUtil.checkPathExist(exportedTempFile, job.getBrokerDesc())) { failMsg = exportedFile + " temp file not exist"; LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); break; } } int timeoutMs = Math.min(Math.max(1, getLeftTimeSecond()), 3600) * 1000; if (!job.getBrokerDesc().hasBroker()) { HdfsUtil.rename(exportedTempFile, exportedFile, job.getBrokerDesc(), timeoutMs); } else { BrokerUtil.rename(exportedTempFile, exportedFile, job.getBrokerDesc(), timeoutMs); } job.addExportedFile(exportedFile); success = true; LOG.info("move {} to {} success. job id: {}", exportedTempFile, exportedFile, job.getId()); break; } catch (UserException e) { failMsg = e.getMessage(); LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); } } if (!success) { return new Status(TStatusCode.INTERNAL_ERROR, failMsg); } } job.clearExportedTempFiles(); return Status.OK; } private class ExportExportingSubTask extends PriorityLeaderTask { private Coordinator coord; private final int taskIdx; private final int coordSize; private final ExportJob exportJob; public ExportExportingSubTask(Coordinator coord, int taskIdx, int coordSize, ExportJob exportJob) { this.coord = coord; this.taskIdx = taskIdx; this.coordSize = coordSize; this.exportJob = exportJob; this.signature = GlobalStateMgr.getCurrentState().getNextId(); } public int getTaskIdx() { return taskIdx; } public String getQueryId() { return DebugUtil.printId(coord.getQueryId()); } @Override protected void exec() { LOG.info("begin execute sub task, task idx: {}, task query id: {}", taskIdx, getQueryId()); boolean success = false; String failMsg = null; for (int i = 0; i < RETRY_NUM; ++i) { if (job.isExportDone()) { break; } try { execOneCoord(coord); if (coord.getExecStatus().ok()) { success = true; break; } } catch (Exception e) { failMsg = e.getMessage(); TUniqueId queryId = coord.getQueryId(); LOG.warn("export sub task internal error. task idx: {}, task query id: {}", taskIdx, getQueryId(), e); } if (i < RETRY_NUM - 1) { TUniqueId queryId = coord.getQueryId(); UUID uuid = UUID.randomUUID(); if (exportJob.needResetCoord()) { try { Coordinator newCoord = exportJob.resetCoord(taskIdx, uuid); coord = newCoord; } catch (UserException e) { LOG.warn("fail to reset coord for task idx: {}, task query id: {}, reason: {}", taskIdx, getQueryId(), e.getMessage()); coord.clearExportStatus(); coord.setQueryId(new TUniqueId(queryId.hi, uuid.getLeastSignificantBits())); } } else { coord.clearExportStatus(); coord.setQueryId(new TUniqueId(queryId.hi, uuid.getLeastSignificantBits())); } LOG.warn( "export sub task fail. err: {}. task idx: {}, task query id: {}. retry: {}, new query id: {}", coord.getExecStatus().getErrorMsg(), taskIdx, DebugUtil.printId(queryId), i, DebugUtil.printId(coord.getQueryId())); } } if (!success) { onSubTaskFailed(coord, failMsg); } coord.getQueryProfile().getCounterTotalTime().setValue(TimeUtils.getEstimatedTime(job.getStartTimeMs())); coord.endProfile(); synchronized (fragmentProfiles) { fragmentProfiles.add(coord.getQueryProfile()); } } private void execOneCoord(Coordinator coord) throws Exception { TUniqueId queryId = coord.getQueryId(); QeProcessorImpl.INSTANCE.registerQuery(queryId, coord); try { actualExecCoord(coord); } finally { QeProcessorImpl.INSTANCE.unregisterQuery(queryId); } } private void actualExecCoord(Coordinator coord) throws Exception { int leftTimeSecond = getLeftTimeSecond(); if (leftTimeSecond <= 0) { throw new UserException("timeout"); } coord.setTimeout(leftTimeSecond); coord.exec(); if (coord.join(leftTimeSecond)) { Status status = coord.getExecStatus(); if (status.ok()) { onSubTaskFinished(coord.getExportFiles()); } else { throw new UserException(status.getErrorMsg()); } } else { throw new UserException("timeout"); } } private void onSubTaskFinished(List<String> exportFiles) { job.addExportedTempFiles(exportFiles); synchronized (subTasksDoneSignal) { subTasksDoneSignal.markedCountDown(taskIdx, -1 /* dummy value */); job.setProgress((int) (coordSize - subTasksDoneSignal.getCount()) * 100 / coordSize); } LOG.info("export sub task finish. task idx: {}, task query id: {}", taskIdx, getQueryId()); } private void onSubTaskFailed(Coordinator coordinator, String failMsg) { Status coordStatus = coordinator.getExecStatus(); String taskFailMsg = "export job fail. query id: " + DebugUtil.printId(coordinator.getQueryId()) + ", fail msg: "; if (!Strings.isNullOrEmpty(coordStatus.getErrorMsg())) { taskFailMsg += coordStatus.getErrorMsg(); } else { taskFailMsg += failMsg; } Status failStatus = new Status(TStatusCode.INTERNAL_ERROR, taskFailMsg); synchronized (subTasksDoneSignal) { subTasksDoneSignal.countDownToZero(failStatus); } LOG.warn("export sub task fail. task idx: {}, task query id: {}, err: {}", taskIdx, getQueryId(), taskFailMsg); } } }
class ExportExportingTask extends PriorityLeaderTask { private static final Logger LOG = LogManager.getLogger(ExportExportingTask.class); private static final int RETRY_NUM = 2; protected final ExportJob job; private RuntimeProfile profile = new RuntimeProfile("Export"); private final List<RuntimeProfile> fragmentProfiles = Lists.newArrayList(); private final MarkedCountDownLatch<Integer, Integer> subTasksDoneSignal; public ExportExportingTask(ExportJob job) { this.job = job; this.signature = job.getId(); this.subTasksDoneSignal = new MarkedCountDownLatch<Integer, Integer>(job.getCoordList().size()); } @Override private boolean submitSubTask(ExportExportingSubTask subTask) { int retryNum = 0; while (!ExportChecker.getExportingSubTaskExecutor().submit(subTask)) { LOG.warn("submit export sub task failed. try to resubmit. task idx {}, task query id: {}, retry: {}", subTask.getTaskIdx(), subTask.getQueryId(), retryNum); if (++retryNum > RETRY_NUM) { return false; } try { Thread.sleep(1000); } catch (InterruptedException e) { LOG.warn(e); } } return true; } private int getLeftTimeSecond() { return (int) (job.getTimeoutSecond() - (System.currentTimeMillis() - job.getCreateTimeMs()) / 1000); } private void initProfile() { profile = new RuntimeProfile("Query"); RuntimeProfile summaryProfile = new RuntimeProfile("Summary"); summaryProfile.addInfoString(ProfileManager.QUERY_ID, String.valueOf(job.getId())); summaryProfile.addInfoString(ProfileManager.START_TIME, TimeUtils.longToTimeString(job.getStartTimeMs())); long currentTimestamp = System.currentTimeMillis(); long totalTimeMs = currentTimestamp - job.getStartTimeMs(); summaryProfile.addInfoString(ProfileManager.END_TIME, TimeUtils.longToTimeString(currentTimestamp)); summaryProfile.addInfoString(ProfileManager.TOTAL_TIME, DebugUtil.getPrettyStringMs(totalTimeMs)); summaryProfile.addInfoString(ProfileManager.QUERY_TYPE, "Query"); summaryProfile.addInfoString(ProfileManager.QUERY_STATE, job.getState().toString()); summaryProfile.addInfoString("StarRocks Version", String.format("%s-%s", Version.STARROCKS_VERSION, Version.STARROCKS_COMMIT_HASH)); summaryProfile.addInfoString(ProfileManager.USER, "xxx"); summaryProfile.addInfoString(ProfileManager.DEFAULT_DB, String.valueOf(job.getDbId())); summaryProfile.addInfoString(ProfileManager.SQL_STATEMENT, job.getSql()); profile.addChild(summaryProfile); } private void registerProfile() { initProfile(); synchronized (fragmentProfiles) { for (RuntimeProfile p : fragmentProfiles) { profile.addChild(p); } } ProfileManager.getInstance().pushProfile(profile); } private Status moveTmpFiles() { Set<String> exportedTempFiles = job.getExportedTempFiles(); String exportPath = job.getExportPath(); for (String exportedTempFile : exportedTempFiles) { String exportedFile = exportedTempFile.substring(exportedTempFile.lastIndexOf("/") + 1); exportedFile = exportedFile.substring(0, exportedFile.lastIndexOf(".")); exportedFile = exportPath + exportedFile; boolean success = false; String failMsg = null; for (int i = 0; i < RETRY_NUM; ++i) { try { if (!job.getBrokerDesc().hasBroker()) { if (HdfsUtil.checkPathExist(exportedFile, job.getBrokerDesc())) { failMsg = exportedFile + " already exist"; LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); break; } if (!HdfsUtil.checkPathExist(exportedTempFile, job.getBrokerDesc())) { failMsg = exportedFile + " temp file not exist"; LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); break; } } else { if (BrokerUtil.checkPathExist(exportedFile, job.getBrokerDesc())) { failMsg = exportedFile + " already exist"; LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); break; } if (!BrokerUtil.checkPathExist(exportedTempFile, job.getBrokerDesc())) { failMsg = exportedFile + " temp file not exist"; LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); break; } } int timeoutMs = Math.min(Math.max(1, getLeftTimeSecond()), 3600) * 1000; if (!job.getBrokerDesc().hasBroker()) { HdfsUtil.rename(exportedTempFile, exportedFile, job.getBrokerDesc(), timeoutMs); } else { BrokerUtil.rename(exportedTempFile, exportedFile, job.getBrokerDesc(), timeoutMs); } job.addExportedFile(exportedFile); success = true; LOG.info("move {} to {} success. job id: {}", exportedTempFile, exportedFile, job.getId()); break; } catch (UserException e) { failMsg = e.getMessage(); LOG.warn("move {} to {} fail. job id: {}, retry: {}, msg: {}", exportedTempFile, exportedFile, job.getId(), i, failMsg); } } if (!success) { return new Status(TStatusCode.INTERNAL_ERROR, failMsg); } } job.clearExportedTempFiles(); return Status.OK; } private class ExportExportingSubTask extends PriorityLeaderTask { private Coordinator coord; private final int taskIdx; private final int coordSize; private final ExportJob exportJob; public ExportExportingSubTask(Coordinator coord, int taskIdx, int coordSize, ExportJob exportJob) { this.coord = coord; this.taskIdx = taskIdx; this.coordSize = coordSize; this.exportJob = exportJob; this.signature = GlobalStateMgr.getCurrentState().getNextId(); } public int getTaskIdx() { return taskIdx; } public String getQueryId() { return DebugUtil.printId(coord.getQueryId()); } @Override protected void exec() { LOG.info("begin execute sub task, task idx: {}, task query id: {}", taskIdx, getQueryId()); boolean success = false; String failMsg = null; for (int i = 0; i < RETRY_NUM; ++i) { if (job.isExportDone()) { break; } try { execOneCoord(coord); if (coord.getExecStatus().ok()) { success = true; break; } } catch (Exception e) { failMsg = e.getMessage(); TUniqueId queryId = coord.getQueryId(); LOG.warn("export sub task internal error. task idx: {}, task query id: {}", taskIdx, getQueryId(), e); } if (i < RETRY_NUM - 1) { TUniqueId oldQueryId = coord.getQueryId(); UUID uuid = UUID.randomUUID(); TUniqueId newQueryId = new TUniqueId(oldQueryId.hi, uuid.getLeastSignificantBits()); if (exportJob.needResetCoord()) { try { Coordinator newCoord = exportJob.resetCoord(taskIdx, newQueryId); coord = newCoord; } catch (UserException e) { LOG.warn("fail to reset coord for task idx: {}, task query id: {}, reason: {}", taskIdx, getQueryId(), e.getMessage()); coord.clearExportStatus(); } } else { coord.clearExportStatus(); } coord.setQueryId(newQueryId); LOG.warn( "export sub task fail. err: {}. task idx: {}, task query id: {}. retry: {}, new query id: {}", coord.getExecStatus().getErrorMsg(), taskIdx, DebugUtil.printId(oldQueryId), i, DebugUtil.printId(coord.getQueryId())); } } if (!success) { onSubTaskFailed(coord, failMsg); } coord.getQueryProfile().getCounterTotalTime().setValue(TimeUtils.getEstimatedTime(job.getStartTimeMs())); coord.endProfile(); synchronized (fragmentProfiles) { fragmentProfiles.add(coord.getQueryProfile()); } } private void execOneCoord(Coordinator coord) throws Exception { TUniqueId queryId = coord.getQueryId(); QeProcessorImpl.INSTANCE.registerQuery(queryId, coord); try { actualExecCoord(coord); } finally { QeProcessorImpl.INSTANCE.unregisterQuery(queryId); } } private void actualExecCoord(Coordinator coord) throws Exception { int leftTimeSecond = getLeftTimeSecond(); if (leftTimeSecond <= 0) { throw new UserException("timeout"); } coord.setTimeout(leftTimeSecond); coord.exec(); if (coord.join(leftTimeSecond)) { Status status = coord.getExecStatus(); if (status.ok()) { onSubTaskFinished(coord.getExportFiles()); } else { throw new UserException(status.getErrorMsg()); } } else { throw new UserException("timeout"); } } private void onSubTaskFinished(List<String> exportFiles) { job.addExportedTempFiles(exportFiles); synchronized (subTasksDoneSignal) { subTasksDoneSignal.markedCountDown(taskIdx, -1 /* dummy value */); job.setProgress((int) (coordSize - subTasksDoneSignal.getCount()) * 100 / coordSize); } LOG.info("export sub task finish. task idx: {}, task query id: {}", taskIdx, getQueryId()); } private void onSubTaskFailed(Coordinator coordinator, String failMsg) { Status coordStatus = coordinator.getExecStatus(); String taskFailMsg = "export job fail. query id: " + DebugUtil.printId(coordinator.getQueryId()) + ", fail msg: "; if (!Strings.isNullOrEmpty(coordStatus.getErrorMsg())) { taskFailMsg += coordStatus.getErrorMsg(); } else { taskFailMsg += failMsg; } Status failStatus = new Status(TStatusCode.INTERNAL_ERROR, taskFailMsg); synchronized (subTasksDoneSignal) { subTasksDoneSignal.countDownToZero(failStatus); } LOG.warn("export sub task fail. task idx: {}, task query id: {}, err: {}", taskIdx, getQueryId(), taskFailMsg); } } }
This will lose other configs in `configuration`, we can add a private `create(Configuration conf, EnvironmentSettings settings)` method as the basic implementation.
public static TableEnvironmentImpl create(Configuration configuration) { return create(EnvironmentSettings.fromConfiguration(configuration)); }
return create(EnvironmentSettings.fromConfiguration(configuration));
public static TableEnvironmentImpl create(Configuration configuration) { return create(EnvironmentSettings.fromConfiguration(configuration), configuration); }
class TableEnvironmentImpl implements TableEnvironmentInternal { private static final boolean IS_STREAM_TABLE = true; private final CatalogManager catalogManager; private final ModuleManager moduleManager; private final OperationTreeBuilder operationTreeBuilder; private final List<ModifyOperation> bufferedModifyOperations = new ArrayList<>(); protected final TableConfig tableConfig; protected final Executor execEnv; protected final FunctionCatalog functionCatalog; protected final Planner planner; protected final Parser parser; private final boolean isStreamingMode; private final ClassLoader userClassLoader; private static final String UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG = "Unsupported SQL query! sqlUpdate() only accepts a single SQL statement of type " + "INSERT, CREATE TABLE, DROP TABLE, ALTER TABLE, USE CATALOG, USE [CATALOG.]DATABASE, " + "CREATE DATABASE, DROP DATABASE, ALTER DATABASE, CREATE FUNCTION, DROP FUNCTION, ALTER FUNCTION, " + "CREATE CATALOG, DROP CATALOG, CREATE VIEW, DROP VIEW, LOAD MODULE, UNLOAD MODULE, USE MODULES."; private static final String UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG = "Unsupported SQL query! executeSql() only accepts a single SQL statement of type " + "CREATE TABLE, DROP TABLE, ALTER TABLE, CREATE DATABASE, DROP DATABASE, ALTER DATABASE, " + "CREATE FUNCTION, DROP FUNCTION, ALTER FUNCTION, CREATE CATALOG, DROP CATALOG, " + "USE CATALOG, USE [CATALOG.]DATABASE, SHOW CATALOGS, SHOW DATABASES, SHOW TABLES, SHOW FUNCTIONS, SHOW PARTITIONS" + "CREATE VIEW, DROP VIEW, SHOW VIEWS, INSERT, DESCRIBE, LOAD MODULE, UNLOAD MODULE, USE MODULES."; /** Provides necessary methods for {@link ConnectTableDescriptor}. */ private final Registration registration = new Registration() { @Override public void createTemporaryTable(String path, CatalogBaseTable table) { UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); catalogManager.createTemporaryTable(table, objectIdentifier, false); } }; protected TableEnvironmentImpl( CatalogManager catalogManager, ModuleManager moduleManager, TableConfig tableConfig, Executor executor, FunctionCatalog functionCatalog, Planner planner, boolean isStreamingMode, ClassLoader userClassLoader) { this.catalogManager = catalogManager; this.catalogManager.setCatalogTableSchemaResolver( new CatalogTableSchemaResolver(planner.getParser(), isStreamingMode)); this.moduleManager = moduleManager; this.execEnv = executor; this.tableConfig = tableConfig; this.functionCatalog = functionCatalog; this.planner = planner; this.parser = planner.getParser(); this.isStreamingMode = isStreamingMode; this.userClassLoader = userClassLoader; this.operationTreeBuilder = OperationTreeBuilder.create( tableConfig, functionCatalog.asLookup(parser::parseIdentifier), catalogManager.getDataTypeFactory(), path -> { try { UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); Optional<CatalogQueryOperation> catalogQueryOperation = scanInternal(unresolvedIdentifier); return catalogQueryOperation.map( t -> ApiExpressionUtils.tableRef(path, t)); } catch (SqlParserException ex) { return Optional.empty(); } }, (sqlExpression, inputSchema) -> { try { return parser.parseSqlExpression(sqlExpression, inputSchema); } catch (Throwable t) { throw new ValidationException( String.format("Invalid SQL expression: %s", sqlExpression), t); } }, isStreamingMode); } public static TableEnvironmentImpl create(EnvironmentSettings settings) { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); TableConfig tableConfig = new TableConfig(settings.toConfiguration()); ModuleManager moduleManager = new ModuleManager(); CatalogManager catalogManager = CatalogManager.newBuilder() .classLoader(classLoader) .config(tableConfig.getConfiguration()) .defaultCatalog( settings.getBuiltInCatalogName(), new GenericInMemoryCatalog( settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())) .build(); FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager); Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = ComponentFactoryService.find(ExecutorFactory.class, executorProperties) .create(executorProperties); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create( plannerProperties, executor, tableConfig, functionCatalog, catalogManager); return new TableEnvironmentImpl( catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode(), classLoader); } @Override public Table fromValues(Object... values) { return fromValues(Arrays.asList(values)); } @Override public Table fromValues(AbstractDataType<?> rowType, Object... values) { return fromValues(rowType, Arrays.asList(values)); } @Override public Table fromValues(Expression... values) { return createTable(operationTreeBuilder.values(values)); } @Override public Table fromValues(AbstractDataType<?> rowType, Expression... values) { final DataType resolvedDataType = catalogManager.getDataTypeFactory().createDataType(rowType); return createTable(operationTreeBuilder.values(resolvedDataType, values)); } @Override public Table fromValues(Iterable<?> values) { Expression[] exprs = StreamSupport.stream(values.spliterator(), false) .map(ApiExpressionUtils::objectToExpression) .toArray(Expression[]::new); return fromValues(exprs); } @Override public Table fromValues(AbstractDataType<?> rowType, Iterable<?> values) { Expression[] exprs = StreamSupport.stream(values.spliterator(), false) .map(ApiExpressionUtils::objectToExpression) .toArray(Expression[]::new); return fromValues(rowType, exprs); } @VisibleForTesting public Planner getPlanner() { return planner; } @Override public Table fromTableSource(TableSource<?> source) { return createTable(new TableSourceQueryOperation<>(source, !IS_STREAM_TABLE)); } @Override public void registerCatalog(String catalogName, Catalog catalog) { catalogManager.registerCatalog(catalogName, catalog); } @Override public Optional<Catalog> getCatalog(String catalogName) { return catalogManager.getCatalog(catalogName); } @Override public void loadModule(String moduleName, Module module) { moduleManager.loadModule(moduleName, module); } @Override public void useModules(String... moduleNames) { moduleManager.useModules(moduleNames); } @Override public void unloadModule(String moduleName) { moduleManager.unloadModule(moduleName); } @Override public void registerFunction(String name, ScalarFunction function) { functionCatalog.registerTempSystemScalarFunction(name, function); } @Override public void createTemporarySystemFunction( String name, Class<? extends UserDefinedFunction> functionClass) { final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass); createTemporarySystemFunction(name, functionInstance); } @Override public void createTemporarySystemFunction(String name, UserDefinedFunction functionInstance) { functionCatalog.registerTemporarySystemFunction(name, functionInstance, false); } @Override public boolean dropTemporarySystemFunction(String name) { return functionCatalog.dropTemporarySystemFunction(name, true); } @Override public void createFunction(String path, Class<? extends UserDefinedFunction> functionClass) { createFunction(path, functionClass, false); } @Override public void createFunction( String path, Class<? extends UserDefinedFunction> functionClass, boolean ignoreIfExists) { final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); functionCatalog.registerCatalogFunction( unresolvedIdentifier, functionClass, ignoreIfExists); } @Override public boolean dropFunction(String path) { final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); return functionCatalog.dropCatalogFunction(unresolvedIdentifier, true); } @Override public void createTemporaryFunction( String path, Class<? extends UserDefinedFunction> functionClass) { final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass); createTemporaryFunction(path, functionInstance); } @Override public void createTemporaryFunction(String path, UserDefinedFunction functionInstance) { final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); functionCatalog.registerTemporaryCatalogFunction( unresolvedIdentifier, functionInstance, false); } @Override public boolean dropTemporaryFunction(String path) { final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); return functionCatalog.dropTemporaryCatalogFunction(unresolvedIdentifier, true); } @Override public void registerTable(String name, Table table) { UnresolvedIdentifier identifier = UnresolvedIdentifier.of(name); createTemporaryView(identifier, table); } @Override public void createTemporaryView(String path, Table view) { UnresolvedIdentifier identifier = parser.parseIdentifier(path); createTemporaryView(identifier, view); } private void createTemporaryView(UnresolvedIdentifier identifier, Table view) { if (((TableImpl) view).getTableEnvironment() != this) { throw new TableException( "Only table API objects that belong to this TableEnvironment can be registered."); } ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier); QueryOperation queryOperation = qualifyQueryOperation(tableIdentifier, view.getQueryOperation()); CatalogBaseTable tableTable = new QueryOperationCatalogView(queryOperation); catalogManager.createTemporaryTable(tableTable, tableIdentifier, false); } @Override public Table scan(String... tablePath) { UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(tablePath); return scanInternal(unresolvedIdentifier) .map(this::createTable) .orElseThrow( () -> new ValidationException( String.format( "Table %s was not found.", unresolvedIdentifier))); } @Override public Table from(String path) { UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); return scanInternal(unresolvedIdentifier) .map(this::createTable) .orElseThrow( () -> new ValidationException( String.format( "Table %s was not found.", unresolvedIdentifier))); } @Override public void insertInto(String targetPath, Table table) { UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(targetPath); insertIntoInternal(unresolvedIdentifier, table); } @Override public void insertInto(Table table, String sinkPath, String... sinkPathContinued) { List<String> fullPath = new ArrayList<>(Arrays.asList(sinkPathContinued)); fullPath.add(0, sinkPath); UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(fullPath); insertIntoInternal(unresolvedIdentifier, table); } private void insertIntoInternal(UnresolvedIdentifier unresolvedIdentifier, Table table) { ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); List<ModifyOperation> modifyOperations = Collections.singletonList( new CatalogSinkModifyOperation( objectIdentifier, table.getQueryOperation())); buffer(modifyOperations); } private Optional<CatalogQueryOperation> scanInternal(UnresolvedIdentifier identifier) { ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier); return catalogManager .getTable(tableIdentifier) .map(t -> new CatalogQueryOperation(tableIdentifier, t.getResolvedSchema())); } @Override public ConnectTableDescriptor connect(ConnectorDescriptor connectorDescriptor) { return new StreamTableDescriptor(registration, connectorDescriptor); } @Override public String[] listCatalogs() { return catalogManager.listCatalogs().stream().sorted().toArray(String[]::new); } @Override public String[] listModules() { return moduleManager.listModules().toArray(new String[0]); } @Override public ModuleEntry[] listFullModules() { return moduleManager.listFullModules().toArray(new ModuleEntry[0]); } @Override public String[] listDatabases() { return catalogManager .getCatalog(catalogManager.getCurrentCatalog()) .get() .listDatabases() .toArray(new String[0]); } @Override public String[] listTables() { return catalogManager.listTables().stream().sorted().toArray(String[]::new); } @Override public String[] listViews() { return catalogManager.listViews().stream().sorted().toArray(String[]::new); } @Override public String[] listTemporaryTables() { return catalogManager.listTemporaryTables().stream().sorted().toArray(String[]::new); } @Override public String[] listTemporaryViews() { return catalogManager.listTemporaryViews().stream().sorted().toArray(String[]::new); } @Override public boolean dropTemporaryTable(String path) { UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); try { catalogManager.dropTemporaryTable(identifier, false); return true; } catch (ValidationException e) { return false; } } @Override public boolean dropTemporaryView(String path) { UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); try { catalogManager.dropTemporaryView(identifier, false); return true; } catch (ValidationException e) { return false; } } @Override public String[] listUserDefinedFunctions() { return functionCatalog.getUserDefinedFunctions(); } @Override public String[] listFunctions() { return functionCatalog.getFunctions(); } @Override public String explain(Table table) { return explain(table, false); } @Override public String explain(Table table, boolean extended) { return planner.explain( Collections.singletonList(table.getQueryOperation()), getExplainDetails(extended)); } @Override public String explain(boolean extended) { List<Operation> operations = bufferedModifyOperations.stream() .map(o -> (Operation) o) .collect(Collectors.toList()); return planner.explain(operations, getExplainDetails(extended)); } @Override public String explainSql(String statement, ExplainDetail... extraDetails) { List<Operation> operations = parser.parse(statement); if (operations.size() != 1) { throw new TableException( "Unsupported SQL query! explainSql() only accepts a single SQL query."); } return planner.explain(operations, extraDetails); } @Override public String explainInternal(List<Operation> operations, ExplainDetail... extraDetails) { return planner.explain(operations, extraDetails); } @Override public String[] getCompletionHints(String statement, int position) { return planner.getCompletionHints(statement, position); } @Override public Table sqlQuery(String query) { List<Operation> operations = parser.parse(query); if (operations.size() != 1) { throw new ValidationException( "Unsupported SQL query! sqlQuery() only accepts a single SQL query."); } Operation operation = operations.get(0); if (operation instanceof QueryOperation && !(operation instanceof ModifyOperation)) { return createTable((QueryOperation) operation); } else { throw new ValidationException( "Unsupported SQL query! sqlQuery() only accepts a single SQL query of type " + "SELECT, UNION, INTERSECT, EXCEPT, VALUES, and ORDER_BY."); } } @Override public TableResult executeSql(String statement) { List<Operation> operations = parser.parse(statement); if (operations.size() != 1) { throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG); } return executeOperation(operations.get(0)); } @Override public StatementSet createStatementSet() { return new StatementSetImpl(this); } @Override public TableResult executeInternal(List<ModifyOperation> operations) { List<Transformation<?>> transformations = translate(operations); List<String> sinkIdentifierNames = extractSinkIdentifierNames(operations); return executeInternal(transformations, sinkIdentifierNames); } private TableResult executeInternal( List<Transformation<?>> transformations, List<String> sinkIdentifierNames) { String jobName = getJobName("insert-into_" + String.join(",", sinkIdentifierNames)); Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig, jobName); try { JobClient jobClient = execEnv.executeAsync(pipeline); TableSchema.Builder builder = TableSchema.builder(); Object[] affectedRowCounts = new Long[transformations.size()]; for (int i = 0; i < transformations.size(); ++i) { builder.field(sinkIdentifierNames.get(i), DataTypes.BIGINT()); affectedRowCounts[i] = -1L; } return TableResultImpl.builder() .jobClient(jobClient) .resultKind(ResultKind.SUCCESS_WITH_CONTENT) .tableSchema(builder.build()) .data( new InsertResultIterator( jobClient, Row.of(affectedRowCounts), userClassLoader)) .build(); } catch (Exception e) { throw new TableException("Failed to execute sql", e); } } @Override public TableResult executeInternal(QueryOperation operation) { SelectSinkOperation sinkOperation = new SelectSinkOperation(operation); List<Transformation<?>> transformations = translate(Collections.singletonList(sinkOperation)); String jobName = getJobName("collect"); Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig, jobName); try { JobClient jobClient = execEnv.executeAsync(pipeline); SelectResultProvider resultProvider = sinkOperation.getSelectResultProvider(); resultProvider.setJobClient(jobClient); return TableResultImpl.builder() .jobClient(jobClient) .resultKind(ResultKind.SUCCESS_WITH_CONTENT) .tableSchema(operation.getTableSchema()) .data(resultProvider.getResultIterator()) .setPrintStyle( TableResultImpl.PrintStyle.tableau( PrintUtils.MAX_COLUMN_WIDTH, PrintUtils.NULL_COLUMN, true, isStreamingMode)) .build(); } catch (Exception e) { throw new TableException("Failed to execute sql", e); } } @Override public void sqlUpdate(String stmt) { List<Operation> operations = parser.parse(stmt); if (operations.size() != 1) { throw new TableException(UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG); } Operation operation = operations.get(0); if (operation instanceof ModifyOperation) { buffer(Collections.singletonList((ModifyOperation) operation)); } else if (operation instanceof CreateTableOperation || operation instanceof DropTableOperation || operation instanceof AlterTableOperation || operation instanceof CreateViewOperation || operation instanceof DropViewOperation || operation instanceof CreateDatabaseOperation || operation instanceof DropDatabaseOperation || operation instanceof AlterDatabaseOperation || operation instanceof CreateCatalogFunctionOperation || operation instanceof CreateTempSystemFunctionOperation || operation instanceof DropCatalogFunctionOperation || operation instanceof DropTempSystemFunctionOperation || operation instanceof AlterCatalogFunctionOperation || operation instanceof CreateCatalogOperation || operation instanceof DropCatalogOperation || operation instanceof UseCatalogOperation || operation instanceof UseDatabaseOperation || operation instanceof LoadModuleOperation || operation instanceof UnloadModuleOperation) { executeOperation(operation); } else { throw new TableException(UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG); } } private TableResult executeOperation(Operation operation) { if (operation instanceof ModifyOperation) { return executeInternal(Collections.singletonList((ModifyOperation) operation)); } else if (operation instanceof CreateTableOperation) { CreateTableOperation createTableOperation = (CreateTableOperation) operation; if (createTableOperation.isTemporary()) { catalogManager.createTemporaryTable( createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists()); } else { catalogManager.createTable( createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof DropTableOperation) { DropTableOperation dropTableOperation = (DropTableOperation) operation; if (dropTableOperation.isTemporary()) { catalogManager.dropTemporaryTable( dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists()); } else { catalogManager.dropTable( dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof AlterTableOperation) { AlterTableOperation alterTableOperation = (AlterTableOperation) operation; Catalog catalog = getCatalogOrThrowException( alterTableOperation.getTableIdentifier().getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(alterTableOperation.asSummaryString()); try { if (alterTableOperation instanceof AlterTableRenameOperation) { AlterTableRenameOperation alterTableRenameOp = (AlterTableRenameOperation) operation; catalog.renameTable( alterTableRenameOp.getTableIdentifier().toObjectPath(), alterTableRenameOp.getNewTableIdentifier().getObjectName(), false); } else if (alterTableOperation instanceof AlterTableOptionsOperation) { AlterTableOptionsOperation alterTablePropertiesOp = (AlterTableOptionsOperation) operation; catalog.alterTable( alterTablePropertiesOp.getTableIdentifier().toObjectPath(), alterTablePropertiesOp.getCatalogTable(), false); } else if (alterTableOperation instanceof AlterTableAddConstraintOperation) { AlterTableAddConstraintOperation addConstraintOP = (AlterTableAddConstraintOperation) operation; CatalogTable oriTable = (CatalogTable) catalogManager .getTable(addConstraintOP.getTableIdentifier()) .get() .getTable(); TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriTable.getSchema()); if (addConstraintOP.getConstraintName().isPresent()) { builder.primaryKey( addConstraintOP.getConstraintName().get(), addConstraintOP.getColumnNames()); } else { builder.primaryKey(addConstraintOP.getColumnNames()); } CatalogTable newTable = new CatalogTableImpl( builder.build(), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment()); catalog.alterTable( addConstraintOP.getTableIdentifier().toObjectPath(), newTable, false); } else if (alterTableOperation instanceof AlterTableDropConstraintOperation) { AlterTableDropConstraintOperation dropConstraintOperation = (AlterTableDropConstraintOperation) operation; CatalogTable oriTable = (CatalogTable) catalogManager .getTable(dropConstraintOperation.getTableIdentifier()) .get() .getTable(); CatalogTable newTable = new CatalogTableImpl( TableSchemaUtils.dropConstraint( oriTable.getSchema(), dropConstraintOperation.getConstraintName()), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment()); catalog.alterTable( dropConstraintOperation.getTableIdentifier().toObjectPath(), newTable, false); } else if (alterTableOperation instanceof AlterPartitionPropertiesOperation) { AlterPartitionPropertiesOperation alterPartPropsOp = (AlterPartitionPropertiesOperation) operation; catalog.alterPartition( alterPartPropsOp.getTableIdentifier().toObjectPath(), alterPartPropsOp.getPartitionSpec(), alterPartPropsOp.getCatalogPartition(), false); } else if (alterTableOperation instanceof AlterTableSchemaOperation) { AlterTableSchemaOperation alterTableSchemaOperation = (AlterTableSchemaOperation) alterTableOperation; catalog.alterTable( alterTableSchemaOperation.getTableIdentifier().toObjectPath(), alterTableSchemaOperation.getCatalogTable(), false); } else if (alterTableOperation instanceof AddPartitionsOperation) { AddPartitionsOperation addPartitionsOperation = (AddPartitionsOperation) alterTableOperation; List<CatalogPartitionSpec> specs = addPartitionsOperation.getPartitionSpecs(); List<CatalogPartition> partitions = addPartitionsOperation.getCatalogPartitions(); boolean ifNotExists = addPartitionsOperation.ifNotExists(); ObjectPath tablePath = addPartitionsOperation.getTableIdentifier().toObjectPath(); for (int i = 0; i < specs.size(); i++) { catalog.createPartition( tablePath, specs.get(i), partitions.get(i), ifNotExists); } } else if (alterTableOperation instanceof DropPartitionsOperation) { DropPartitionsOperation dropPartitionsOperation = (DropPartitionsOperation) alterTableOperation; ObjectPath tablePath = dropPartitionsOperation.getTableIdentifier().toObjectPath(); boolean ifExists = dropPartitionsOperation.ifExists(); for (CatalogPartitionSpec spec : dropPartitionsOperation.getPartitionSpecs()) { catalog.dropPartition(tablePath, spec, ifExists); } } return TableResultImpl.TABLE_RESULT_OK; } catch (TableAlreadyExistException | TableNotExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof CreateViewOperation) { CreateViewOperation createViewOperation = (CreateViewOperation) operation; if (createViewOperation.isTemporary()) { catalogManager.createTemporaryTable( createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists()); } else { catalogManager.createTable( createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof DropViewOperation) { DropViewOperation dropViewOperation = (DropViewOperation) operation; if (dropViewOperation.isTemporary()) { catalogManager.dropTemporaryView( dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists()); } else { catalogManager.dropView( dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof AlterViewOperation) { AlterViewOperation alterViewOperation = (AlterViewOperation) operation; Catalog catalog = getCatalogOrThrowException( alterViewOperation.getViewIdentifier().getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(alterViewOperation.asSummaryString()); try { if (alterViewOperation instanceof AlterViewRenameOperation) { AlterViewRenameOperation alterTableRenameOp = (AlterViewRenameOperation) operation; catalog.renameTable( alterTableRenameOp.getViewIdentifier().toObjectPath(), alterTableRenameOp.getNewViewIdentifier().getObjectName(), false); } else if (alterViewOperation instanceof AlterViewPropertiesOperation) { AlterViewPropertiesOperation alterTablePropertiesOp = (AlterViewPropertiesOperation) operation; catalog.alterTable( alterTablePropertiesOp.getViewIdentifier().toObjectPath(), alterTablePropertiesOp.getCatalogView(), false); } else if (alterViewOperation instanceof AlterViewAsOperation) { AlterViewAsOperation alterViewAsOperation = (AlterViewAsOperation) alterViewOperation; catalog.alterTable( alterViewAsOperation.getViewIdentifier().toObjectPath(), alterViewAsOperation.getNewView(), false); } return TableResultImpl.TABLE_RESULT_OK; } catch (TableAlreadyExistException | TableNotExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof CreateDatabaseOperation) { CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation; Catalog catalog = getCatalogOrThrowException(createDatabaseOperation.getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(createDatabaseOperation.asSummaryString()); try { catalog.createDatabase( createDatabaseOperation.getDatabaseName(), createDatabaseOperation.getCatalogDatabase(), createDatabaseOperation.isIgnoreIfExists()); return TableResultImpl.TABLE_RESULT_OK; } catch (DatabaseAlreadyExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof DropDatabaseOperation) { DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation; Catalog catalog = getCatalogOrThrowException(dropDatabaseOperation.getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(dropDatabaseOperation.asSummaryString()); try { catalog.dropDatabase( dropDatabaseOperation.getDatabaseName(), dropDatabaseOperation.isIfExists(), dropDatabaseOperation.isCascade()); return TableResultImpl.TABLE_RESULT_OK; } catch (DatabaseNotExistException | DatabaseNotEmptyException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof AlterDatabaseOperation) { AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation; Catalog catalog = getCatalogOrThrowException(alterDatabaseOperation.getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(alterDatabaseOperation.asSummaryString()); try { catalog.alterDatabase( alterDatabaseOperation.getDatabaseName(), alterDatabaseOperation.getCatalogDatabase(), false); return TableResultImpl.TABLE_RESULT_OK; } catch (DatabaseNotExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof CreateCatalogFunctionOperation) { return createCatalogFunction((CreateCatalogFunctionOperation) operation); } else if (operation instanceof CreateTempSystemFunctionOperation) { return createSystemFunction((CreateTempSystemFunctionOperation) operation); } else if (operation instanceof DropCatalogFunctionOperation) { return dropCatalogFunction((DropCatalogFunctionOperation) operation); } else if (operation instanceof DropTempSystemFunctionOperation) { return dropSystemFunction((DropTempSystemFunctionOperation) operation); } else if (operation instanceof AlterCatalogFunctionOperation) { return alterCatalogFunction((AlterCatalogFunctionOperation) operation); } else if (operation instanceof CreateCatalogOperation) { return createCatalog((CreateCatalogOperation) operation); } else if (operation instanceof DropCatalogOperation) { DropCatalogOperation dropCatalogOperation = (DropCatalogOperation) operation; String exMsg = getDDLOpExecuteErrorMsg(dropCatalogOperation.asSummaryString()); try { catalogManager.unregisterCatalog( dropCatalogOperation.getCatalogName(), dropCatalogOperation.isIfExists()); return TableResultImpl.TABLE_RESULT_OK; } catch (CatalogException e) { throw new ValidationException(exMsg, e); } } else if (operation instanceof LoadModuleOperation) { return loadModule((LoadModuleOperation) operation); } else if (operation instanceof UnloadModuleOperation) { return unloadModule((UnloadModuleOperation) operation); } else if (operation instanceof UseModulesOperation) { return useModules((UseModulesOperation) operation); } else if (operation instanceof UseCatalogOperation) { UseCatalogOperation useCatalogOperation = (UseCatalogOperation) operation; catalogManager.setCurrentCatalog(useCatalogOperation.getCatalogName()); return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof UseDatabaseOperation) { UseDatabaseOperation useDatabaseOperation = (UseDatabaseOperation) operation; catalogManager.setCurrentCatalog(useDatabaseOperation.getCatalogName()); catalogManager.setCurrentDatabase(useDatabaseOperation.getDatabaseName()); return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof ShowCatalogsOperation) { return buildShowResult("catalog name", listCatalogs()); } else if (operation instanceof ShowCurrentCatalogOperation) { return buildShowResult( "current catalog name", new String[] {catalogManager.getCurrentCatalog()}); } else if (operation instanceof ShowDatabasesOperation) { return buildShowResult("database name", listDatabases()); } else if (operation instanceof ShowCurrentDatabaseOperation) { return buildShowResult( "current database name", new String[] {catalogManager.getCurrentDatabase()}); } else if (operation instanceof ShowTablesOperation) { return buildShowResult("table name", listTables()); } else if (operation instanceof ShowFunctionsOperation) { return buildShowResult("function name", listFunctions()); } else if (operation instanceof ShowViewsOperation) { return buildShowResult("view name", listViews()); } else if (operation instanceof ShowPartitionsOperation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { ShowPartitionsOperation showPartitionsOperation = (ShowPartitionsOperation) operation; Catalog catalog = getCatalogOrThrowException( showPartitionsOperation.getTableIdentifier().getCatalogName()); ObjectPath tablePath = showPartitionsOperation.getTableIdentifier().toObjectPath(); CatalogPartitionSpec partitionSpec = showPartitionsOperation.getPartitionSpec(); List<CatalogPartitionSpec> partitionSpecs = partitionSpec == null ? catalog.listPartitions(tablePath) : catalog.listPartitions(tablePath, partitionSpec); List<String> partitionNames = new ArrayList<>(partitionSpecs.size()); for (CatalogPartitionSpec spec : partitionSpecs) { List<String> partitionKVs = new ArrayList<>(spec.getPartitionSpec().size()); for (Map.Entry<String, String> partitionKV : spec.getPartitionSpec().entrySet()) { partitionKVs.add(partitionKV.getKey() + "=" + partitionKV.getValue()); } partitionNames.add(String.join("/", partitionKVs)); } return buildShowResult("partition name", partitionNames.toArray(new String[0])); } catch (TableNotExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof ExplainOperation) { String explanation = planner.explain( Collections.singletonList(((ExplainOperation) operation).getChild())); return TableResultImpl.builder() .resultKind(ResultKind.SUCCESS_WITH_CONTENT) .tableSchema(TableSchema.builder().field("result", DataTypes.STRING()).build()) .data(Collections.singletonList(Row.of(explanation))) .setPrintStyle(TableResultImpl.PrintStyle.rawContent()) .build(); } else if (operation instanceof DescribeTableOperation) { DescribeTableOperation describeTableOperation = (DescribeTableOperation) operation; Optional<CatalogManager.TableLookupResult> result = catalogManager.getTable(describeTableOperation.getSqlIdentifier()); if (result.isPresent()) { return buildDescribeResult(result.get().getResolvedSchema()); } else { throw new ValidationException( String.format( "Tables or views with the identifier '%s' doesn't exist", describeTableOperation.getSqlIdentifier().asSummaryString())); } } else if (operation instanceof QueryOperation) { return executeInternal((QueryOperation) operation); } else { throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG); } } private TableResult createCatalog(CreateCatalogOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { String catalogName = operation.getCatalogName(); Map<String, String> properties = operation.getProperties(); final CatalogFactory factory = TableFactoryService.find(CatalogFactory.class, properties, userClassLoader); Catalog catalog = factory.createCatalog(catalogName, properties); catalogManager.registerCatalog(catalogName, catalog); return TableResultImpl.TABLE_RESULT_OK; } catch (CatalogException e) { throw new ValidationException(exMsg, e); } } private TableResult loadModule(LoadModuleOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { Map<String, String> properties = new HashMap<>(operation.getProperties()); if (properties.containsKey(MODULE_TYPE)) { throw new ValidationException( String.format( "Property 'type' = '%s' is not supported since module name " + "is used to find module", properties.get(MODULE_TYPE))); } properties.put(MODULE_TYPE, operation.getModuleName()); final ModuleFactory factory = TableFactoryService.find(ModuleFactory.class, properties, userClassLoader); moduleManager.loadModule(operation.getModuleName(), factory.createModule(properties)); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw new ValidationException(String.format("%s. %s", exMsg, e.getMessage()), e); } catch (Exception e) { throw new TableException(String.format("%s. %s", exMsg, e.getMessage()), e); } } private TableResult unloadModule(UnloadModuleOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { moduleManager.unloadModule(operation.getModuleName()); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw new ValidationException(String.format("%s. %s", exMsg, e.getMessage()), e); } } private TableResult useModules(UseModulesOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { moduleManager.useModules(operation.getModuleNames().toArray(new String[0])); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw new ValidationException(String.format("%s. %s", exMsg, e.getMessage()), e); } } private TableResult buildShowResult(String columnName, String[] objects) { return buildResult( new String[] {columnName}, new DataType[] {DataTypes.STRING()}, Arrays.stream(objects).map((c) -> new String[] {c}).toArray(String[][]::new)); } private TableResult buildDescribeResult(TableSchema schema) { Map<String, String> fieldToWatermark = schema.getWatermarkSpecs().stream() .collect( Collectors.toMap( WatermarkSpec::getRowtimeAttribute, WatermarkSpec::getWatermarkExpr)); Map<String, String> fieldToPrimaryKey = new HashMap<>(); schema.getPrimaryKey() .ifPresent( (p) -> { List<String> columns = p.getColumns(); columns.forEach( (c) -> fieldToPrimaryKey.put( c, String.format( "PRI(%s)", String.join(", ", columns)))); }); Object[][] rows = schema.getTableColumns().stream() .map( (c) -> { final LogicalType logicalType = c.getType().getLogicalType(); return new Object[] { c.getName(), logicalType.copy(true).asSummaryString(), logicalType.isNullable(), fieldToPrimaryKey.getOrDefault(c.getName(), null), c.explainExtras().orElse(null), fieldToWatermark.getOrDefault(c.getName(), null) }; }) .toArray(Object[][]::new); return buildResult( new String[] {"name", "type", "null", "key", "extras", "watermark"}, new DataType[] { DataTypes.STRING(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING() }, rows); } private TableResult buildResult(String[] headers, DataType[] types, Object[][] rows) { return TableResultImpl.builder() .resultKind(ResultKind.SUCCESS_WITH_CONTENT) .tableSchema(TableSchema.builder().fields(headers, types).build()) .data(Arrays.stream(rows).map(Row::of).collect(Collectors.toList())) .setPrintStyle( TableResultImpl.PrintStyle.tableau(Integer.MAX_VALUE, "", false, false)) .build(); } /** * extract sink identifier names from {@link ModifyOperation}s. * * <p>If there are multiple ModifyOperations have same name, an index suffix will be added at * the end of the name to ensure each name is unique. */ private List<String> extractSinkIdentifierNames(List<ModifyOperation> operations) { List<String> tableNames = new ArrayList<>(operations.size()); Map<String, Integer> tableNameToCount = new HashMap<>(); for (ModifyOperation operation : operations) { if (operation instanceof CatalogSinkModifyOperation) { ObjectIdentifier identifier = ((CatalogSinkModifyOperation) operation).getTableIdentifier(); String fullName = identifier.asSummaryString(); tableNames.add(fullName); tableNameToCount.put(fullName, tableNameToCount.getOrDefault(fullName, 0) + 1); } else { throw new UnsupportedOperationException("Unsupported operation: " + operation); } } Map<String, Integer> tableNameToIndex = new HashMap<>(); return tableNames.stream() .map( tableName -> { if (tableNameToCount.get(tableName) == 1) { return tableName; } else { Integer index = tableNameToIndex.getOrDefault(tableName, 0) + 1; tableNameToIndex.put(tableName, index); return tableName + "_" + index; } }) .collect(Collectors.toList()); } private String getJobName(String defaultJobName) { return tableConfig.getConfiguration().getString(PipelineOptions.NAME, defaultJobName); } /** Get catalog from catalogName or throw a ValidationException if the catalog not exists. */ private Catalog getCatalogOrThrowException(String catalogName) { return getCatalog(catalogName) .orElseThrow( () -> new ValidationException( String.format("Catalog %s does not exist", catalogName))); } private String getDDLOpExecuteErrorMsg(String action) { return String.format("Could not execute %s", action); } @Override public String getCurrentCatalog() { return catalogManager.getCurrentCatalog(); } @Override public void useCatalog(String catalogName) { catalogManager.setCurrentCatalog(catalogName); } @Override public String getCurrentDatabase() { return catalogManager.getCurrentDatabase(); } @Override public void useDatabase(String databaseName) { catalogManager.setCurrentDatabase(databaseName); } @Override public TableConfig getConfig() { return tableConfig; } @Override public JobExecutionResult execute(String jobName) throws Exception { Pipeline pipeline = execEnv.createPipeline(translateAndClearBuffer(), tableConfig, jobName); return execEnv.execute(pipeline); } @Override public Parser getParser() { return parser; } @Override public CatalogManager getCatalogManager() { return catalogManager; } /** * Subclasses can override this method to transform the given QueryOperation to a new one with * the qualified object identifier. This is needed for some QueryOperations, e.g. * JavaDataStreamQueryOperation, which doesn't know the registered identifier when created * ({@code fromDataStream(DataStream)}. But the identifier is required when converting this * QueryOperation to RelNode. */ protected QueryOperation qualifyQueryOperation( ObjectIdentifier identifier, QueryOperation queryOperation) { return queryOperation; } /** * Subclasses can override this method to add additional checks. * * @param tableSource tableSource to validate */ protected void validateTableSource(TableSource<?> tableSource) { TableSourceValidation.validateTableSource(tableSource, tableSource.getTableSchema()); } /** * Translate the buffered operations to Transformations, and clear the buffer. * * <p>The buffer will be clear even if the `translate` fails. In most cases, the failure is not * retryable (e.g. type mismatch, can't generate physical plan). If the buffer is not clear * after failure, the following `translate` will also fail. */ protected List<Transformation<?>> translateAndClearBuffer() { List<Transformation<?>> transformations; try { transformations = translate(bufferedModifyOperations); } finally { bufferedModifyOperations.clear(); } return transformations; } private List<Transformation<?>> translate(List<ModifyOperation> modifyOperations) { return planner.translate(modifyOperations); } private void buffer(List<ModifyOperation> modifyOperations) { bufferedModifyOperations.addAll(modifyOperations); } @VisibleForTesting protected ExplainDetail[] getExplainDetails(boolean extended) { if (extended) { if (isStreamingMode) { return new ExplainDetail[] { ExplainDetail.ESTIMATED_COST, ExplainDetail.CHANGELOG_MODE }; } else { return new ExplainDetail[] {ExplainDetail.ESTIMATED_COST}; } } else { return new ExplainDetail[0]; } } @Override public void registerTableSourceInternal(String name, TableSource<?> tableSource) { validateTableSource(tableSource); ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name)); Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier); if (table.isPresent()) { if (table.get() instanceof ConnectorCatalogTable<?, ?>) { ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get(); if (sourceSinkTable.getTableSource().isPresent()) { throw new ValidationException( String.format( "Table '%s' already exists. Please choose a different name.", name)); } else { ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink( tableSource, sourceSinkTable.getTableSink().get(), !IS_STREAM_TABLE); catalogManager.dropTemporaryTable(objectIdentifier, false); catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false); } } else { throw new ValidationException( String.format( "Table '%s' already exists. Please choose a different name.", name)); } } else { ConnectorCatalogTable source = ConnectorCatalogTable.source(tableSource, !IS_STREAM_TABLE); catalogManager.createTemporaryTable(source, objectIdentifier, false); } } @Override public void registerTableSinkInternal(String name, TableSink<?> tableSink) { ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name)); Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier); if (table.isPresent()) { if (table.get() instanceof ConnectorCatalogTable<?, ?>) { ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get(); if (sourceSinkTable.getTableSink().isPresent()) { throw new ValidationException( String.format( "Table '%s' already exists. Please choose a different name.", name)); } else { ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink( sourceSinkTable.getTableSource().get(), tableSink, !IS_STREAM_TABLE); catalogManager.dropTemporaryTable(objectIdentifier, false); catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false); } } else { throw new ValidationException( String.format( "Table '%s' already exists. Please choose a different name.", name)); } } else { ConnectorCatalogTable sink = ConnectorCatalogTable.sink(tableSink, !IS_STREAM_TABLE); catalogManager.createTemporaryTable(sink, objectIdentifier, false); } } private Optional<CatalogBaseTable> getTemporaryTable(ObjectIdentifier identifier) { return catalogManager .getTable(identifier) .filter(CatalogManager.TableLookupResult::isTemporary) .map(CatalogManager.TableLookupResult::getTable); } private TableResult createCatalogFunction( CreateCatalogFunctionOperation createCatalogFunctionOperation) { String exMsg = getDDLOpExecuteErrorMsg(createCatalogFunctionOperation.asSummaryString()); try { if (createCatalogFunctionOperation.isTemporary()) { functionCatalog.registerTemporaryCatalogFunction( UnresolvedIdentifier.of( createCatalogFunctionOperation.getFunctionIdentifier().toList()), createCatalogFunctionOperation.getCatalogFunction(), createCatalogFunctionOperation.isIgnoreIfExists()); } else { Catalog catalog = getCatalogOrThrowException( createCatalogFunctionOperation .getFunctionIdentifier() .getCatalogName()); catalog.createFunction( createCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(), createCatalogFunctionOperation.getCatalogFunction(), createCatalogFunctionOperation.isIgnoreIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (FunctionAlreadyExistException e) { throw new ValidationException(e.getMessage(), e); } catch (Exception e) { throw new TableException(exMsg, e); } } private TableResult alterCatalogFunction( AlterCatalogFunctionOperation alterCatalogFunctionOperation) { String exMsg = getDDLOpExecuteErrorMsg(alterCatalogFunctionOperation.asSummaryString()); try { CatalogFunction function = alterCatalogFunctionOperation.getCatalogFunction(); if (alterCatalogFunctionOperation.isTemporary()) { throw new ValidationException("Alter temporary catalog function is not supported"); } else { Catalog catalog = getCatalogOrThrowException( alterCatalogFunctionOperation .getFunctionIdentifier() .getCatalogName()); catalog.alterFunction( alterCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(), function, alterCatalogFunctionOperation.isIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (FunctionNotExistException e) { throw new ValidationException(e.getMessage(), e); } catch (Exception e) { throw new TableException(exMsg, e); } } private TableResult dropCatalogFunction( DropCatalogFunctionOperation dropCatalogFunctionOperation) { String exMsg = getDDLOpExecuteErrorMsg(dropCatalogFunctionOperation.asSummaryString()); try { if (dropCatalogFunctionOperation.isTemporary()) { functionCatalog.dropTempCatalogFunction( dropCatalogFunctionOperation.getFunctionIdentifier(), dropCatalogFunctionOperation.isIfExists()); } else { Catalog catalog = getCatalogOrThrowException( dropCatalogFunctionOperation .getFunctionIdentifier() .getCatalogName()); catalog.dropFunction( dropCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(), dropCatalogFunctionOperation.isIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (FunctionNotExistException e) { throw new ValidationException(e.getMessage(), e); } catch (Exception e) { throw new TableException(exMsg, e); } } private TableResult createSystemFunction(CreateTempSystemFunctionOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { functionCatalog.registerTemporarySystemFunction( operation.getFunctionName(), operation.getFunctionClass(), operation.getFunctionLanguage(), operation.isIgnoreIfExists()); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (Exception e) { throw new TableException(exMsg, e); } } private TableResult dropSystemFunction(DropTempSystemFunctionOperation operation) { try { functionCatalog.dropTemporarySystemFunction( operation.getFunctionName(), operation.isIfExists()); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (Exception e) { throw new TableException(getDDLOpExecuteErrorMsg(operation.asSummaryString()), e); } } protected TableImpl createTable(QueryOperation tableOperation) { return TableImpl.createTable( this, tableOperation, operationTreeBuilder, functionCatalog.asLookup(parser::parseIdentifier)); } @Override public String getJsonPlan(String stmt) { List<Operation> operations = parser.parse(stmt); if (operations.size() != 1) { throw new TableException( "Unsupported SQL query! getJsonPlan() only accepts a single INSERT statement."); } Operation operation = operations.get(0); List<ModifyOperation> modifyOperations = new ArrayList<>(1); if (operation instanceof ModifyOperation) { modifyOperations.add((ModifyOperation) operation); } else { throw new TableException("Only INSERT is supported now."); } return getJsonPlan(modifyOperations); } @Override public String getJsonPlan(List<ModifyOperation> operations) { return planner.getJsonPlan(operations); } @Override public String explainJsonPlan(String jsonPlan, ExplainDetail... extraDetails) { return planner.explainJsonPlan(jsonPlan, extraDetails); } @Override public TableResult executeJsonPlan(String jsonPlan) { List<Transformation<?>> transformations = planner.translateJsonPlan(jsonPlan); List<String> sinkIdentifierNames = new ArrayList<>(); for (int i = 0; i < transformations.size(); ++i) { sinkIdentifierNames.add("sink" + i); } return executeInternal(transformations, sinkIdentifierNames); } }
class TableEnvironmentImpl implements TableEnvironmentInternal { private static final boolean IS_STREAM_TABLE = true; private final CatalogManager catalogManager; private final ModuleManager moduleManager; private final OperationTreeBuilder operationTreeBuilder; private final List<ModifyOperation> bufferedModifyOperations = new ArrayList<>(); protected final TableConfig tableConfig; protected final Executor execEnv; protected final FunctionCatalog functionCatalog; protected final Planner planner; private final boolean isStreamingMode; private final ClassLoader userClassLoader; private static final String UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG = "Unsupported SQL query! sqlUpdate() only accepts a single SQL statement of type " + "INSERT, CREATE TABLE, DROP TABLE, ALTER TABLE, USE CATALOG, USE [CATALOG.]DATABASE, " + "CREATE DATABASE, DROP DATABASE, ALTER DATABASE, CREATE FUNCTION, DROP FUNCTION, ALTER FUNCTION, " + "CREATE CATALOG, DROP CATALOG, CREATE VIEW, DROP VIEW, LOAD MODULE, UNLOAD MODULE, USE MODULES."; private static final String UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG = "Unsupported SQL query! executeSql() only accepts a single SQL statement of type " + "CREATE TABLE, DROP TABLE, ALTER TABLE, CREATE DATABASE, DROP DATABASE, ALTER DATABASE, " + "CREATE FUNCTION, DROP FUNCTION, ALTER FUNCTION, CREATE CATALOG, DROP CATALOG, " + "USE CATALOG, USE [CATALOG.]DATABASE, SHOW CATALOGS, SHOW DATABASES, SHOW TABLES, SHOW FUNCTIONS, SHOW PARTITIONS" + "CREATE VIEW, DROP VIEW, SHOW VIEWS, INSERT, DESCRIBE, LOAD MODULE, UNLOAD MODULE, USE MODULES."; /** Provides necessary methods for {@link ConnectTableDescriptor}. */ private final Registration registration = new Registration() { @Override public void createTemporaryTable(String path, CatalogBaseTable table) { UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path); ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); catalogManager.createTemporaryTable(table, objectIdentifier, false); } }; protected TableEnvironmentImpl( CatalogManager catalogManager, ModuleManager moduleManager, TableConfig tableConfig, Executor executor, FunctionCatalog functionCatalog, Planner planner, boolean isStreamingMode, ClassLoader userClassLoader) { this.catalogManager = catalogManager; this.catalogManager.setCatalogTableSchemaResolver( new CatalogTableSchemaResolver(planner.getParser(), isStreamingMode)); this.moduleManager = moduleManager; this.execEnv = executor; this.tableConfig = tableConfig; this.functionCatalog = functionCatalog; this.planner = planner; this.isStreamingMode = isStreamingMode; this.userClassLoader = userClassLoader; this.operationTreeBuilder = OperationTreeBuilder.create( tableConfig, functionCatalog.asLookup(getParser()::parseIdentifier), catalogManager.getDataTypeFactory(), path -> { try { UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path); Optional<CatalogQueryOperation> catalogQueryOperation = scanInternal(unresolvedIdentifier); return catalogQueryOperation.map( t -> ApiExpressionUtils.tableRef(path, t)); } catch (SqlParserException ex) { return Optional.empty(); } }, (sqlExpression, inputSchema) -> { try { return getParser().parseSqlExpression(sqlExpression, inputSchema); } catch (Throwable t) { throw new ValidationException( String.format("Invalid SQL expression: %s", sqlExpression), t); } }, isStreamingMode); } public static TableEnvironmentImpl create(EnvironmentSettings settings) { return create(settings, settings.toConfiguration()); } private static TableEnvironmentImpl create( EnvironmentSettings settings, Configuration configuration) { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); TableConfig tableConfig = new TableConfig(); tableConfig.addConfiguration(configuration); ModuleManager moduleManager = new ModuleManager(); CatalogManager catalogManager = CatalogManager.newBuilder() .classLoader(classLoader) .config(tableConfig.getConfiguration()) .defaultCatalog( settings.getBuiltInCatalogName(), new GenericInMemoryCatalog( settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())) .build(); FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager); Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = ComponentFactoryService.find(ExecutorFactory.class, executorProperties) .create(executorProperties); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create( plannerProperties, executor, tableConfig, functionCatalog, catalogManager); return new TableEnvironmentImpl( catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode(), classLoader); } @Override public Table fromValues(Object... values) { return fromValues(Arrays.asList(values)); } @Override public Table fromValues(AbstractDataType<?> rowType, Object... values) { return fromValues(rowType, Arrays.asList(values)); } @Override public Table fromValues(Expression... values) { return createTable(operationTreeBuilder.values(values)); } @Override public Table fromValues(AbstractDataType<?> rowType, Expression... values) { final DataType resolvedDataType = catalogManager.getDataTypeFactory().createDataType(rowType); return createTable(operationTreeBuilder.values(resolvedDataType, values)); } @Override public Table fromValues(Iterable<?> values) { Expression[] exprs = StreamSupport.stream(values.spliterator(), false) .map(ApiExpressionUtils::objectToExpression) .toArray(Expression[]::new); return fromValues(exprs); } @Override public Table fromValues(AbstractDataType<?> rowType, Iterable<?> values) { Expression[] exprs = StreamSupport.stream(values.spliterator(), false) .map(ApiExpressionUtils::objectToExpression) .toArray(Expression[]::new); return fromValues(rowType, exprs); } @VisibleForTesting public Planner getPlanner() { return planner; } @Override public Table fromTableSource(TableSource<?> source) { return createTable(new TableSourceQueryOperation<>(source, !IS_STREAM_TABLE)); } @Override public void registerCatalog(String catalogName, Catalog catalog) { catalogManager.registerCatalog(catalogName, catalog); } @Override public Optional<Catalog> getCatalog(String catalogName) { return catalogManager.getCatalog(catalogName); } @Override public void loadModule(String moduleName, Module module) { moduleManager.loadModule(moduleName, module); } @Override public void useModules(String... moduleNames) { moduleManager.useModules(moduleNames); } @Override public void unloadModule(String moduleName) { moduleManager.unloadModule(moduleName); } @Override public void registerFunction(String name, ScalarFunction function) { functionCatalog.registerTempSystemScalarFunction(name, function); } @Override public void createTemporarySystemFunction( String name, Class<? extends UserDefinedFunction> functionClass) { final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass); createTemporarySystemFunction(name, functionInstance); } @Override public void createTemporarySystemFunction(String name, UserDefinedFunction functionInstance) { functionCatalog.registerTemporarySystemFunction(name, functionInstance, false); } @Override public boolean dropTemporarySystemFunction(String name) { return functionCatalog.dropTemporarySystemFunction(name, true); } @Override public void createFunction(String path, Class<? extends UserDefinedFunction> functionClass) { createFunction(path, functionClass, false); } @Override public void createFunction( String path, Class<? extends UserDefinedFunction> functionClass, boolean ignoreIfExists) { final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path); functionCatalog.registerCatalogFunction( unresolvedIdentifier, functionClass, ignoreIfExists); } @Override public boolean dropFunction(String path) { final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path); return functionCatalog.dropCatalogFunction(unresolvedIdentifier, true); } @Override public void createTemporaryFunction( String path, Class<? extends UserDefinedFunction> functionClass) { final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass); createTemporaryFunction(path, functionInstance); } @Override public void createTemporaryFunction(String path, UserDefinedFunction functionInstance) { final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path); functionCatalog.registerTemporaryCatalogFunction( unresolvedIdentifier, functionInstance, false); } @Override public boolean dropTemporaryFunction(String path) { final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path); return functionCatalog.dropTemporaryCatalogFunction(unresolvedIdentifier, true); } @Override public void registerTable(String name, Table table) { UnresolvedIdentifier identifier = UnresolvedIdentifier.of(name); createTemporaryView(identifier, table); } @Override public void createTemporaryView(String path, Table view) { UnresolvedIdentifier identifier = getParser().parseIdentifier(path); createTemporaryView(identifier, view); } private void createTemporaryView(UnresolvedIdentifier identifier, Table view) { if (((TableImpl) view).getTableEnvironment() != this) { throw new TableException( "Only table API objects that belong to this TableEnvironment can be registered."); } ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier); QueryOperation queryOperation = qualifyQueryOperation(tableIdentifier, view.getQueryOperation()); CatalogBaseTable tableTable = new QueryOperationCatalogView(queryOperation); catalogManager.createTemporaryTable(tableTable, tableIdentifier, false); } @Override public Table scan(String... tablePath) { UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(tablePath); return scanInternal(unresolvedIdentifier) .map(this::createTable) .orElseThrow( () -> new ValidationException( String.format( "Table %s was not found.", unresolvedIdentifier))); } @Override public Table from(String path) { UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path); return scanInternal(unresolvedIdentifier) .map(this::createTable) .orElseThrow( () -> new ValidationException( String.format( "Table %s was not found.", unresolvedIdentifier))); } @Override public void insertInto(String targetPath, Table table) { UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(targetPath); insertIntoInternal(unresolvedIdentifier, table); } @Override public void insertInto(Table table, String sinkPath, String... sinkPathContinued) { List<String> fullPath = new ArrayList<>(Arrays.asList(sinkPathContinued)); fullPath.add(0, sinkPath); UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(fullPath); insertIntoInternal(unresolvedIdentifier, table); } private void insertIntoInternal(UnresolvedIdentifier unresolvedIdentifier, Table table) { ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); List<ModifyOperation> modifyOperations = Collections.singletonList( new CatalogSinkModifyOperation( objectIdentifier, table.getQueryOperation())); buffer(modifyOperations); } private Optional<CatalogQueryOperation> scanInternal(UnresolvedIdentifier identifier) { ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier); return catalogManager .getTable(tableIdentifier) .map(t -> new CatalogQueryOperation(tableIdentifier, t.getResolvedSchema())); } @Override public ConnectTableDescriptor connect(ConnectorDescriptor connectorDescriptor) { return new StreamTableDescriptor(registration, connectorDescriptor); } @Override public String[] listCatalogs() { return catalogManager.listCatalogs().stream().sorted().toArray(String[]::new); } @Override public String[] listModules() { return moduleManager.listModules().toArray(new String[0]); } @Override public ModuleEntry[] listFullModules() { return moduleManager.listFullModules().toArray(new ModuleEntry[0]); } @Override public String[] listDatabases() { return catalogManager .getCatalog(catalogManager.getCurrentCatalog()) .get() .listDatabases() .toArray(new String[0]); } @Override public String[] listTables() { return catalogManager.listTables().stream().sorted().toArray(String[]::new); } @Override public String[] listViews() { return catalogManager.listViews().stream().sorted().toArray(String[]::new); } @Override public String[] listTemporaryTables() { return catalogManager.listTemporaryTables().stream().sorted().toArray(String[]::new); } @Override public String[] listTemporaryViews() { return catalogManager.listTemporaryViews().stream().sorted().toArray(String[]::new); } @Override public boolean dropTemporaryTable(String path) { UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path); ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); try { catalogManager.dropTemporaryTable(identifier, false); return true; } catch (ValidationException e) { return false; } } @Override public boolean dropTemporaryView(String path) { UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path); ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); try { catalogManager.dropTemporaryView(identifier, false); return true; } catch (ValidationException e) { return false; } } @Override public String[] listUserDefinedFunctions() { return functionCatalog.getUserDefinedFunctions(); } @Override public String[] listFunctions() { return functionCatalog.getFunctions(); } @Override public String explain(Table table) { return explain(table, false); } @Override public String explain(Table table, boolean extended) { return planner.explain( Collections.singletonList(table.getQueryOperation()), getExplainDetails(extended)); } @Override public String explain(boolean extended) { List<Operation> operations = bufferedModifyOperations.stream() .map(o -> (Operation) o) .collect(Collectors.toList()); return planner.explain(operations, getExplainDetails(extended)); } @Override public String explainSql(String statement, ExplainDetail... extraDetails) { List<Operation> operations = getParser().parse(statement); if (operations.size() != 1) { throw new TableException( "Unsupported SQL query! explainSql() only accepts a single SQL query."); } return planner.explain(operations, extraDetails); } @Override public String explainInternal(List<Operation> operations, ExplainDetail... extraDetails) { return planner.explain(operations, extraDetails); } @Override public String[] getCompletionHints(String statement, int position) { return planner.getCompletionHints(statement, position); } @Override public Table sqlQuery(String query) { List<Operation> operations = getParser().parse(query); if (operations.size() != 1) { throw new ValidationException( "Unsupported SQL query! sqlQuery() only accepts a single SQL query."); } Operation operation = operations.get(0); if (operation instanceof QueryOperation && !(operation instanceof ModifyOperation)) { return createTable((QueryOperation) operation); } else { throw new ValidationException( "Unsupported SQL query! sqlQuery() only accepts a single SQL query of type " + "SELECT, UNION, INTERSECT, EXCEPT, VALUES, and ORDER_BY."); } } @Override public TableResult executeSql(String statement) { List<Operation> operations = getParser().parse(statement); if (operations.size() != 1) { throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG); } return executeOperation(operations.get(0)); } @Override public StatementSet createStatementSet() { return new StatementSetImpl(this); } @Override public TableResult executeInternal(List<ModifyOperation> operations) { List<Transformation<?>> transformations = translate(operations); List<String> sinkIdentifierNames = extractSinkIdentifierNames(operations); return executeInternal(transformations, sinkIdentifierNames); } private TableResult executeInternal( List<Transformation<?>> transformations, List<String> sinkIdentifierNames) { String jobName = getJobName("insert-into_" + String.join(",", sinkIdentifierNames)); Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig, jobName); try { JobClient jobClient = execEnv.executeAsync(pipeline); TableSchema.Builder builder = TableSchema.builder(); Object[] affectedRowCounts = new Long[transformations.size()]; for (int i = 0; i < transformations.size(); ++i) { builder.field(sinkIdentifierNames.get(i), DataTypes.BIGINT()); affectedRowCounts[i] = -1L; } return TableResultImpl.builder() .jobClient(jobClient) .resultKind(ResultKind.SUCCESS_WITH_CONTENT) .tableSchema(builder.build()) .data( new InsertResultIterator( jobClient, Row.of(affectedRowCounts), userClassLoader)) .build(); } catch (Exception e) { throw new TableException("Failed to execute sql", e); } } @Override public TableResult executeInternal(QueryOperation operation) { SelectSinkOperation sinkOperation = new SelectSinkOperation(operation); List<Transformation<?>> transformations = translate(Collections.singletonList(sinkOperation)); String jobName = getJobName("collect"); Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig, jobName); try { JobClient jobClient = execEnv.executeAsync(pipeline); SelectResultProvider resultProvider = sinkOperation.getSelectResultProvider(); resultProvider.setJobClient(jobClient); return TableResultImpl.builder() .jobClient(jobClient) .resultKind(ResultKind.SUCCESS_WITH_CONTENT) .tableSchema(operation.getTableSchema()) .data(resultProvider.getResultIterator()) .setPrintStyle( TableResultImpl.PrintStyle.tableau( PrintUtils.MAX_COLUMN_WIDTH, PrintUtils.NULL_COLUMN, true, isStreamingMode)) .build(); } catch (Exception e) { throw new TableException("Failed to execute sql", e); } } @Override public void sqlUpdate(String stmt) { List<Operation> operations = getParser().parse(stmt); if (operations.size() != 1) { throw new TableException(UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG); } Operation operation = operations.get(0); if (operation instanceof ModifyOperation) { buffer(Collections.singletonList((ModifyOperation) operation)); } else if (operation instanceof CreateTableOperation || operation instanceof DropTableOperation || operation instanceof AlterTableOperation || operation instanceof CreateViewOperation || operation instanceof DropViewOperation || operation instanceof CreateDatabaseOperation || operation instanceof DropDatabaseOperation || operation instanceof AlterDatabaseOperation || operation instanceof CreateCatalogFunctionOperation || operation instanceof CreateTempSystemFunctionOperation || operation instanceof DropCatalogFunctionOperation || operation instanceof DropTempSystemFunctionOperation || operation instanceof AlterCatalogFunctionOperation || operation instanceof CreateCatalogOperation || operation instanceof DropCatalogOperation || operation instanceof UseCatalogOperation || operation instanceof UseDatabaseOperation || operation instanceof LoadModuleOperation || operation instanceof UnloadModuleOperation) { executeOperation(operation); } else { throw new TableException(UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG); } } private TableResult executeOperation(Operation operation) { if (operation instanceof ModifyOperation) { return executeInternal(Collections.singletonList((ModifyOperation) operation)); } else if (operation instanceof CreateTableOperation) { CreateTableOperation createTableOperation = (CreateTableOperation) operation; if (createTableOperation.isTemporary()) { catalogManager.createTemporaryTable( createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists()); } else { catalogManager.createTable( createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof DropTableOperation) { DropTableOperation dropTableOperation = (DropTableOperation) operation; if (dropTableOperation.isTemporary()) { catalogManager.dropTemporaryTable( dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists()); } else { catalogManager.dropTable( dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof AlterTableOperation) { AlterTableOperation alterTableOperation = (AlterTableOperation) operation; Catalog catalog = getCatalogOrThrowException( alterTableOperation.getTableIdentifier().getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(alterTableOperation.asSummaryString()); try { if (alterTableOperation instanceof AlterTableRenameOperation) { AlterTableRenameOperation alterTableRenameOp = (AlterTableRenameOperation) operation; catalog.renameTable( alterTableRenameOp.getTableIdentifier().toObjectPath(), alterTableRenameOp.getNewTableIdentifier().getObjectName(), false); } else if (alterTableOperation instanceof AlterTableOptionsOperation) { AlterTableOptionsOperation alterTablePropertiesOp = (AlterTableOptionsOperation) operation; catalog.alterTable( alterTablePropertiesOp.getTableIdentifier().toObjectPath(), alterTablePropertiesOp.getCatalogTable(), false); } else if (alterTableOperation instanceof AlterTableAddConstraintOperation) { AlterTableAddConstraintOperation addConstraintOP = (AlterTableAddConstraintOperation) operation; CatalogTable oriTable = (CatalogTable) catalogManager .getTable(addConstraintOP.getTableIdentifier()) .get() .getTable(); TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriTable.getSchema()); if (addConstraintOP.getConstraintName().isPresent()) { builder.primaryKey( addConstraintOP.getConstraintName().get(), addConstraintOP.getColumnNames()); } else { builder.primaryKey(addConstraintOP.getColumnNames()); } CatalogTable newTable = new CatalogTableImpl( builder.build(), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment()); catalog.alterTable( addConstraintOP.getTableIdentifier().toObjectPath(), newTable, false); } else if (alterTableOperation instanceof AlterTableDropConstraintOperation) { AlterTableDropConstraintOperation dropConstraintOperation = (AlterTableDropConstraintOperation) operation; CatalogTable oriTable = (CatalogTable) catalogManager .getTable(dropConstraintOperation.getTableIdentifier()) .get() .getTable(); CatalogTable newTable = new CatalogTableImpl( TableSchemaUtils.dropConstraint( oriTable.getSchema(), dropConstraintOperation.getConstraintName()), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment()); catalog.alterTable( dropConstraintOperation.getTableIdentifier().toObjectPath(), newTable, false); } else if (alterTableOperation instanceof AlterPartitionPropertiesOperation) { AlterPartitionPropertiesOperation alterPartPropsOp = (AlterPartitionPropertiesOperation) operation; catalog.alterPartition( alterPartPropsOp.getTableIdentifier().toObjectPath(), alterPartPropsOp.getPartitionSpec(), alterPartPropsOp.getCatalogPartition(), false); } else if (alterTableOperation instanceof AlterTableSchemaOperation) { AlterTableSchemaOperation alterTableSchemaOperation = (AlterTableSchemaOperation) alterTableOperation; catalog.alterTable( alterTableSchemaOperation.getTableIdentifier().toObjectPath(), alterTableSchemaOperation.getCatalogTable(), false); } else if (alterTableOperation instanceof AddPartitionsOperation) { AddPartitionsOperation addPartitionsOperation = (AddPartitionsOperation) alterTableOperation; List<CatalogPartitionSpec> specs = addPartitionsOperation.getPartitionSpecs(); List<CatalogPartition> partitions = addPartitionsOperation.getCatalogPartitions(); boolean ifNotExists = addPartitionsOperation.ifNotExists(); ObjectPath tablePath = addPartitionsOperation.getTableIdentifier().toObjectPath(); for (int i = 0; i < specs.size(); i++) { catalog.createPartition( tablePath, specs.get(i), partitions.get(i), ifNotExists); } } else if (alterTableOperation instanceof DropPartitionsOperation) { DropPartitionsOperation dropPartitionsOperation = (DropPartitionsOperation) alterTableOperation; ObjectPath tablePath = dropPartitionsOperation.getTableIdentifier().toObjectPath(); boolean ifExists = dropPartitionsOperation.ifExists(); for (CatalogPartitionSpec spec : dropPartitionsOperation.getPartitionSpecs()) { catalog.dropPartition(tablePath, spec, ifExists); } } return TableResultImpl.TABLE_RESULT_OK; } catch (TableAlreadyExistException | TableNotExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof CreateViewOperation) { CreateViewOperation createViewOperation = (CreateViewOperation) operation; if (createViewOperation.isTemporary()) { catalogManager.createTemporaryTable( createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists()); } else { catalogManager.createTable( createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof DropViewOperation) { DropViewOperation dropViewOperation = (DropViewOperation) operation; if (dropViewOperation.isTemporary()) { catalogManager.dropTemporaryView( dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists()); } else { catalogManager.dropView( dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof AlterViewOperation) { AlterViewOperation alterViewOperation = (AlterViewOperation) operation; Catalog catalog = getCatalogOrThrowException( alterViewOperation.getViewIdentifier().getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(alterViewOperation.asSummaryString()); try { if (alterViewOperation instanceof AlterViewRenameOperation) { AlterViewRenameOperation alterTableRenameOp = (AlterViewRenameOperation) operation; catalog.renameTable( alterTableRenameOp.getViewIdentifier().toObjectPath(), alterTableRenameOp.getNewViewIdentifier().getObjectName(), false); } else if (alterViewOperation instanceof AlterViewPropertiesOperation) { AlterViewPropertiesOperation alterTablePropertiesOp = (AlterViewPropertiesOperation) operation; catalog.alterTable( alterTablePropertiesOp.getViewIdentifier().toObjectPath(), alterTablePropertiesOp.getCatalogView(), false); } else if (alterViewOperation instanceof AlterViewAsOperation) { AlterViewAsOperation alterViewAsOperation = (AlterViewAsOperation) alterViewOperation; catalog.alterTable( alterViewAsOperation.getViewIdentifier().toObjectPath(), alterViewAsOperation.getNewView(), false); } return TableResultImpl.TABLE_RESULT_OK; } catch (TableAlreadyExistException | TableNotExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof CreateDatabaseOperation) { CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation; Catalog catalog = getCatalogOrThrowException(createDatabaseOperation.getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(createDatabaseOperation.asSummaryString()); try { catalog.createDatabase( createDatabaseOperation.getDatabaseName(), createDatabaseOperation.getCatalogDatabase(), createDatabaseOperation.isIgnoreIfExists()); return TableResultImpl.TABLE_RESULT_OK; } catch (DatabaseAlreadyExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof DropDatabaseOperation) { DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation; Catalog catalog = getCatalogOrThrowException(dropDatabaseOperation.getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(dropDatabaseOperation.asSummaryString()); try { catalog.dropDatabase( dropDatabaseOperation.getDatabaseName(), dropDatabaseOperation.isIfExists(), dropDatabaseOperation.isCascade()); return TableResultImpl.TABLE_RESULT_OK; } catch (DatabaseNotExistException | DatabaseNotEmptyException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof AlterDatabaseOperation) { AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation; Catalog catalog = getCatalogOrThrowException(alterDatabaseOperation.getCatalogName()); String exMsg = getDDLOpExecuteErrorMsg(alterDatabaseOperation.asSummaryString()); try { catalog.alterDatabase( alterDatabaseOperation.getDatabaseName(), alterDatabaseOperation.getCatalogDatabase(), false); return TableResultImpl.TABLE_RESULT_OK; } catch (DatabaseNotExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof CreateCatalogFunctionOperation) { return createCatalogFunction((CreateCatalogFunctionOperation) operation); } else if (operation instanceof CreateTempSystemFunctionOperation) { return createSystemFunction((CreateTempSystemFunctionOperation) operation); } else if (operation instanceof DropCatalogFunctionOperation) { return dropCatalogFunction((DropCatalogFunctionOperation) operation); } else if (operation instanceof DropTempSystemFunctionOperation) { return dropSystemFunction((DropTempSystemFunctionOperation) operation); } else if (operation instanceof AlterCatalogFunctionOperation) { return alterCatalogFunction((AlterCatalogFunctionOperation) operation); } else if (operation instanceof CreateCatalogOperation) { return createCatalog((CreateCatalogOperation) operation); } else if (operation instanceof DropCatalogOperation) { DropCatalogOperation dropCatalogOperation = (DropCatalogOperation) operation; String exMsg = getDDLOpExecuteErrorMsg(dropCatalogOperation.asSummaryString()); try { catalogManager.unregisterCatalog( dropCatalogOperation.getCatalogName(), dropCatalogOperation.isIfExists()); return TableResultImpl.TABLE_RESULT_OK; } catch (CatalogException e) { throw new ValidationException(exMsg, e); } } else if (operation instanceof LoadModuleOperation) { return loadModule((LoadModuleOperation) operation); } else if (operation instanceof UnloadModuleOperation) { return unloadModule((UnloadModuleOperation) operation); } else if (operation instanceof UseModulesOperation) { return useModules((UseModulesOperation) operation); } else if (operation instanceof UseCatalogOperation) { UseCatalogOperation useCatalogOperation = (UseCatalogOperation) operation; catalogManager.setCurrentCatalog(useCatalogOperation.getCatalogName()); return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof UseDatabaseOperation) { UseDatabaseOperation useDatabaseOperation = (UseDatabaseOperation) operation; catalogManager.setCurrentCatalog(useDatabaseOperation.getCatalogName()); catalogManager.setCurrentDatabase(useDatabaseOperation.getDatabaseName()); return TableResultImpl.TABLE_RESULT_OK; } else if (operation instanceof ShowCatalogsOperation) { return buildShowResult("catalog name", listCatalogs()); } else if (operation instanceof ShowCurrentCatalogOperation) { return buildShowResult( "current catalog name", new String[] {catalogManager.getCurrentCatalog()}); } else if (operation instanceof ShowDatabasesOperation) { return buildShowResult("database name", listDatabases()); } else if (operation instanceof ShowCurrentDatabaseOperation) { return buildShowResult( "current database name", new String[] {catalogManager.getCurrentDatabase()}); } else if (operation instanceof ShowTablesOperation) { return buildShowResult("table name", listTables()); } else if (operation instanceof ShowFunctionsOperation) { return buildShowResult("function name", listFunctions()); } else if (operation instanceof ShowViewsOperation) { return buildShowResult("view name", listViews()); } else if (operation instanceof ShowPartitionsOperation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { ShowPartitionsOperation showPartitionsOperation = (ShowPartitionsOperation) operation; Catalog catalog = getCatalogOrThrowException( showPartitionsOperation.getTableIdentifier().getCatalogName()); ObjectPath tablePath = showPartitionsOperation.getTableIdentifier().toObjectPath(); CatalogPartitionSpec partitionSpec = showPartitionsOperation.getPartitionSpec(); List<CatalogPartitionSpec> partitionSpecs = partitionSpec == null ? catalog.listPartitions(tablePath) : catalog.listPartitions(tablePath, partitionSpec); List<String> partitionNames = new ArrayList<>(partitionSpecs.size()); for (CatalogPartitionSpec spec : partitionSpecs) { List<String> partitionKVs = new ArrayList<>(spec.getPartitionSpec().size()); for (Map.Entry<String, String> partitionKV : spec.getPartitionSpec().entrySet()) { partitionKVs.add(partitionKV.getKey() + "=" + partitionKV.getValue()); } partitionNames.add(String.join("/", partitionKVs)); } return buildShowResult("partition name", partitionNames.toArray(new String[0])); } catch (TableNotExistException e) { throw new ValidationException(exMsg, e); } catch (Exception e) { throw new TableException(exMsg, e); } } else if (operation instanceof ExplainOperation) { String explanation = planner.explain( Collections.singletonList(((ExplainOperation) operation).getChild())); return TableResultImpl.builder() .resultKind(ResultKind.SUCCESS_WITH_CONTENT) .tableSchema(TableSchema.builder().field("result", DataTypes.STRING()).build()) .data(Collections.singletonList(Row.of(explanation))) .setPrintStyle(TableResultImpl.PrintStyle.rawContent()) .build(); } else if (operation instanceof DescribeTableOperation) { DescribeTableOperation describeTableOperation = (DescribeTableOperation) operation; Optional<CatalogManager.TableLookupResult> result = catalogManager.getTable(describeTableOperation.getSqlIdentifier()); if (result.isPresent()) { return buildDescribeResult(result.get().getResolvedSchema()); } else { throw new ValidationException( String.format( "Tables or views with the identifier '%s' doesn't exist", describeTableOperation.getSqlIdentifier().asSummaryString())); } } else if (operation instanceof QueryOperation) { return executeInternal((QueryOperation) operation); } else { throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG); } } private TableResult createCatalog(CreateCatalogOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { String catalogName = operation.getCatalogName(); Map<String, String> properties = operation.getProperties(); final CatalogFactory factory = TableFactoryService.find(CatalogFactory.class, properties, userClassLoader); Catalog catalog = factory.createCatalog(catalogName, properties); catalogManager.registerCatalog(catalogName, catalog); return TableResultImpl.TABLE_RESULT_OK; } catch (CatalogException e) { throw new ValidationException(exMsg, e); } } private TableResult loadModule(LoadModuleOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { Map<String, String> properties = new HashMap<>(operation.getProperties()); if (properties.containsKey(MODULE_TYPE)) { throw new ValidationException( String.format( "Property 'type' = '%s' is not supported since module name " + "is used to find module", properties.get(MODULE_TYPE))); } properties.put(MODULE_TYPE, operation.getModuleName()); final ModuleFactory factory = TableFactoryService.find(ModuleFactory.class, properties, userClassLoader); moduleManager.loadModule(operation.getModuleName(), factory.createModule(properties)); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw new ValidationException(String.format("%s. %s", exMsg, e.getMessage()), e); } catch (Exception e) { throw new TableException(String.format("%s. %s", exMsg, e.getMessage()), e); } } private TableResult unloadModule(UnloadModuleOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { moduleManager.unloadModule(operation.getModuleName()); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw new ValidationException(String.format("%s. %s", exMsg, e.getMessage()), e); } } private TableResult useModules(UseModulesOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { moduleManager.useModules(operation.getModuleNames().toArray(new String[0])); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw new ValidationException(String.format("%s. %s", exMsg, e.getMessage()), e); } } private TableResult buildShowResult(String columnName, String[] objects) { return buildResult( new String[] {columnName}, new DataType[] {DataTypes.STRING()}, Arrays.stream(objects).map((c) -> new String[] {c}).toArray(String[][]::new)); } private TableResult buildDescribeResult(TableSchema schema) { Map<String, String> fieldToWatermark = schema.getWatermarkSpecs().stream() .collect( Collectors.toMap( WatermarkSpec::getRowtimeAttribute, WatermarkSpec::getWatermarkExpr)); Map<String, String> fieldToPrimaryKey = new HashMap<>(); schema.getPrimaryKey() .ifPresent( (p) -> { List<String> columns = p.getColumns(); columns.forEach( (c) -> fieldToPrimaryKey.put( c, String.format( "PRI(%s)", String.join(", ", columns)))); }); Object[][] rows = schema.getTableColumns().stream() .map( (c) -> { final LogicalType logicalType = c.getType().getLogicalType(); return new Object[] { c.getName(), logicalType.copy(true).asSummaryString(), logicalType.isNullable(), fieldToPrimaryKey.getOrDefault(c.getName(), null), c.explainExtras().orElse(null), fieldToWatermark.getOrDefault(c.getName(), null) }; }) .toArray(Object[][]::new); return buildResult( new String[] {"name", "type", "null", "key", "extras", "watermark"}, new DataType[] { DataTypes.STRING(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING() }, rows); } private TableResult buildResult(String[] headers, DataType[] types, Object[][] rows) { return TableResultImpl.builder() .resultKind(ResultKind.SUCCESS_WITH_CONTENT) .tableSchema(TableSchema.builder().fields(headers, types).build()) .data(Arrays.stream(rows).map(Row::of).collect(Collectors.toList())) .setPrintStyle( TableResultImpl.PrintStyle.tableau(Integer.MAX_VALUE, "", false, false)) .build(); } /** * extract sink identifier names from {@link ModifyOperation}s. * * <p>If there are multiple ModifyOperations have same name, an index suffix will be added at * the end of the name to ensure each name is unique. */ private List<String> extractSinkIdentifierNames(List<ModifyOperation> operations) { List<String> tableNames = new ArrayList<>(operations.size()); Map<String, Integer> tableNameToCount = new HashMap<>(); for (ModifyOperation operation : operations) { if (operation instanceof CatalogSinkModifyOperation) { ObjectIdentifier identifier = ((CatalogSinkModifyOperation) operation).getTableIdentifier(); String fullName = identifier.asSummaryString(); tableNames.add(fullName); tableNameToCount.put(fullName, tableNameToCount.getOrDefault(fullName, 0) + 1); } else { throw new UnsupportedOperationException("Unsupported operation: " + operation); } } Map<String, Integer> tableNameToIndex = new HashMap<>(); return tableNames.stream() .map( tableName -> { if (tableNameToCount.get(tableName) == 1) { return tableName; } else { Integer index = tableNameToIndex.getOrDefault(tableName, 0) + 1; tableNameToIndex.put(tableName, index); return tableName + "_" + index; } }) .collect(Collectors.toList()); } private String getJobName(String defaultJobName) { return tableConfig.getConfiguration().getString(PipelineOptions.NAME, defaultJobName); } /** Get catalog from catalogName or throw a ValidationException if the catalog not exists. */ private Catalog getCatalogOrThrowException(String catalogName) { return getCatalog(catalogName) .orElseThrow( () -> new ValidationException( String.format("Catalog %s does not exist", catalogName))); } private String getDDLOpExecuteErrorMsg(String action) { return String.format("Could not execute %s", action); } @Override public String getCurrentCatalog() { return catalogManager.getCurrentCatalog(); } @Override public void useCatalog(String catalogName) { catalogManager.setCurrentCatalog(catalogName); } @Override public String getCurrentDatabase() { return catalogManager.getCurrentDatabase(); } @Override public void useDatabase(String databaseName) { catalogManager.setCurrentDatabase(databaseName); } @Override public TableConfig getConfig() { return tableConfig; } @Override public JobExecutionResult execute(String jobName) throws Exception { Pipeline pipeline = execEnv.createPipeline(translateAndClearBuffer(), tableConfig, jobName); return execEnv.execute(pipeline); } @Override public Parser getParser() { return getPlanner().getParser(); } @Override public CatalogManager getCatalogManager() { return catalogManager; } /** * Subclasses can override this method to transform the given QueryOperation to a new one with * the qualified object identifier. This is needed for some QueryOperations, e.g. * JavaDataStreamQueryOperation, which doesn't know the registered identifier when created * ({@code fromDataStream(DataStream)}. But the identifier is required when converting this * QueryOperation to RelNode. */ protected QueryOperation qualifyQueryOperation( ObjectIdentifier identifier, QueryOperation queryOperation) { return queryOperation; } /** * Subclasses can override this method to add additional checks. * * @param tableSource tableSource to validate */ protected void validateTableSource(TableSource<?> tableSource) { TableSourceValidation.validateTableSource(tableSource, tableSource.getTableSchema()); } /** * Translate the buffered operations to Transformations, and clear the buffer. * * <p>The buffer will be clear even if the `translate` fails. In most cases, the failure is not * retryable (e.g. type mismatch, can't generate physical plan). If the buffer is not clear * after failure, the following `translate` will also fail. */ protected List<Transformation<?>> translateAndClearBuffer() { List<Transformation<?>> transformations; try { transformations = translate(bufferedModifyOperations); } finally { bufferedModifyOperations.clear(); } return transformations; } private List<Transformation<?>> translate(List<ModifyOperation> modifyOperations) { return planner.translate(modifyOperations); } private void buffer(List<ModifyOperation> modifyOperations) { bufferedModifyOperations.addAll(modifyOperations); } @VisibleForTesting protected ExplainDetail[] getExplainDetails(boolean extended) { if (extended) { if (isStreamingMode) { return new ExplainDetail[] { ExplainDetail.ESTIMATED_COST, ExplainDetail.CHANGELOG_MODE }; } else { return new ExplainDetail[] {ExplainDetail.ESTIMATED_COST}; } } else { return new ExplainDetail[0]; } } @Override public void registerTableSourceInternal(String name, TableSource<?> tableSource) { validateTableSource(tableSource); ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name)); Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier); if (table.isPresent()) { if (table.get() instanceof ConnectorCatalogTable<?, ?>) { ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get(); if (sourceSinkTable.getTableSource().isPresent()) { throw new ValidationException( String.format( "Table '%s' already exists. Please choose a different name.", name)); } else { ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink( tableSource, sourceSinkTable.getTableSink().get(), !IS_STREAM_TABLE); catalogManager.dropTemporaryTable(objectIdentifier, false); catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false); } } else { throw new ValidationException( String.format( "Table '%s' already exists. Please choose a different name.", name)); } } else { ConnectorCatalogTable source = ConnectorCatalogTable.source(tableSource, !IS_STREAM_TABLE); catalogManager.createTemporaryTable(source, objectIdentifier, false); } } @Override public void registerTableSinkInternal(String name, TableSink<?> tableSink) { ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name)); Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier); if (table.isPresent()) { if (table.get() instanceof ConnectorCatalogTable<?, ?>) { ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get(); if (sourceSinkTable.getTableSink().isPresent()) { throw new ValidationException( String.format( "Table '%s' already exists. Please choose a different name.", name)); } else { ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink( sourceSinkTable.getTableSource().get(), tableSink, !IS_STREAM_TABLE); catalogManager.dropTemporaryTable(objectIdentifier, false); catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false); } } else { throw new ValidationException( String.format( "Table '%s' already exists. Please choose a different name.", name)); } } else { ConnectorCatalogTable sink = ConnectorCatalogTable.sink(tableSink, !IS_STREAM_TABLE); catalogManager.createTemporaryTable(sink, objectIdentifier, false); } } private Optional<CatalogBaseTable> getTemporaryTable(ObjectIdentifier identifier) { return catalogManager .getTable(identifier) .filter(CatalogManager.TableLookupResult::isTemporary) .map(CatalogManager.TableLookupResult::getTable); } private TableResult createCatalogFunction( CreateCatalogFunctionOperation createCatalogFunctionOperation) { String exMsg = getDDLOpExecuteErrorMsg(createCatalogFunctionOperation.asSummaryString()); try { if (createCatalogFunctionOperation.isTemporary()) { functionCatalog.registerTemporaryCatalogFunction( UnresolvedIdentifier.of( createCatalogFunctionOperation.getFunctionIdentifier().toList()), createCatalogFunctionOperation.getCatalogFunction(), createCatalogFunctionOperation.isIgnoreIfExists()); } else { Catalog catalog = getCatalogOrThrowException( createCatalogFunctionOperation .getFunctionIdentifier() .getCatalogName()); catalog.createFunction( createCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(), createCatalogFunctionOperation.getCatalogFunction(), createCatalogFunctionOperation.isIgnoreIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (FunctionAlreadyExistException e) { throw new ValidationException(e.getMessage(), e); } catch (Exception e) { throw new TableException(exMsg, e); } } private TableResult alterCatalogFunction( AlterCatalogFunctionOperation alterCatalogFunctionOperation) { String exMsg = getDDLOpExecuteErrorMsg(alterCatalogFunctionOperation.asSummaryString()); try { CatalogFunction function = alterCatalogFunctionOperation.getCatalogFunction(); if (alterCatalogFunctionOperation.isTemporary()) { throw new ValidationException("Alter temporary catalog function is not supported"); } else { Catalog catalog = getCatalogOrThrowException( alterCatalogFunctionOperation .getFunctionIdentifier() .getCatalogName()); catalog.alterFunction( alterCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(), function, alterCatalogFunctionOperation.isIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (FunctionNotExistException e) { throw new ValidationException(e.getMessage(), e); } catch (Exception e) { throw new TableException(exMsg, e); } } private TableResult dropCatalogFunction( DropCatalogFunctionOperation dropCatalogFunctionOperation) { String exMsg = getDDLOpExecuteErrorMsg(dropCatalogFunctionOperation.asSummaryString()); try { if (dropCatalogFunctionOperation.isTemporary()) { functionCatalog.dropTempCatalogFunction( dropCatalogFunctionOperation.getFunctionIdentifier(), dropCatalogFunctionOperation.isIfExists()); } else { Catalog catalog = getCatalogOrThrowException( dropCatalogFunctionOperation .getFunctionIdentifier() .getCatalogName()); catalog.dropFunction( dropCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(), dropCatalogFunctionOperation.isIfExists()); } return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (FunctionNotExistException e) { throw new ValidationException(e.getMessage(), e); } catch (Exception e) { throw new TableException(exMsg, e); } } private TableResult createSystemFunction(CreateTempSystemFunctionOperation operation) { String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString()); try { functionCatalog.registerTemporarySystemFunction( operation.getFunctionName(), operation.getFunctionClass(), operation.getFunctionLanguage(), operation.isIgnoreIfExists()); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (Exception e) { throw new TableException(exMsg, e); } } private TableResult dropSystemFunction(DropTempSystemFunctionOperation operation) { try { functionCatalog.dropTemporarySystemFunction( operation.getFunctionName(), operation.isIfExists()); return TableResultImpl.TABLE_RESULT_OK; } catch (ValidationException e) { throw e; } catch (Exception e) { throw new TableException(getDDLOpExecuteErrorMsg(operation.asSummaryString()), e); } } protected TableImpl createTable(QueryOperation tableOperation) { return TableImpl.createTable( this, tableOperation, operationTreeBuilder, functionCatalog.asLookup(getParser()::parseIdentifier)); } @Override public String getJsonPlan(String stmt) { List<Operation> operations = getParser().parse(stmt); if (operations.size() != 1) { throw new TableException( "Unsupported SQL query! getJsonPlan() only accepts a single INSERT statement."); } Operation operation = operations.get(0); List<ModifyOperation> modifyOperations = new ArrayList<>(1); if (operation instanceof ModifyOperation) { modifyOperations.add((ModifyOperation) operation); } else { throw new TableException("Only INSERT is supported now."); } return getJsonPlan(modifyOperations); } @Override public String getJsonPlan(List<ModifyOperation> operations) { return planner.getJsonPlan(operations); } @Override public String explainJsonPlan(String jsonPlan, ExplainDetail... extraDetails) { return planner.explainJsonPlan(jsonPlan, extraDetails); } @Override public TableResult executeJsonPlan(String jsonPlan) { List<Transformation<?>> transformations = planner.translateJsonPlan(jsonPlan); List<String> sinkIdentifierNames = new ArrayList<>(); for (int i = 0; i < transformations.size(); ++i) { sinkIdentifierNames.add("sink" + i); } return executeInternal(transformations, sinkIdentifierNames); } }
feel like the condition should be `lockHolderRequest.locker.equals(this.locker) && !(this.lockType == LockType.READ && lockHolderRequest.lockType == LockType.WRITE)`
boolean isConflict(LockHolder lockHolderRequest) { if (lockHolderRequest.locker.equals(this.locker) && this.lockType == LockType.WRITE && lockHolderRequest.lockType == LockType.READ) { /* * If you acquire an exclusive lock first and then request a shared lock, you can successfully acquire the lock. * This scenario is generally called "lock downgrade", * but this lock does not actually reduce the original write lock directly to a read lock. * In fact, it is still two independent read and write locks, and the two locks still need * to be released independently. The actual scenario is that before releasing the write lock, * acquire the read lock first, so that there is no gap time to release the lock. */ return false; } else { return this.lockType.isConflict(lockHolderRequest.getLockType()); } }
if (lockHolderRequest.locker.equals(this.locker)
boolean isConflict(LockHolder lockHolderRequest) { return this.lockType.isConflict(lockHolderRequest.getLockType()); }
class LockHolder implements Cloneable { private final Locker locker; private final LockType lockType; private int refCount; public LockHolder(Locker locker, LockType lockType) { this.locker = locker; this.lockType = lockType; this.refCount = 1; } public Locker getLocker() { return locker; } public LockType getLockType() { return lockType; } public void increaseRefCount() { refCount++; } public void decreaseRefCount() { refCount--; } public int getRefCount() { return refCount; } @Override public String toString() { return "<locker=\"" + locker + "\" type=\"" + lockType + "\"/>"; } @Override public LockHolder clone() { try { return (LockHolder) super.clone(); } catch (CloneNotSupportedException e) { throw new AssertionError(); } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } LockHolder that = (LockHolder) o; return Objects.equal(locker, that.locker) && Objects.equal(lockType, that.lockType); } @Override public int hashCode() { return Objects.hashCode(locker, lockType); } }
class LockHolder implements Cloneable { private final Locker locker; private final LockType lockType; private int refCount; public LockHolder(Locker locker, LockType lockType) { this.locker = locker; this.lockType = lockType; this.refCount = 1; } public Locker getLocker() { return locker; } public LockType getLockType() { return lockType; } public void increaseRefCount() { refCount++; } public void decreaseRefCount() { refCount--; } public int getRefCount() { return refCount; } @Override public String toString() { return "<locker=\"" + locker + "\" type=\"" + lockType + "\"/>"; } @Override public LockHolder clone() { try { return (LockHolder) super.clone(); } catch (CloneNotSupportedException e) { throw new AssertionError(); } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } LockHolder that = (LockHolder) o; return Objects.equal(locker, that.locker) && Objects.equal(lockType, that.lockType); } @Override public int hashCode() { return Objects.hashCode(locker, lockType); } }
Shall we check `future.isSuccess() && cause != null` as in other places?
private static void pingAutomatically(WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = controlMessage.getWebSocketConnection(); webSocketConnection.pong(controlMessage.getPayload()).addListener(future -> { Throwable cause = future.cause(); if (cause != null) { ErrorHandlerUtils.printError(cause); } webSocketConnection.readNextFrame(); }); }
if (cause != null) {
private static void pingAutomatically(WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = controlMessage.getWebSocketConnection(); webSocketConnection.pong(controlMessage.getPayload()).addListener(future -> { Throwable cause = future.cause(); if (!future.isSuccess() && cause != null) { ErrorHandlerUtils.printError(cause); } webSocketConnection.readNextFrame(); }); }
class WebSocketDispatcher { /** * This will find the best matching service for given web socket request. * * @param webSocketMessage incoming message. * @return matching service. */ public static WebSocketService findService(WebSocketServicesRegistry servicesRegistry, Map<String, String> pathParams, WebSocketMessage webSocketMessage, HTTPCarbonMessage msg) { try { String serviceUri = webSocketMessage.getTarget(); serviceUri = WebSocketUtil.refactorUri(serviceUri); URI requestUri; try { requestUri = URI.create(serviceUri); } catch (IllegalArgumentException e) { throw new BallerinaConnectorException(e.getMessage()); } WebSocketService service = servicesRegistry.getUriTemplate().matches(requestUri.getPath(), pathParams, webSocketMessage); if (service == null) { throw new BallerinaConnectorException("no Service found to handle the service request: " + serviceUri); } msg.setProperty(HttpConstants.QUERY_STR, requestUri.getRawQuery()); return service; } catch (Throwable throwable) { ErrorHandlerUtils.printError(throwable); throw new BallerinaConnectorException("no Service found to handle the service request"); } } public static void dispatchTextMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketTextMessage textMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onTextMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_TEXT); if (onTextMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onTextMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BString(textMessage.getText()); if (paramDetails.size() == 3) { bValues[2] = new BBoolean(textMessage.isFinalFragment()); } Executor.submit(onTextMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchBinaryMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketBinaryMessage binaryMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onBinaryMessageResource = wsService.getResourceByName( WebSocketConstants.RESOURCE_NAME_ON_BINARY); if (onBinaryMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onBinaryMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(binaryMessage.getByteArray()); if (paramDetails.size() == 3) { bValues[2] = new BBoolean(binaryMessage.isFinalFragment()); } Executor.submit(onBinaryMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchControlMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { if (controlMessage.getControlSignal() == WebSocketControlSignal.PING) { WebSocketDispatcher.dispatchPingMessage(connectionInfo, controlMessage); } else if (controlMessage.getControlSignal() == WebSocketControlSignal.PONG) { WebSocketDispatcher.dispatchPongMessage(connectionInfo, controlMessage); } else { throw new BallerinaConnectorException("Received unknown control signal"); } } private static void dispatchPingMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onPingMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PING); if (onPingMessageResource == null) { pingAutomatically(controlMessage); return; } List<ParamDetail> paramDetails = onPingMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(controlMessage.getByteArray()); Executor.submit(onPingMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } private static void dispatchPongMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onPongMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PONG); if (onPongMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onPongMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(controlMessage.getByteArray()); Executor.submit(onPongMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchCloseMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketCloseMessage closeMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onCloseResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_CLOSE); int closeCode = closeMessage.getCloseCode(); String closeReason = closeMessage.getCloseReason(); if (onCloseResource == null) { if (webSocketConnection.getSession().isOpen()) { webSocketConnection.finishConnectionClosure(closeCode, null); } return; } List<ParamDetail> paramDetails = onCloseResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BInteger(closeCode); bValues[2] = new BString(closeReason); CallableUnitCallback onCloseCallback = new CallableUnitCallback() { @Override public void notifySuccess() { if (closeMessage.getCloseCode() != WebSocketConstants.STATUS_CODE_ABNORMAL_CLOSURE && webSocketConnection.getSession().isOpen()) { webSocketConnection.finishConnectionClosure(closeCode, null).addListener( closeFuture -> connectionInfo.getWebSocketEndpoint().setBooleanField(0, 0)); } } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onCloseResource, onCloseCallback, null, null, bValues); } public static void dispatchIdleTimeout(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onIdleTimeoutResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_IDLE_TIMEOUT); if (onIdleTimeoutResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onIdleTimeoutResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); CallableUnitCallback onIdleTimeoutCallback = new CallableUnitCallback() { @Override public void notifySuccess() { } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onIdleTimeoutResource, onIdleTimeoutCallback, null, null, bValues); } public static void setPathParams(BValue[] bValues, List<ParamDetail> paramDetails, Map<String, String> pathParams, int defaultArgSize) { int parameterDetailsSize = paramDetails.size(); if (parameterDetailsSize > defaultArgSize) { for (int i = defaultArgSize; i < parameterDetailsSize; i++) { bValues[i] = new BString(pathParams.get(paramDetails.get(i).getVarName())); } } } }
class WebSocketDispatcher { /** * This will find the best matching service for given web socket request. * * @param webSocketMessage incoming message. * @return matching service. */ public static WebSocketService findService(WebSocketServicesRegistry servicesRegistry, Map<String, String> pathParams, WebSocketMessage webSocketMessage, HTTPCarbonMessage msg) { try { String serviceUri = webSocketMessage.getTarget(); serviceUri = WebSocketUtil.refactorUri(serviceUri); URI requestUri; try { requestUri = URI.create(serviceUri); } catch (IllegalArgumentException e) { throw new BallerinaConnectorException(e.getMessage()); } WebSocketService service = servicesRegistry.getUriTemplate().matches(requestUri.getPath(), pathParams, webSocketMessage); if (service == null) { throw new BallerinaConnectorException("no Service found to handle the service request: " + serviceUri); } msg.setProperty(HttpConstants.QUERY_STR, requestUri.getRawQuery()); return service; } catch (Throwable throwable) { ErrorHandlerUtils.printError(throwable); throw new BallerinaConnectorException("no Service found to handle the service request"); } } public static void dispatchTextMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketTextMessage textMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onTextMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_TEXT); if (onTextMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onTextMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BString(textMessage.getText()); if (paramDetails.size() == 3) { bValues[2] = new BBoolean(textMessage.isFinalFragment()); } Executor.submit(onTextMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchBinaryMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketBinaryMessage binaryMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onBinaryMessageResource = wsService.getResourceByName( WebSocketConstants.RESOURCE_NAME_ON_BINARY); if (onBinaryMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onBinaryMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(binaryMessage.getByteArray()); if (paramDetails.size() == 3) { bValues[2] = new BBoolean(binaryMessage.isFinalFragment()); } Executor.submit(onBinaryMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchControlMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { if (controlMessage.getControlSignal() == WebSocketControlSignal.PING) { WebSocketDispatcher.dispatchPingMessage(connectionInfo, controlMessage); } else if (controlMessage.getControlSignal() == WebSocketControlSignal.PONG) { WebSocketDispatcher.dispatchPongMessage(connectionInfo, controlMessage); } else { throw new BallerinaConnectorException("Received unknown control signal"); } } private static void dispatchPingMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onPingMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PING); if (onPingMessageResource == null) { pingAutomatically(controlMessage); return; } List<ParamDetail> paramDetails = onPingMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(controlMessage.getByteArray()); Executor.submit(onPingMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } private static void dispatchPongMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onPongMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PONG); if (onPongMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onPongMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(controlMessage.getByteArray()); Executor.submit(onPongMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchCloseMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketCloseMessage closeMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onCloseResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_CLOSE); int closeCode = closeMessage.getCloseCode(); String closeReason = closeMessage.getCloseReason(); if (onCloseResource == null) { if (webSocketConnection.getSession().isOpen()) { webSocketConnection.finishConnectionClosure(closeCode, null); } return; } List<ParamDetail> paramDetails = onCloseResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BInteger(closeCode); bValues[2] = new BString(closeReason); CallableUnitCallback onCloseCallback = new CallableUnitCallback() { @Override public void notifySuccess() { if (closeMessage.getCloseCode() != WebSocketConstants.STATUS_CODE_ABNORMAL_CLOSURE && webSocketConnection.getSession().isOpen()) { webSocketConnection.finishConnectionClosure(closeCode, null).addListener( closeFuture -> connectionInfo.getWebSocketEndpoint().setBooleanField(0, 0)); } } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onCloseResource, onCloseCallback, null, null, bValues); } public static void dispatchIdleTimeout(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onIdleTimeoutResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_IDLE_TIMEOUT); if (onIdleTimeoutResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onIdleTimeoutResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); CallableUnitCallback onIdleTimeoutCallback = new CallableUnitCallback() { @Override public void notifySuccess() { } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onIdleTimeoutResource, onIdleTimeoutCallback, null, null, bValues); } public static void setPathParams(BValue[] bValues, List<ParamDetail> paramDetails, Map<String, String> pathParams, int defaultArgSize) { int parameterDetailsSize = paramDetails.size(); if (parameterDetailsSize > defaultArgSize) { for (int i = defaultArgSize; i < parameterDetailsSize; i++) { bValues[i] = new BString(pathParams.get(paramDetails.get(i).getVarName())); } } } }
oops. This is a line that was not added intentionally. Have removed it.
public RelRoot rel(String sql, QueryParameters params) { RelOptCluster cluster = RelOptCluster.create(planner, new RexBuilder(typeFactory)); QueryTrait trait = new QueryTrait(); SqlAnalyzer analyzer = SqlAnalyzer.getBuilder() .withQueryParams(params) .withQueryTrait(trait) .withCalciteContext(config.getContext()) .withTopLevelSchema(defaultSchemaPlus) .withTypeFactory((JavaTypeFactory) cluster.getTypeFactory()) .build(); AnalyzerOptions options = SqlAnalyzer.initAnalyzerOptions(params); List<List<String>> tables = analyzer.extractTableNames(sql, options); TableResolution.registerTables(this.defaultSchemaPlus, tables); SimpleCatalog catalog = analyzer.createPopulatedCatalog(defaultSchemaPlus.getName(), options, tables); ImmutableMap.Builder<String, ResolvedCreateFunctionStmt> udfBuilder = ImmutableMap.builder(); ImmutableMap.Builder<List<String>, ResolvedNode> udtvfBuilder = ImmutableMap.builder(); ResolvedStatement statement; ParseResumeLocation parseResumeLocation = new ParseResumeLocation(sql); do { statement = analyzer.analyzeNextStatement(parseResumeLocation, options, catalog); if (statement.nodeKind() == RESOLVED_CREATE_FUNCTION_STMT) { ResolvedCreateFunctionStmt createFunctionStmt = (ResolvedCreateFunctionStmt) statement; String functionFullName = String.format( "%s:%s", SqlAnalyzer.USER_DEFINED_FUNCTIONS, String.join(".", createFunctionStmt.getNamePath())); udfBuilder.put(functionFullName, createFunctionStmt); } else if (statement.nodeKind() == RESOLVED_CREATE_TABLE_FUNCTION_STMT) { ResolvedCreateTableFunctionStmt createTableFunctionStmt = (ResolvedCreateTableFunctionStmt) statement; catalog.addTableValuedFunction( new TableValuedFunction.FixedOutputSchemaTVF( createTableFunctionStmt.getNamePath(), createTableFunctionStmt.getSignature(), TVFRelation.createColumnBased( createTableFunctionStmt.getQuery().getColumnList().stream() .map(c -> TVFRelation.Column.create(c.getName(), c.getType())) .collect(Collectors.toList())))); udtvfBuilder.put(createTableFunctionStmt.getNamePath(), createTableFunctionStmt.getQuery()); } else if (statement.nodeKind() == RESOLVED_QUERY_STMT) { if (!SqlAnalyzer.isEndOfInput(parseResumeLocation)) { throw new UnsupportedOperationException( "No additional statements are allowed after a SELECT statement."); } break; } } while (!SqlAnalyzer.isEndOfInput(parseResumeLocation)); if (!(statement instanceof ResolvedQueryStmt)) { throw new UnsupportedOperationException( "Statement list must end in a SELECT statement, not " + statement.nodeKindString()); } ExpressionConverter expressionConverter = new ExpressionConverter(cluster, params, udfBuilder.build()); ConversionContext context = ConversionContext.of(config, expressionConverter, cluster, trait, udtvfBuilder.build()); RelNode convertedNode = QueryStatementConverter.convertRootQuery(context, (ResolvedQueryStmt) statement); LOG.info("SQLPlan>\n" + RelOptUtil.toString(convertedNode)); return RelRoot.of(convertedNode, SqlKind.ALL); }
LOG.info("SQLPlan>\n" + RelOptUtil.toString(convertedNode));
public RelRoot rel(String sql, QueryParameters params) { RelOptCluster cluster = RelOptCluster.create(planner, new RexBuilder(typeFactory)); QueryTrait trait = new QueryTrait(); SqlAnalyzer analyzer = SqlAnalyzer.getBuilder() .withQueryParams(params) .withQueryTrait(trait) .withCalciteContext(config.getContext()) .withTopLevelSchema(defaultSchemaPlus) .withTypeFactory((JavaTypeFactory) cluster.getTypeFactory()) .build(); AnalyzerOptions options = SqlAnalyzer.initAnalyzerOptions(params); List<List<String>> tables = analyzer.extractTableNames(sql, options); TableResolution.registerTables(this.defaultSchemaPlus, tables); SimpleCatalog catalog = analyzer.createPopulatedCatalog(defaultSchemaPlus.getName(), options, tables); ImmutableMap.Builder<String, ResolvedCreateFunctionStmt> udfBuilder = ImmutableMap.builder(); ImmutableMap.Builder<List<String>, ResolvedNode> udtvfBuilder = ImmutableMap.builder(); ResolvedStatement statement; ParseResumeLocation parseResumeLocation = new ParseResumeLocation(sql); do { statement = analyzer.analyzeNextStatement(parseResumeLocation, options, catalog); if (statement.nodeKind() == RESOLVED_CREATE_FUNCTION_STMT) { ResolvedCreateFunctionStmt createFunctionStmt = (ResolvedCreateFunctionStmt) statement; String functionFullName = String.format( "%s:%s", SqlAnalyzer.USER_DEFINED_FUNCTIONS, String.join(".", createFunctionStmt.getNamePath())); udfBuilder.put(functionFullName, createFunctionStmt); } else if (statement.nodeKind() == RESOLVED_CREATE_TABLE_FUNCTION_STMT) { ResolvedCreateTableFunctionStmt createTableFunctionStmt = (ResolvedCreateTableFunctionStmt) statement; udtvfBuilder.put(createTableFunctionStmt.getNamePath(), createTableFunctionStmt.getQuery()); } else if (statement.nodeKind() == RESOLVED_QUERY_STMT) { if (!SqlAnalyzer.isEndOfInput(parseResumeLocation)) { throw new UnsupportedOperationException( "No additional statements are allowed after a SELECT statement."); } break; } } while (!SqlAnalyzer.isEndOfInput(parseResumeLocation)); if (!(statement instanceof ResolvedQueryStmt)) { throw new UnsupportedOperationException( "Statement list must end in a SELECT statement, not " + statement.nodeKindString()); } ExpressionConverter expressionConverter = new ExpressionConverter(cluster, params, udfBuilder.build()); ConversionContext context = ConversionContext.of(config, expressionConverter, cluster, trait, udtvfBuilder.build()); RelNode convertedNode = QueryStatementConverter.convertRootQuery(context, (ResolvedQueryStmt) statement); return RelRoot.of(convertedNode, SqlKind.ALL); }
class ZetaSQLPlannerImpl implements Planner { private static final Logger LOG = Logger.getLogger(ZetaSQLPlannerImpl.class.getName()); private final SchemaPlus defaultSchemaPlus; private final FrameworkConfig config; private RelOptPlanner planner; private JavaTypeFactory typeFactory; private final RexExecutor executor; private final ImmutableList<Program> programs; private static final long ONE_SECOND_IN_MILLIS = 1000L; private static final long ONE_MINUTE_IN_MILLIS = 60L * ONE_SECOND_IN_MILLIS; private static final long ONE_HOUR_IN_MILLIS = 60L * ONE_MINUTE_IN_MILLIS; private static final long ONE_DAY_IN_MILLIS = 24L * ONE_HOUR_IN_MILLIS; @SuppressWarnings("unused") private static final long ONE_MONTH_IN_MILLIS = 30L * ONE_DAY_IN_MILLIS; @SuppressWarnings("unused") private static final long ONE_YEAR_IN_MILLIS = 365L * ONE_DAY_IN_MILLIS; public ZetaSQLPlannerImpl(FrameworkConfig config) { this.config = config; this.executor = config.getExecutor(); this.programs = config.getPrograms(); Frameworks.withPlanner( (cluster, relOptSchema, rootSchema) -> { Util.discard(rootSchema); typeFactory = (JavaTypeFactory) cluster.getTypeFactory(); planner = cluster.getPlanner(); planner.setExecutor(executor); return null; }, config); this.defaultSchemaPlus = config.getDefaultSchema(); } @Override public SqlNode parse(String s) throws SqlParseException { throw new UnsupportedOperationException( String.format("%s.parse(String) is not implemented", this.getClass().getCanonicalName())); } @Override public SqlNode parse(Reader reader) throws SqlParseException { throw new UnsupportedOperationException( String.format("%s.parse(Reader) is not implemented", this.getClass().getCanonicalName())); } @Override public SqlNode validate(SqlNode sqlNode) throws ValidationException { throw new UnsupportedOperationException( String.format( "%s.validate(SqlNode) is not implemented", this.getClass().getCanonicalName())); } @Override public Pair<SqlNode, RelDataType> validateAndGetType(SqlNode sqlNode) throws ValidationException { throw new UnsupportedOperationException( String.format( "%s.validateAndGetType(SqlNode) is not implemented", this.getClass().getCanonicalName())); } @Override public RelRoot rel(SqlNode sqlNode) throws RelConversionException { throw new UnsupportedOperationException( String.format("%s.rel(SqlNode) is not implemented", this.getClass().getCanonicalName())); } @Override public RelNode convert(SqlNode sqlNode) { throw new UnsupportedOperationException( String.format("%s.convert(SqlNode) is not implemented.", getClass().getCanonicalName())); } @Override public RelDataTypeFactory getTypeFactory() { throw new UnsupportedOperationException( String.format("%s.getTypeFactor() is not implemented.", getClass().getCanonicalName())); } @Override public RelNode transform(int i, RelTraitSet relTraitSet, RelNode relNode) throws RelConversionException { Program program = programs.get(i); return program.run(planner, relNode, relTraitSet, ImmutableList.of(), ImmutableList.of()); } @Override public void reset() { throw new UnsupportedOperationException( String.format("%s.reset() is not implemented", this.getClass().getCanonicalName())); } @Override public void close() { } @Override public RelTraitSet getEmptyTraitSet() { throw new UnsupportedOperationException( String.format( "%s.getEmptyTraitSet() is not implemented", this.getClass().getCanonicalName())); } public static LanguageOptions getLanguageOptions() { return SqlAnalyzer.initAnalyzerOptions().getLanguageOptions(); } }
class ZetaSQLPlannerImpl implements Planner { private static final Logger LOG = Logger.getLogger(ZetaSQLPlannerImpl.class.getName()); private final SchemaPlus defaultSchemaPlus; private final FrameworkConfig config; private RelOptPlanner planner; private JavaTypeFactory typeFactory; private final RexExecutor executor; private final ImmutableList<Program> programs; private static final long ONE_SECOND_IN_MILLIS = 1000L; private static final long ONE_MINUTE_IN_MILLIS = 60L * ONE_SECOND_IN_MILLIS; private static final long ONE_HOUR_IN_MILLIS = 60L * ONE_MINUTE_IN_MILLIS; private static final long ONE_DAY_IN_MILLIS = 24L * ONE_HOUR_IN_MILLIS; @SuppressWarnings("unused") private static final long ONE_MONTH_IN_MILLIS = 30L * ONE_DAY_IN_MILLIS; @SuppressWarnings("unused") private static final long ONE_YEAR_IN_MILLIS = 365L * ONE_DAY_IN_MILLIS; public ZetaSQLPlannerImpl(FrameworkConfig config) { this.config = config; this.executor = config.getExecutor(); this.programs = config.getPrograms(); Frameworks.withPlanner( (cluster, relOptSchema, rootSchema) -> { Util.discard(rootSchema); typeFactory = (JavaTypeFactory) cluster.getTypeFactory(); planner = cluster.getPlanner(); planner.setExecutor(executor); return null; }, config); this.defaultSchemaPlus = config.getDefaultSchema(); } @Override public SqlNode parse(String s) throws SqlParseException { throw new UnsupportedOperationException( String.format("%s.parse(String) is not implemented", this.getClass().getCanonicalName())); } @Override public SqlNode parse(Reader reader) throws SqlParseException { throw new UnsupportedOperationException( String.format("%s.parse(Reader) is not implemented", this.getClass().getCanonicalName())); } @Override public SqlNode validate(SqlNode sqlNode) throws ValidationException { throw new UnsupportedOperationException( String.format( "%s.validate(SqlNode) is not implemented", this.getClass().getCanonicalName())); } @Override public Pair<SqlNode, RelDataType> validateAndGetType(SqlNode sqlNode) throws ValidationException { throw new UnsupportedOperationException( String.format( "%s.validateAndGetType(SqlNode) is not implemented", this.getClass().getCanonicalName())); } @Override public RelRoot rel(SqlNode sqlNode) throws RelConversionException { throw new UnsupportedOperationException( String.format("%s.rel(SqlNode) is not implemented", this.getClass().getCanonicalName())); } @Override public RelNode convert(SqlNode sqlNode) { throw new UnsupportedOperationException( String.format("%s.convert(SqlNode) is not implemented.", getClass().getCanonicalName())); } @Override public RelDataTypeFactory getTypeFactory() { throw new UnsupportedOperationException( String.format("%s.getTypeFactor() is not implemented.", getClass().getCanonicalName())); } @Override public RelNode transform(int i, RelTraitSet relTraitSet, RelNode relNode) throws RelConversionException { Program program = programs.get(i); return program.run(planner, relNode, relTraitSet, ImmutableList.of(), ImmutableList.of()); } @Override public void reset() { throw new UnsupportedOperationException( String.format("%s.reset() is not implemented", this.getClass().getCanonicalName())); } @Override public void close() { } @Override public RelTraitSet getEmptyTraitSet() { throw new UnsupportedOperationException( String.format( "%s.getEmptyTraitSet() is not implemented", this.getClass().getCanonicalName())); } public static LanguageOptions getLanguageOptions() { return SqlAnalyzer.initAnalyzerOptions().getLanguageOptions(); } }
Any doc or evidence to support this behavior(if null, then true)?
public boolean isAllowCrossTenantReplication() { if (this.innerModel().allowCrossTenantReplication() == null) { return true; } return this.innerModel().allowCrossTenantReplication(); }
return true;
public boolean isAllowCrossTenantReplication() { if (this.innerModel().allowCrossTenantReplication() == null) { return true; } return this.innerModel().allowCrossTenantReplication(); }
class StorageAccountImpl extends GroupableResourceImpl<StorageAccount, StorageAccountInner, StorageAccountImpl, StorageManager> implements StorageAccount, StorageAccount.Definition, StorageAccount.Update { private final ClientLogger logger = new ClientLogger(getClass()); private PublicEndpoints publicEndpoints; private AccountStatuses accountStatuses; private StorageAccountCreateParameters createParameters; private StorageAccountUpdateParameters updateParameters; private StorageNetworkRulesHelper networkRulesHelper; private StorageEncryptionHelper encryptionHelper; StorageAccountImpl(String name, StorageAccountInner innerModel, final StorageManager storageManager) { super(name, innerModel, storageManager); this.createParameters = new StorageAccountCreateParameters(); this.networkRulesHelper = new StorageNetworkRulesHelper(this.createParameters); this.encryptionHelper = new StorageEncryptionHelper(this.createParameters); } @Override public AccountStatuses accountStatuses() { if (accountStatuses == null) { accountStatuses = new AccountStatuses(this.innerModel().statusOfPrimary(), this.innerModel().statusOfSecondary()); } return accountStatuses; } @Override public StorageAccountSkuType skuType() { return StorageAccountSkuType.fromSkuName(this.innerModel().sku().name()); } @Override public Kind kind() { return innerModel().kind(); } @Override public OffsetDateTime creationTime() { return this.innerModel().creationTime(); } @Override public CustomDomain customDomain() { return this.innerModel().customDomain(); } @Override public OffsetDateTime lastGeoFailoverTime() { return this.innerModel().lastGeoFailoverTime(); } @Override public ProvisioningState provisioningState() { return this.innerModel().provisioningState(); } @Override public PublicEndpoints endPoints() { if (publicEndpoints == null) { publicEndpoints = new PublicEndpoints(this.innerModel().primaryEndpoints(), this.innerModel().secondaryEndpoints()); } return publicEndpoints; } @Override public StorageAccountEncryptionKeySource encryptionKeySource() { return StorageEncryptionHelper.encryptionKeySource(this.innerModel()); } @Override public Map<StorageService, StorageAccountEncryptionStatus> encryptionStatuses() { return StorageEncryptionHelper.encryptionStatuses(this.innerModel()); } @Override public boolean infrastructureEncryptionEnabled() { return StorageEncryptionHelper.infrastructureEncryptionEnabled(this.innerModel()); } @Override public AccessTier accessTier() { return innerModel().accessTier(); } @Override public String systemAssignedManagedServiceIdentityTenantId() { if (this.innerModel().identity() == null) { return null; } else { return this.innerModel().identity().tenantId(); } } @Override public String systemAssignedManagedServiceIdentityPrincipalId() { if (this.innerModel().identity() == null) { return null; } else { return this.innerModel().identity().principalId(); } } @Override public boolean isAccessAllowedFromAllNetworks() { return StorageNetworkRulesHelper.isAccessAllowedFromAllNetworks(this.innerModel()); } @Override public List<String> networkSubnetsWithAccess() { return StorageNetworkRulesHelper.networkSubnetsWithAccess(this.innerModel()); } @Override public List<String> ipAddressesWithAccess() { return StorageNetworkRulesHelper.ipAddressesWithAccess(this.innerModel()); } @Override public List<String> ipAddressRangesWithAccess() { return StorageNetworkRulesHelper.ipAddressRangesWithAccess(this.innerModel()); } @Override public boolean canReadLogEntriesFromAnyNetwork() { return StorageNetworkRulesHelper.canReadLogEntriesFromAnyNetwork(this.innerModel()); } @Override public boolean canReadMetricsFromAnyNetwork() { return StorageNetworkRulesHelper.canReadMetricsFromAnyNetwork(this.innerModel()); } @Override public boolean canAccessFromAzureServices() { return StorageNetworkRulesHelper.canAccessFromAzureServices(this.innerModel()); } @Override public boolean isAzureFilesAadIntegrationEnabled() { return this.innerModel().azureFilesIdentityBasedAuthentication() != null && this.innerModel().azureFilesIdentityBasedAuthentication().directoryServiceOptions() == DirectoryServiceOptions.AADDS; } @Override public boolean isHnsEnabled() { return ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().isHnsEnabled()); } @Override public boolean isLargeFileSharesEnabled() { return this.innerModel().largeFileSharesState() == LargeFileSharesState.ENABLED; } @Override public MinimumTlsVersion minimumTlsVersion() { return this.innerModel().minimumTlsVersion(); } @Override public boolean isHttpsTrafficOnly() { if (this.innerModel().enableHttpsTrafficOnly() == null) { return true; } return this.innerModel().enableHttpsTrafficOnly(); } @Override public boolean isBlobPublicAccessAllowed() { if (this.innerModel().allowBlobPublicAccess() == null) { return true; } return this.innerModel().allowBlobPublicAccess(); } @Override public boolean isSharedKeyAccessAllowed() { if (this.innerModel().allowSharedKeyAccess() == null) { return true; } return this.innerModel().allowSharedKeyAccess(); } @Override @Override public boolean isDefaultToOAuthAuthentication() { if (this.innerModel().defaultToOAuthAuthentication() == null) { return true; } return this.innerModel().defaultToOAuthAuthentication(); } @Override public List<StorageAccountKey> getKeys() { return this.getKeysAsync().block(); } @Override public Mono<List<StorageAccountKey>> getKeysAsync() { return this .manager() .serviceClient() .getStorageAccounts() .listKeysAsync(this.resourceGroupName(), this.name()) .map(storageAccountListKeysResultInner -> storageAccountListKeysResultInner.keys()); } @Override public List<StorageAccountKey> regenerateKey(String keyName) { return this.regenerateKeyAsync(keyName).block(); } @Override public Mono<List<StorageAccountKey>> regenerateKeyAsync(String keyName) { return this .manager() .serviceClient() .getStorageAccounts() .regenerateKeyAsync(this.resourceGroupName(), this.name(), new StorageAccountRegenerateKeyParameters().withKeyName(keyName)) .map(storageAccountListKeysResultInner -> storageAccountListKeysResultInner.keys()); } @Override public PagedIterable<PrivateLinkResource> listPrivateLinkResources() { return new PagedIterable<>(listPrivateLinkResourcesAsync()); } @Override public PagedFlux<PrivateLinkResource> listPrivateLinkResourcesAsync() { Mono<Response<List<PrivateLinkResource>>> retList = this.manager().serviceClient().getPrivateLinkResources() .listByStorageAccountWithResponseAsync(this.resourceGroupName(), this.name()) .map(response -> new SimpleResponse<>(response, response.getValue().value().stream() .map(PrivateLinkResourceImpl::new) .collect(Collectors.toList()))); return PagedConverter.convertListToPagedFlux(retList); } @Override public PagedIterable<PrivateEndpointConnection> listPrivateEndpointConnections() { return new PagedIterable<>(listPrivateEndpointConnectionsAsync()); } @Override public PagedFlux<PrivateEndpointConnection> listPrivateEndpointConnectionsAsync() { return PagedConverter.mapPage(this.manager().serviceClient().getPrivateEndpointConnections() .listAsync(this.resourceGroupName(), this.name()), PrivateEndpointConnectionImpl::new); } @Override public void approvePrivateEndpointConnection(String privateEndpointConnectionName) { approvePrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> approvePrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return this.manager().serviceClient().getPrivateEndpointConnections() .putWithResponseAsync(this.resourceGroupName(), this.name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionState() .withStatus( PrivateEndpointServiceConnectionStatus.APPROVED))) .then(); } @Override public void rejectPrivateEndpointConnection(String privateEndpointConnectionName) { rejectPrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> rejectPrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return this.manager().serviceClient().getPrivateEndpointConnections() .putWithResponseAsync(this.resourceGroupName(), this.name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionState() .withStatus( PrivateEndpointServiceConnectionStatus.REJECTED))) .then(); } @Override public Mono<StorageAccount> refreshAsync() { return super .refreshAsync() .map( storageAccount -> { StorageAccountImpl impl = (StorageAccountImpl) storageAccount; impl.clearWrapperProperties(); return impl; }); } @Override protected Mono<StorageAccountInner> getInnerAsync() { return this.manager().serviceClient().getStorageAccounts().getByResourceGroupAsync(this.resourceGroupName(), this.name()); } @Override public StorageAccountImpl withSku(StorageAccountSkuType sku) { if (isInCreateMode()) { createParameters.withSku(new Sku().withName(sku.name())); } else { updateParameters.withSku(new Sku().withName(sku.name())); } return this; } @Override public StorageAccountImpl withBlobStorageAccountKind() { createParameters.withKind(Kind.BLOB_STORAGE); return this; } @Override public StorageAccountImpl withGeneralPurposeAccountKind() { createParameters.withKind(Kind.STORAGE); return this; } @Override public StorageAccountImpl withGeneralPurposeAccountKindV2() { createParameters.withKind(Kind.STORAGE_V2); return this; } @Override public StorageAccountImpl withBlockBlobStorageAccountKind() { createParameters.withKind(Kind.BLOCK_BLOB_STORAGE); return this; } @Override public StorageAccountImpl withFileStorageAccountKind() { createParameters.withKind(Kind.FILE_STORAGE); return this; } @Override public StorageAccountImpl withInfrastructureEncryption() { this.encryptionHelper.withInfrastructureEncryption(); return this; } @Override public StorageAccountImpl withBlobEncryption() { this.encryptionHelper.withBlobEncryption(); return this; } @Override public StorageAccountImpl withFileEncryption() { this.encryptionHelper.withFileEncryption(); return this; } @Override public StorageAccountImpl withEncryptionKeyFromKeyVault(String keyVaultUri, String keyName, String keyVersion) { this.encryptionHelper.withEncryptionKeyFromKeyVault(keyVaultUri, keyName, keyVersion); return this; } @Override public StorageAccountImpl withoutBlobEncryption() { this.encryptionHelper.withoutBlobEncryption(); return this; } @Override public StorageAccountImpl withoutFileEncryption() { this.encryptionHelper.withoutFileEncryption(); return this; } @Override public StorageAccountImpl withTableAccountScopedEncryptionKey() { this.encryptionHelper.withTableEncryption(); return this; } @Override public StorageAccountImpl withQueueAccountScopedEncryptionKey() { this.encryptionHelper.withQueueEncryption(); return this; } private void clearWrapperProperties() { accountStatuses = null; publicEndpoints = null; } @Override public StorageAccountImpl update() { createParameters = null; updateParameters = new StorageAccountUpdateParameters(); this.networkRulesHelper = new StorageNetworkRulesHelper(this.updateParameters, this.innerModel()); this.encryptionHelper = new StorageEncryptionHelper(this.updateParameters, this.innerModel()); return super.update(); } @Override public StorageAccountImpl withCustomDomain(CustomDomain customDomain) { if (isInCreateMode()) { createParameters.withCustomDomain(customDomain); } else { updateParameters.withCustomDomain(customDomain); } return this; } @Override public StorageAccountImpl withCustomDomain(String name) { return withCustomDomain(new CustomDomain().withName(name)); } @Override public StorageAccountImpl withCustomDomain(String name, boolean useSubDomain) { return withCustomDomain(new CustomDomain().withName(name).withUseSubDomainName(useSubDomain)); } @Override public StorageAccountImpl withAccessTier(AccessTier accessTier) { if (isInCreateMode()) { createParameters.withAccessTier(accessTier); } else { if (this.innerModel().kind() != Kind.BLOB_STORAGE) { throw logger.logExceptionAsError(new UnsupportedOperationException( "Access tier can not be changed for general purpose storage accounts.")); } updateParameters.withAccessTier(accessTier); } return this; } @Override public StorageAccountImpl withSystemAssignedManagedServiceIdentity() { if (this.innerModel().identity() == null) { if (isInCreateMode()) { createParameters.withIdentity(new Identity().withType(IdentityType.SYSTEM_ASSIGNED)); } else { updateParameters.withIdentity(new Identity().withType(IdentityType.SYSTEM_ASSIGNED)); } } return this; } @Override public StorageAccountImpl withOnlyHttpsTraffic() { if (isInCreateMode()) { createParameters.withEnableHttpsTrafficOnly(true); } else { updateParameters.withEnableHttpsTrafficOnly(true); } return this; } @Override public StorageAccountImpl withHttpAndHttpsTraffic() { if (isInCreateMode()) { createParameters.withEnableHttpsTrafficOnly(false); } else { updateParameters.withEnableHttpsTrafficOnly(false); } return this; } @Override public StorageAccountImpl withMinimumTlsVersion(MinimumTlsVersion minimumTlsVersion) { if (isInCreateMode()) { createParameters.withMinimumTlsVersion(minimumTlsVersion); } else { updateParameters.withMinimumTlsVersion(minimumTlsVersion); } return this; } @Override public StorageAccountImpl enableBlobPublicAccess() { if (isInCreateMode()) { createParameters.withAllowBlobPublicAccess(true); } else { updateParameters.withAllowBlobPublicAccess(true); } return this; } @Override public StorageAccountImpl disableBlobPublicAccess() { if (isInCreateMode()) { createParameters.withAllowBlobPublicAccess(false); } else { updateParameters.withAllowBlobPublicAccess(false); } return this; } @Override public StorageAccountImpl enableSharedKeyAccess() { if (isInCreateMode()) { createParameters.withAllowSharedKeyAccess(true); } else { updateParameters.withAllowSharedKeyAccess(true); } return this; } @Override public StorageAccountImpl disableSharedKeyAccess() { if (isInCreateMode()) { createParameters.withAllowSharedKeyAccess(false); } else { updateParameters.withAllowSharedKeyAccess(false); } return this; } @Override public StorageAccountImpl withAllowCrossTenantReplication(boolean enabled) { if (isInCreateMode()) { createParameters.withAllowCrossTenantReplication(enabled); } else { updateParameters.withAllowCrossTenantReplication(enabled); } return this; } @Override public StorageAccountImpl withDefaultToOAuthAuthentication(boolean enabled) { if (isInCreateMode()) { createParameters.withDefaultToOAuthAuthentication(enabled); } else { updateParameters.withDefaultToOAuthAuthentication(enabled); } return this; } @Override public StorageAccountImpl withAccessFromAllNetworks() { this.networkRulesHelper.withAccessFromAllNetworks(); return this; } @Override public StorageAccountImpl withAccessFromSelectedNetworks() { this.networkRulesHelper.withAccessFromSelectedNetworks(); return this; } @Override public StorageAccountImpl withAccessFromNetworkSubnet(String subnetId) { this.networkRulesHelper.withAccessFromNetworkSubnet(subnetId); return this; } @Override public StorageAccountImpl withAccessFromIpAddress(String ipAddress) { this.networkRulesHelper.withAccessFromIpAddress(ipAddress); return this; } @Override public StorageAccountImpl withAccessFromIpAddressRange(String ipAddressCidr) { this.networkRulesHelper.withAccessFromIpAddressRange(ipAddressCidr); return this; } @Override public StorageAccountImpl withReadAccessToLogEntriesFromAnyNetwork() { this.networkRulesHelper.withReadAccessToLoggingFromAnyNetwork(); return this; } @Override public StorageAccountImpl withReadAccessToMetricsFromAnyNetwork() { this.networkRulesHelper.withReadAccessToMetricsFromAnyNetwork(); return this; } @Override public StorageAccountImpl withAccessFromAzureServices() { this.networkRulesHelper.withAccessAllowedFromAzureServices(); return this; } @Override public StorageAccountImpl withoutNetworkSubnetAccess(String subnetId) { this.networkRulesHelper.withoutNetworkSubnetAccess(subnetId); return this; } @Override public StorageAccountImpl withoutIpAddressAccess(String ipAddress) { this.networkRulesHelper.withoutIpAddressAccess(ipAddress); return this; } @Override public StorageAccountImpl withoutIpAddressRangeAccess(String ipAddressCidr) { this.networkRulesHelper.withoutIpAddressRangeAccess(ipAddressCidr); return this; } @Override public Update withoutReadAccessToLoggingFromAnyNetwork() { this.networkRulesHelper.withoutReadAccessToLoggingFromAnyNetwork(); return this; } @Override public Update withoutReadAccessToMetricsFromAnyNetwork() { this.networkRulesHelper.withoutReadAccessToMetricsFromAnyNetwork(); return this; } @Override public Update withoutAccessFromAzureServices() { this.networkRulesHelper.withoutAccessFromAzureServices(); return this; } @Override public Update upgradeToGeneralPurposeAccountKindV2() { updateParameters.withKind(Kind.STORAGE_V2); return this; } @Override public Mono<StorageAccount> createResourceAsync() { this.networkRulesHelper.setDefaultActionIfRequired(); createParameters.withLocation(this.regionName()); createParameters.withTags(this.innerModel().tags()); final StorageAccountsClient client = this.manager().serviceClient().getStorageAccounts(); return this .manager() .serviceClient() .getStorageAccounts() .createAsync(this.resourceGroupName(), this.name(), createParameters) .flatMap( storageAccountInner -> client .getByResourceGroupAsync(resourceGroupName(), this.name()) .map(innerToFluentMap(this)) .doOnNext(storageAccount -> clearWrapperProperties())); } @Override public Mono<StorageAccount> updateResourceAsync() { this.networkRulesHelper.setDefaultActionIfRequired(); updateParameters.withTags(this.innerModel().tags()); return this .manager() .serviceClient() .getStorageAccounts() .updateAsync(resourceGroupName(), this.name(), updateParameters) .map(innerToFluentMap(this)) .doOnNext(storageAccount -> clearWrapperProperties()); } @Override public StorageAccountImpl withAzureFilesAadIntegrationEnabled(boolean enabled) { if (isInCreateMode()) { if (enabled) { this .createParameters .withAzureFilesIdentityBasedAuthentication( new AzureFilesIdentityBasedAuthentication() .withDirectoryServiceOptions(DirectoryServiceOptions.AADDS)); } } else { if (this.createParameters.azureFilesIdentityBasedAuthentication() == null) { this .createParameters .withAzureFilesIdentityBasedAuthentication(new AzureFilesIdentityBasedAuthentication()); } if (enabled) { this .updateParameters .azureFilesIdentityBasedAuthentication() .withDirectoryServiceOptions(DirectoryServiceOptions.AADDS); } else { this .updateParameters .azureFilesIdentityBasedAuthentication() .withDirectoryServiceOptions(DirectoryServiceOptions.NONE); } } return this; } @Override public StorageAccountImpl withLargeFileShares(boolean enabled) { if (isInCreateMode()) { if (enabled) { this.createParameters.withLargeFileSharesState(LargeFileSharesState.ENABLED); } else { this.createParameters.withLargeFileSharesState(LargeFileSharesState.DISABLED); } } return this; } @Override public StorageAccountImpl withHnsEnabled(boolean enabled) { this.createParameters.withIsHnsEnabled(enabled); return this; } private static final class PrivateLinkResourceImpl implements PrivateLinkResource { private final com.azure.resourcemanager.storage.models.PrivateLinkResource innerModel; private PrivateLinkResourceImpl(com.azure.resourcemanager.storage.models.PrivateLinkResource innerModel) { this.innerModel = innerModel; } @Override public String groupId() { return innerModel.groupId(); } @Override public List<String> requiredMemberNames() { return Collections.unmodifiableList(innerModel.requiredMembers()); } @Override public List<String> requiredDnsZoneNames() { return Collections.unmodifiableList(innerModel.requiredZoneNames()); } } private static final class PrivateEndpointConnectionImpl implements PrivateEndpointConnection { private final PrivateEndpointConnectionInner innerModel; private final PrivateEndpoint privateEndpoint; private final com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateLinkServiceConnectionState privateLinkServiceConnectionState; private final PrivateEndpointConnectionProvisioningState provisioningState; private PrivateEndpointConnectionImpl(PrivateEndpointConnectionInner innerModel) { this.innerModel = innerModel; this.privateEndpoint = innerModel.privateEndpoint() == null ? null : new PrivateEndpoint(innerModel.privateEndpoint().id()); this.privateLinkServiceConnectionState = innerModel.privateLinkServiceConnectionState() == null ? null : new com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateLinkServiceConnectionState( innerModel.privateLinkServiceConnectionState().status() == null ? null : com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateEndpointServiceConnectionStatus .fromString(innerModel.privateLinkServiceConnectionState().status().toString()), innerModel.privateLinkServiceConnectionState().description(), innerModel.privateLinkServiceConnectionState().actionRequired()); this.provisioningState = innerModel.provisioningState() == null ? null : PrivateEndpointConnectionProvisioningState.fromString(innerModel.provisioningState().toString()); } @Override public String id() { return innerModel.id(); } @Override public String name() { return innerModel.name(); } @Override public String type() { return innerModel.type(); } @Override public PrivateEndpoint privateEndpoint() { return privateEndpoint; } @Override public com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateLinkServiceConnectionState privateLinkServiceConnectionState() { return privateLinkServiceConnectionState; } @Override public PrivateEndpointConnectionProvisioningState provisioningState() { return provisioningState; } } }
class StorageAccountImpl extends GroupableResourceImpl<StorageAccount, StorageAccountInner, StorageAccountImpl, StorageManager> implements StorageAccount, StorageAccount.Definition, StorageAccount.Update { private final ClientLogger logger = new ClientLogger(getClass()); private PublicEndpoints publicEndpoints; private AccountStatuses accountStatuses; private StorageAccountCreateParameters createParameters; private StorageAccountUpdateParameters updateParameters; private StorageNetworkRulesHelper networkRulesHelper; private StorageEncryptionHelper encryptionHelper; StorageAccountImpl(String name, StorageAccountInner innerModel, final StorageManager storageManager) { super(name, innerModel, storageManager); this.createParameters = new StorageAccountCreateParameters(); this.networkRulesHelper = new StorageNetworkRulesHelper(this.createParameters); this.encryptionHelper = new StorageEncryptionHelper(this.createParameters); } @Override public AccountStatuses accountStatuses() { if (accountStatuses == null) { accountStatuses = new AccountStatuses(this.innerModel().statusOfPrimary(), this.innerModel().statusOfSecondary()); } return accountStatuses; } @Override public StorageAccountSkuType skuType() { return StorageAccountSkuType.fromSkuName(this.innerModel().sku().name()); } @Override public Kind kind() { return innerModel().kind(); } @Override public OffsetDateTime creationTime() { return this.innerModel().creationTime(); } @Override public CustomDomain customDomain() { return this.innerModel().customDomain(); } @Override public OffsetDateTime lastGeoFailoverTime() { return this.innerModel().lastGeoFailoverTime(); } @Override public ProvisioningState provisioningState() { return this.innerModel().provisioningState(); } @Override public PublicEndpoints endPoints() { if (publicEndpoints == null) { publicEndpoints = new PublicEndpoints(this.innerModel().primaryEndpoints(), this.innerModel().secondaryEndpoints()); } return publicEndpoints; } @Override public StorageAccountEncryptionKeySource encryptionKeySource() { return StorageEncryptionHelper.encryptionKeySource(this.innerModel()); } @Override public Map<StorageService, StorageAccountEncryptionStatus> encryptionStatuses() { return StorageEncryptionHelper.encryptionStatuses(this.innerModel()); } @Override public boolean infrastructureEncryptionEnabled() { return StorageEncryptionHelper.infrastructureEncryptionEnabled(this.innerModel()); } @Override public AccessTier accessTier() { return innerModel().accessTier(); } @Override public String systemAssignedManagedServiceIdentityTenantId() { if (this.innerModel().identity() == null) { return null; } else { return this.innerModel().identity().tenantId(); } } @Override public String systemAssignedManagedServiceIdentityPrincipalId() { if (this.innerModel().identity() == null) { return null; } else { return this.innerModel().identity().principalId(); } } @Override public boolean isAccessAllowedFromAllNetworks() { return StorageNetworkRulesHelper.isAccessAllowedFromAllNetworks(this.innerModel()); } @Override public List<String> networkSubnetsWithAccess() { return StorageNetworkRulesHelper.networkSubnetsWithAccess(this.innerModel()); } @Override public List<String> ipAddressesWithAccess() { return StorageNetworkRulesHelper.ipAddressesWithAccess(this.innerModel()); } @Override public List<String> ipAddressRangesWithAccess() { return StorageNetworkRulesHelper.ipAddressRangesWithAccess(this.innerModel()); } @Override public boolean canReadLogEntriesFromAnyNetwork() { return StorageNetworkRulesHelper.canReadLogEntriesFromAnyNetwork(this.innerModel()); } @Override public boolean canReadMetricsFromAnyNetwork() { return StorageNetworkRulesHelper.canReadMetricsFromAnyNetwork(this.innerModel()); } @Override public boolean canAccessFromAzureServices() { return StorageNetworkRulesHelper.canAccessFromAzureServices(this.innerModel()); } @Override public boolean isAzureFilesAadIntegrationEnabled() { return this.innerModel().azureFilesIdentityBasedAuthentication() != null && this.innerModel().azureFilesIdentityBasedAuthentication().directoryServiceOptions() == DirectoryServiceOptions.AADDS; } @Override public boolean isHnsEnabled() { return ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().isHnsEnabled()); } @Override public boolean isLargeFileSharesEnabled() { return this.innerModel().largeFileSharesState() == LargeFileSharesState.ENABLED; } @Override public MinimumTlsVersion minimumTlsVersion() { return this.innerModel().minimumTlsVersion(); } @Override public boolean isHttpsTrafficOnly() { if (this.innerModel().enableHttpsTrafficOnly() == null) { return true; } return this.innerModel().enableHttpsTrafficOnly(); } @Override public boolean isBlobPublicAccessAllowed() { if (this.innerModel().allowBlobPublicAccess() == null) { return true; } return this.innerModel().allowBlobPublicAccess(); } @Override public boolean isSharedKeyAccessAllowed() { if (this.innerModel().allowSharedKeyAccess() == null) { return true; } return this.innerModel().allowSharedKeyAccess(); } @Override @Override public boolean isDefaultToOAuthAuthentication() { return ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().defaultToOAuthAuthentication()); } @Override public List<StorageAccountKey> getKeys() { return this.getKeysAsync().block(); } @Override public Mono<List<StorageAccountKey>> getKeysAsync() { return this .manager() .serviceClient() .getStorageAccounts() .listKeysAsync(this.resourceGroupName(), this.name()) .map(storageAccountListKeysResultInner -> storageAccountListKeysResultInner.keys()); } @Override public List<StorageAccountKey> regenerateKey(String keyName) { return this.regenerateKeyAsync(keyName).block(); } @Override public Mono<List<StorageAccountKey>> regenerateKeyAsync(String keyName) { return this .manager() .serviceClient() .getStorageAccounts() .regenerateKeyAsync(this.resourceGroupName(), this.name(), new StorageAccountRegenerateKeyParameters().withKeyName(keyName)) .map(storageAccountListKeysResultInner -> storageAccountListKeysResultInner.keys()); } @Override public PagedIterable<PrivateLinkResource> listPrivateLinkResources() { return new PagedIterable<>(listPrivateLinkResourcesAsync()); } @Override public PagedFlux<PrivateLinkResource> listPrivateLinkResourcesAsync() { Mono<Response<List<PrivateLinkResource>>> retList = this.manager().serviceClient().getPrivateLinkResources() .listByStorageAccountWithResponseAsync(this.resourceGroupName(), this.name()) .map(response -> new SimpleResponse<>(response, response.getValue().value().stream() .map(PrivateLinkResourceImpl::new) .collect(Collectors.toList()))); return PagedConverter.convertListToPagedFlux(retList); } @Override public PagedIterable<PrivateEndpointConnection> listPrivateEndpointConnections() { return new PagedIterable<>(listPrivateEndpointConnectionsAsync()); } @Override public PagedFlux<PrivateEndpointConnection> listPrivateEndpointConnectionsAsync() { return PagedConverter.mapPage(this.manager().serviceClient().getPrivateEndpointConnections() .listAsync(this.resourceGroupName(), this.name()), PrivateEndpointConnectionImpl::new); } @Override public void approvePrivateEndpointConnection(String privateEndpointConnectionName) { approvePrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> approvePrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return this.manager().serviceClient().getPrivateEndpointConnections() .putWithResponseAsync(this.resourceGroupName(), this.name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionState() .withStatus( PrivateEndpointServiceConnectionStatus.APPROVED))) .then(); } @Override public void rejectPrivateEndpointConnection(String privateEndpointConnectionName) { rejectPrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> rejectPrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return this.manager().serviceClient().getPrivateEndpointConnections() .putWithResponseAsync(this.resourceGroupName(), this.name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionState() .withStatus( PrivateEndpointServiceConnectionStatus.REJECTED))) .then(); } @Override public Mono<StorageAccount> refreshAsync() { return super .refreshAsync() .map( storageAccount -> { StorageAccountImpl impl = (StorageAccountImpl) storageAccount; impl.clearWrapperProperties(); return impl; }); } @Override protected Mono<StorageAccountInner> getInnerAsync() { return this.manager().serviceClient().getStorageAccounts().getByResourceGroupAsync(this.resourceGroupName(), this.name()); } @Override public StorageAccountImpl withSku(StorageAccountSkuType sku) { if (isInCreateMode()) { createParameters.withSku(new Sku().withName(sku.name())); } else { updateParameters.withSku(new Sku().withName(sku.name())); } return this; } @Override public StorageAccountImpl withBlobStorageAccountKind() { createParameters.withKind(Kind.BLOB_STORAGE); return this; } @Override public StorageAccountImpl withGeneralPurposeAccountKind() { createParameters.withKind(Kind.STORAGE); return this; } @Override public StorageAccountImpl withGeneralPurposeAccountKindV2() { createParameters.withKind(Kind.STORAGE_V2); return this; } @Override public StorageAccountImpl withBlockBlobStorageAccountKind() { createParameters.withKind(Kind.BLOCK_BLOB_STORAGE); return this; } @Override public StorageAccountImpl withFileStorageAccountKind() { createParameters.withKind(Kind.FILE_STORAGE); return this; } @Override public StorageAccountImpl withInfrastructureEncryption() { this.encryptionHelper.withInfrastructureEncryption(); return this; } @Override public StorageAccountImpl withBlobEncryption() { this.encryptionHelper.withBlobEncryption(); return this; } @Override public StorageAccountImpl withFileEncryption() { this.encryptionHelper.withFileEncryption(); return this; } @Override public StorageAccountImpl withEncryptionKeyFromKeyVault(String keyVaultUri, String keyName, String keyVersion) { this.encryptionHelper.withEncryptionKeyFromKeyVault(keyVaultUri, keyName, keyVersion); return this; } @Override public StorageAccountImpl withoutBlobEncryption() { this.encryptionHelper.withoutBlobEncryption(); return this; } @Override public StorageAccountImpl withoutFileEncryption() { this.encryptionHelper.withoutFileEncryption(); return this; } @Override public StorageAccountImpl withTableAccountScopedEncryptionKey() { this.encryptionHelper.withTableEncryption(); return this; } @Override public StorageAccountImpl withQueueAccountScopedEncryptionKey() { this.encryptionHelper.withQueueEncryption(); return this; } private void clearWrapperProperties() { accountStatuses = null; publicEndpoints = null; } @Override public StorageAccountImpl update() { createParameters = null; updateParameters = new StorageAccountUpdateParameters(); this.networkRulesHelper = new StorageNetworkRulesHelper(this.updateParameters, this.innerModel()); this.encryptionHelper = new StorageEncryptionHelper(this.updateParameters, this.innerModel()); return super.update(); } @Override public StorageAccountImpl withCustomDomain(CustomDomain customDomain) { if (isInCreateMode()) { createParameters.withCustomDomain(customDomain); } else { updateParameters.withCustomDomain(customDomain); } return this; } @Override public StorageAccountImpl withCustomDomain(String name) { return withCustomDomain(new CustomDomain().withName(name)); } @Override public StorageAccountImpl withCustomDomain(String name, boolean useSubDomain) { return withCustomDomain(new CustomDomain().withName(name).withUseSubDomainName(useSubDomain)); } @Override public StorageAccountImpl withAccessTier(AccessTier accessTier) { if (isInCreateMode()) { createParameters.withAccessTier(accessTier); } else { if (this.innerModel().kind() != Kind.BLOB_STORAGE) { throw logger.logExceptionAsError(new UnsupportedOperationException( "Access tier can not be changed for general purpose storage accounts.")); } updateParameters.withAccessTier(accessTier); } return this; } @Override public StorageAccountImpl withSystemAssignedManagedServiceIdentity() { if (this.innerModel().identity() == null) { if (isInCreateMode()) { createParameters.withIdentity(new Identity().withType(IdentityType.SYSTEM_ASSIGNED)); } else { updateParameters.withIdentity(new Identity().withType(IdentityType.SYSTEM_ASSIGNED)); } } return this; } @Override public StorageAccountImpl withOnlyHttpsTraffic() { if (isInCreateMode()) { createParameters.withEnableHttpsTrafficOnly(true); } else { updateParameters.withEnableHttpsTrafficOnly(true); } return this; } @Override public StorageAccountImpl withHttpAndHttpsTraffic() { if (isInCreateMode()) { createParameters.withEnableHttpsTrafficOnly(false); } else { updateParameters.withEnableHttpsTrafficOnly(false); } return this; } @Override public StorageAccountImpl withMinimumTlsVersion(MinimumTlsVersion minimumTlsVersion) { if (isInCreateMode()) { createParameters.withMinimumTlsVersion(minimumTlsVersion); } else { updateParameters.withMinimumTlsVersion(minimumTlsVersion); } return this; } @Override public StorageAccountImpl enableBlobPublicAccess() { if (isInCreateMode()) { createParameters.withAllowBlobPublicAccess(true); } else { updateParameters.withAllowBlobPublicAccess(true); } return this; } @Override public StorageAccountImpl disableBlobPublicAccess() { if (isInCreateMode()) { createParameters.withAllowBlobPublicAccess(false); } else { updateParameters.withAllowBlobPublicAccess(false); } return this; } @Override public StorageAccountImpl enableSharedKeyAccess() { if (isInCreateMode()) { createParameters.withAllowSharedKeyAccess(true); } else { updateParameters.withAllowSharedKeyAccess(true); } return this; } @Override public StorageAccountImpl disableSharedKeyAccess() { if (isInCreateMode()) { createParameters.withAllowSharedKeyAccess(false); } else { updateParameters.withAllowSharedKeyAccess(false); } return this; } @Override public StorageAccountImpl allowCrossTenantReplication() { if (isInCreateMode()) { createParameters.withAllowCrossTenantReplication(true); } else { updateParameters.withAllowCrossTenantReplication(true); } return this; } @Override public StorageAccountImpl disallowCrossTenantReplication() { if (isInCreateMode()) { createParameters.withAllowCrossTenantReplication(false); } else { updateParameters.withAllowCrossTenantReplication(false); } return this; } @Override public StorageAccountImpl enableDefaultToOAuthAuthentication() { if (isInCreateMode()) { createParameters.withDefaultToOAuthAuthentication(true); } else { updateParameters.withDefaultToOAuthAuthentication(true); } return this; } @Override public StorageAccountImpl disableDefaultToOAuthAuthentication() { if (isInCreateMode()) { createParameters.withDefaultToOAuthAuthentication(false); } else { updateParameters.withDefaultToOAuthAuthentication(false); } return this; } @Override public StorageAccountImpl withAccessFromAllNetworks() { this.networkRulesHelper.withAccessFromAllNetworks(); return this; } @Override public StorageAccountImpl withAccessFromSelectedNetworks() { this.networkRulesHelper.withAccessFromSelectedNetworks(); return this; } @Override public StorageAccountImpl withAccessFromNetworkSubnet(String subnetId) { this.networkRulesHelper.withAccessFromNetworkSubnet(subnetId); return this; } @Override public StorageAccountImpl withAccessFromIpAddress(String ipAddress) { this.networkRulesHelper.withAccessFromIpAddress(ipAddress); return this; } @Override public StorageAccountImpl withAccessFromIpAddressRange(String ipAddressCidr) { this.networkRulesHelper.withAccessFromIpAddressRange(ipAddressCidr); return this; } @Override public StorageAccountImpl withReadAccessToLogEntriesFromAnyNetwork() { this.networkRulesHelper.withReadAccessToLoggingFromAnyNetwork(); return this; } @Override public StorageAccountImpl withReadAccessToMetricsFromAnyNetwork() { this.networkRulesHelper.withReadAccessToMetricsFromAnyNetwork(); return this; } @Override public StorageAccountImpl withAccessFromAzureServices() { this.networkRulesHelper.withAccessAllowedFromAzureServices(); return this; } @Override public StorageAccountImpl withoutNetworkSubnetAccess(String subnetId) { this.networkRulesHelper.withoutNetworkSubnetAccess(subnetId); return this; } @Override public StorageAccountImpl withoutIpAddressAccess(String ipAddress) { this.networkRulesHelper.withoutIpAddressAccess(ipAddress); return this; } @Override public StorageAccountImpl withoutIpAddressRangeAccess(String ipAddressCidr) { this.networkRulesHelper.withoutIpAddressRangeAccess(ipAddressCidr); return this; } @Override public Update withoutReadAccessToLoggingFromAnyNetwork() { this.networkRulesHelper.withoutReadAccessToLoggingFromAnyNetwork(); return this; } @Override public Update withoutReadAccessToMetricsFromAnyNetwork() { this.networkRulesHelper.withoutReadAccessToMetricsFromAnyNetwork(); return this; } @Override public Update withoutAccessFromAzureServices() { this.networkRulesHelper.withoutAccessFromAzureServices(); return this; } @Override public Update upgradeToGeneralPurposeAccountKindV2() { updateParameters.withKind(Kind.STORAGE_V2); return this; } @Override public Mono<StorageAccount> createResourceAsync() { this.networkRulesHelper.setDefaultActionIfRequired(); createParameters.withLocation(this.regionName()); createParameters.withTags(this.innerModel().tags()); final StorageAccountsClient client = this.manager().serviceClient().getStorageAccounts(); return this .manager() .serviceClient() .getStorageAccounts() .createAsync(this.resourceGroupName(), this.name(), createParameters) .flatMap( storageAccountInner -> client .getByResourceGroupAsync(resourceGroupName(), this.name()) .map(innerToFluentMap(this)) .doOnNext(storageAccount -> clearWrapperProperties())); } @Override public Mono<StorageAccount> updateResourceAsync() { this.networkRulesHelper.setDefaultActionIfRequired(); updateParameters.withTags(this.innerModel().tags()); return this .manager() .serviceClient() .getStorageAccounts() .updateAsync(resourceGroupName(), this.name(), updateParameters) .map(innerToFluentMap(this)) .doOnNext(storageAccount -> clearWrapperProperties()); } @Override public StorageAccountImpl withAzureFilesAadIntegrationEnabled(boolean enabled) { if (isInCreateMode()) { if (enabled) { this .createParameters .withAzureFilesIdentityBasedAuthentication( new AzureFilesIdentityBasedAuthentication() .withDirectoryServiceOptions(DirectoryServiceOptions.AADDS)); } } else { if (this.createParameters.azureFilesIdentityBasedAuthentication() == null) { this .createParameters .withAzureFilesIdentityBasedAuthentication(new AzureFilesIdentityBasedAuthentication()); } if (enabled) { this .updateParameters .azureFilesIdentityBasedAuthentication() .withDirectoryServiceOptions(DirectoryServiceOptions.AADDS); } else { this .updateParameters .azureFilesIdentityBasedAuthentication() .withDirectoryServiceOptions(DirectoryServiceOptions.NONE); } } return this; } @Override public StorageAccountImpl withLargeFileShares(boolean enabled) { if (isInCreateMode()) { if (enabled) { this.createParameters.withLargeFileSharesState(LargeFileSharesState.ENABLED); } else { this.createParameters.withLargeFileSharesState(LargeFileSharesState.DISABLED); } } return this; } @Override public StorageAccountImpl withHnsEnabled(boolean enabled) { this.createParameters.withIsHnsEnabled(enabled); return this; } private static final class PrivateLinkResourceImpl implements PrivateLinkResource { private final com.azure.resourcemanager.storage.models.PrivateLinkResource innerModel; private PrivateLinkResourceImpl(com.azure.resourcemanager.storage.models.PrivateLinkResource innerModel) { this.innerModel = innerModel; } @Override public String groupId() { return innerModel.groupId(); } @Override public List<String> requiredMemberNames() { return Collections.unmodifiableList(innerModel.requiredMembers()); } @Override public List<String> requiredDnsZoneNames() { return Collections.unmodifiableList(innerModel.requiredZoneNames()); } } private static final class PrivateEndpointConnectionImpl implements PrivateEndpointConnection { private final PrivateEndpointConnectionInner innerModel; private final PrivateEndpoint privateEndpoint; private final com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateLinkServiceConnectionState privateLinkServiceConnectionState; private final PrivateEndpointConnectionProvisioningState provisioningState; private PrivateEndpointConnectionImpl(PrivateEndpointConnectionInner innerModel) { this.innerModel = innerModel; this.privateEndpoint = innerModel.privateEndpoint() == null ? null : new PrivateEndpoint(innerModel.privateEndpoint().id()); this.privateLinkServiceConnectionState = innerModel.privateLinkServiceConnectionState() == null ? null : new com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateLinkServiceConnectionState( innerModel.privateLinkServiceConnectionState().status() == null ? null : com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateEndpointServiceConnectionStatus .fromString(innerModel.privateLinkServiceConnectionState().status().toString()), innerModel.privateLinkServiceConnectionState().description(), innerModel.privateLinkServiceConnectionState().actionRequired()); this.provisioningState = innerModel.provisioningState() == null ? null : PrivateEndpointConnectionProvisioningState.fromString(innerModel.provisioningState().toString()); } @Override public String id() { return innerModel.id(); } @Override public String name() { return innerModel.name(); } @Override public String type() { return innerModel.type(); } @Override public PrivateEndpoint privateEndpoint() { return privateEndpoint; } @Override public com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateLinkServiceConnectionState privateLinkServiceConnectionState() { return privateLinkServiceConnectionState; } @Override public PrivateEndpointConnectionProvisioningState provisioningState() { return provisioningState; } } }
It seems when a client is CLOSED, it will not start() again. Not sure if we need to make the client reuseable. Or we need to add some comments on closeAsync() API in case they may not release the client after close it?
public Mono<Void> start() { if (clientState.get() != WebPubSubClientState.STOPPED) { return Mono.error(logger.logExceptionAsError( new IllegalStateException("Failed to start. Client is not STOPPED."))); } return Mono.defer(() -> { isStoppedByUser.set(false); sequenceAckId.clear(); boolean success = clientState.changeStateOn(WebPubSubClientState.STOPPED, WebPubSubClientState.CONNECTING); if (!success) { return Mono.error(logger.logExceptionAsError( new IllegalStateException("Failed to start. Client is not STOPPED."))); } else { return Mono.empty(); } }).then(clientAccessUriProvider.flatMap(uri -> Mono.fromCallable(() -> { this.endpoint = new ClientEndpoint(); ClientEndpointConfig config = ClientEndpointConfig.Builder.create() .preferredSubprotocols(Collections.singletonList(webPubSubProtocol.getName())) .encoders(Collections.singletonList(MessageEncoder.class)) .decoders(Collections.singletonList(MessageDecoder.class)) .build(); this.session = clientManager.connectToServer(endpoint, config, new URI(uri)); return (Void) null; }).subscribeOn(Schedulers.boundedElastic()))).doOnError(error -> { handleClientStop(); }); }
if (clientState.get() != WebPubSubClientState.STOPPED) {
public Mono<Void> start() { return this.start(null); }
class WebPubSubAsyncClient implements AsyncCloseable { private ClientLogger logger; private final Mono<String> clientAccessUriProvider; private final WebPubSubProtocol webPubSubProtocol; private final boolean autoReconnect; private final boolean autoRestoreGroup; private final ClientManager clientManager; private Endpoint endpoint; private Session session; private String connectionId; private String reconnectionToken; private static final AtomicLong ACK_ID = new AtomicLong(0); private final Sinks.Many<GroupMessageEvent> groupMessageEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private final Sinks.Many<ServerMessageEvent> serverMessageEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private Sinks.Many<AckMessage> ackMessageSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private final Sinks.Many<ConnectedEvent> connectedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private final Sinks.Many<DisconnectedEvent> disconnectedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private final Sinks.Many<StoppedEvent> stoppedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private final SequenceAckId sequenceAckId = new SequenceAckId(); private final AtomicReference<Disposable> sequenceAckTask = new AtomicReference<>(); private final ClientState clientState = new ClientState(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final Sinks.Empty<Void> isClosedMono = Sinks.empty(); private final AtomicBoolean isStoppedByUser = new AtomicBoolean(); private final AtomicReference<Sinks.Empty<Void>> isStoppedByUserMono = new AtomicReference<>(); private final ConcurrentMap<String, WebPubSubGroup> groups = new ConcurrentHashMap<>(); private final Retry sendMessageRetrySpec; private static final Duration ACK_TIMEOUT = Duration.ofSeconds(30); private static final Duration RECOVER_TIMEOUT = Duration.ofSeconds(30); private static final Retry RECONNECT_RETRY_SPEC = Retry.backoff(Long.MAX_VALUE, Duration.ofSeconds(1)) .filter(thr -> !(thr instanceof StopReconnectException)); WebPubSubAsyncClient(Mono<String> clientAccessUriProvider, WebPubSubProtocol webPubSubProtocol, RetryStrategy retryStrategy, boolean autoReconnect, boolean autoRestoreGroup) { this.logger = new ClientLogger(WebPubSubAsyncClient.class); this.clientAccessUriProvider = Objects.requireNonNull(clientAccessUriProvider); this.webPubSubProtocol = Objects.requireNonNull(webPubSubProtocol); this.autoReconnect = autoReconnect; this.autoRestoreGroup = autoRestoreGroup; this.clientManager = ClientManager.createClient(); Objects.requireNonNull(retryStrategy); this.sendMessageRetrySpec = Retry.from(signals -> { AtomicInteger retryCount = new AtomicInteger(0); return signals.concatMap(s -> { Mono<Retry.RetrySignal> ret = Mono.error(s.failure()); if (s.failure() instanceof SendMessageFailedException) { if (((SendMessageFailedException) s.failure()).isTransient()) { int retryAttempt = retryCount.incrementAndGet(); if (retryAttempt <= retryStrategy.getMaxRetries()) { ret = Mono.delay(retryStrategy.calculateRetryDelay(retryAttempt)) .then(Mono.just(s)); } } } return ret; }); }); } /** * Gets the connection ID. * * @return the connection ID. */ public String getConnectionId() { return connectionId; } /** * Starts the client for connecting to the server. * * @return the task. */ /** * Stops the client for disconnecting from the server. * * @return the task. */ public Mono<Void> stop() { if (clientState.get() == WebPubSubClientState.CLOSED) { return Mono.error(logger.logExceptionAsError( new IllegalStateException("Failed to stop. Client is CLOSED."))); } return Mono.defer(() -> { isStoppedByUser.set(true); isStoppedByUserMono.set(null); groups.clear(); if (session != null && session.isOpen()) { return Mono.fromCallable(() -> { session.close(CloseReasons.NO_STATUS_CODE.getCloseReason()); return (Void) null; }).subscribeOn(Schedulers.boundedElastic()); } else { if (clientState.get() == WebPubSubClientState.STOPPED) { return Mono.empty(); } else if (clientState.changeStateOn(WebPubSubClientState.DISCONNECTED, WebPubSubClientState.STOPPED)) { handleClientStop(); return Mono.empty(); } else { Sinks.Empty<Void> sink = Sinks.empty(); isStoppedByUserMono.set(sink); return sink.asMono(); } } }); } /** * Closes the client. * * @return the task. */ public Mono<Void> closeAsync() { if (this.isDisposed.getAndSet(true)) { return this.isClosedMono.asMono(); } else { return stop().then(Mono.fromRunnable(() -> { this.clientState.changeState(WebPubSubClientState.CLOSED); groupMessageEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to groupMessageEventSink")); serverMessageEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to groupMessageEventSink")); connectedEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to connectedEventSink")); disconnectedEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to disconnectedEventSink")); stoppedEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to disconnectedEventSink")); isClosedMono.emitEmpty(emitFailureHandler("Unable to emit Close")); })); } } /** * Joins a group. * * @param group the group name. * @return the result. */ public Mono<WebPubSubResult> joinGroup(String group) { return joinGroup(group, nextAckId()); } /** * Joins a group. * * @param group the group name. * @param ackId the ackId. * @return the result. */ public Mono<WebPubSubResult> joinGroup(String group, long ackId) { return sendMessage(new JoinGroupMessage().setGroup(group).setAckId(ackId)) .then(waitForAckMessage(ackId)).retryWhen(sendMessageRetrySpec) .map(result -> { groups.compute(group, (k, v) -> { if (v == null) { return new WebPubSubGroup(group).setJoined(true); } else { return v.setJoined(true); } }); return result; }); } /** * Leaves a group. * * @param group the group name. * @return the result. */ public Mono<WebPubSubResult> leaveGroup(String group) { return leaveGroup(group, nextAckId()); } /** * Leaves a group. * * @param group the group name. * @param ackId the ackId. * @return the result. */ public Mono<WebPubSubResult> leaveGroup(String group, long ackId) { return sendMessage(new LeaveGroupMessage().setGroup(group).setAckId(ackId)) .then(waitForAckMessage(ackId)).retryWhen(sendMessageRetrySpec) .map(result -> { groups.compute(group, (k, v) -> { if (v == null) { return new WebPubSubGroup(group).setJoined(false); } else { return v.setJoined(false); } }); return result; }); } /** * Sends message to group. * * @param group the group name. * @param content the data. * @param dataType the data type. * @return the result. */ public Mono<WebPubSubResult> sendToGroup(String group, BinaryData content, WebPubSubDataType dataType) { return sendToGroup(group, content, dataType, new SendToGroupOptions().setAckId(nextAckId())); } /** * Sends message to group. * * @param group the group name. * @param content the data. * @param dataType the data type. * @param options the options. * @return the result. */ public Mono<WebPubSubResult> sendToGroup(String group, BinaryData content, WebPubSubDataType dataType, SendToGroupOptions options) { Objects.requireNonNull(group); Objects.requireNonNull(content); Objects.requireNonNull(dataType); Objects.requireNonNull(options); long ackId = options.getAckId() != null ? options.getAckId() : nextAckId(); BinaryData data = content; if (dataType == WebPubSubDataType.BINARY || dataType == WebPubSubDataType.PROTOBUF) { data = BinaryData.fromBytes(Base64.getEncoder().encode(content.toBytes())); } SendToGroupMessage message = new SendToGroupMessage() .setGroup(group) .setData(data) .setDataType(dataType.name().toLowerCase(Locale.ROOT)) .setAckId(ackId) .setNoEcho(options.getNoEcho()); Mono<Void> sendMessageMono = sendMessage(message); Mono<WebPubSubResult> responseMono = options.getFireAndForget() ? sendMessageMono.then(Mono.just(new WebPubSubResult(null))) : sendMessageMono.then(waitForAckMessage(ackId)); return responseMono.retryWhen(sendMessageRetrySpec); } /** * Sends event. * * @param eventName the event name. * @param content the data. * @param dataType the data type. * @return the result. */ public Mono<WebPubSubResult> sendEvent(String eventName, BinaryData content, WebPubSubDataType dataType) { return sendEvent(eventName, content, dataType, new SendEventOptions().setAckId(nextAckId())); } /** * Sends event. * * @param eventName the event name. * @param content the data. * @param dataType the data type. * @param options the options. * @return the result. */ public Mono<WebPubSubResult> sendEvent(String eventName, BinaryData content, WebPubSubDataType dataType, SendEventOptions options) { Objects.requireNonNull(eventName); Objects.requireNonNull(content); Objects.requireNonNull(dataType); Objects.requireNonNull(options); long ackId = options.getAckId() != null ? options.getAckId() : nextAckId(); BinaryData data = content; if (dataType == WebPubSubDataType.BINARY || dataType == WebPubSubDataType.PROTOBUF) { data = BinaryData.fromBytes(Base64.getEncoder().encode(content.toBytes())); } SendEventMessage message = new SendEventMessage() .setEvent(eventName) .setData(data) .setDataType(dataType.name().toLowerCase(Locale.ROOT)) .setAckId(ackId); Mono<Void> sendMessageMono = sendMessage(message); Mono<WebPubSubResult> responseMono = options.getFireAndForget() ? sendMessageMono.then(Mono.just(new WebPubSubResult(null))) : sendMessageMono.then(waitForAckMessage(ackId)); return responseMono.retryWhen(sendMessageRetrySpec); } /** * Receives group message events. * * @return the Publisher of group message events. */ public Flux<GroupMessageEvent> receiveGroupMessageEvents() { return groupMessageEventSink.asFlux(); } /** * Receives server message events. * * @return the Publisher of server message events. */ public Flux<ServerMessageEvent> receiveServerMessageEvents() { return serverMessageEventSink.asFlux(); } /** * Receives connected events. * * @return the Publisher of connected events. */ public Flux<ConnectedEvent> receiveConnectedEvents() { return connectedEventSink.asFlux(); } /** * Receives disconnected events. * * @return the Publisher of disconnected events. */ public Flux<DisconnectedEvent> receiveDisconnectedEvents() { return disconnectedEventSink.asFlux(); } /** * Receives stopped events. * * @return the Publisher of stopped events. */ public Flux<StoppedEvent> receiveStoppedEvents() { return stoppedEventSink.asFlux(); } private long nextAckId() { return ACK_ID.getAndUpdate(value -> { if (++value < 0) { value = 0; } return value; }); } private Flux<AckMessage> receiveAckMessages() { return ackMessageSink.asFlux(); } private Mono<Void> sendMessage(WebPubSubMessage message) { return checkStateBeforeSend().then(Mono.create(sink -> { if (logger.canLogAtLevel(LogLevel.VERBOSE)) { try { String json = JacksonAdapter.createDefaultSerializerAdapter() .serialize(message, SerializerEncoding.JSON); logger.atVerbose().addKeyValue("message", json).log("Send message"); } catch (IOException e) { } } session.getAsyncRemote().sendObject(message, sendResult -> { if (sendResult.isOK()) { sink.success(); } else { sink.error(logSendMessageFailedException( "Failed to send message.", sendResult.getException(), true, message)); } }); })); } private Mono<Void> checkStateBeforeSend() { return Mono.defer(() -> { if (isDisposed.get()) { return Mono.error(logger.logExceptionAsError( new IllegalStateException("Failed to send message. WebPubSubClient is CLOSED."))); } WebPubSubClientState state = clientState.get(); if (state != WebPubSubClientState.CONNECTED) { return Mono.error(logSendMessageFailedException( "Failed to send message. Client is " + state.name() + ".", null, state == WebPubSubClientState.RECOVERING || state == WebPubSubClientState.CONNECTING, (Long) null)); } if (session == null || !session.isOpen()) { return Mono.error(logSendMessageFailedException( "Failed to send message. Websocket session is not opened.", null, false, (Long) null)); } else { return Mono.empty(); } }); } private Mono<WebPubSubResult> waitForAckMessage(long ackId) { return receiveAckMessages() .filter(m -> ackId == m.getAckId()) .next() .onErrorMap(throwable -> logSendMessageFailedException( "Acknowledge from the service not received.", throwable, true, ackId)) .flatMap(m -> { if (m.isSuccess() || (m.getError() != null && "Duplicate".equals(m.getError().getName()))) { return Mono.just(new WebPubSubResult(m.getAckId())); } else { return Mono.error(logSendMessageFailedException( "Received non-success acknowledge from the service.", null, false, ackId, m.getError())); } }) .timeout(ACK_TIMEOUT, Mono.empty()) .switchIfEmpty(Mono.defer(() -> Mono.error(logSendMessageFailedException( "Acknowledge from the service not received.", null, true, ackId)))); } private void handleSessionOpen() { clientState.changeState(WebPubSubClientState.CONNECTED); if (isStoppedByUser.compareAndSet(true, false)) { Mono.fromCallable(() -> { if (session != null && session.isOpen()) { session.close(CloseReasons.NO_STATUS_CODE.getCloseReason()); } return (Void) null; }).subscribeOn(Schedulers.boundedElastic()).subscribe(null, thr -> { logger.atWarning() .log("Failed to close session: " + thr.getMessage()); }); } else { if (webPubSubProtocol.isReliable()) { Flux<Void> sequenceAckFlux = Flux.interval(Duration.ofSeconds(5)).concatMap(ignored -> { if (clientState.get() == WebPubSubClientState.CONNECTED && session != null && session.isOpen()) { Long id = sequenceAckId.getUpdated(); if (id != null) { return sendMessage(new SequenceAckMessage().setSequenceId(id)); } else { return Mono.empty(); } } else { return Mono.empty(); } }); Disposable previousTask = sequenceAckTask.getAndSet(sequenceAckFlux.subscribe()); if (previousTask != null) { previousTask.dispose(); } } if (autoRestoreGroup) { List<Mono<WebPubSubResult>> restoreGroupMonoList = groups.values().stream() .filter(WebPubSubGroup::isJoined) .map(v -> joinGroup(v.getName()).onErrorComplete()) .collect(Collectors.toList()); Flux.mergeSequentialDelayError(restoreGroupMonoList, Schedulers.DEFAULT_POOL_SIZE, Schedulers.DEFAULT_POOL_SIZE) .subscribeOn(Schedulers.boundedElastic()).subscribe(null, thr -> { logger.atWarning() .log("Failed to auto restore group: " + thr.getMessage()); }); } } } private void handleSessionClose(CloseReason closeReason) { clientState.changeState(WebPubSubClientState.DISCONNECTED); if (isStoppedByUser.compareAndSet(true, false)) { handleClientStop(); } else if (closeReason.getCloseCode() == CloseReason.CloseCodes.VIOLATED_POLICY) { handleClientStop(); } else { if (!webPubSubProtocol.isReliable() || reconnectionToken == null || connectionId == null) { handleNoRecovery().subscribe(null, thr -> { logger.atWarning() .log("Failed to auto reconnect session: " + thr.getMessage()); }); } else { handleRecovery().timeout(RECOVER_TIMEOUT, Mono.defer(() -> { clientState.changeState(WebPubSubClientState.DISCONNECTED); return handleNoRecovery(); })).subscribe(null, thr -> { logger.atWarning() .log("Failed to recover session: " + thr.getMessage()); }); } } } private Mono<Void> handleNoRecovery() { return Mono.defer(() -> { if (isStoppedByUser.compareAndSet(true, false)) { handleClientStop(); return Mono.empty(); } else if (autoReconnect) { boolean success = clientState.changeStateOn(WebPubSubClientState.DISCONNECTED, WebPubSubClientState.CONNECTING); if (!success) { return Mono.error(logger.logExceptionAsError( new StopReconnectException("Failed to start. Client is not DISCONNECTED."))); } return Mono.defer(() -> { if (isStoppedByUser.compareAndSet(true, false)) { return Mono.error(logger.logExceptionAsWarning( new StopReconnectException("Client is stopped by user."))); } else { return Mono.empty(); } }).then(clientAccessUriProvider.flatMap(uri -> Mono.fromCallable(() -> { this.endpoint = new ClientEndpoint(); ClientEndpointConfig config = ClientEndpointConfig.Builder.create() .preferredSubprotocols(Collections.singletonList(webPubSubProtocol.getName())) .encoders(Collections.singletonList(MessageEncoder.class)) .decoders(Collections.singletonList(MessageDecoder.class)) .build(); this.session = clientManager.connectToServer(endpoint, config, new URI(uri)); return (Void) null; }).subscribeOn(Schedulers.boundedElastic()))).retryWhen(RECONNECT_RETRY_SPEC).doOnError(error -> { handleClientStop(); }); } else { handleClientStop(); return Mono.empty(); } }); } private Mono<Void> handleRecovery() { return Mono.defer(() -> { if (isStoppedByUser.compareAndSet(true, false)) { handleClientStop(); return Mono.empty(); } else { boolean success = clientState.changeStateOn(WebPubSubClientState.DISCONNECTED, WebPubSubClientState.RECOVERING); if (!success) { return Mono.error(logger.logExceptionAsError( new StopReconnectException("Failed to recover. Client is not DISCONNECTED."))); } return Mono.defer(() -> { if (isStoppedByUser.compareAndSet(true, false)) { return Mono.error(logger.logExceptionAsWarning( new StopReconnectException("Client is stopped by user."))); } else { return Mono.empty(); } }).then(clientAccessUriProvider.flatMap(uri -> Mono.fromCallable(() -> { String recoveryUri = UrlBuilder.parse(uri) .addQueryParameter("awps_connection_id", connectionId) .addQueryParameter("awps_reconnection_token", reconnectionToken) .toString(); this.endpoint = new ClientEndpoint(); ClientEndpointConfig config = ClientEndpointConfig.Builder.create() .preferredSubprotocols(Collections.singletonList(webPubSubProtocol.getName())) .encoders(Collections.singletonList(MessageEncoder.class)) .decoders(Collections.singletonList(MessageDecoder.class)) .build(); this.session = clientManager.connectToServer(endpoint, config, new URI(recoveryUri)); return (Void) null; }).subscribeOn(Schedulers.boundedElastic()))).retryWhen(RECONNECT_RETRY_SPEC).doOnError(error -> { handleClientStop(); }); } }); } private void handleClientStop() { clientState.changeState(WebPubSubClientState.STOPPED); session = null; connectionId = null; reconnectionToken = null; ackMessageSink.emitComplete(emitFailureHandler("Unable to emit Complete to ackMessageSink")); ackMessageSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); Sinks.Empty<Void> mono = isStoppedByUserMono.getAndSet(null); if (mono != null) { mono.emitEmpty(emitFailureHandler("Unable to emit Stopped")); } Disposable task = sequenceAckTask.getAndSet(null); if (task != null) { task.dispose(); } stoppedEventSink.emitNext(new StoppedEvent(), emitFailureHandler("Unable to emit StoppedEvent")); } private void updateLogger(String connectionId) { logger = new ClientLogger(WebPubSubAsyncClient.class, LoggingUtils.createContextWithConnectionId(connectionId)); } private class ClientEndpoint extends Endpoint { @Override public void onOpen(Session session, EndpointConfig endpointConfig) { logger.atVerbose().log("Session opened"); session.addMessageHandler(new MessageHandler.Whole<WebPubSubMessage>() { @Override public void onMessage(WebPubSubMessage webPubSubMessage) { if (logger.canLogAtLevel(LogLevel.VERBOSE)) { try { String json = JacksonAdapter.createDefaultSerializerAdapter() .serialize(webPubSubMessage, SerializerEncoding.JSON); logger.atVerbose().addKeyValue("message", json).log("Message received"); } catch (IOException e) { } } if (webPubSubMessage instanceof GroupDataMessage) { GroupDataMessage groupDataMessage = (GroupDataMessage) webPubSubMessage; groupMessageEventSink.emitNext( new GroupMessageEvent(groupDataMessage), emitFailureHandler("Unable to emit GroupMessageEvent")); sequenceAckId.update(groupDataMessage.getSequenceId()); } else if (webPubSubMessage instanceof ServerDataMessage) { ServerDataMessage serverDataMessage = (ServerDataMessage) webPubSubMessage; serverMessageEventSink.emitNext( new ServerMessageEvent(serverDataMessage), emitFailureHandler("Unable to emit ServerMessageEvent")); sequenceAckId.update(serverDataMessage.getSequenceId()); } else if (webPubSubMessage instanceof AckMessage) { ackMessageSink.emitNext((AckMessage) webPubSubMessage, emitFailureHandler("Unable to emit GroupMessageEvent")); } else if (webPubSubMessage instanceof ConnectedMessage) { ConnectedMessage connectedMessage = (ConnectedMessage) webPubSubMessage; connectionId = connectedMessage.getConnectionId(); reconnectionToken = connectedMessage.getReconnectionToken(); updateLogger(connectionId); connectedEventSink.emitNext(new ConnectedEvent( connectionId, connectedMessage.getUserId()), emitFailureHandler("Unable to emit ConnectedEvent")); } else if (webPubSubMessage instanceof DisconnectedMessage) { disconnectedEventSink.emitNext(new DisconnectedEvent( connectionId, (DisconnectedMessage) webPubSubMessage), emitFailureHandler("Unable to emit DisconnectedEvent")); } } }); handleSessionOpen(); } @Override public void onClose(Session session, CloseReason closeReason) { logger.atVerbose().addKeyValue("code", closeReason.getCloseCode()).log("Session closed"); handleSessionClose(closeReason); } @Override public void onError(Session session, Throwable thr) { logger.atWarning() .log("Error from session: " + thr.getMessage()); } } private static final class StopReconnectException extends RuntimeException { private StopReconnectException(String message) { super(message); } } private static final class SequenceAckId { private final AtomicLong sequenceId = new AtomicLong(0); private final AtomicBoolean updated = new AtomicBoolean(false); private void clear() { sequenceId.set(0); updated.set(false); } private Long getUpdated() { if (updated.compareAndSet(true, false)) { return sequenceId.get(); } else { return null; } } private void update(long id) { long previousId = sequenceId.getAndUpdate(existId -> Math.max(id, existId)); if (previousId < id) { updated.set(true); } } } final class ClientState { private final AtomicReference<WebPubSubClientState> clientState = new AtomicReference<>(WebPubSubClientState.STOPPED); WebPubSubClientState get() { return clientState.get(); } WebPubSubClientState changeState(WebPubSubClientState state) { WebPubSubClientState previousState = clientState.getAndSet(state); logger.atInfo() .addKeyValue("currentClientState", state) .addKeyValue("previousClientState", previousState) .log("Client state changed."); return previousState; } boolean changeStateOn(WebPubSubClientState previousState, WebPubSubClientState state) { boolean success = clientState.compareAndSet(previousState, state); if (success) { logger.atInfo() .addKeyValue("currentClientState", state) .addKeyValue("previousClientState", previousState) .log("Client state changed."); } return success; } } WebPubSubClientState getClientState() { return clientState.get(); } private Sinks.EmitFailureHandler emitFailureHandler(String message) { return (signalType, emitResult) -> { LoggingUtils.addSignalTypeAndResult(this.logger.atWarning(), signalType, emitResult) .log(message); return false; }; } private RuntimeException logSendMessageFailedException( String errorMessage, Throwable cause, boolean isTransient, WebPubSubMessage message) { return logSendMessageFailedException(errorMessage, cause, isTransient, (message instanceof WebPubSubMessageAck) ? ((WebPubSubMessageAck) message).getAckId() : null); } private RuntimeException logSendMessageFailedException( String errorMessage, Throwable cause, boolean isTransient, Long ackId) { return logSendMessageFailedException(errorMessage, cause, isTransient, ackId, null); } private RuntimeException logSendMessageFailedException( String errorMessage, Throwable cause, boolean isTransient, Long ackId, AckMessageError error) { return logger.logExceptionAsWarning( new SendMessageFailedException(errorMessage, cause, isTransient, ackId, error)); } }
class WebPubSubAsyncClient implements Closeable { private ClientLogger logger; private final AtomicReference<ClientLogger> loggerReference = new AtomicReference<>(); private final Mono<String> clientAccessUrlProvider; private final WebPubSubProtocol webPubSubProtocol; private final boolean autoReconnect; private final boolean autoRestoreGroup; private final String applicationId; private final ClientEndpointConfiguration clientEndpointConfiguration; private final WebSocketClient webSocketClient; private WebSocketSession webSocketSession; private Sinks.Many<GroupMessageEvent> groupMessageEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private Sinks.Many<ServerMessageEvent> serverMessageEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private Sinks.Many<AckMessage> ackMessageSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private Sinks.Many<ConnectedEvent> connectedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private Sinks.Many<DisconnectedEvent> disconnectedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private Sinks.Many<StoppedEvent> stoppedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private Sinks.Many<RejoinGroupFailedEvent> rejoinGroupFailedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); private final AtomicLong ackId = new AtomicLong(0); private WebPubSubConnection webPubSubConnection; private final AtomicReference<Disposable> sequenceAckTask = new AtomicReference<>(); private final ClientState clientState = new ClientState(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final Sinks.Empty<Void> isClosedMono = Sinks.empty(); private final AtomicBoolean isStoppedByUser = new AtomicBoolean(); private final AtomicReference<Sinks.Empty<Void>> isStoppedByUserSink = new AtomicReference<>(); private final ConcurrentMap<String, WebPubSubGroup> groups = new ConcurrentHashMap<>(); private final Retry sendMessageRetrySpec; private static final Duration ACK_TIMEOUT = Duration.ofSeconds(30); private static final Duration RECOVER_TIMEOUT = Duration.ofSeconds(30); private static final Retry RECONNECT_RETRY_SPEC = Retry.backoff(Long.MAX_VALUE, Duration.ofSeconds(1)) .filter(thr -> !(thr instanceof StopReconnectException)); private static final Duration CLOSE_AFTER_SESSION_OPEN_DELAY = Duration.ofMillis(100); private static final Duration SEQUENCE_ACK_DELAY = Duration.ofSeconds(5); WebPubSubAsyncClient(WebSocketClient webSocketClient, Mono<String> clientAccessUrlProvider, WebPubSubProtocol webPubSubProtocol, String applicationId, String userAgent, RetryStrategy retryStrategy, boolean autoReconnect, boolean autoRestoreGroup) { updateLogger(applicationId, null); this.applicationId = applicationId; this.clientAccessUrlProvider = Objects.requireNonNull(clientAccessUrlProvider); this.webPubSubProtocol = Objects.requireNonNull(webPubSubProtocol); this.autoReconnect = autoReconnect; this.autoRestoreGroup = autoRestoreGroup; this.clientEndpointConfiguration = new ClientEndpointConfiguration(webPubSubProtocol.getName(), userAgent); this.webSocketClient = webSocketClient == null ? new WebSocketClientNettyImpl() : webSocketClient; this.sendMessageRetrySpec = Retry.from(signals -> { AtomicInteger retryCount = new AtomicInteger(0); return signals.concatMap(s -> { Mono<Retry.RetrySignal> ret = Mono.error(s.failure()); if (s.failure() instanceof SendMessageFailedException) { if (((SendMessageFailedException) s.failure()).isTransient()) { int retryAttempt = retryCount.incrementAndGet(); if (retryAttempt <= retryStrategy.getMaxRetries()) { ret = Mono.delay(retryStrategy.calculateRetryDelay(retryAttempt)) .then(Mono.just(s)); } } } return ret; }); }); } /** * Gets the connection ID. * * @return the connection ID. */ public String getConnectionId() { return webPubSubConnection == null ? null : webPubSubConnection.getConnectionId(); } /** * Starts the client for connecting to the server. * * @return the task. */ Mono<Void> start(Runnable postStartTask) { if (clientState.get() == WebPubSubClientState.CLOSED) { return Mono.error(logger.logExceptionAsError( new IllegalStateException("Failed to start. Client is CLOSED."))); } return Mono.defer(() -> { logger.atInfo() .addKeyValue("currentClientState", clientState.get()) .log("Start client called."); isStoppedByUser.set(false); isStoppedByUserSink.set(null); boolean success = clientState.changeStateOn(WebPubSubClientState.STOPPED, WebPubSubClientState.CONNECTING); if (!success) { return Mono.error(logger.logExceptionAsError( new IllegalStateException("Failed to start. Client is not STOPPED."))); } else { if (postStartTask != null) { postStartTask.run(); } return Mono.empty(); } }).then(clientAccessUrlProvider.flatMap(url -> Mono.<Void>fromRunnable(() -> { this.webSocketSession = webSocketClient.connectToServer( clientEndpointConfiguration, url, loggerReference, this::handleMessage, this::handleSessionOpen, this::handleSessionClose); }).subscribeOn(Schedulers.boundedElastic()))).doOnError(error -> { handleClientStop(false); }); } /** * Stops the client for disconnecting from the server. * * @return the task. */ public Mono<Void> stop() { if (clientState.get() == WebPubSubClientState.CLOSED) { return Mono.error(logger.logExceptionAsError( new IllegalStateException("Failed to stop. Client is CLOSED."))); } return Mono.defer(() -> { logger.atInfo() .addKeyValue("currentClientState", clientState.get()) .log("Stop client called."); if (clientState.get() == WebPubSubClientState.STOPPED) { return Mono.empty(); } else if (clientState.get() == WebPubSubClientState.STOPPING) { return getStoppedByUserMono(); } isStoppedByUser.compareAndSet(false, true); groups.clear(); WebSocketSession localSession = webSocketSession; if (localSession != null && localSession.isOpen()) { clientState.changeState(WebPubSubClientState.STOPPING); return Mono.fromCallable(() -> { localSession.close(); return (Void) null; }).subscribeOn(Schedulers.boundedElastic()); } else { if (clientState.changeStateOn(WebPubSubClientState.DISCONNECTED, WebPubSubClientState.STOPPED)) { handleClientStop(); return Mono.empty(); } else { return getStoppedByUserMono(); } } }); } /** * Closes the client. */ @Override public void close() { if (this.isDisposed.getAndSet(true)) { this.isClosedMono.asMono().block(); } else { stop().then(Mono.fromRunnable(() -> { this.clientState.changeState(WebPubSubClientState.CLOSED); isClosedMono.emitEmpty(emitFailureHandler("Unable to emit Close")); })).block(); } } /** * Joins a group. * * @param group the group name. * @return the result. */ public Mono<WebPubSubResult> joinGroup(String group) { return joinGroup(group, nextAckId()); } /** * Joins a group. * * @param group the group name. * @param ackId the ackId. Client will provide auto increment ID, if set to {@code null}. * @return the result. */ public Mono<WebPubSubResult> joinGroup(String group, Long ackId) { Objects.requireNonNull(group); if (ackId == null) { ackId = nextAckId(); } return sendMessage(new JoinGroupMessage().setGroup(group).setAckId(ackId)) .then(waitForAckMessage(ackId)).retryWhen(sendMessageRetrySpec) .map(result -> { groups.compute(group, (k, v) -> { if (v == null) { return new WebPubSubGroup(group).setJoined(true); } else { return v.setJoined(true); } }); return result; }); } /** * Leaves a group. * * @param group the group name. * @return the result. */ public Mono<WebPubSubResult> leaveGroup(String group) { return leaveGroup(group, nextAckId()); } /** * Leaves a group. * * @param group the group name. * @param ackId the ackId. Client will provide auto increment ID, if set to {@code null}. * @return the result. */ public Mono<WebPubSubResult> leaveGroup(String group, Long ackId) { Objects.requireNonNull(group); if (ackId == null) { ackId = nextAckId(); } return sendMessage(new LeaveGroupMessage().setGroup(group).setAckId(ackId)) .then(waitForAckMessage(ackId)).retryWhen(sendMessageRetrySpec) .map(result -> { groups.compute(group, (k, v) -> { if (v == null) { return new WebPubSubGroup(group).setJoined(false); } else { return v.setJoined(false); } }); return result; }); } /** * Sends message to group. * * @param group the group name. * @param content the data as WebPubSubDataType.TEXT. * @return the result. */ public Mono<WebPubSubResult> sendToGroup(String group, String content) { return sendToGroup(group, BinaryData.fromString(content), WebPubSubDataType.TEXT); } /** * Sends message to group. * * @param group the group name. * @param content the data as WebPubSubDataType.TEXT. * @param options the options. * @return the result. */ public Mono<WebPubSubResult> sendToGroup(String group, String content, SendToGroupOptions options) { return sendToGroup(group, BinaryData.fromString(content), WebPubSubDataType.TEXT, options); } /** * Sends message to group. * * @param group the group name. * @param content the data. * @param dataType the data type. * @return the result. */ public Mono<WebPubSubResult> sendToGroup(String group, BinaryData content, WebPubSubDataType dataType) { return sendToGroup(group, content, dataType, new SendToGroupOptions().setAckId(nextAckId())); } /** * Sends message to group. * * @param group the group name. * @param content the data. * @param dataType the data type. * @param options the options. * @return the result. */ public Mono<WebPubSubResult> sendToGroup(String group, BinaryData content, WebPubSubDataType dataType, SendToGroupOptions options) { Objects.requireNonNull(group); Objects.requireNonNull(content); Objects.requireNonNull(dataType); Objects.requireNonNull(options); Long ackId = options.isFireAndForget() ? null : (options.getAckId() != null ? options.getAckId() : nextAckId()); SendToGroupMessage message = new SendToGroupMessage() .setGroup(group) .setData(content) .setDataType(dataType.toString()) .setAckId(ackId) .setNoEcho(options.isNoEcho()); Mono<Void> sendMessageMono = sendMessage(message); Mono<WebPubSubResult> responseMono = sendMessageMono.then(waitForAckMessage(ackId)); return responseMono.retryWhen(sendMessageRetrySpec); } /** * Sends event. * * @param eventName the event name. * @param content the data. * @param dataType the data type. * @return the result. */ public Mono<WebPubSubResult> sendEvent(String eventName, BinaryData content, WebPubSubDataType dataType) { return sendEvent(eventName, content, dataType, new SendEventOptions().setAckId(nextAckId())); } /** * Sends event. * * @param eventName the event name. * @param content the data. * @param dataType the data type. * @param options the options. * @return the result. */ public Mono<WebPubSubResult> sendEvent(String eventName, BinaryData content, WebPubSubDataType dataType, SendEventOptions options) { Objects.requireNonNull(eventName); Objects.requireNonNull(content); Objects.requireNonNull(dataType); Objects.requireNonNull(options); Long ackId = options.isFireAndForget() ? null : (options.getAckId() != null ? options.getAckId() : nextAckId()); SendEventMessage message = new SendEventMessage() .setEvent(eventName) .setData(content) .setDataType(dataType.toString()) .setAckId(ackId); Mono<Void> sendMessageMono = sendMessage(message); Mono<WebPubSubResult> responseMono = sendMessageMono.then(waitForAckMessage(ackId)); return responseMono.retryWhen(sendMessageRetrySpec); } /** * Receives group message events. * * @return the Publisher of group message events. */ public Flux<GroupMessageEvent> receiveGroupMessageEvents() { return groupMessageEventSink.asFlux(); } /** * Receives server message events. * * @return the Publisher of server message events. */ public Flux<ServerMessageEvent> receiveServerMessageEvents() { return serverMessageEventSink.asFlux(); } /** * Receives connected events. * * @return the Publisher of connected events. */ public Flux<ConnectedEvent> receiveConnectedEvents() { return connectedEventSink.asFlux(); } /** * Receives disconnected events. * * @return the Publisher of disconnected events. */ public Flux<DisconnectedEvent> receiveDisconnectedEvents() { return disconnectedEventSink.asFlux(); } /** * Receives stopped events. * * @return the Publisher of stopped events. */ public Flux<StoppedEvent> receiveStoppedEvents() { return stoppedEventSink.asFlux(); } /** * Receives re-join group failed events. * * @return the Publisher of re-join failed events. */ public Flux<RejoinGroupFailedEvent> receiveRejoinGroupFailedEvents() { return rejoinGroupFailedEventSink.asFlux(); } private long nextAckId() { return ackId.getAndUpdate(value -> { if (++value < 0) { value = 0; } return value; }); } private Flux<AckMessage> receiveAckMessages() { return ackMessageSink.asFlux(); } private Mono<Void> sendMessage(WebPubSubMessage message) { return checkStateBeforeSend().then(Mono.create(sink -> { webSocketSession.sendObjectAsync(message, sendResult -> { if (sendResult.isOK()) { sink.success(); } else { sink.error(logSendMessageFailedException( "Failed to send message.", sendResult.getException(), true, message)); } }); })); } private Mono<Void> checkStateBeforeSend() { return Mono.defer(() -> { WebPubSubClientState state = clientState.get(); if (state == WebPubSubClientState.CLOSED) { return Mono.error(logger.logExceptionAsError( new IllegalStateException("Failed to send message. WebPubSubClient is CLOSED."))); } if (state != WebPubSubClientState.CONNECTED) { return Mono.error(logSendMessageFailedException( "Failed to send message. Client is " + state.name() + ".", null, state == WebPubSubClientState.RECOVERING || state == WebPubSubClientState.CONNECTING || state == WebPubSubClientState.RECONNECTING || state == WebPubSubClientState.DISCONNECTED, (Long) null)); } if (webSocketSession == null || !webSocketSession.isOpen()) { return Mono.error(logSendMessageFailedException( "Failed to send message. Websocket session is not opened.", null, false, (Long) null)); } else { return Mono.empty(); } }); } private Mono<Void> getStoppedByUserMono() { Sinks.Empty<Void> sink = Sinks.empty(); boolean isStoppedByUserMonoSet = isStoppedByUserSink.compareAndSet(null, sink); if (!isStoppedByUserMonoSet) { sink = isStoppedByUserSink.get(); } return sink == null ? Mono.empty() : sink.asMono(); } private void tryCompleteOnStoppedByUserSink() { Sinks.Empty<Void> mono = isStoppedByUserSink.getAndSet(null); if (mono != null) { mono.emitEmpty(emitFailureHandler("Unable to emit Stopped")); } } private <EventT> void tryEmitNext(Sinks.Many<EventT> sink, EventT event) { logger.atVerbose() .addKeyValue("type", event.getClass().getSimpleName()) .log("Send event"); sink.emitNext(event, emitFailureHandler("Unable to emit " + event.getClass().getSimpleName())); } private Mono<WebPubSubResult> waitForAckMessage(Long ackId) { if (ackId == null) { return Mono.just(new WebPubSubResult(null, false)); } return receiveAckMessages() .filter(m -> ackId == m.getAckId()) .next() .onErrorMap(throwable -> logSendMessageFailedException( "Acknowledge from the service not received.", throwable, true, ackId)) .flatMap(m -> { if (m.isSuccess()) { return Mono.just(new WebPubSubResult(m.getAckId(), false)); } else if (m.getError() != null && "Duplicate".equals(m.getError().getName())) { return Mono.just(new WebPubSubResult(m.getAckId(), true)); } else { return Mono.error(logSendMessageFailedException( "Received non-success acknowledge from the service.", null, false, ackId, m.getError())); } }) .timeout(ACK_TIMEOUT, Mono.empty()) .switchIfEmpty(Mono.defer(() -> Mono.error(logSendMessageFailedException( "Acknowledge from the service not received.", null, true, ackId)))); } private void handleSessionOpen(WebSocketSession session) { logger.atVerbose().log("Session opened"); clientState.changeState(WebPubSubClientState.CONNECTED); if (isStoppedByUser.compareAndSet(true, false)) { Mono.delay(CLOSE_AFTER_SESSION_OPEN_DELAY).then(Mono.fromCallable(() -> { clientState.changeState(WebPubSubClientState.STOPPING); if (session != null && session.isOpen()) { session.close(); } else { logger.atError() .log("Failed to close session after session open"); handleClientStop(); } return (Void) null; }).subscribeOn(Schedulers.boundedElastic())).subscribe(null, thr -> { logger.atError() .log("Failed to close session after session open: " + thr.getMessage()); handleClientStop(); }); } else { if (webPubSubProtocol.isReliable()) { Flux<Void> sequenceAckFlux = Flux.interval(SEQUENCE_ACK_DELAY).concatMap(ignored -> { if (clientState.get() == WebPubSubClientState.CONNECTED && session != null && session.isOpen()) { WebPubSubConnection connection = this.webPubSubConnection; if (connection != null) { Long id = connection.getSequenceAckId().getUpdated(); if (id != null) { return sendMessage(new SequenceAckMessage().setSequenceId(id)) .onErrorResume(error -> { connection.getSequenceAckId().setUpdated(); return Mono.empty(); }); } else { return Mono.empty(); } } else { return Mono.empty(); } } else { return Mono.empty(); } }); Disposable previousTask = sequenceAckTask.getAndSet(sequenceAckFlux.subscribe()); if (previousTask != null) { previousTask.dispose(); } } if (autoRestoreGroup) { List<Mono<WebPubSubResult>> restoreGroupMonoList = groups.values().stream() .filter(WebPubSubGroup::isJoined) .map(group -> joinGroup(group.getName()).onErrorResume(error -> { if (error instanceof Exception) { tryEmitNext(rejoinGroupFailedEventSink, new RejoinGroupFailedEvent(group.getName(), (Exception) error)); } return Mono.empty(); })) .collect(Collectors.toList()); Mono.delay(CLOSE_AFTER_SESSION_OPEN_DELAY) .thenMany(Flux.mergeSequentialDelayError(restoreGroupMonoList, Schedulers.DEFAULT_POOL_SIZE, Schedulers.DEFAULT_POOL_SIZE)) .subscribe(null, thr -> { logger.atWarning() .log("Failed to auto restore group: " + thr.getMessage()); }); } } } private void handleSessionClose(CloseReason closeReason) { logger.atVerbose().addKeyValue("code", closeReason.getCloseCode()).log("Session closed"); final int violatedPolicyStatusCode = 1008; if (clientState.get() == WebPubSubClientState.STOPPED) { return; } final String connectionId = this.getConnectionId(); if (isStoppedByUser.compareAndSet(true, false) || clientState.get() == WebPubSubClientState.STOPPING) { handleConnectionClose(); handleClientStop(); } else if (closeReason.getCloseCode() == violatedPolicyStatusCode) { clientState.changeState(WebPubSubClientState.DISCONNECTED); handleConnectionClose(); handleNoRecovery().subscribe(null, thr -> { logger.atWarning() .log("Failed to auto reconnect session: " + thr.getMessage()); }); } else { final WebPubSubConnection connection = this.webPubSubConnection; final String reconnectionToken = connection == null ? null : connection.getReconnectionToken(); if (!webPubSubProtocol.isReliable() || reconnectionToken == null || connectionId == null) { clientState.changeState(WebPubSubClientState.DISCONNECTED); handleConnectionClose(); handleNoRecovery().subscribe(null, thr -> { logger.atWarning() .log("Failed to auto reconnect session: " + thr.getMessage()); }); } else { handleRecovery(connectionId, reconnectionToken).timeout(RECOVER_TIMEOUT, Mono.defer(() -> { clientState.changeState(WebPubSubClientState.DISCONNECTED); handleConnectionClose(); return handleNoRecovery(); })).subscribe(null, thr -> { logger.atWarning() .log("Failed to recover or reconnect session: " + thr.getMessage()); }); } } } private void handleMessage(Object webPubSubMessage) { if (webPubSubMessage instanceof GroupDataMessage) { final GroupDataMessage groupDataMessage = (GroupDataMessage) webPubSubMessage; boolean emitMessage = true; if (groupDataMessage.getSequenceId() != null) { emitMessage = updateSequenceAckId(groupDataMessage.getSequenceId()); } if (emitMessage) { tryEmitNext(groupMessageEventSink, new GroupMessageEvent( groupDataMessage.getGroup(), groupDataMessage.getData(), groupDataMessage.getDataType(), groupDataMessage.getFromUserId(), groupDataMessage.getSequenceId())); } } else if (webPubSubMessage instanceof ServerDataMessage) { final ServerDataMessage serverDataMessage = (ServerDataMessage) webPubSubMessage; boolean emitMessage = true; if (serverDataMessage.getSequenceId() != null) { emitMessage = updateSequenceAckId(serverDataMessage.getSequenceId()); } if (emitMessage) { tryEmitNext(serverMessageEventSink, new ServerMessageEvent( serverDataMessage.getData(), serverDataMessage.getDataType(), serverDataMessage.getSequenceId())); } } else if (webPubSubMessage instanceof AckMessage) { tryEmitNext(ackMessageSink, (AckMessage) webPubSubMessage); } else if (webPubSubMessage instanceof ConnectedMessage) { final ConnectedMessage connectedMessage = (ConnectedMessage) webPubSubMessage; final String connectionId = connectedMessage.getConnectionId(); updateLogger(applicationId, connectionId); if (this.webPubSubConnection == null) { this.webPubSubConnection = new WebPubSubConnection(); } this.webPubSubConnection.updateForConnected( connectedMessage.getConnectionId(), connectedMessage.getReconnectionToken(), () -> tryEmitNext(connectedEventSink, new ConnectedEvent( connectionId, connectedMessage.getUserId()))); } else if (webPubSubMessage instanceof DisconnectedMessage) { final DisconnectedMessage disconnectedMessage = (DisconnectedMessage) webPubSubMessage; handleConnectionClose(new DisconnectedEvent( this.getConnectionId(), disconnectedMessage.getReason())); } } private boolean updateSequenceAckId(long id) { WebPubSubConnection connection = this.webPubSubConnection; if (connection != null) { return connection.getSequenceAckId().update(id); } else { return false; } } private Mono<Void> handleNoRecovery() { return Mono.defer(() -> { if (isStoppedByUser.compareAndSet(true, false)) { handleClientStop(); return Mono.empty(); } else if (autoReconnect) { boolean success = clientState.changeStateOn(WebPubSubClientState.DISCONNECTED, WebPubSubClientState.RECONNECTING); if (!success) { return Mono.error(logger.logExceptionAsError( new StopReconnectException("Failed to start. Client is not DISCONNECTED."))); } return Mono.defer(() -> { if (isStoppedByUser.compareAndSet(true, false)) { return Mono.error(logger.logExceptionAsWarning( new StopReconnectException("Client is stopped by user."))); } else { return Mono.empty(); } }).then(clientAccessUrlProvider.flatMap(url -> Mono.<Void>fromRunnable(() -> { this.webSocketSession = webSocketClient.connectToServer( clientEndpointConfiguration, url, loggerReference, this::handleMessage, this::handleSessionOpen, this::handleSessionClose); }).subscribeOn(Schedulers.boundedElastic()))).retryWhen(RECONNECT_RETRY_SPEC).doOnError(error -> { handleClientStop(); }); } else { handleClientStop(); return Mono.empty(); } }); } private Mono<Void> handleRecovery(String connectionId, String reconnectionToken) { return Mono.defer(() -> { if (isStoppedByUser.compareAndSet(true, false)) { handleClientStop(); return Mono.empty(); } else { boolean success = clientState.changeStateOn(WebPubSubClientState.CONNECTED, WebPubSubClientState.RECOVERING); if (!success) { return Mono.error(logger.logExceptionAsError( new StopReconnectException("Failed to recover. Client is not CONNECTED."))); } return Mono.defer(() -> { if (isStoppedByUser.compareAndSet(true, false)) { return Mono.error(logger.logExceptionAsWarning( new StopReconnectException("Client is stopped by user."))); } else { return Mono.empty(); } }).then(clientAccessUrlProvider.flatMap(url -> Mono.<Void>fromRunnable(() -> { String recoveryUrl = UrlBuilder.parse(url) .addQueryParameter("awps_connection_id", connectionId) .addQueryParameter("awps_reconnection_token", reconnectionToken) .toString(); this.webSocketSession = webSocketClient.connectToServer( clientEndpointConfiguration, recoveryUrl, loggerReference, this::handleMessage, this::handleSessionOpen, this::handleSessionClose); }).subscribeOn(Schedulers.boundedElastic()))).retryWhen(RECONNECT_RETRY_SPEC).doOnError(error -> { handleClientStop(); }); } }); } private void handleClientStop() { handleClientStop(true); } private void handleClientStop(boolean sendStoppedEvent) { clientState.changeState(WebPubSubClientState.STOPPED); this.webSocketSession = null; this.webPubSubConnection = null; tryCompleteOnStoppedByUserSink(); Disposable task = sequenceAckTask.getAndSet(null); if (task != null) { task.dispose(); } if (sendStoppedEvent) { tryEmitNext(stoppedEventSink, new StoppedEvent()); } groupMessageEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to groupMessageEventSink")); groupMessageEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); serverMessageEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to groupMessageEventSink")); serverMessageEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); connectedEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to connectedEventSink")); connectedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); disconnectedEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to disconnectedEventSink")); disconnectedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); stoppedEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to disconnectedEventSink")); stoppedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); rejoinGroupFailedEventSink.emitComplete( emitFailureHandler("Unable to emit Complete to rejoinGroupFailedEventSink")); rejoinGroupFailedEventSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); ackMessageSink.emitComplete(emitFailureHandler("Unable to emit Complete to ackMessageSink")); ackMessageSink = Sinks.many().multicast().onBackpressureBuffer(Queues.SMALL_BUFFER_SIZE, false); updateLogger(applicationId, null); } private void handleConnectionClose() { handleConnectionClose(null); } private void handleConnectionClose(DisconnectedEvent disconnectedEvent) { final DisconnectedEvent event = disconnectedEvent == null ? new DisconnectedEvent(this.getConnectionId(), null) : disconnectedEvent; WebPubSubConnection connection = this.webPubSubConnection; if (connection != null) { connection.updateForDisconnected(() -> tryEmitNext(disconnectedEventSink, event)); } if (disconnectedEvent == null) { this.webPubSubConnection = null; } } private void updateLogger(String applicationId, String connectionId) { logger = new ClientLogger(WebPubSubAsyncClient.class, LoggingUtils.createContextWithConnectionId(applicationId, connectionId)); loggerReference.set(logger); } private static final class StopReconnectException extends RuntimeException { private StopReconnectException(String message) { super(message); } } private final class ClientState { private final AtomicReference<WebPubSubClientState> clientState = new AtomicReference<>(WebPubSubClientState.STOPPED); WebPubSubClientState get() { return clientState.get(); } WebPubSubClientState changeState(WebPubSubClientState state) { WebPubSubClientState previousState = clientState.getAndSet(state); logger.atInfo() .addKeyValue("currentClientState", state) .addKeyValue("previousClientState", previousState) .log("Client state changed."); return previousState; } boolean changeStateOn(WebPubSubClientState previousState, WebPubSubClientState state) { boolean success = clientState.compareAndSet(previousState, state); if (success) { logger.atInfo() .addKeyValue("currentClientState", state) .addKeyValue("previousClientState", previousState) .log("Client state changed."); } return success; } } WebPubSubClientState getClientState() { return clientState.get(); } WebSocketSession getWebsocketSession() { return webSocketSession; } private Sinks.EmitFailureHandler emitFailureHandler(String message) { return (signalType, emitResult) -> { LoggingUtils.addSignalTypeAndResult(this.logger.atWarning(), signalType, emitResult) .log(message); return emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED); }; } private RuntimeException logSendMessageFailedException( String errorMessage, Throwable cause, boolean isTransient, WebPubSubMessage message) { return logSendMessageFailedException(errorMessage, cause, isTransient, (message instanceof WebPubSubMessageAck) ? ((WebPubSubMessageAck) message).getAckId() : null); } private RuntimeException logSendMessageFailedException( String errorMessage, Throwable cause, boolean isTransient, Long ackId) { return logSendMessageFailedException(errorMessage, cause, isTransient, ackId, null); } private RuntimeException logSendMessageFailedException( String errorMessage, Throwable cause, boolean isTransient, Long ackId, AckResponseError error) { return logger.logExceptionAsWarning( new SendMessageFailedException(errorMessage, cause, isTransient, ackId, error)); } }
It's unfortunate that Jackson doesn't consider this as a breaking change.
public JacksonAdapter() { this.simpleMapper = initializeMapperBuilder(JsonMapper.builder()) .build(); this.headerMapper = initializeMapperBuilder(JsonMapper.builder()) .enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES) .build(); this.xmlMapper = initializeMapperBuilder(XmlMapper.builder()) .defaultUseWrapper(false) .enable(ToXmlGenerator.Feature.WRITE_XML_DECLARATION) /* * In Jackson 2.12 the default value of this feature changed from true to false. * https: */ .enable(FromXmlParser.Feature.EMPTY_ELEMENT_AS_NULL) .build(); ObjectMapper flatteningMapper = initializeMapperBuilder(JsonMapper.builder()) .addModule(FlatteningSerializer.getModule(simpleMapper())) .addModule(FlatteningDeserializer.getModule(simpleMapper())) .build(); this.mapper = initializeMapperBuilder(JsonMapper.builder()) .addModule(AdditionalPropertiesSerializer.getModule(flatteningMapper)) .addModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper)) .addModule(FlatteningSerializer.getModule(simpleMapper())) .addModule(FlatteningDeserializer.getModule(simpleMapper())) .build(); }
* https:
public JacksonAdapter() { this.simpleMapper = initializeMapperBuilder(JsonMapper.builder()) .build(); this.headerMapper = initializeMapperBuilder(JsonMapper.builder()) .enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES) .build(); this.xmlMapper = initializeMapperBuilder(XmlMapper.builder()) .defaultUseWrapper(false) .enable(ToXmlGenerator.Feature.WRITE_XML_DECLARATION) /* * In Jackson 2.12 the default value of this feature changed from true to false. * https: */ .enable(FromXmlParser.Feature.EMPTY_ELEMENT_AS_NULL) .build(); this.xmlMapper.coercionConfigDefaults() .setCoercion(CoercionInputShape.EmptyString, CoercionAction.AsNull); ObjectMapper flatteningMapper = initializeMapperBuilder(JsonMapper.builder()) .addModule(FlatteningSerializer.getModule(simpleMapper())) .addModule(FlatteningDeserializer.getModule(simpleMapper())) .build(); this.mapper = initializeMapperBuilder(JsonMapper.builder()) .addModule(AdditionalPropertiesSerializer.getModule(flatteningMapper)) .addModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper)) .addModule(FlatteningSerializer.getModule(simpleMapper())) .addModule(FlatteningDeserializer.getModule(simpleMapper())) .build(); }
class JacksonAdapter implements SerializerAdapter { private static final Pattern PATTERN = Pattern.compile("^\"*|\"*$"); private final ClientLogger logger = new ClientLogger(JacksonAdapter.class); /** * An instance of {@link ObjectMapper} to serialize/deserialize objects. */ private final ObjectMapper mapper; /** * An instance of {@link ObjectMapper} that does not do flattening. */ private final ObjectMapper simpleMapper; private final ObjectMapper xmlMapper; private final ObjectMapper headerMapper; /* * The lazily-created serializer for this ServiceClient. */ private static SerializerAdapter serializerAdapter; private final Map<Type, JavaType> typeToJavaTypeCache = new ConcurrentHashMap<>(); /** * Creates a new JacksonAdapter instance with default mapper settings. */ /** * Gets a static instance of {@link ObjectMapper} that doesn't handle flattening. * * @return an instance of {@link ObjectMapper}. */ protected ObjectMapper simpleMapper() { return simpleMapper; } /** * maintain singleton instance of the default serializer adapter. * * @return the default serializer */ public static synchronized SerializerAdapter createDefaultSerializerAdapter() { if (serializerAdapter == null) { serializerAdapter = new JacksonAdapter(); } return serializerAdapter; } /** * @return the original serializer type */ public ObjectMapper serializer() { return mapper; } @Override public String serialize(Object object, SerializerEncoding encoding) throws IOException { if (object == null) { return null; } ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serialize(object, encoding, stream); return new String(stream.toByteArray(), 0, stream.size(), StandardCharsets.UTF_8); } @Override public void serialize(Object object, SerializerEncoding encoding, OutputStream outputStream) throws IOException { if (object == null) { return; } if ((encoding == SerializerEncoding.XML)) { xmlMapper.writeValue(outputStream, object); } else { serializer().writeValue(outputStream, object); } } @Override public String serializeRaw(Object object) { if (object == null) { return null; } try { return PATTERN.matcher(serialize(object, SerializerEncoding.JSON)).replaceAll(""); } catch (IOException ex) { logger.warning("Failed to serialize {} to JSON.", object.getClass(), ex); return null; } } @Override public String serializeList(List<?> list, CollectionFormat format) { if (list == null) { return null; } List<String> serialized = new ArrayList<>(); for (Object element : list) { String raw = serializeRaw(element); serialized.add(raw != null ? raw : ""); } return String.join(format.getDelimiter(), serialized); } @Override public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException { if (CoreUtils.isNullOrEmpty(value)) { return null; } return deserialize(new ByteArrayInputStream(value.getBytes(StandardCharsets.UTF_8)), type, encoding); } @Override public <T> T deserialize(InputStream inputStream, final Type type, SerializerEncoding encoding) throws IOException { if (inputStream == null) { return null; } final JavaType javaType = createJavaType(type); try { if (encoding == SerializerEncoding.XML) { return xmlMapper.readValue(inputStream, javaType); } else { return serializer().readValue(inputStream, javaType); } } catch (JsonParseException jpe) { throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe)); } } @Override public <T> T deserialize(HttpHeaders headers, Type deserializedHeadersType) throws IOException { if (deserializedHeadersType == null) { return null; } /* * Do we need to serialize and then deserialize the headers? For now transition to using convertValue as it * allows for some internal optimizations by Jackson. */ T deserializedHeaders = headerMapper.convertValue(headers, createJavaType(deserializedHeadersType)); final Class<?> deserializedHeadersClass = TypeUtil.getRawClass(deserializedHeadersType); final Field[] declaredFields = deserializedHeadersClass.getDeclaredFields(); /* * A list containing all handlers for header collections of the header type. */ final List<HeaderCollectionHandler> headerCollectionHandlers = new ArrayList<>(); /* * This set is an optimization where we track the first character of all HeaderCollections defined on the * deserialized headers type. This allows us to optimize away startWiths checks which are much more costly than * getting the first character. */ final Set<Character> headerCollectionsFirstCharacters = new HashSet<>(); /* * Begin by looping over all declared fields and initializing all header collection information. */ for (final Field declaredField : declaredFields) { if (!declaredField.isAnnotationPresent(HeaderCollection.class)) { continue; } final Type declaredFieldType = declaredField.getGenericType(); if (!TypeUtil.isTypeOrSubTypeOf(declaredField.getType(), Map.class)) { continue; } final Type[] mapTypeArguments = TypeUtil.getTypeArguments(declaredFieldType); if (mapTypeArguments.length != 2 || mapTypeArguments[0] != String.class || mapTypeArguments[1] != String.class) { continue; } final HeaderCollection headerCollectionAnnotation = declaredField.getAnnotation(HeaderCollection.class); final String headerCollectionPrefix = headerCollectionAnnotation.value().toLowerCase(Locale.ROOT); final int headerCollectionPrefixLength = headerCollectionPrefix.length(); if (headerCollectionPrefixLength == 0) { continue; } headerCollectionHandlers.add(new HeaderCollectionHandler(headerCollectionPrefix, declaredField)); headerCollectionsFirstCharacters.add(headerCollectionPrefix.charAt(0)); } /* * Then loop over all headers and check if they begin with any of the prefixes found. */ for (final HttpHeader header : headers) { String headerNameLower = header.getName().toLowerCase(Locale.ROOT); for (HeaderCollectionHandler headerCollectionHandler : headerCollectionHandlers) { if (!headerCollectionsFirstCharacters.contains(headerNameLower.charAt(0))) { continue; } if (headerCollectionHandler.headerStartsWithPrefix(headerNameLower)) { headerCollectionHandler.addHeader(header.getName(), header.getValue()); } } } /* * Finally inject all found header collection values into the deserialized headers. */ headerCollectionHandlers.forEach(h -> h.injectValuesIntoDeclaringField(deserializedHeaders, logger)); return deserializedHeaders; } private static <S extends MapperBuilder<?, ?>> S initializeMapperBuilder(S mapper) { mapper.enable(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS) .enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT) .enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY) .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) .disable(SerializationFeature.FAIL_ON_EMPTY_BEANS) .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES) .serializationInclusion(JsonInclude.Include.NON_NULL) .addModule(new JavaTimeModule()) .addModule(ByteArraySerializer.getModule()) .addModule(Base64UrlSerializer.getModule()) .addModule(DateTimeSerializer.getModule()) .addModule(DateTimeDeserializer.getModule()) .addModule(DateTimeRfc1123Serializer.getModule()) .addModule(DurationSerializer.getModule()) .addModule(HttpHeadersSerializer.getModule()) .addModule(UnixTimeSerializer.getModule()) .visibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY) .visibility(PropertyAccessor.SETTER, JsonAutoDetect.Visibility.NONE) .visibility(PropertyAccessor.GETTER, JsonAutoDetect.Visibility.NONE) .visibility(PropertyAccessor.IS_GETTER, JsonAutoDetect.Visibility.NONE); return mapper; } private JavaType createJavaType(Type type) { if (type == null) { return null; } else if (type instanceof JavaType) { return (JavaType) type; } else if (type instanceof ParameterizedType) { return typeToJavaTypeCache.computeIfAbsent(type, t -> { final ParameterizedType parameterizedType = (ParameterizedType) type; final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length]; for (int i = 0; i != actualTypeArguments.length; i++) { javaTypeArguments[i] = createJavaType(actualTypeArguments[i]); } return mapper.getTypeFactory().constructParametricType((Class<?>) parameterizedType.getRawType(), javaTypeArguments); }); } else { return typeToJavaTypeCache.computeIfAbsent(type, t -> mapper.getTypeFactory().constructType(t)); } } /* * Internal helper class that helps manage converting headers into their header collection. */ private static final class HeaderCollectionHandler { private final String prefix; private final int prefixLength; private final Map<String, String> values; private final Field declaringField; HeaderCollectionHandler(String prefix, Field declaringField) { this.prefix = prefix; this.prefixLength = prefix.length(); this.values = new HashMap<>(); this.declaringField = declaringField; } boolean headerStartsWithPrefix(String headerName) { return headerName.startsWith(prefix); } void addHeader(String headerName, String headerValue) { values.put(headerName.substring(prefixLength), headerValue); } void injectValuesIntoDeclaringField(Object deserializedHeaders, ClientLogger logger) { /* * First check if the deserialized headers type has a public setter. */ if (usePublicSetter(deserializedHeaders, logger)) { return; } logger.verbose("Failed to find or use public setter to set header collection."); /* * Otherwise fallback to setting the field directly. */ final boolean declaredFieldAccessibleBackup = declaringField.isAccessible(); try { if (!declaredFieldAccessibleBackup) { AccessController.doPrivileged((PrivilegedAction<Object>) () -> { declaringField.setAccessible(true); return null; }); } declaringField.set(deserializedHeaders, values); logger.verbose("Set header collection by accessing the field directly."); } catch (IllegalAccessException ex) { logger.warning("Failed to inject header collection values into deserialized headers.", ex); } finally { if (!declaredFieldAccessibleBackup) { AccessController.doPrivileged((PrivilegedAction<Object>) () -> { declaringField.setAccessible(false); return null; }); } } } private boolean usePublicSetter(Object deserializedHeaders, ClientLogger logger) { try { String potentialSetterName = getPotentialSetterName(); Method setterMethod = deserializedHeaders.getClass().getDeclaredMethod(potentialSetterName, Map.class); if (Modifier.isPublic(setterMethod.getModifiers())) { setterMethod.invoke(deserializedHeaders, values); logger.verbose("User setter %s on class %s to set header collection.", potentialSetterName, deserializedHeaders.getClass().getSimpleName()); return true; } return false; } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) { return false; } } private String getPotentialSetterName() { String fieldName = declaringField.getName(); return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1); } } }
class JacksonAdapter implements SerializerAdapter { private static final Pattern PATTERN = Pattern.compile("^\"*|\"*$"); private final ClientLogger logger = new ClientLogger(JacksonAdapter.class); /** * An instance of {@link ObjectMapper} to serialize/deserialize objects. */ private final ObjectMapper mapper; /** * An instance of {@link ObjectMapper} that does not do flattening. */ private final ObjectMapper simpleMapper; private final ObjectMapper xmlMapper; private final ObjectMapper headerMapper; /* * The lazily-created serializer for this ServiceClient. */ private static SerializerAdapter serializerAdapter; private final Map<Type, JavaType> typeToJavaTypeCache = new ConcurrentHashMap<>(); /** * Creates a new JacksonAdapter instance with default mapper settings. */ /** * Gets a static instance of {@link ObjectMapper} that doesn't handle flattening. * * @return an instance of {@link ObjectMapper}. */ protected ObjectMapper simpleMapper() { return simpleMapper; } /** * maintain singleton instance of the default serializer adapter. * * @return the default serializer */ public static synchronized SerializerAdapter createDefaultSerializerAdapter() { if (serializerAdapter == null) { serializerAdapter = new JacksonAdapter(); } return serializerAdapter; } /** * @return the original serializer type */ public ObjectMapper serializer() { return mapper; } @Override public String serialize(Object object, SerializerEncoding encoding) throws IOException { if (object == null) { return null; } ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serialize(object, encoding, stream); return new String(stream.toByteArray(), 0, stream.size(), StandardCharsets.UTF_8); } @Override public void serialize(Object object, SerializerEncoding encoding, OutputStream outputStream) throws IOException { if (object == null) { return; } if ((encoding == SerializerEncoding.XML)) { xmlMapper.writeValue(outputStream, object); } else { serializer().writeValue(outputStream, object); } } @Override public String serializeRaw(Object object) { if (object == null) { return null; } try { return PATTERN.matcher(serialize(object, SerializerEncoding.JSON)).replaceAll(""); } catch (IOException ex) { logger.warning("Failed to serialize {} to JSON.", object.getClass(), ex); return null; } } @Override public String serializeList(List<?> list, CollectionFormat format) { if (list == null) { return null; } List<String> serialized = new ArrayList<>(); for (Object element : list) { String raw = serializeRaw(element); serialized.add(raw != null ? raw : ""); } return String.join(format.getDelimiter(), serialized); } @Override public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException { if (CoreUtils.isNullOrEmpty(value)) { return null; } return deserialize(new ByteArrayInputStream(value.getBytes(StandardCharsets.UTF_8)), type, encoding); } @Override public <T> T deserialize(InputStream inputStream, final Type type, SerializerEncoding encoding) throws IOException { if (inputStream == null) { return null; } final JavaType javaType = createJavaType(type); try { if (encoding == SerializerEncoding.XML) { return xmlMapper.readValue(inputStream, javaType); } else { return serializer().readValue(inputStream, javaType); } } catch (JsonParseException jpe) { throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe)); } } @Override public <T> T deserialize(HttpHeaders headers, Type deserializedHeadersType) throws IOException { if (deserializedHeadersType == null) { return null; } T deserializedHeaders = headerMapper.convertValue(headers, createJavaType(deserializedHeadersType)); final Class<?> deserializedHeadersClass = TypeUtil.getRawClass(deserializedHeadersType); final Field[] declaredFields = deserializedHeadersClass.getDeclaredFields(); /* * A list containing all handlers for header collections of the header type. */ final List<HeaderCollectionHandler> headerCollectionHandlers = new ArrayList<>(); /* * This set is an optimization where we track the first character of all HeaderCollections defined on the * deserialized headers type. This allows us to optimize away startWiths checks which are much more costly than * getting the first character. */ final Set<Character> headerCollectionsFirstCharacters = new HashSet<>(); /* * Begin by looping over all declared fields and initializing all header collection information. */ for (final Field declaredField : declaredFields) { if (!declaredField.isAnnotationPresent(HeaderCollection.class)) { continue; } final Type declaredFieldType = declaredField.getGenericType(); if (!TypeUtil.isTypeOrSubTypeOf(declaredField.getType(), Map.class)) { continue; } final Type[] mapTypeArguments = TypeUtil.getTypeArguments(declaredFieldType); if (mapTypeArguments.length != 2 || mapTypeArguments[0] != String.class || mapTypeArguments[1] != String.class) { continue; } final HeaderCollection headerCollectionAnnotation = declaredField.getAnnotation(HeaderCollection.class); final String headerCollectionPrefix = headerCollectionAnnotation.value().toLowerCase(Locale.ROOT); final int headerCollectionPrefixLength = headerCollectionPrefix.length(); if (headerCollectionPrefixLength == 0) { continue; } headerCollectionHandlers.add(new HeaderCollectionHandler(headerCollectionPrefix, declaredField)); headerCollectionsFirstCharacters.add(headerCollectionPrefix.charAt(0)); } /* * Then loop over all headers and check if they begin with any of the prefixes found. */ for (final HttpHeader header : headers) { String headerNameLower = header.getName().toLowerCase(Locale.ROOT); /* * Optimization to skip this header as it doesn't begin with any character starting header collections in * the deserialized headers type. */ if (!headerCollectionsFirstCharacters.contains(headerNameLower.charAt(0))) { continue; } for (HeaderCollectionHandler headerCollectionHandler : headerCollectionHandlers) { if (headerCollectionHandler.headerStartsWithPrefix(headerNameLower)) { headerCollectionHandler.addHeader(header.getName(), header.getValue()); } } } /* * Finally inject all found header collection values into the deserialized headers. */ headerCollectionHandlers.forEach(h -> h.injectValuesIntoDeclaringField(deserializedHeaders, logger)); return deserializedHeaders; } @SuppressWarnings("deprecation") private static <S extends MapperBuilder<?, ?>> S initializeMapperBuilder(S mapper) { mapper.enable(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS) .enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT) .enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY) .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) .disable(SerializationFeature.FAIL_ON_EMPTY_BEANS) .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES) .serializationInclusion(JsonInclude.Include.NON_NULL) .addModule(new JavaTimeModule()) .addModule(ByteArraySerializer.getModule()) .addModule(Base64UrlSerializer.getModule()) .addModule(DateTimeSerializer.getModule()) .addModule(DateTimeDeserializer.getModule()) .addModule(DateTimeRfc1123Serializer.getModule()) .addModule(DurationSerializer.getModule()) .addModule(HttpHeadersSerializer.getModule()) .addModule(UnixTimeSerializer.getModule()) .visibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY) .visibility(PropertyAccessor.SETTER, JsonAutoDetect.Visibility.NONE) .visibility(PropertyAccessor.GETTER, JsonAutoDetect.Visibility.NONE) .visibility(PropertyAccessor.IS_GETTER, JsonAutoDetect.Visibility.NONE); return mapper; } private JavaType createJavaType(Type type) { if (type == null) { return null; } else if (type instanceof JavaType) { return (JavaType) type; } else if (type instanceof ParameterizedType) { final ParameterizedType parameterizedType = (ParameterizedType) type; final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length]; for (int i = 0; i != actualTypeArguments.length; i++) { javaTypeArguments[i] = createJavaType(actualTypeArguments[i]); } return typeToJavaTypeCache.computeIfAbsent(type, t -> mapper.getTypeFactory() .constructParametricType((Class<?>) parameterizedType.getRawType(), javaTypeArguments)); } else { return typeToJavaTypeCache.computeIfAbsent(type, t -> mapper.getTypeFactory().constructType(t)); } } /* * Internal helper class that helps manage converting headers into their header collection. */ private static final class HeaderCollectionHandler { private final String prefix; private final int prefixLength; private final Map<String, String> values; private final Field declaringField; HeaderCollectionHandler(String prefix, Field declaringField) { this.prefix = prefix; this.prefixLength = prefix.length(); this.values = new HashMap<>(); this.declaringField = declaringField; } boolean headerStartsWithPrefix(String headerName) { return headerName.startsWith(prefix); } void addHeader(String headerName, String headerValue) { values.put(headerName.substring(prefixLength), headerValue); } @SuppressWarnings("deprecation") void injectValuesIntoDeclaringField(Object deserializedHeaders, ClientLogger logger) { /* * First check if the deserialized headers type has a public setter. */ if (usePublicSetter(deserializedHeaders, logger)) { return; } logger.verbose("Failed to find or use public setter to set header collection."); /* * Otherwise fallback to setting the field directly. */ final boolean declaredFieldAccessibleBackup = declaringField.isAccessible(); try { if (!declaredFieldAccessibleBackup) { AccessController.doPrivileged((PrivilegedAction<Object>) () -> { declaringField.setAccessible(true); return null; }); } declaringField.set(deserializedHeaders, values); logger.verbose("Set header collection by accessing the field directly."); } catch (IllegalAccessException ex) { logger.warning("Failed to inject header collection values into deserialized headers.", ex); } finally { if (!declaredFieldAccessibleBackup) { AccessController.doPrivileged((PrivilegedAction<Object>) () -> { declaringField.setAccessible(false); return null; }); } } } private boolean usePublicSetter(Object deserializedHeaders, ClientLogger logger) { try { String potentialSetterName = getPotentialSetterName(); Method setterMethod = deserializedHeaders.getClass().getDeclaredMethod(potentialSetterName, Map.class); if (Modifier.isPublic(setterMethod.getModifiers())) { setterMethod.invoke(deserializedHeaders, values); logger.verbose("User setter %s on class %s to set header collection.", potentialSetterName, deserializedHeaders.getClass().getSimpleName()); return true; } return false; } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) { return false; } } private String getPotentialSetterName() { String fieldName = declaringField.getName(); return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1); } } }
I see, however this works while a message exists in the exception, but some IO exceptions don't carry a message, that's why I suggested it to log the stacktrace too
LocalProject load() throws BootstrapMavenException { final AtomicReference<LocalProject> currentProject = new AtomicReference<>(); final Consumer<Model> processor; if (modelBuilder == null) { processor = rawModel -> { var project = new LocalProject(rawModel, workspace); if (currentProject.get() == null && project.getDir().equals(currentProjectPom.getParent())) { currentProject.set(project); } }; } else { processor = rawModel -> { var req = new DefaultModelBuildingRequest(); req.setPomFile(rawModel.getPomFile()); req.setModelResolver(modelResolver); req.setSystemProperties(System.getProperties()); req.setUserProperties(System.getProperties()); req.setModelCache(modelCache); req.setActiveProfileIds(activeProfileIds); req.setInactiveProfileIds(inactiveProfileIds); req.setProfiles(profiles); req.setRawModel(rawModel); req.setWorkspaceModelResolver(this); LocalProject project = null; try { project = new LocalProject(modelBuilder.build(req), workspace); } catch (Exception e) { if (warnOnFailingWsModules) { log.warn("Failed to resolve effective model for " + rawModel.getPomFile() + ": " + e); return; } throw new RuntimeException("Failed to resolve the effective model for " + rawModel.getPomFile(), e); } if (currentProject.get() == null && project.getDir().equals(currentProjectPom.getParent())) { currentProject.set(project); } for (var module : project.getModelBuildingResult().getEffectiveModel().getModules()) { addModulePom(project.getDir().resolve(module).resolve(POM_XML)); } }; } int i = 0; while (i < moduleQueue.size()) { var newModules = new ArrayList<RawModule>(); while (i < moduleQueue.size()) { loadModule(moduleQueue.get(i++), newModules); } for (var newModule : newModules) { newModule.process(processor); } } if (currentProject.get() == null) { throw new BootstrapMavenException("Failed to load project " + currentProjectPom); } return currentProject.get(); }
log.warn("Failed to resolve effective model for " + rawModel.getPomFile() + ": " + e);
LocalProject load() throws BootstrapMavenException { final AtomicReference<LocalProject> currentProject = new AtomicReference<>(); final Consumer<Model> processor; if (modelBuilder == null) { processor = rawModel -> { var project = new LocalProject(rawModel, workspace); if (currentProject.get() == null && project.getDir().equals(currentProjectPom.getParent())) { currentProject.set(project); } }; } else { processor = rawModel -> { var req = new DefaultModelBuildingRequest(); req.setPomFile(rawModel.getPomFile()); req.setModelResolver(modelResolver); req.setSystemProperties(System.getProperties()); req.setUserProperties(System.getProperties()); req.setModelCache(modelCache); req.setActiveProfileIds(activeProfileIds); req.setInactiveProfileIds(inactiveProfileIds); req.setProfiles(profiles); req.setRawModel(rawModel); req.setWorkspaceModelResolver(this); LocalProject project = null; try { project = new LocalProject(modelBuilder.build(req), workspace); } catch (Exception e) { if (warnOnFailingWsModules) { log.warn("Failed to resolve effective model for " + rawModel.getPomFile(), e); return; } throw new RuntimeException("Failed to resolve the effective model for " + rawModel.getPomFile(), e); } if (currentProject.get() == null && project.getDir().equals(currentProjectPom.getParent())) { currentProject.set(project); } for (var module : project.getModelBuildingResult().getEffectiveModel().getModules()) { addModulePom(project.getDir().resolve(module).resolve(POM_XML)); } }; } int i = 0; while (i < moduleQueue.size()) { var newModules = new ArrayList<RawModule>(); while (i < moduleQueue.size()) { loadModule(moduleQueue.get(i++), newModules); } for (var newModule : newModules) { newModule.process(processor); } } if (currentProject.get() == null) { throw new BootstrapMavenException("Failed to load project " + currentProjectPom); } return currentProject.get(); }
class WorkspaceLoader implements WorkspaceModelResolver, WorkspaceReader { private static final Logger log = Logger.getLogger(WorkspaceLoader.class); private static final String POM_XML = "pom.xml"; private static Path locateCurrentProjectPom(Path path) throws BootstrapMavenException { Path p = path; while (p != null) { final Path pom = p.resolve(POM_XML); if (Files.exists(pom)) { return pom; } p = p.getParent(); } throw new BootstrapMavenException("Failed to locate project pom.xml for " + path); } private final List<RawModule> moduleQueue = new ArrayList<>(); private final Map<Path, Model> loadedPoms = new HashMap<>(); private final Function<Path, Model> modelProvider; private final Map<GAV, Model> loadedModules = new HashMap<>(); private final LocalWorkspace workspace = new LocalWorkspace(); private final Path currentProjectPom; private boolean warnOnFailingWsModules; private ModelBuilder modelBuilder; private BootstrapModelResolver modelResolver; private ModelCache modelCache; private List<String> activeProfileIds; private List<String> inactiveProfileIds; private List<Profile> profiles; WorkspaceLoader(BootstrapMavenContext ctx, Path currentProjectPom, Function<Path, Model> modelProvider) throws BootstrapMavenException { try { final BasicFileAttributes fileAttributes = Files.readAttributes(currentProjectPom, BasicFileAttributes.class); this.currentProjectPom = fileAttributes.isDirectory() ? locateCurrentProjectPom(currentProjectPom) : currentProjectPom; } catch (IOException e) { throw new IllegalArgumentException(currentProjectPom + " does not exist", e); } addModulePom(this.currentProjectPom); this.modelProvider = modelProvider == null ? pom -> null : modelProvider; if (ctx != null && ctx.isEffectiveModelBuilder()) { modelBuilder = BootstrapModelBuilderFactory.getDefaultModelBuilder(); modelResolver = BootstrapModelResolver.newInstance(ctx, this); modelCache = new BootstrapModelCache(modelResolver.getSession()); profiles = ctx.getActiveSettingsProfiles(); final BootstrapMavenOptions cliOptions = ctx.getCliOptions(); activeProfileIds = new ArrayList<>(profiles.size() + cliOptions.getActiveProfileIds().size()); for (Profile p : profiles) { activeProfileIds.add(p.getId()); } activeProfileIds.addAll(cliOptions.getActiveProfileIds()); inactiveProfileIds = cliOptions.getInactiveProfileIds(); warnOnFailingWsModules = ctx.isWarnOnFailingWorkspaceModules(); } workspace.setBootstrapMavenContext(ctx); } private void addModulePom(Path pom) { if (pom != null) { moduleQueue.add(new RawModule(pom)); } } void setWorkspaceRootPom(Path rootPom) { addModulePom(rootPom); } private void loadModule(RawModule rawModule, List<RawModule> newModules) { var moduleDir = rawModule.pom.getParent(); if (moduleDir == null) { moduleDir = getFsRootDir(); } if (loadedPoms.containsKey(moduleDir)) { return; } rawModule.model = modelProvider == null ? null : modelProvider.apply(moduleDir); if (rawModule.model == null) { rawModule.model = readModel(rawModule.pom); } loadedPoms.put(moduleDir, rawModule.model); if (rawModule.model == null) { return; } newModules.add(rawModule); var added = loadedModules.putIfAbsent( new GAV(ModelUtils.getGroupId(rawModule.model), rawModule.model.getArtifactId(), ModelUtils.getVersion(rawModule.model)), rawModule.model); if (added != null) { return; } for (var module : rawModule.model.getModules()) { queueModule(rawModule.model.getProjectDirectory().toPath().resolve(module)); } for (var profile : rawModule.model.getProfiles()) { for (var module : profile.getModules()) { queueModule(rawModule.model.getProjectDirectory().toPath().resolve(module)); } } if (rawModule.parent == null) { final Path parentPom = rawModule.getParentPom(); if (parentPom != null) { var parentDir = parentPom.getParent(); if (parentDir == null) { parentDir = getFsRootDir(); } if (!loadedPoms.containsKey(parentDir)) { var parent = new RawModule(parentPom); rawModule.parent = parent; moduleQueue.add(parent); } } } } private static Path getFsRootDir() { return Path.of("/"); } private void queueModule(Path dir) { if (!loadedPoms.containsKey(dir)) { moduleQueue.add(new RawModule(dir.resolve(POM_XML))); } } @Override public Model resolveRawModel(String groupId, String artifactId, String versionConstraint) { return loadedModules.get(new GAV(groupId, artifactId, versionConstraint)); } @Override public Model resolveEffectiveModel(String groupId, String artifactId, String versionConstraint) { final LocalProject project = workspace.getProject(groupId, artifactId); return project != null && project.getVersion().equals(versionConstraint) ? project.getModelBuildingResult().getEffectiveModel() : null; } @Override public WorkspaceRepository getRepository() { return workspace.getRepository(); } @Override public File findArtifact(Artifact artifact) { if (!ArtifactCoords.TYPE_POM.equals(artifact.getExtension())) { return null; } var model = loadedModules.get(new GAV(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion())); return model == null ? null : model.getPomFile(); } @Override public List<String> findVersions(Artifact artifact) { var model = loadedModules.get(new GAV(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion())); return model == null ? List.of() : List.of(ModelUtils.getVersion(model)); } private static Model readModel(Path pom) { try { final Model model = ModelUtils.readModel(pom); model.setPomFile(pom.toFile()); return model; } catch (NoSuchFileException e) { log.warn("Module(s) under " + pom.getParent() + " will be handled as thirdparty dependencies because " + pom + " does not exist"); return null; } catch (IOException e) { throw new UncheckedIOException("Failed to load POM from " + pom, e); } } private static class RawModule { final Path pom; Model model; RawModule parent; boolean processed; private RawModule(Path pom) { this(null, pom); } private RawModule(RawModule parent, Path pom) { this.pom = pom.normalize().toAbsolutePath(); this.parent = parent; } private Path getParentPom() { if (model == null) { return null; } Path parentPom = null; final Parent parent = model.getParent(); if (parent != null && parent.getRelativePath() != null && !parent.getRelativePath().isEmpty()) { parentPom = pom.getParent().resolve(parent.getRelativePath()).normalize(); if (Files.isDirectory(parentPom)) { parentPom = parentPom.resolve(POM_XML); } } else { final Path parentDir = pom.getParent().getParent(); if (parentDir != null) { parentPom = parentDir.resolve(POM_XML); } } return parentPom != null && Files.exists(parentPom) ? parentPom : null; } private void process(Consumer<Model> consumer) { if (processed) { return; } processed = true; if (parent != null) { parent.process(consumer); } if (model != null) { consumer.accept(model); } } } }
class WorkspaceLoader implements WorkspaceModelResolver, WorkspaceReader { private static final Logger log = Logger.getLogger(WorkspaceLoader.class); private static final String POM_XML = "pom.xml"; private static Path locateCurrentProjectPom(Path path) throws BootstrapMavenException { Path p = path; while (p != null) { final Path pom = p.resolve(POM_XML); if (Files.exists(pom)) { return pom; } p = p.getParent(); } throw new BootstrapMavenException("Failed to locate project pom.xml for " + path); } private final List<RawModule> moduleQueue = new ArrayList<>(); private final Map<Path, Model> loadedPoms = new HashMap<>(); private final Function<Path, Model> modelProvider; private final Map<GAV, Model> loadedModules = new HashMap<>(); private final LocalWorkspace workspace = new LocalWorkspace(); private final Path currentProjectPom; private boolean warnOnFailingWsModules; private ModelBuilder modelBuilder; private BootstrapModelResolver modelResolver; private ModelCache modelCache; private List<String> activeProfileIds; private List<String> inactiveProfileIds; private List<Profile> profiles; WorkspaceLoader(BootstrapMavenContext ctx, Path currentProjectPom, Function<Path, Model> modelProvider) throws BootstrapMavenException { try { final BasicFileAttributes fileAttributes = Files.readAttributes(currentProjectPom, BasicFileAttributes.class); this.currentProjectPom = fileAttributes.isDirectory() ? locateCurrentProjectPom(currentProjectPom) : currentProjectPom; } catch (IOException e) { throw new IllegalArgumentException(currentProjectPom + " does not exist", e); } addModulePom(this.currentProjectPom); this.modelProvider = modelProvider == null ? pom -> null : modelProvider; if (ctx != null && ctx.isEffectiveModelBuilder()) { modelBuilder = BootstrapModelBuilderFactory.getDefaultModelBuilder(); modelResolver = BootstrapModelResolver.newInstance(ctx, this); modelCache = new BootstrapModelCache(modelResolver.getSession()); profiles = ctx.getActiveSettingsProfiles(); final BootstrapMavenOptions cliOptions = ctx.getCliOptions(); activeProfileIds = new ArrayList<>(profiles.size() + cliOptions.getActiveProfileIds().size()); for (Profile p : profiles) { activeProfileIds.add(p.getId()); } activeProfileIds.addAll(cliOptions.getActiveProfileIds()); inactiveProfileIds = cliOptions.getInactiveProfileIds(); warnOnFailingWsModules = ctx.isWarnOnFailingWorkspaceModules(); } workspace.setBootstrapMavenContext(ctx); } private void addModulePom(Path pom) { if (pom != null) { moduleQueue.add(new RawModule(pom)); } } void setWorkspaceRootPom(Path rootPom) { addModulePom(rootPom); } private void loadModule(RawModule rawModule, List<RawModule> newModules) { var moduleDir = rawModule.pom.getParent(); if (moduleDir == null) { moduleDir = getFsRootDir(); } if (loadedPoms.containsKey(moduleDir)) { return; } rawModule.model = modelProvider == null ? null : modelProvider.apply(moduleDir); if (rawModule.model == null) { rawModule.model = readModel(rawModule.pom); } loadedPoms.put(moduleDir, rawModule.model); if (rawModule.model == null) { return; } newModules.add(rawModule); var added = loadedModules.putIfAbsent( new GAV(ModelUtils.getGroupId(rawModule.model), rawModule.model.getArtifactId(), ModelUtils.getVersion(rawModule.model)), rawModule.model); if (added != null) { return; } for (var module : rawModule.model.getModules()) { queueModule(rawModule.model.getProjectDirectory().toPath().resolve(module)); } for (var profile : rawModule.model.getProfiles()) { for (var module : profile.getModules()) { queueModule(rawModule.model.getProjectDirectory().toPath().resolve(module)); } } if (rawModule.parent == null) { final Path parentPom = rawModule.getParentPom(); if (parentPom != null) { var parentDir = parentPom.getParent(); if (parentDir == null) { parentDir = getFsRootDir(); } if (!loadedPoms.containsKey(parentDir)) { var parent = new RawModule(parentPom); rawModule.parent = parent; moduleQueue.add(parent); } } } } private static Path getFsRootDir() { return Path.of("/"); } private void queueModule(Path dir) { if (!loadedPoms.containsKey(dir)) { moduleQueue.add(new RawModule(dir.resolve(POM_XML))); } } @Override public Model resolveRawModel(String groupId, String artifactId, String versionConstraint) { return loadedModules.get(new GAV(groupId, artifactId, versionConstraint)); } @Override public Model resolveEffectiveModel(String groupId, String artifactId, String versionConstraint) { final LocalProject project = workspace.getProject(groupId, artifactId); return project != null && project.getVersion().equals(versionConstraint) ? project.getModelBuildingResult().getEffectiveModel() : null; } @Override public WorkspaceRepository getRepository() { return workspace.getRepository(); } @Override public File findArtifact(Artifact artifact) { if (!ArtifactCoords.TYPE_POM.equals(artifact.getExtension())) { return null; } var model = loadedModules.get(new GAV(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion())); return model == null ? null : model.getPomFile(); } @Override public List<String> findVersions(Artifact artifact) { var model = loadedModules.get(new GAV(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion())); return model == null ? List.of() : List.of(ModelUtils.getVersion(model)); } private static Model readModel(Path pom) { try { final Model model = ModelUtils.readModel(pom); model.setPomFile(pom.toFile()); return model; } catch (NoSuchFileException e) { log.warn("Module(s) under " + pom.getParent() + " will be handled as thirdparty dependencies because " + pom + " does not exist"); return null; } catch (IOException e) { throw new UncheckedIOException("Failed to load POM from " + pom, e); } } private static class RawModule { final Path pom; Model model; RawModule parent; boolean processed; private RawModule(Path pom) { this(null, pom); } private RawModule(RawModule parent, Path pom) { this.pom = pom.normalize().toAbsolutePath(); this.parent = parent; } private Path getParentPom() { if (model == null) { return null; } Path parentPom = null; final Parent parent = model.getParent(); if (parent != null && parent.getRelativePath() != null && !parent.getRelativePath().isEmpty()) { parentPom = pom.getParent().resolve(parent.getRelativePath()).normalize(); if (Files.isDirectory(parentPom)) { parentPom = parentPom.resolve(POM_XML); } } else { final Path parentDir = pom.getParent().getParent(); if (parentDir != null) { parentPom = parentDir.resolve(POM_XML); } } return parentPom != null && Files.exists(parentPom) ? parentPom : null; } private void process(Consumer<Model> consumer) { if (processed) { return; } processed = true; if (parent != null) { parent.process(consumer); } if (model != null) { consumer.accept(model); } } } }
I thought about it, but this is a helper class, it's not a "junit" specific class, and I thought we shouldn't use `TemporaryFolder` directly. Another idea is to remove ctors with no `tmpWorkingDir` so it's a mandatory argument, and all consumers should be responsible to pass a directory path there, so all the tests using it could pass a junit `TemporaryFolder.newFolder()`.
public TestingTaskManagerRuntimeInfo() { this( new Configuration(), EnvironmentInformation.getTemporaryFileDirectory() .split(",|" + File.pathSeparator)); }
EnvironmentInformation.getTemporaryFileDirectory()
public TestingTaskManagerRuntimeInfo() { this( new Configuration(), EnvironmentInformation.getTemporaryFileDirectory() .split(",|" + File.pathSeparator)); }
class TestingTaskManagerRuntimeInfo implements TaskManagerRuntimeInfo { private final Configuration configuration; private final String[] tmpDirectories; private final String taskManagerExternalAddress; private final File tmpWorkingDirectory; public TestingTaskManagerRuntimeInfo(Configuration configuration) { this(configuration, EnvironmentInformation.getTemporaryFileDirectory()); } public TestingTaskManagerRuntimeInfo(Configuration configuration, File tmpWorkingDirectory) { this( configuration, new String[] {EnvironmentInformation.getTemporaryFileDirectory()}, InetAddress.getLoopbackAddress().getHostAddress(), tmpWorkingDirectory); } public TestingTaskManagerRuntimeInfo(Configuration configuration, String tmpDirectory) { this(configuration, new String[] {checkNotNull(tmpDirectory)}); } public TestingTaskManagerRuntimeInfo(Configuration configuration, String[] tmpDirectories) { this( configuration, tmpDirectories, InetAddress.getLoopbackAddress().getHostAddress(), new File( EnvironmentInformation.getTemporaryFileDirectory(), "tmp_" + UUID.randomUUID())); } public TestingTaskManagerRuntimeInfo( Configuration configuration, String[] tmpDirectories, String taskManagerExternalAddress, File tmpWorkingDirectory) { this.configuration = configuration; this.tmpDirectories = tmpDirectories; this.taskManagerExternalAddress = taskManagerExternalAddress; this.tmpWorkingDirectory = tmpWorkingDirectory; } @Override public Configuration getConfiguration() { return configuration; } @Override public String[] getTmpDirectories() { return tmpDirectories; } @Override public boolean shouldExitJvmOnOutOfMemoryError() { return false; } @Override public String getTaskManagerExternalAddress() { return taskManagerExternalAddress; } @Override public File getTmpWorkingDirectory() { return tmpWorkingDirectory; } }
class TestingTaskManagerRuntimeInfo implements TaskManagerRuntimeInfo { private final Configuration configuration; private final String[] tmpDirectories; private final String taskManagerExternalAddress; private final File tmpWorkingDirectory; public TestingTaskManagerRuntimeInfo(Configuration configuration) { this(configuration, EnvironmentInformation.getTemporaryFileDirectory()); } public TestingTaskManagerRuntimeInfo(Configuration configuration, File tmpWorkingDirectory) { this( configuration, new String[] {EnvironmentInformation.getTemporaryFileDirectory()}, InetAddress.getLoopbackAddress().getHostAddress(), tmpWorkingDirectory); } public TestingTaskManagerRuntimeInfo(Configuration configuration, String tmpDirectory) { this(configuration, new String[] {checkNotNull(tmpDirectory)}); } public TestingTaskManagerRuntimeInfo(Configuration configuration, String[] tmpDirectories) { this( configuration, tmpDirectories, InetAddress.getLoopbackAddress().getHostAddress(), new File( EnvironmentInformation.getTemporaryFileDirectory(), "tmp_" + UUID.randomUUID())); } public TestingTaskManagerRuntimeInfo( Configuration configuration, String[] tmpDirectories, String taskManagerExternalAddress, File tmpWorkingDirectory) { this.configuration = configuration; this.tmpDirectories = tmpDirectories; this.taskManagerExternalAddress = taskManagerExternalAddress; this.tmpWorkingDirectory = tmpWorkingDirectory; } @Override public Configuration getConfiguration() { return configuration; } @Override public String[] getTmpDirectories() { return tmpDirectories; } @Override public boolean shouldExitJvmOnOutOfMemoryError() { return false; } @Override public String getTaskManagerExternalAddress() { return taskManagerExternalAddress; } @Override public File getTmpWorkingDirectory() { return tmpWorkingDirectory; } }
Would this possibly happen? > Thread1: call subpartition.createReadView() - create view1 > Thread2: obtain a reference to view1 It is not possible to access to view1 through a different thread, unless downstream reconnects, meaning either thread1 releases the view upon disconnecting from downstream or a different thread (thread2) reconnects and release the view; that would be guarded by the buffer lock as you suggested. > Thread1: call subpartition.createReadView() - create view2 > Thread2: call view1.releaseAllResources <-- nulls out subpartition.readView; view2 is now corrupt? same as above.
void releaseView() { LOG.info("Releasing view of subpartition {} of {}.", getSubPartitionIndex(), parent.getPartitionId()); readView = null; isPartialBuffer = true; isBlockedByCheckpoint = false; sequenceNumber = 0; }
readView = null;
void releaseView() { assert Thread.holdsLock(buffers); if (readView != null) { LOG.debug("Releasing view of subpartition {} of {}.", getSubPartitionIndex(), parent.getPartitionId()); readView.releaseAllResources(); readView = null; isPartialBufferCleanupRequired = true; isBlockedByCheckpoint = false; sequenceNumber = 0; } }
class PipelinedApproximateSubpartition extends PipelinedSubpartition { private static final Logger LOG = LoggerFactory.getLogger(PipelinedApproximateSubpartition.class); private boolean isPartialBuffer = false; PipelinedApproximateSubpartition(int index, ResultPartition parent) { super(index, parent); } @Override public PipelinedSubpartitionView createReadView(BufferAvailabilityListener availabilityListener) { synchronized (buffers) { checkState(!isReleased); if (readView != null) { LOG.info("{} ReadView for Subpartition {} of {} has not been released!", parent.getOwningTaskName(), getSubPartitionIndex(), parent.getPartitionId()); releaseView(); } LOG.debug("{}: Creating read view for subpartition {} of partition {}.", parent.getOwningTaskName(), getSubPartitionIndex(), parent.getPartitionId()); readView = new PipelinedApproximateSubpartitionView(this, availabilityListener); } return readView; } @Override Buffer buildSliceBuffer(BufferConsumerWithPartialRecordLength buffer) { if (isPartialBuffer) { isPartialBuffer = !buffer.cleanupPartialRecord(); } return buffer.build(); } @Override public String toString() { final long numBuffers; final long numBytes; final boolean finished; final boolean hasReadView; synchronized (buffers) { numBuffers = getTotalNumberOfBuffers(); numBytes = getTotalNumberOfBytes(); finished = isFinished; hasReadView = readView != null; } return String.format( "PipelinedApproximateSubpartition getSubPartitionIndex(), numBuffers, numBytes, getBuffersInBacklog(), finished, hasReadView); } @VisibleForTesting public boolean isPartialBuffer() { return isPartialBuffer; } }
class PipelinedApproximateSubpartition extends PipelinedSubpartition { private static final Logger LOG = LoggerFactory.getLogger(PipelinedApproximateSubpartition.class); @GuardedBy("buffers") private boolean isPartialBufferCleanupRequired = false; PipelinedApproximateSubpartition(int index, ResultPartition parent) { super(index, parent); } /** * To simply the view releasing threading model, {@link PipelinedApproximateSubpartition * only before creating a new view. * * <p>There is still one corner case when a downstream task fails continuously in a short period of time * then multiple netty worker threads can createReadView at the same time. * TODO: This problem will be solved in FLINK-19774 */ @Override public PipelinedSubpartitionView createReadView(BufferAvailabilityListener availabilityListener) { synchronized (buffers) { checkState(!isReleased); releaseView(); LOG.debug("{}: Creating read view for subpartition {} of partition {}.", parent.getOwningTaskName(), getSubPartitionIndex(), parent.getPartitionId()); readView = new PipelinedApproximateSubpartitionView(this, availabilityListener); } return readView; } @Override Buffer buildSliceBuffer(BufferConsumerWithPartialRecordLength buffer) { if (isPartialBufferCleanupRequired) { isPartialBufferCleanupRequired = !buffer.cleanupPartialRecord(); } return buffer.build(); } private /** for testing only. */ @VisibleForTesting boolean isPartialBufferCleanupRequired() { return isPartialBufferCleanupRequired; } /** for testing only. */ @VisibleForTesting void setIsPartialBufferCleanupRequired() { isPartialBufferCleanupRequired = true; } }
this is more clear and simple: `for (Expr aggFnExpr : aggFnExprList) { for (Expr expr : groupByClause.getGroupingExprs()) { .... } }`
public void analyze(Analyzer analyzer) throws UserException { if (isAnalyzed()) { return; } super.analyze(analyzer); fromClause.setNeedToSql(needToSql); fromClause.analyze(analyzer); if (!analyzer.isWithClause()) { registerIsNotEmptyPredicates(analyzer); } for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { TableName tblName = item.getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } } else { item.getExpr().analyze(analyzer); if (!(item.getExpr() instanceof CaseExpr) && item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { throw new AnalysisException("Subquery is not supported in the select list."); } Expr expr = rewriteQueryExprByMvColumnExpr(item.getExpr(), analyzer); resultExprs.add(expr); SlotRef aliasRef = new SlotRef(null, item.toColumnLabel()); Expr existingAliasExpr = aliasSMap.get(aliasRef); if (existingAliasExpr != null && !existingAliasExpr.equals(item.getExpr())) { ambiguousAliasList.add(aliasRef); } aliasSMap.put(aliasRef, item.getExpr().clone()); colLabels.add(item.toColumnLabel()); } } if (groupByClause != null && groupByClause.isGroupByExtension()) { ArrayList<Expr> aggFnExprList = new ArrayList<>(); for (SelectListItem item : selectList.getItems()) { aggFnExprList.clear(); getAggregateFnExpr(item.getExpr(), aggFnExprList); if (!aggFnExprList.isEmpty()) { for (Expr expr : groupByClause.getGroupingExprs()) { for (Expr aggFnExpr : aggFnExprList) { if (aggFnExpr.contains(expr)) { throw new AnalysisException("column: " + expr.toSql() + " cannot both in select " + "list and aggregate functions when using GROUPING SETS/CUBE/ROLLUP, " + "please use union instead."); } } } } } groupingInfo = new GroupingInfo(analyzer, groupByClause); groupingInfo.substituteGroupingFn(resultExprs, analyzer); } else { for (Expr expr : resultExprs) { if (checkGroupingFn(expr)) { throw new AnalysisException( "cannot use GROUPING functions without [grouping sets|rollup|cube] " + "clause or grouping sets only have one element."); } } } if (valueList != null) { if (!fromInsert) { valueList.analyzeForSelect(analyzer); } for (Expr expr : valueList.getFirstRow()) { if (expr instanceof DefaultValueExpr) { resultExprs.add(new IntLiteral(1)); } else { resultExprs.add(expr); } colLabels.add(expr.toColumnLabel()); } } if (needToSql) { originalExpr = Expr.cloneList(resultExprs); } Expr.analyze(resultExprs, analyzer); if (TreeNode.contains(resultExprs, AnalyticExpr.class)) { if (fromClause.isEmpty()) { throw new AnalysisException("Analytic expressions require FROM clause."); } if (selectList.isDistinct()) { throw new AnalysisException( "cannot combine SELECT DISTINCT with analytic functions"); } } whereClauseRewrite(); if (whereClause != null) { if (checkGroupingFn(whereClause)) { throw new AnalysisException("grouping operations are not allowed in WHERE."); } whereClause.analyze(analyzer); if (whereClause.containsAggregate()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_INVALID_GROUP_FUNC_USE); } whereClause.checkReturnsBool("WHERE clause", false); Expr e = whereClause.findFirstOf(AnalyticExpr.class); if (e != null) { throw new AnalysisException( "WHERE clause must not contain analytic expressions: " + e.toSql()); } analyzer.registerConjuncts(whereClause, false, getTableRefIds()); } createSortInfo(analyzer); if (sortInfo != null && CollectionUtils.isNotEmpty(sortInfo.getOrderingExprs())) { if (groupingInfo != null) { List<Expr> orderingExprNotInSelect = sortInfo.getOrderingExprs().stream() .filter(item -> !resultExprs.contains(item)).collect(Collectors.toList()); groupingInfo.substituteGroupingFn(orderingExprNotInSelect, analyzer); } } analyzeAggregation(analyzer); createAnalyticInfo(analyzer); if (evaluateOrderBy) { createSortTupleInfo(analyzer); } if (needToSql) { sqlString = toSql(); } if (analyzer.enableStarJoinReorder()) { LOG.debug("use old reorder logical in select stmt"); reorderTable(analyzer); } resolveInlineViewRefs(analyzer); if (analyzer.hasEmptySpjResultSet() && aggInfo == null) { analyzer.setHasEmptyResultSet(); } if (aggInfo != null) { if (LOG.isDebugEnabled()) { LOG.debug("post-analysis " + aggInfo.debugString()); } } if (hasOutFileClause()) { outFileClause.analyze(analyzer, resultExprs); } }
if (!aggFnExprList.isEmpty()) {
public void analyze(Analyzer analyzer) throws UserException { if (isAnalyzed()) { return; } super.analyze(analyzer); fromClause.setNeedToSql(needToSql); fromClause.analyze(analyzer); if (!analyzer.isWithClause()) { registerIsNotEmptyPredicates(analyzer); } for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { TableName tblName = item.getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } } else { item.getExpr().analyze(analyzer); if (!(item.getExpr() instanceof CaseExpr) && item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { throw new AnalysisException("Subquery is not supported in the select list."); } Expr expr = rewriteQueryExprByMvColumnExpr(item.getExpr(), analyzer); resultExprs.add(expr); SlotRef aliasRef = new SlotRef(null, item.toColumnLabel()); Expr existingAliasExpr = aliasSMap.get(aliasRef); if (existingAliasExpr != null && !existingAliasExpr.equals(item.getExpr())) { ambiguousAliasList.add(aliasRef); } aliasSMap.put(aliasRef, item.getExpr().clone()); colLabels.add(item.toColumnLabel()); } } if (groupByClause != null && groupByClause.isGroupByExtension()) { ArrayList<Expr> aggFnExprList = new ArrayList<>(); for (SelectListItem item : selectList.getItems()) { aggFnExprList.clear(); getAggregateFnExpr(item.getExpr(), aggFnExprList); for (Expr aggFnExpr : aggFnExprList) { for (Expr expr : groupByClause.getGroupingExprs()) { if (aggFnExpr.contains(expr)) { throw new AnalysisException("column: " + expr.toSql() + " cannot both in select " + "list and aggregate functions when using GROUPING SETS/CUBE/ROLLUP, " + "please use union instead."); } } } } groupingInfo = new GroupingInfo(analyzer, groupByClause); groupingInfo.substituteGroupingFn(resultExprs, analyzer); } else { for (Expr expr : resultExprs) { if (checkGroupingFn(expr)) { throw new AnalysisException( "cannot use GROUPING functions without [grouping sets|rollup|cube] " + "clause or grouping sets only have one element."); } } } if (valueList != null) { if (!fromInsert) { valueList.analyzeForSelect(analyzer); } for (Expr expr : valueList.getFirstRow()) { if (expr instanceof DefaultValueExpr) { resultExprs.add(new IntLiteral(1)); } else { resultExprs.add(expr); } colLabels.add(expr.toColumnLabel()); } } if (needToSql) { originalExpr = Expr.cloneList(resultExprs); } Expr.analyze(resultExprs, analyzer); if (TreeNode.contains(resultExprs, AnalyticExpr.class)) { if (fromClause.isEmpty()) { throw new AnalysisException("Analytic expressions require FROM clause."); } if (selectList.isDistinct()) { throw new AnalysisException( "cannot combine SELECT DISTINCT with analytic functions"); } } whereClauseRewrite(); if (whereClause != null) { if (checkGroupingFn(whereClause)) { throw new AnalysisException("grouping operations are not allowed in WHERE."); } whereClause.analyze(analyzer); if (whereClause.containsAggregate()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_INVALID_GROUP_FUNC_USE); } whereClause.checkReturnsBool("WHERE clause", false); Expr e = whereClause.findFirstOf(AnalyticExpr.class); if (e != null) { throw new AnalysisException( "WHERE clause must not contain analytic expressions: " + e.toSql()); } analyzer.registerConjuncts(whereClause, false, getTableRefIds()); } createSortInfo(analyzer); if (sortInfo != null && CollectionUtils.isNotEmpty(sortInfo.getOrderingExprs())) { if (groupingInfo != null) { List<Expr> orderingExprNotInSelect = sortInfo.getOrderingExprs().stream() .filter(item -> !resultExprs.contains(item)).collect(Collectors.toList()); groupingInfo.substituteGroupingFn(orderingExprNotInSelect, analyzer); } } analyzeAggregation(analyzer); createAnalyticInfo(analyzer); if (evaluateOrderBy) { createSortTupleInfo(analyzer); } if (needToSql) { sqlString = toSql(); } if (analyzer.enableStarJoinReorder()) { LOG.debug("use old reorder logical in select stmt"); reorderTable(analyzer); } resolveInlineViewRefs(analyzer); if (analyzer.hasEmptySpjResultSet() && aggInfo == null) { analyzer.setHasEmptyResultSet(); } if (aggInfo != null) { if (LOG.isDebugEnabled()) { LOG.debug("post-analysis " + aggInfo.debugString()); } } if (hasOutFileClause()) { outFileClause.analyze(analyzer, resultExprs); } }
class SelectStmt extends QueryStmt { private static final Logger LOG = LogManager.getLogger(SelectStmt.class); private UUID id = UUID.randomUUID(); protected SelectList selectList; private final ArrayList<String> colLabels; protected final FromClause fromClause; protected GroupByClause groupByClause; private List<Expr> originalExpr; private Expr havingClause; protected Expr whereClause; private Expr havingPred; private AggregateInfo aggInfo; private AnalyticInfo analyticInfo; private ExprSubstitutionMap baseTblSmap = new ExprSubstitutionMap(); private ValueList valueList; private GroupingInfo groupingInfo; private Expr havingClauseAfterAnaylzed; protected String sqlString; private TableAliasGenerator tableAliasGenerator = null; private SelectList originSelectList; public SelectStmt(ValueList valueList, ArrayList<OrderByElement> orderByElement, LimitElement limitElement) { super(orderByElement, limitElement); this.valueList = valueList; this.selectList = new SelectList(); this.fromClause = new FromClause(); this.colLabels = Lists.newArrayList(); } SelectStmt( SelectList selectList, FromClause fromClause, Expr wherePredicate, GroupByClause groupByClause, Expr havingPredicate, ArrayList<OrderByElement> orderByElements, LimitElement limitElement) { super(orderByElements, limitElement); this.selectList = selectList; this.originSelectList = selectList.clone(); if (fromClause == null) { this.fromClause = new FromClause(); } else { this.fromClause = fromClause; } this.whereClause = wherePredicate; this.groupByClause = groupByClause; this.havingClause = havingPredicate; this.colLabels = Lists.newArrayList(); this.havingPred = null; this.aggInfo = null; this.sortInfo = null; this.groupingInfo = null; } protected SelectStmt(SelectStmt other) { super(other); this.id = other.id; selectList = other.selectList.clone(); fromClause = other.fromClause.clone(); whereClause = (other.whereClause != null) ? other.whereClause.clone() : null; groupByClause = (other.groupByClause != null) ? other.groupByClause.clone() : null; havingClause = (other.havingClause != null) ? other.havingClause.clone() : null; havingClauseAfterAnaylzed = other.havingClauseAfterAnaylzed != null ? other.havingClauseAfterAnaylzed.clone() : null; colLabels = Lists.newArrayList(other.colLabels); aggInfo = (other.aggInfo != null) ? other.aggInfo.clone() : null; analyticInfo = (other.analyticInfo != null) ? other.analyticInfo.clone() : null; sqlString = (other.sqlString != null) ? other.sqlString : null; baseTblSmap = other.baseTblSmap.clone(); groupingInfo = null; } @Override public void reset() { super.reset(); selectList.reset(); colLabels.clear(); fromClause.reset(); if (whereClause != null) { whereClause.reset(); } if (groupByClause != null) { groupByClause.reset(); } if (havingClause != null) { havingClause.reset(); } havingClauseAfterAnaylzed = null; havingPred = null; aggInfo = null; analyticInfo = null; baseTblSmap.clear(); groupingInfo = null; } @Override public void resetSelectList() { if (originSelectList != null) { selectList = originSelectList; } } @Override public QueryStmt clone() { return new SelectStmt(this); } public UUID getId() { return id; } /** * @return the original select list items from the query */ public SelectList getSelectList() { return selectList; } public void setSelectList(SelectList selectList) { this.selectList = selectList; } public ValueList getValueList() { return valueList; } /** * @return the HAVING clause post-analysis and with aliases resolved */ public Expr getHavingPred() { return havingPred; } public Expr getHavingClauseAfterAnaylzed() { return havingClauseAfterAnaylzed; } public List<TableRef> getTableRefs() { return fromClause.getTableRefs(); } public Expr getWhereClause() { return whereClause; } public void setWhereClause(Expr whereClause) { this.whereClause = whereClause; } public AggregateInfo getAggInfo() { return aggInfo; } public GroupingInfo getGroupingInfo() { return groupingInfo; } public GroupByClause getGroupByClause() { return groupByClause; } public AnalyticInfo getAnalyticInfo() { return analyticInfo; } public boolean hasAnalyticInfo() { return analyticInfo != null; } public boolean hasHavingClause() { return havingClause != null; } public void removeHavingClause() { havingClause = null; } @Override public SortInfo getSortInfo() { return sortInfo; } @Override public ArrayList<String> getColLabels() { return colLabels; } public ExprSubstitutionMap getBaseTblSmap() { return baseTblSmap; } @Override public void getTables(Analyzer analyzer, boolean expandView, Map<Long, TableIf> tableMap, Set<String> parentViewNameSet) throws AnalysisException { getWithClauseTables(analyzer, expandView, tableMap, parentViewNameSet); for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { QueryStmt inlineStmt = ((InlineViewRef) tblRef).getViewStmt(); inlineStmt.getTables(analyzer, expandView, tableMap, parentViewNameSet); } else if (tblRef instanceof TableValuedFunctionRef) { TableValuedFunctionRef tblFuncRef = (TableValuedFunctionRef) tblRef; tableMap.put(tblFuncRef.getTableFunction().getTable().getId(), tblFuncRef.getTableFunction().getTable()); } else { String dbName = tblRef.getName().getDb(); String tableName = tblRef.getName().getTbl(); if (Strings.isNullOrEmpty(dbName)) { dbName = analyzer.getDefaultDb(); } else { dbName = ClusterNamespace.getFullName(analyzer.getClusterName(), tblRef.getName().getDb()); } if (isViewTableRef(tblRef.getName().toString(), parentViewNameSet)) { continue; } tblRef.getName().analyze(analyzer); DatabaseIf db = analyzer.getEnv().getCatalogMgr() .getCatalogOrAnalysisException(tblRef.getName().getCtl()).getDbOrAnalysisException(dbName); TableIf table = db.getTableOrAnalysisException(tableName); if (expandView && (table instanceof View)) { View view = (View) table; view.getQueryStmt().getTables(analyzer, expandView, tableMap, parentViewNameSet); } else { if (!Env.getCurrentEnv().getAuth() .checkTblPriv(ConnectContext.get(), tblRef.getName(), PrivPredicate.SELECT)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SELECT", ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(), dbName + ": " + tableName); } tableMap.put(table.getId(), table); } } } } @Override public void getTableRefs(Analyzer analyzer, List<TableRef> tblRefs, Set<String> parentViewNameSet) { getWithClauseTableRefs(analyzer, tblRefs, parentViewNameSet); for (TableRef tblRef : fromClause) { try { TableRef tmpTblRef = analyzer.resolveTableRef(tblRef); if (tmpTblRef instanceof InlineViewRef) { QueryStmt inlineStmt = ((InlineViewRef) tmpTblRef).getViewStmt(); inlineStmt.getTableRefs(analyzer, tblRefs, parentViewNameSet); } else { if (isViewTableRef(tmpTblRef.getName().toString(), parentViewNameSet)) { continue; } tblRefs.add(tmpTblRef); } } catch (AnalysisException e) { } } } private boolean isViewTableRef(String tblName, Set<String> parentViewNameSet) { if (parentViewNameSet.contains(tblName)) { return true; } if (withClause != null) { List<View> views = withClause.getViews(); for (View view : views) { if (view.getName().equals(tblName)) { return true; } } } return false; } private ColumnAliasGenerator columnAliasGenerator = null; public ColumnAliasGenerator getColumnAliasGenerator() { if (columnAliasGenerator == null) { columnAliasGenerator = new ColumnAliasGenerator(colLabels, null); } return columnAliasGenerator; } public TableAliasGenerator getTableAliasGenerator() { if (tableAliasGenerator == null) { tableAliasGenerator = new TableAliasGenerator(analyzer, null); } return tableAliasGenerator; } public void setTableAliasGenerator(TableAliasGenerator tableAliasGenerator) { this.tableAliasGenerator = tableAliasGenerator; } public List<TupleId> getTableRefIds() { List<TupleId> result = Lists.newArrayList(); for (TableRef ref : fromClause) { result.add(ref.getId()); } return result; } public List<TupleId> getTableRefIdsWithoutInlineView() { List<TupleId> result = Lists.newArrayList(); for (TableRef ref : fromClause) { if (ref instanceof InlineViewRef) { continue; } result.add(ref.getId()); } return result; } public boolean hasInlineView() { for (TableRef ref : fromClause) { if (ref instanceof InlineViewRef) { return true; } } return false; } @Override public List<TupleId> collectTupleIds() { List<TupleId> result = Lists.newArrayList(); resultExprs.stream().forEach(expr -> expr.getIds(result, null)); result.addAll(getTableRefIds()); if (whereClause != null) { whereClause.getIds(result, null); } if (havingClauseAfterAnaylzed != null) { havingClauseAfterAnaylzed.getIds(result, null); } return result; } private void whereClauseRewrite() { if (whereClause instanceof IntLiteral) { if (((IntLiteral) whereClause).getLongValue() == 0) { whereClause = new BoolLiteral(false); } else { whereClause = new BoolLiteral(true); } } } /** * Generates and registers !empty() predicates to filter out empty collections directly * in the parent scan of collection table refs. This is a performance optimization to * avoid the expensive processing of empty collections inside a subplan that would * yield an empty result set. * <p> * For correctness purposes, the predicates are generated in cases where we can ensure * that they will be assigned only to the parent scan, and no other plan node. * <p> * The conditions are as follows: * - collection table ref is relative and non-correlated * - collection table ref represents the rhs of an inner/cross/semi join * - collection table ref's parent tuple is not outer joined * <p> * TODO: In some cases, it is possible to generate !empty() predicates for a correlated * table ref, but in general, that is not correct for non-trivial query blocks. * For example, if the block with the correlated ref has an aggregation then adding a * !empty() predicate would incorrectly discard rows from the final result set. * TODO: Evaluating !empty() predicates at non-scan nodes interacts poorly with our BE * projection of collection slots. For example, rows could incorrectly be filtered if * a !empty() predicate is assigned to a plan node that comes after the unnest of the * collection that also performs the projection. */ private void registerIsNotEmptyPredicates(Analyzer analyzer) throws AnalysisException { /* for (TableRef tblRef: fromClause_.getTableRefs()) { Preconditions.checkState(tblRef.isResolved()); if (!(tblRef instanceof CollectionTableRef)) continue; CollectionTableRef ref = (CollectionTableRef) tblRef; if (!ref.isRelative() || ref.isCorrelated()) continue; if (ref.getJoinOp().isOuterJoin() || ref.getJoinOp().isAntiJoin()) continue; if (analyzer.isOuterJoined(ref.getResolvedPath().getRootDesc().getId())) continue; IsNotEmptyPredicate isNotEmptyPred = new IsNotEmptyPredicate(ref.getCollectionExpr().clone()); isNotEmptyPred.analyze(analyzer); analyzer.registerOnClauseConjuncts( Lists.<Expr>newArrayList(isNotEmptyPred), ref); } */ } /** * Marks all unassigned join predicates as well as exprs in aggInfo and sortInfo. */ public void materializeRequiredSlots(Analyzer analyzer) throws AnalysisException { List<Expr> unassigned = analyzer.getUnassignedConjuncts(getTableRefIds(), true); List<Expr> unassignedJoinConjuncts = Lists.newArrayList(); for (Expr e : unassigned) { if (analyzer.evalAfterJoin(e)) { unassignedJoinConjuncts.add(e); } } List<Expr> baseTblJoinConjuncts = Expr.trySubstituteList(unassignedJoinConjuncts, baseTblSmap, analyzer, false); analyzer.materializeSlots(baseTblJoinConjuncts); if (evaluateOrderBy) { sortInfo.materializeRequiredSlots(analyzer, baseTblSmap); } if (hasAnalyticInfo()) { ArrayList<TupleId> tids = Lists.newArrayList(); getMaterializedTupleIds(tids); List<Expr> conjuncts = analyzer.getUnassignedConjuncts(tids); analyzer.materializeSlots(conjuncts); analyticInfo.materializeRequiredSlots(analyzer, baseTblSmap); } if (aggInfo != null) { ArrayList<Expr> havingConjuncts = Lists.newArrayList(); if (havingPred != null) { havingConjuncts.add(havingPred); } havingConjuncts.addAll( analyzer.getUnassignedConjuncts(aggInfo.getResultTupleId().asList())); materializeSlots(analyzer, havingConjuncts); aggInfo.materializeRequiredSlots(analyzer, baseTblSmap); } for (TableRef tableRef : fromClause.getTableRefs()) { if (tableRef.lateralViewRefs != null) { for (LateralViewRef lateralViewRef : tableRef.lateralViewRefs) { lateralViewRef.materializeRequiredSlots(baseTblSmap, analyzer); } } } } protected void reorderTable(Analyzer analyzer) throws AnalysisException { List<Pair<TableRef, Long>> candidates = Lists.newArrayList(); ArrayList<TableRef> originOrderBackUp = Lists.newArrayList(fromClause.getTableRefs()); for (TableRef tblRef : fromClause) { if (tblRef.getJoinOp() != JoinOperator.INNER_JOIN || tblRef.hasJoinHints()) { break; } long rowCount = 0; if (tblRef.getTable().getType() == TableType.OLAP) { rowCount = ((OlapTable) (tblRef.getTable())).getRowCount(); LOG.debug("tableName={} rowCount={}", tblRef.getAlias(), rowCount); } candidates.add(Pair.of(tblRef, rowCount)); } int reorderTableCount = candidates.size(); if (reorderTableCount < originOrderBackUp.size()) { fromClause.clear(); fromClause.addAll(originOrderBackUp.subList(0, reorderTableCount)); } long last = 0; for (int i = candidates.size() - 1; i >= 0; --i) { Pair<TableRef, Long> candidate = candidates.get(i); if (candidate.first instanceof InlineViewRef) { candidate.second = last; } last = candidate.second + 1; } Collections.sort(candidates, (a, b) -> b.second.compareTo(a.second)); for (Pair<TableRef, Long> candidate : candidates) { if (reorderTable(analyzer, candidate.first)) { if (reorderTableCount < originOrderBackUp.size()) { fromClause.addAll(originOrderBackUp.subList(reorderTableCount, originOrderBackUp.size())); } return; } } fromClause.clear(); for (TableRef tableRef : originOrderBackUp) { fromClause.add(tableRef); } } protected boolean reorderTable(Analyzer analyzer, TableRef firstRef) throws AnalysisException { List<TableRef> tmpRefList = Lists.newArrayList(); Map<TupleId, TableRef> tableRefMap = Maps.newHashMap(); for (TableRef tblRef : fromClause) { tableRefMap.put(tblRef.getId(), tblRef); tmpRefList.add(tblRef); } fromClause.clear(); fromClause.add(firstRef); tableRefMap.remove(firstRef.getId()); Set<TupleId> validTupleId = Sets.newHashSet(); validTupleId.add(firstRef.getId()); int i = 0; while (i < fromClause.size()) { TableRef tblRef = fromClause.get(i); List<Expr> eqJoinPredicates = analyzer.getEqJoinConjuncts(tblRef.getId()); List<TupleId> tupleList = Lists.newArrayList(); Expr.getIds(eqJoinPredicates, tupleList, null); for (TupleId tid : tupleList) { if (validTupleId.contains(tid)) { continue; } TableRef candidateTableRef = tableRefMap.get(tid); if (candidateTableRef != null) { Preconditions.checkState(tid == candidateTableRef.getId()); List<Expr> candidateEqJoinPredicates = analyzer.getEqJoinConjunctsExcludeAuxPredicates(tid); for (Expr candidateEqJoinPredicate : candidateEqJoinPredicates) { List<TupleId> candidateTupleList = Lists.newArrayList(); Expr.getIds(Lists.newArrayList(candidateEqJoinPredicate), candidateTupleList, null); int count = candidateTupleList.size(); for (TupleId tupleId : candidateTupleList) { if (validTupleId.contains(tupleId) || tid.equals(tupleId)) { count--; } } if (count == 0) { fromClause.add(candidateTableRef); validTupleId.add(tid); tableRefMap.remove(tid); break; } } } } i++; } if (0 != tableRefMap.size()) { fromClause.clear(); fromClause.addAll(tmpRefList); return false; } return true; } /** * Populates baseTblSmap_ with our combined inline view smap and creates * baseTblResultExprs. */ protected void resolveInlineViewRefs(Analyzer analyzer) throws AnalysisException { for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { InlineViewRef inlineViewRef = (InlineViewRef) tblRef; baseTblSmap = ExprSubstitutionMap.combine(baseTblSmap, inlineViewRef.getBaseTblSmap()); } } baseTblResultExprs = Expr.trySubstituteList(resultExprs, baseTblSmap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("baseTblSmap_: " + baseTblSmap.debugString()); LOG.debug("resultExprs: " + Expr.debugString(resultExprs)); LOG.debug("baseTblResultExprs: " + Expr.debugString(baseTblResultExprs)); } } /** * Expand "*" select list item. */ private void expandStar(Analyzer analyzer) throws AnalysisException { if (fromClause.isEmpty()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_NO_TABLES_USED); } for (TableRef tableRef : fromClause) { if (analyzer.isSemiJoined(tableRef.getId())) { continue; } expandStar(new TableName(tableRef.getAliasAsName().getCtl(), tableRef.getAliasAsName().getDb(), tableRef.getAliasAsName().getTbl()), tableRef.getDesc()); if (tableRef.lateralViewRefs != null) { for (LateralViewRef lateralViewRef : tableRef.lateralViewRefs) { expandStar(lateralViewRef.getName(), lateralViewRef.getDesc()); } } } } /** * Expand "<tbl>.*" select list item. */ private void expandStar(Analyzer analyzer, TableName tblName) throws AnalysisException { Collection<TupleDescriptor> descs = analyzer.getDescriptor(tblName); if (descs == null || descs.isEmpty()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_UNKNOWN_TABLE, tblName.getTbl(), tblName.getDb()); } for (TupleDescriptor desc : descs) { expandStar(tblName, desc); } } /** * Expand "*" for a particular tuple descriptor by appending * refs for each column to selectListExprs. */ private void expandStar(TableName tblName, TupleDescriptor desc) { for (Column col : desc.getTable().getBaseSchema()) { resultExprs.add(new SlotRef(tblName, col.getName())); colLabels.add(col.getName()); } } /** * Analyze aggregation-relevant components of the select block (Group By clause, * select list, Order By clause), * Create the AggregationInfo, including the agg output tuple, and transform all post-agg exprs * given AggregationInfo's smap. */ private void analyzeAggregation(Analyzer analyzer) throws AnalysisException { if (havingClause != null) { Expr ambiguousAlias = getFirstAmbiguousAlias(havingClause); if (ambiguousAlias != null) { ErrorReport.reportAnalysisException(ErrorCode.ERR_NON_UNIQ_ERROR, ambiguousAlias.toColumnLabel()); } /* * The having clause need to be substitute by aliasSMap. * And it is analyzed after substitute. * For example: * Query: select k1 a, sum(k2) b from table group by k1 having a > 1; * Having clause: a > 1 * aliasSMap: <a, table.k1> <b, sum(table.k2)> * After substitute: a > 1 changed to table.k1 > 1 * Analyzer: check column and other subquery in having clause * having predicate: table.k1 > 1 */ /* * TODO(ml): support substitute outer column in correlated subquery * For example: select k1 key, sum(k1) sum_k1 from table a group by k1 * having k1 > * (select min(k1) from table b where a.key=b.k2); * TODO: the a.key should be replaced by a.k1 instead of unknown column 'key' in 'a' */ havingClauseAfterAnaylzed = havingClause.substitute(aliasSMap, analyzer, false); havingClauseAfterAnaylzed = rewriteQueryExprByMvColumnExpr(havingClauseAfterAnaylzed, analyzer); havingClauseAfterAnaylzed.checkReturnsBool("HAVING clause", true); if (groupingInfo != null) { groupingInfo.substituteGroupingFn(Arrays.asList(havingClauseAfterAnaylzed), analyzer); } Expr analyticExpr = havingClauseAfterAnaylzed.findFirstOf(AnalyticExpr.class); if (analyticExpr != null) { throw new AnalysisException( "HAVING clause must not contain analytic expressions: " + analyticExpr.toSql()); } } if (groupByClause == null && !selectList.isDistinct() && !TreeNode.contains(resultExprs, Expr.isAggregatePredicate()) && (havingClauseAfterAnaylzed == null || !havingClauseAfterAnaylzed.contains( Expr.isAggregatePredicate())) && (sortInfo == null || !TreeNode.contains(sortInfo.getOrderingExprs(), Expr.isAggregatePredicate()))) { if (havingClauseAfterAnaylzed != null) { if (havingClauseAfterAnaylzed.contains(Subquery.class)) { throw new AnalysisException("Only constant expr could be supported in having clause " + "when no aggregation in stmt"); } analyzer.registerConjuncts(havingClauseAfterAnaylzed, true); } return; } if (fromClause.size() == 0) { throw new AnalysisException("Aggregation without a FROM clause is not allowed"); } if (selectList.isDistinct() && groupByClause == null) { List<Expr> aggregateExpr = Lists.newArrayList(); TreeNode.collect(resultExprs, Expr.isAggregatePredicate(), aggregateExpr); if (aggregateExpr.size() == resultExprs.size()) { selectList.setIsDistinct(false); } } if (selectList.isDistinct() && (groupByClause != null || TreeNode.contains(resultExprs, Expr.isAggregatePredicate()) || (havingClauseAfterAnaylzed != null && havingClauseAfterAnaylzed.contains( Expr.isAggregatePredicate())))) { throw new AnalysisException("cannot combine SELECT DISTINCT with aggregate functions or GROUP BY"); } if (groupByClause != null || TreeNode.contains(resultExprs, Expr.isAggregatePredicate())) { for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { throw new AnalysisException( "cannot combine '*' in select list with GROUP BY: " + item.toSql()); } } } ArrayList<FunctionCallExpr> aggExprs = Lists.newArrayList(); TreeNode.collect(resultExprs, Expr.isAggregatePredicate(), aggExprs); if (havingClauseAfterAnaylzed != null) { havingClauseAfterAnaylzed.collect(Expr.isAggregatePredicate(), aggExprs); } if (sortInfo != null) { TreeNode.collect(sortInfo.getOrderingExprs(), Expr.isAggregatePredicate(), aggExprs); } ExprSubstitutionMap countAllMap = createCountAllMap(aggExprs, analyzer); final ExprSubstitutionMap multiCountOrSumDistinctMap = createSumOrCountMultiDistinctSMap(aggExprs, analyzer); countAllMap = ExprSubstitutionMap.compose(multiCountOrSumDistinctMap, countAllMap, analyzer); List<Expr> substitutedAggs = Expr.substituteList(aggExprs, countAllMap, analyzer, false); aggExprs.clear(); TreeNode.collect(substitutedAggs, Expr.isAggregatePredicate(), aggExprs); List<TupleId> groupingByTupleIds = new ArrayList<>(); if (groupByClause != null) { groupByClause.genGroupingExprs(); ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingInfo != null) { groupingInfo.buildRepeat(groupingExprs, groupByClause.getGroupingSetList()); } substituteOrdinalsAliases(groupingExprs, "GROUP BY", analyzer); if (!groupByClause.isGroupByExtension()) { groupingExprs.removeIf(Expr::isConstant); } if (groupingInfo != null) { groupingInfo.genOutputTupleDescAndSMap(analyzer, groupingExprs, aggExprs); groupingByTupleIds.add(groupingInfo.getOutputTupleDesc().getId()); } groupByClause.analyze(analyzer); createAggInfo(groupingExprs, aggExprs, analyzer); } else { createAggInfo(new ArrayList<>(), aggExprs, analyzer); } AggregateInfo finalAggInfo = aggInfo.getSecondPhaseDistinctAggInfo() != null ? aggInfo.getSecondPhaseDistinctAggInfo() : aggInfo; groupingByTupleIds.add(finalAggInfo.getOutputTupleId()); ExprSubstitutionMap combinedSmap = ExprSubstitutionMap.compose( countAllMap, finalAggInfo.getOutputSmap(), analyzer); if (LOG.isDebugEnabled()) { LOG.debug("combined smap: " + combinedSmap.debugString()); LOG.debug("desctbl: " + analyzer.getDescTbl().debugString()); LOG.debug("resultexprs: " + Expr.debugString(resultExprs)); } if (havingClauseAfterAnaylzed != null) { List<Subquery> subqueryInHaving = Lists.newArrayList(); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryInHaving); for (Subquery subquery : subqueryInHaving) { if (subquery.isCorrelatedPredicate(getTableRefIds())) { throw new AnalysisException("The correlated having clause is not supported"); } } } /* * All of columns of result and having clause are replaced by new slot ref * which is bound by top tuple of agg info. * For example: * ResultExprs: SlotRef(k1), FunctionCall(sum(SlotRef(k2))) * Having predicate: FunctionCall(sum(SlotRef(k2))) > subquery * CombinedSMap: <SlotRef(k1) tuple 0, SlotRef(k1) of tuple 3>, * <FunctionCall(SlotRef(k2)) tuple 0, SlotRef(sum(k2)) of tuple 3> * * After rewritten: * ResultExprs: SlotRef(k1) of tuple 3, SlotRef(sum(k2)) of tuple 3 * Having predicate: SlotRef(sum(k2)) of tuple 3 > subquery */ resultExprs = Expr.substituteList(resultExprs, combinedSmap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("post-agg selectListExprs: " + Expr.debugString(resultExprs)); } if (havingClauseAfterAnaylzed != null) { havingPred = havingClauseAfterAnaylzed.substitute(combinedSmap, analyzer, false); analyzer.registerConjuncts(havingPred, true, finalAggInfo.getOutputTupleId().asList()); if (LOG.isDebugEnabled()) { LOG.debug("post-agg havingPred: " + havingPred.debugString()); } } if (sortInfo != null) { sortInfo.substituteOrderingExprs(combinedSmap, analyzer); if (LOG.isDebugEnabled()) { LOG.debug("post-agg orderingExprs: " + Expr.debugString(sortInfo.getOrderingExprs())); } } for (int i = 0; i < selectList.getItems().size(); ++i) { if (!resultExprs.get(i).isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "select list expression not produced by aggregation output " + "(missing from " + "GROUP BY clause?): " + selectList.getItems().get(i).getExpr().toSql()); } } if (orderByElements != null) { for (int i = 0; i < orderByElements.size(); ++i) { if (!sortInfo.getOrderingExprs().get(i).isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "ORDER BY expression not produced by aggregation output " + "(missing from " + "GROUP BY clause?): " + orderByElements.get(i).getExpr().toSql()); } if (sortInfo.getOrderingExprs().get(i).type.isObjectStored()) { throw new AnalysisException("ORDER BY expression could not contain object-stored columnx."); } } } if (havingPred != null) { if (!havingPred.isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "HAVING clause not produced by aggregation output " + "(missing from GROUP BY " + "clause?): " + havingClause.toSql()); } } } /** * Build smap count_distinct->multi_count_distinct sum_distinct->multi_count_distinct * assumes that select list and having clause have been analyzed. */ private ExprSubstitutionMap createSumOrCountMultiDistinctSMap( ArrayList<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { final List<FunctionCallExpr> distinctExprs = Lists.newArrayList(); for (FunctionCallExpr aggExpr : aggExprs) { if (aggExpr.isDistinct()) { distinctExprs.add(aggExpr); } } final ExprSubstitutionMap result = new ExprSubstitutionMap(); final boolean isUsingSetForDistinct = AggregateInfo.estimateIfUsingSetForDistinct(distinctExprs); if (!isUsingSetForDistinct) { return result; } for (FunctionCallExpr inputExpr : distinctExprs) { Expr replaceExpr = null; final String functionName = inputExpr.getFnName().getFunction(); if (functionName.equalsIgnoreCase(FunctionSet.COUNT)) { final List<Expr> countInputExpr = Lists.newArrayList(inputExpr.getChild(0).clone(null)); replaceExpr = new FunctionCallExpr("MULTI_DISTINCT_COUNT", new FunctionParams(inputExpr.isDistinct(), countInputExpr)); } else if (functionName.equalsIgnoreCase("SUM")) { final List<Expr> sumInputExprs = Lists.newArrayList(inputExpr.getChild(0).clone(null)); replaceExpr = new FunctionCallExpr("MULTI_DISTINCT_SUM", new FunctionParams(inputExpr.isDistinct(), sumInputExprs)); } else if (functionName.equalsIgnoreCase("AVG")) { final List<Expr> sumInputExprs = Lists.newArrayList(inputExpr.getChild(0).clone(null)); final List<Expr> countInputExpr = Lists.newArrayList(inputExpr.getChild(0).clone(null)); final FunctionCallExpr sumExpr = new FunctionCallExpr("MULTI_DISTINCT_SUM", new FunctionParams(inputExpr.isDistinct(), sumInputExprs)); final FunctionCallExpr countExpr = new FunctionCallExpr("MULTI_DISTINCT_COUNT", new FunctionParams(inputExpr.isDistinct(), countInputExpr)); replaceExpr = new ArithmeticExpr(ArithmeticExpr.Operator.DIVIDE, sumExpr, countExpr); } else { throw new AnalysisException(inputExpr.getFnName() + " can't support multi distinct."); } replaceExpr.analyze(analyzer); result.put(inputExpr, replaceExpr); } if (LOG.isDebugEnabled()) { LOG.debug("multi distinct smap: {}", result.debugString()); } return result; } /** * Create a map from COUNT([ALL]) -> zeroifnull(COUNT([ALL])) if * i) There is no GROUP-BY, and * ii) There are other distinct aggregates to be evaluated. * This transformation is necessary for COUNT to correctly return 0 for empty * input relations. */ private ExprSubstitutionMap createCountAllMap( List<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { ExprSubstitutionMap scalarCountAllMap = new ExprSubstitutionMap(); if (groupByClause != null && !groupByClause.isEmpty()) { return scalarCountAllMap; } com.google.common.base.Predicate<FunctionCallExpr> isNotDistinctPred = new com.google.common.base.Predicate<FunctionCallExpr>() { public boolean apply(FunctionCallExpr expr) { return !expr.isDistinct(); } }; if (Iterables.all(aggExprs, isNotDistinctPred)) { return scalarCountAllMap; } com.google.common.base.Predicate<FunctionCallExpr> isCountPred = new com.google.common.base.Predicate<FunctionCallExpr>() { public boolean apply(FunctionCallExpr expr) { return expr.getFnName().getFunction().equals(FunctionSet.COUNT); } }; Iterable<FunctionCallExpr> countAllAggs = Iterables.filter(aggExprs, Predicates.and(isCountPred, isNotDistinctPred)); for (FunctionCallExpr countAllAgg : countAllAggs) { ArrayList<Expr> zeroIfNullParam = Lists.newArrayList(countAllAgg.clone(), new IntLiteral(0, Type.BIGINT)); FunctionCallExpr zeroIfNull = new FunctionCallExpr("ifnull", zeroIfNullParam); zeroIfNull.analyze(analyzer); scalarCountAllMap.put(countAllAgg, zeroIfNull); } return scalarCountAllMap; } /** * Create aggInfo for the given grouping and agg exprs. */ private void createAggInfo( ArrayList<Expr> groupingExprs, ArrayList<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { if (selectList.isDistinct()) { Preconditions.checkState(groupingExprs.isEmpty()); Preconditions.checkState(aggExprs.isEmpty()); aggInfo = AggregateInfo.create(Expr.cloneList(resultExprs), null, null, analyzer); } else { aggInfo = AggregateInfo.create(groupingExprs, aggExprs, null, analyzer); } } /** * If the select list contains AnalyticExprs, create AnalyticInfo and substitute * AnalyticExprs using the AnalyticInfo's smap. */ private void createAnalyticInfo(Analyzer analyzer) throws AnalysisException { ArrayList<Expr> analyticExprs = Lists.newArrayList(); TreeNode.collect(resultExprs, AnalyticExpr.class, analyticExprs); if (sortInfo != null) { TreeNode.collect(sortInfo.getOrderingExprs(), AnalyticExpr.class, analyticExprs); } if (analyticExprs.isEmpty()) { return; } ExprSubstitutionMap rewriteSmap = new ExprSubstitutionMap(); for (Expr expr : analyticExprs) { AnalyticExpr toRewrite = (AnalyticExpr) expr; Expr newExpr = AnalyticExpr.rewrite(toRewrite); if (newExpr != null) { newExpr.analyze(analyzer); if (!rewriteSmap.containsMappingFor(toRewrite)) { rewriteSmap.put(toRewrite, newExpr); } } } if (rewriteSmap.size() > 0) { ArrayList<Expr> updatedAnalyticExprs = Expr.substituteList(analyticExprs, rewriteSmap, analyzer, false); analyticExprs.clear(); TreeNode.collect(updatedAnalyticExprs, AnalyticExpr.class, analyticExprs); } analyticInfo = AnalyticInfo.create(analyticExprs, analyzer); ExprSubstitutionMap smap = analyticInfo.getSmap(); if (rewriteSmap.size() > 0) { smap = ExprSubstitutionMap.compose( rewriteSmap, analyticInfo.getSmap(), analyzer); } resultExprs = Expr.substituteList(resultExprs, smap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("post-analytic selectListExprs: " + Expr.debugString(resultExprs)); } if (sortInfo != null) { sortInfo.substituteOrderingExprs(smap, analyzer); if (LOG.isDebugEnabled()) { LOG.debug("post-analytic orderingExprs: " + Expr.debugString(sortInfo.getOrderingExprs())); } } } @Override public void rewriteExprs(ExprRewriter rewriter) throws AnalysisException { Preconditions.checkState(isAnalyzed()); rewriteSelectList(rewriter); for (TableRef ref : fromClause) { ref.rewriteExprs(rewriter, analyzer); } List<Subquery> subqueryExprs = Lists.newArrayList(); if (whereClause != null) { whereClause = rewriter.rewrite(whereClause, analyzer, ExprRewriter.ClauseType.WHERE_CLAUSE); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { havingClause = rewriter.rewrite(havingClause, analyzer); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { subquery.getStatement().rewriteExprs(rewriter); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { rewriter.rewriteList(groupingExprs, analyzer); } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { rewriter.rewriteList(oriGroupingExprs, analyzer); } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElements) { orderByElem.setExpr(rewriter.rewrite(orderByElem.getExpr(), analyzer)); } } } @Override public void collectExprs(Map<String, Expr> exprMap) { List<Subquery> subqueryExprs = Lists.newArrayList(); for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { continue; } registerExprId(item.getExpr()); exprMap.put(item.getExpr().getId().toString(), item.getExpr()); if (item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { item.getExpr().collect(Subquery.class, subqueryExprs); } } for (TableRef ref : fromClause) { Preconditions.checkState(ref.isAnalyzed); if (ref.onClause != null) { registerExprId(ref.onClause); exprMap.put(ref.onClause.getId().toString(), ref.onClause); } if (ref instanceof InlineViewRef) { ((InlineViewRef) ref).getViewStmt().collectExprs(exprMap); } } if (whereClause != null) { registerExprId(whereClause); exprMap.put(whereClause.getId().toString(), whereClause); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { registerExprId(havingClauseAfterAnaylzed); exprMap.put(havingClauseAfterAnaylzed.getId().toString(), havingClauseAfterAnaylzed); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { registerExprId(subquery); subquery.getStatement().collectExprs(exprMap); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { for (Expr expr : groupingExprs) { if (containAlias(expr)) { continue; } registerExprId(expr); exprMap.put(expr.getId().toString(), expr); } } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { for (Expr expr : oriGroupingExprs) { /* * Suppose there is a query statement: * * ``` * select * i_item_sk as b * from item * group by b * order by b desc * ``` * * where `b` is an alias for `i_item_sk`. * * When analyze is done, it becomes * * ``` * SELECT * `i_item_sk` * FROM `item` * GROUP BY `b` * ORDER BY `b` DESC * ``` * Aliases information of groupBy and orderBy clauses is recorded in `QueryStmt.aliasSMap`. * The select clause has its own alias info in `SelectListItem.alias`. * * Aliases expr in the `group by` and `order by` clauses are not analyzed, * i.e. `Expr.isAnalyzed=false`. Subsequent constant folding will analyze the unanalyzed Expr before * collecting the constant expressions, preventing the `INVALID_TYPE` expr from being sent to BE. * * But when analyzing the alias, the meta information corresponding to the slot cannot be found * in the catalog, an error will be reported. * * So the alias needs to be removed here. * */ if (containAlias(expr)) { continue; } registerExprId(expr); exprMap.put(expr.getId().toString(), expr); } } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElementsAfterAnalyzed) { if (containAlias(orderByElem.getExpr())) { continue; } registerExprId(orderByElem.getExpr()); exprMap.put(orderByElem.getExpr().getId().toString(), orderByElem.getExpr()); } } } @Override public void putBackExprs(Map<String, Expr> rewrittenExprMap) { List<Subquery> subqueryExprs = Lists.newArrayList(); for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { continue; } item.setExpr(rewrittenExprMap.get(item.getExpr().getId().toString())); if (item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { item.getExpr().collect(Subquery.class, subqueryExprs); } } for (TableRef ref : fromClause) { if (ref.onClause != null) { ref.setOnClause(rewrittenExprMap.get(ref.onClause.getId().toString())); } if (ref instanceof InlineViewRef) { ((InlineViewRef) ref).getViewStmt().putBackExprs(rewrittenExprMap); } } if (whereClause != null) { setWhereClause(rewrittenExprMap.get(whereClause.getId().toString())); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { havingClause = rewrittenExprMap.get(havingClauseAfterAnaylzed.getId().toString()); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { subquery.getStatement().putBackExprs(rewrittenExprMap); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { ArrayList<Expr> newGroupingExpr = new ArrayList<>(); for (Expr expr : groupingExprs) { if (expr.getId() == null) { newGroupingExpr.add(expr); } else { newGroupingExpr.add(rewrittenExprMap.get(expr.getId().toString())); } } groupByClause.setGroupingExpr(newGroupingExpr); } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { ArrayList<Expr> newOriGroupingExprs = new ArrayList<>(); for (Expr expr : oriGroupingExprs) { if (expr.getId() == null) { newOriGroupingExprs.add(expr); } else { newOriGroupingExprs.add(rewrittenExprMap.get(expr.getId().toString())); } } groupByClause.setOriGroupingExprs(newOriGroupingExprs); } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElementsAfterAnalyzed) { Expr expr = orderByElem.getExpr(); if (expr.getId() == null) { orderByElem.setExpr(expr); } else { orderByElem.setExpr(rewrittenExprMap.get(expr.getId().toString())); } } orderByElements = (ArrayList<OrderByElement>) orderByElementsAfterAnalyzed; } } private void rewriteSelectList(ExprRewriter rewriter) throws AnalysisException { for (SelectListItem item : selectList.getItems()) { if (item.getExpr() instanceof CaseExpr && item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { rewriteSubquery(item.getExpr(), analyzer); } } selectList.rewriteExprs(rewriter, analyzer); } /** equal subquery in case when to an inline view * subquery in case when statement like * * SELECT CASE * WHEN ( * SELECT COUNT(*) / 2 * FROM t * ) > k4 THEN ( * SELECT AVG(k4) * FROM t * ) * ELSE ( * SELECT SUM(k4) * FROM t * ) * END AS kk4 * FROM t; * this statement will be equal to * * SELECT CASE * WHEN t1.a > k4 THEN t2.a * ELSE t3.a * END AS kk4 * FROM t, ( * SELECT COUNT(*) / 2 AS a * FROM t * ) t1, ( * SELECT AVG(k4) AS a * FROM t * ) t2, ( * SELECT SUM(k4) AS a * FROM t * ) t3; */ private Expr rewriteSubquery(Expr expr, Analyzer analyzer) throws AnalysisException { if (expr instanceof Subquery) { if (!(((Subquery) expr).getStatement() instanceof SelectStmt)) { throw new AnalysisException("Only support select subquery in case-when clause."); } if (expr.isCorrelatedPredicate(getTableRefIds())) { throw new AnalysisException("The correlated subquery in case-when clause is not supported"); } SelectStmt subquery = (SelectStmt) ((Subquery) expr).getStatement(); if (subquery.resultExprs.size() != 1 || !subquery.returnsSingleRow()) { throw new AnalysisException("Subquery in case-when must return scala type"); } subquery.reset(); subquery.setAssertNumRowsElement(1, AssertNumRowsElement.Assertion.EQ); String alias = getTableAliasGenerator().getNextAlias(); String colAlias = getColumnAliasGenerator().getNextAlias(); InlineViewRef inlineViewRef = new InlineViewRef(alias, subquery, Arrays.asList(colAlias)); try { inlineViewRef.analyze(analyzer); } catch (UserException e) { throw new AnalysisException(e.getMessage()); } fromClause.add(inlineViewRef); expr = new SlotRef(inlineViewRef.getAliasAsName(), colAlias); } else if (CollectionUtils.isNotEmpty(expr.getChildren())) { for (int i = 0; i < expr.getChildren().size(); ++i) { expr.setChild(i, rewriteSubquery(expr.getChild(i), analyzer)); } } return expr; } @Override public String toSql() { if (sqlString != null) { return sqlString; } StringBuilder strBuilder = new StringBuilder(); if (withClause != null) { strBuilder.append(withClause.toSql()); strBuilder.append(" "); } strBuilder.append("SELECT "); if (selectList.isDistinct()) { strBuilder.append("DISTINCT "); } for (int i = 0; i < resultExprs.size(); ++i) { if (i != 0) { strBuilder.append(", "); } if (needToSql) { strBuilder.append(originalExpr.get(i).toSql()); } else { strBuilder.append(resultExprs.get(i).toSql()); } strBuilder.append(" AS ").append(SqlUtils.getIdentSql(colLabels.get(i))); } if (!fromClause.isEmpty()) { strBuilder.append(fromClause.toSql()); } if (whereClause != null) { strBuilder.append(" WHERE "); strBuilder.append(whereClause.toSql()); } if (groupByClause != null) { strBuilder.append(" GROUP BY "); strBuilder.append(groupByClause.toSql()); } if (havingClause != null) { strBuilder.append(" HAVING "); strBuilder.append(havingClause.toSql()); } if (orderByElements != null) { strBuilder.append(" ORDER BY "); for (int i = 0; i < orderByElements.size(); ++i) { strBuilder.append(orderByElements.get(i).getExpr().toSql()); if (sortInfo != null) { strBuilder.append((sortInfo.getIsAscOrder().get(i)) ? " ASC" : " DESC"); } strBuilder.append((i + 1 != orderByElements.size()) ? ", " : ""); } } if (hasLimitClause()) { strBuilder.append(limitElement.toSql()); } if (hasOutFileClause()) { strBuilder.append(outFileClause.toSql()); } return strBuilder.toString(); } @Override public String toDigest() { StringBuilder strBuilder = new StringBuilder(); if (withClause != null) { strBuilder.append(withClause.toDigest()); strBuilder.append(" "); } strBuilder.append("SELECT "); if (selectList.isDistinct()) { strBuilder.append("DISTINCT "); } if (originalExpr == null) { originalExpr = Expr.cloneList(resultExprs); } if (resultExprs.isEmpty()) { for (int i = 0; i < selectList.getItems().size(); ++i) { if (i != 0) { strBuilder.append(", "); } strBuilder.append(selectList.getItems().get(i).toDigest()); } } else { for (int i = 0; i < originalExpr.size(); ++i) { if (i != 0) { strBuilder.append(", "); } strBuilder.append(originalExpr.get(i).toDigest()); strBuilder.append(" AS ").append(SqlUtils.getIdentSql(colLabels.get(i))); } } if (!fromClause.isEmpty()) { strBuilder.append(fromClause.toDigest()); } if (whereClause != null) { strBuilder.append(" WHERE "); strBuilder.append(whereClause.toDigest()); } if (groupByClause != null) { strBuilder.append(" GROUP BY "); strBuilder.append(groupByClause.toSql()); } if (havingClause != null) { strBuilder.append(" HAVING "); strBuilder.append(havingClause.toDigest()); } if (orderByElements != null) { strBuilder.append(" ORDER BY "); for (int i = 0; i < orderByElements.size(); ++i) { strBuilder.append(orderByElements.get(i).getExpr().toDigest()); if (sortInfo != null) { strBuilder.append((sortInfo.getIsAscOrder().get(i)) ? " ASC" : " DESC"); } strBuilder.append((i + 1 != orderByElements.size()) ? ", " : ""); } } if (hasLimitClause()) { strBuilder.append(limitElement.toDigest()); } if (hasOutFileClause()) { strBuilder.append(outFileClause.toDigest()); } return strBuilder.toString(); } /** * If the select statement has a sort/top that is evaluated, then the sort tuple * is materialized. Else, if there is aggregation then the aggregate tuple id is * materialized. Otherwise, all referenced tables are materialized as long as they are * not semi-joined. If there are analytics and no sort, then the returned tuple * ids also include the logical analytic output tuple. */ @Override public void getMaterializedTupleIds(ArrayList<TupleId> tupleIdList) { if (evaluateOrderBy) { tupleIdList.add(sortInfo.getSortTupleDescriptor().getId()); } else if (aggInfo != null) { if (aggInfo.isDistinctAgg()) { tupleIdList.add(aggInfo.getSecondPhaseDistinctAggInfo().getOutputTupleId()); } else { tupleIdList.add(aggInfo.getOutputTupleId()); } } else { for (TableRef tblRef : fromClause) { tupleIdList.addAll(tblRef.getMaterializedTupleIds()); } } if (hasAnalyticInfo() && !isEvaluateOrderBy()) { tupleIdList.add(analyticInfo.getOutputTupleId()); } } @Override public void substituteSelectList(Analyzer analyzer, List<String> newColLabels) throws AnalysisException, UserException { if (hasWithClause()) { withClause.analyze(analyzer); } TableRef leftTblRef = null; for (int i = 0; i < fromClause.size(); ++i) { TableRef tblRef = fromClause.get(i); tblRef = analyzer.resolveTableRef(tblRef); Preconditions.checkNotNull(tblRef); fromClause.set(i, tblRef); tblRef.setLeftTblRef(leftTblRef); tblRef.analyze(analyzer); leftTblRef = tblRef; } for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { TableName tblName = item.getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } } else { if (item.getExpr() instanceof AnalyticExpr) { item.getExpr().analyze(analyzer); } if (item.getAlias() != null) { SlotRef aliasRef = new SlotRef(null, item.getAlias()); SlotRef newAliasRef = new SlotRef(null, newColLabels.get(resultExprs.size())); newAliasRef.analysisDone(); aliasSMap.put(aliasRef, newAliasRef); } resultExprs.add(item.getExpr()); } } if (groupByClause != null) { substituteOrdinalsAliases(groupByClause.getGroupingExprs(), "GROUP BY", analyzer); } if (havingClause != null) { havingClause = havingClause.clone(aliasSMap); } if (orderByElements != null) { for (int i = 0; i < orderByElements.size(); ++i) { orderByElements = OrderByElement.substitute(orderByElements, aliasSMap, analyzer); } } colLabels.clear(); colLabels.addAll(newColLabels); } public boolean hasWhereClause() { return whereClause != null; } public boolean hasAggInfo() { return aggInfo != null; } public boolean hasGroupByClause() { return groupByClause != null; } /** * Check if the stmt returns a single row. This can happen * in the following cases: * 1. select stmt with a 'limit 1' clause * 2. select stmt with an aggregate function and no group by. * 3. select stmt with no from clause. * <p> * This function may produce false negatives because the cardinality of the * result set also depends on the data a stmt is processing. */ public boolean returnsSingleRow() { if (hasLimitClause() && getLimit() == 1) { return true; } if (fromClause.isEmpty()) { return true; } if (hasAggInfo() && !hasGroupByClause() && !selectList.isDistinct()) { return true; } return false; } @Override public void collectTableRefs(List<TableRef> tblRefs) { for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { InlineViewRef inlineViewRef = (InlineViewRef) tblRef; inlineViewRef.getViewStmt().collectTableRefs(tblRefs); } else { tblRefs.add(tblRef); } } } private boolean checkGroupingFn(Expr expr) { if (expr instanceof GroupingFunctionCallExpr) { return true; } else if (expr.getChildren() != null && expr.getChildren().size() > 0) { for (Expr child : expr.getChildren()) { if (checkGroupingFn(child)) { return true; } } } return false; } private void getAggregateFnExpr(Expr expr, ArrayList<Expr> aggFnExprList) { if (expr instanceof FunctionCallExpr && expr.fn instanceof AggregateFunction) { aggFnExprList.add(expr); } else if (expr.getChildren() != null && expr.getChildren().size() > 0) { for (Expr child : expr.getChildren()) { getAggregateFnExpr(child, aggFnExprList); } } } @Override public int hashCode() { return id.hashCode(); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof SelectStmt)) { return false; } return this.id.equals(((SelectStmt) obj).id); } }
class SelectStmt extends QueryStmt { private static final Logger LOG = LogManager.getLogger(SelectStmt.class); private UUID id = UUID.randomUUID(); protected SelectList selectList; private final ArrayList<String> colLabels; protected final FromClause fromClause; protected GroupByClause groupByClause; private List<Expr> originalExpr; private Expr havingClause; protected Expr whereClause; private Expr havingPred; private AggregateInfo aggInfo; private AnalyticInfo analyticInfo; private ExprSubstitutionMap baseTblSmap = new ExprSubstitutionMap(); private ValueList valueList; private GroupingInfo groupingInfo; private Expr havingClauseAfterAnaylzed; protected String sqlString; private TableAliasGenerator tableAliasGenerator = null; private SelectList originSelectList; public SelectStmt(ValueList valueList, ArrayList<OrderByElement> orderByElement, LimitElement limitElement) { super(orderByElement, limitElement); this.valueList = valueList; this.selectList = new SelectList(); this.fromClause = new FromClause(); this.colLabels = Lists.newArrayList(); } SelectStmt( SelectList selectList, FromClause fromClause, Expr wherePredicate, GroupByClause groupByClause, Expr havingPredicate, ArrayList<OrderByElement> orderByElements, LimitElement limitElement) { super(orderByElements, limitElement); this.selectList = selectList; this.originSelectList = selectList.clone(); if (fromClause == null) { this.fromClause = new FromClause(); } else { this.fromClause = fromClause; } this.whereClause = wherePredicate; this.groupByClause = groupByClause; this.havingClause = havingPredicate; this.colLabels = Lists.newArrayList(); this.havingPred = null; this.aggInfo = null; this.sortInfo = null; this.groupingInfo = null; } protected SelectStmt(SelectStmt other) { super(other); this.id = other.id; selectList = other.selectList.clone(); fromClause = other.fromClause.clone(); whereClause = (other.whereClause != null) ? other.whereClause.clone() : null; groupByClause = (other.groupByClause != null) ? other.groupByClause.clone() : null; havingClause = (other.havingClause != null) ? other.havingClause.clone() : null; havingClauseAfterAnaylzed = other.havingClauseAfterAnaylzed != null ? other.havingClauseAfterAnaylzed.clone() : null; colLabels = Lists.newArrayList(other.colLabels); aggInfo = (other.aggInfo != null) ? other.aggInfo.clone() : null; analyticInfo = (other.analyticInfo != null) ? other.analyticInfo.clone() : null; sqlString = (other.sqlString != null) ? other.sqlString : null; baseTblSmap = other.baseTblSmap.clone(); groupingInfo = null; } @Override public void reset() { super.reset(); selectList.reset(); colLabels.clear(); fromClause.reset(); if (whereClause != null) { whereClause.reset(); } if (groupByClause != null) { groupByClause.reset(); } if (havingClause != null) { havingClause.reset(); } havingClauseAfterAnaylzed = null; havingPred = null; aggInfo = null; analyticInfo = null; baseTblSmap.clear(); groupingInfo = null; } @Override public void resetSelectList() { if (originSelectList != null) { selectList = originSelectList; } } @Override public QueryStmt clone() { return new SelectStmt(this); } public UUID getId() { return id; } /** * @return the original select list items from the query */ public SelectList getSelectList() { return selectList; } public void setSelectList(SelectList selectList) { this.selectList = selectList; } public ValueList getValueList() { return valueList; } /** * @return the HAVING clause post-analysis and with aliases resolved */ public Expr getHavingPred() { return havingPred; } public Expr getHavingClauseAfterAnaylzed() { return havingClauseAfterAnaylzed; } public List<TableRef> getTableRefs() { return fromClause.getTableRefs(); } public Expr getWhereClause() { return whereClause; } public void setWhereClause(Expr whereClause) { this.whereClause = whereClause; } public AggregateInfo getAggInfo() { return aggInfo; } public GroupingInfo getGroupingInfo() { return groupingInfo; } public GroupByClause getGroupByClause() { return groupByClause; } public AnalyticInfo getAnalyticInfo() { return analyticInfo; } public boolean hasAnalyticInfo() { return analyticInfo != null; } public boolean hasHavingClause() { return havingClause != null; } public void removeHavingClause() { havingClause = null; } @Override public SortInfo getSortInfo() { return sortInfo; } @Override public ArrayList<String> getColLabels() { return colLabels; } public ExprSubstitutionMap getBaseTblSmap() { return baseTblSmap; } @Override public void getTables(Analyzer analyzer, boolean expandView, Map<Long, TableIf> tableMap, Set<String> parentViewNameSet) throws AnalysisException { getWithClauseTables(analyzer, expandView, tableMap, parentViewNameSet); for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { QueryStmt inlineStmt = ((InlineViewRef) tblRef).getViewStmt(); inlineStmt.getTables(analyzer, expandView, tableMap, parentViewNameSet); } else if (tblRef instanceof TableValuedFunctionRef) { TableValuedFunctionRef tblFuncRef = (TableValuedFunctionRef) tblRef; tableMap.put(tblFuncRef.getTableFunction().getTable().getId(), tblFuncRef.getTableFunction().getTable()); } else { String dbName = tblRef.getName().getDb(); String tableName = tblRef.getName().getTbl(); if (Strings.isNullOrEmpty(dbName)) { dbName = analyzer.getDefaultDb(); } else { dbName = ClusterNamespace.getFullName(analyzer.getClusterName(), tblRef.getName().getDb()); } if (isViewTableRef(tblRef.getName().toString(), parentViewNameSet)) { continue; } tblRef.getName().analyze(analyzer); DatabaseIf db = analyzer.getEnv().getCatalogMgr() .getCatalogOrAnalysisException(tblRef.getName().getCtl()).getDbOrAnalysisException(dbName); TableIf table = db.getTableOrAnalysisException(tableName); if (expandView && (table instanceof View)) { View view = (View) table; view.getQueryStmt().getTables(analyzer, expandView, tableMap, parentViewNameSet); } else { if (!Env.getCurrentEnv().getAuth() .checkTblPriv(ConnectContext.get(), tblRef.getName(), PrivPredicate.SELECT)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SELECT", ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(), dbName + ": " + tableName); } tableMap.put(table.getId(), table); } } } } @Override public void getTableRefs(Analyzer analyzer, List<TableRef> tblRefs, Set<String> parentViewNameSet) { getWithClauseTableRefs(analyzer, tblRefs, parentViewNameSet); for (TableRef tblRef : fromClause) { try { TableRef tmpTblRef = analyzer.resolveTableRef(tblRef); if (tmpTblRef instanceof InlineViewRef) { QueryStmt inlineStmt = ((InlineViewRef) tmpTblRef).getViewStmt(); inlineStmt.getTableRefs(analyzer, tblRefs, parentViewNameSet); } else { if (isViewTableRef(tmpTblRef.getName().toString(), parentViewNameSet)) { continue; } tblRefs.add(tmpTblRef); } } catch (AnalysisException e) { } } } private boolean isViewTableRef(String tblName, Set<String> parentViewNameSet) { if (parentViewNameSet.contains(tblName)) { return true; } if (withClause != null) { List<View> views = withClause.getViews(); for (View view : views) { if (view.getName().equals(tblName)) { return true; } } } return false; } private ColumnAliasGenerator columnAliasGenerator = null; public ColumnAliasGenerator getColumnAliasGenerator() { if (columnAliasGenerator == null) { columnAliasGenerator = new ColumnAliasGenerator(colLabels, null); } return columnAliasGenerator; } public TableAliasGenerator getTableAliasGenerator() { if (tableAliasGenerator == null) { tableAliasGenerator = new TableAliasGenerator(analyzer, null); } return tableAliasGenerator; } public void setTableAliasGenerator(TableAliasGenerator tableAliasGenerator) { this.tableAliasGenerator = tableAliasGenerator; } public List<TupleId> getTableRefIds() { List<TupleId> result = Lists.newArrayList(); for (TableRef ref : fromClause) { result.add(ref.getId()); } return result; } public List<TupleId> getTableRefIdsWithoutInlineView() { List<TupleId> result = Lists.newArrayList(); for (TableRef ref : fromClause) { if (ref instanceof InlineViewRef) { continue; } result.add(ref.getId()); } return result; } public boolean hasInlineView() { for (TableRef ref : fromClause) { if (ref instanceof InlineViewRef) { return true; } } return false; } @Override public List<TupleId> collectTupleIds() { List<TupleId> result = Lists.newArrayList(); resultExprs.stream().forEach(expr -> expr.getIds(result, null)); result.addAll(getTableRefIds()); if (whereClause != null) { whereClause.getIds(result, null); } if (havingClauseAfterAnaylzed != null) { havingClauseAfterAnaylzed.getIds(result, null); } return result; } private void whereClauseRewrite() { if (whereClause instanceof IntLiteral) { if (((IntLiteral) whereClause).getLongValue() == 0) { whereClause = new BoolLiteral(false); } else { whereClause = new BoolLiteral(true); } } } /** * Generates and registers !empty() predicates to filter out empty collections directly * in the parent scan of collection table refs. This is a performance optimization to * avoid the expensive processing of empty collections inside a subplan that would * yield an empty result set. * <p> * For correctness purposes, the predicates are generated in cases where we can ensure * that they will be assigned only to the parent scan, and no other plan node. * <p> * The conditions are as follows: * - collection table ref is relative and non-correlated * - collection table ref represents the rhs of an inner/cross/semi join * - collection table ref's parent tuple is not outer joined * <p> * TODO: In some cases, it is possible to generate !empty() predicates for a correlated * table ref, but in general, that is not correct for non-trivial query blocks. * For example, if the block with the correlated ref has an aggregation then adding a * !empty() predicate would incorrectly discard rows from the final result set. * TODO: Evaluating !empty() predicates at non-scan nodes interacts poorly with our BE * projection of collection slots. For example, rows could incorrectly be filtered if * a !empty() predicate is assigned to a plan node that comes after the unnest of the * collection that also performs the projection. */ private void registerIsNotEmptyPredicates(Analyzer analyzer) throws AnalysisException { /* for (TableRef tblRef: fromClause_.getTableRefs()) { Preconditions.checkState(tblRef.isResolved()); if (!(tblRef instanceof CollectionTableRef)) continue; CollectionTableRef ref = (CollectionTableRef) tblRef; if (!ref.isRelative() || ref.isCorrelated()) continue; if (ref.getJoinOp().isOuterJoin() || ref.getJoinOp().isAntiJoin()) continue; if (analyzer.isOuterJoined(ref.getResolvedPath().getRootDesc().getId())) continue; IsNotEmptyPredicate isNotEmptyPred = new IsNotEmptyPredicate(ref.getCollectionExpr().clone()); isNotEmptyPred.analyze(analyzer); analyzer.registerOnClauseConjuncts( Lists.<Expr>newArrayList(isNotEmptyPred), ref); } */ } /** * Marks all unassigned join predicates as well as exprs in aggInfo and sortInfo. */ public void materializeRequiredSlots(Analyzer analyzer) throws AnalysisException { List<Expr> unassigned = analyzer.getUnassignedConjuncts(getTableRefIds(), true); List<Expr> unassignedJoinConjuncts = Lists.newArrayList(); for (Expr e : unassigned) { if (analyzer.evalAfterJoin(e)) { unassignedJoinConjuncts.add(e); } } List<Expr> baseTblJoinConjuncts = Expr.trySubstituteList(unassignedJoinConjuncts, baseTblSmap, analyzer, false); analyzer.materializeSlots(baseTblJoinConjuncts); if (evaluateOrderBy) { sortInfo.materializeRequiredSlots(analyzer, baseTblSmap); } if (hasAnalyticInfo()) { ArrayList<TupleId> tids = Lists.newArrayList(); getMaterializedTupleIds(tids); List<Expr> conjuncts = analyzer.getUnassignedConjuncts(tids); analyzer.materializeSlots(conjuncts); analyticInfo.materializeRequiredSlots(analyzer, baseTblSmap); } if (aggInfo != null) { ArrayList<Expr> havingConjuncts = Lists.newArrayList(); if (havingPred != null) { havingConjuncts.add(havingPred); } havingConjuncts.addAll( analyzer.getUnassignedConjuncts(aggInfo.getResultTupleId().asList())); materializeSlots(analyzer, havingConjuncts); aggInfo.materializeRequiredSlots(analyzer, baseTblSmap); } for (TableRef tableRef : fromClause.getTableRefs()) { if (tableRef.lateralViewRefs != null) { for (LateralViewRef lateralViewRef : tableRef.lateralViewRefs) { lateralViewRef.materializeRequiredSlots(baseTblSmap, analyzer); } } } } protected void reorderTable(Analyzer analyzer) throws AnalysisException { List<Pair<TableRef, Long>> candidates = Lists.newArrayList(); ArrayList<TableRef> originOrderBackUp = Lists.newArrayList(fromClause.getTableRefs()); for (TableRef tblRef : fromClause) { if (tblRef.getJoinOp() != JoinOperator.INNER_JOIN || tblRef.hasJoinHints()) { break; } long rowCount = 0; if (tblRef.getTable().getType() == TableType.OLAP) { rowCount = ((OlapTable) (tblRef.getTable())).getRowCount(); LOG.debug("tableName={} rowCount={}", tblRef.getAlias(), rowCount); } candidates.add(Pair.of(tblRef, rowCount)); } int reorderTableCount = candidates.size(); if (reorderTableCount < originOrderBackUp.size()) { fromClause.clear(); fromClause.addAll(originOrderBackUp.subList(0, reorderTableCount)); } long last = 0; for (int i = candidates.size() - 1; i >= 0; --i) { Pair<TableRef, Long> candidate = candidates.get(i); if (candidate.first instanceof InlineViewRef) { candidate.second = last; } last = candidate.second + 1; } Collections.sort(candidates, (a, b) -> b.second.compareTo(a.second)); for (Pair<TableRef, Long> candidate : candidates) { if (reorderTable(analyzer, candidate.first)) { if (reorderTableCount < originOrderBackUp.size()) { fromClause.addAll(originOrderBackUp.subList(reorderTableCount, originOrderBackUp.size())); } return; } } fromClause.clear(); for (TableRef tableRef : originOrderBackUp) { fromClause.add(tableRef); } } protected boolean reorderTable(Analyzer analyzer, TableRef firstRef) throws AnalysisException { List<TableRef> tmpRefList = Lists.newArrayList(); Map<TupleId, TableRef> tableRefMap = Maps.newHashMap(); for (TableRef tblRef : fromClause) { tableRefMap.put(tblRef.getId(), tblRef); tmpRefList.add(tblRef); } fromClause.clear(); fromClause.add(firstRef); tableRefMap.remove(firstRef.getId()); Set<TupleId> validTupleId = Sets.newHashSet(); validTupleId.add(firstRef.getId()); int i = 0; while (i < fromClause.size()) { TableRef tblRef = fromClause.get(i); List<Expr> eqJoinPredicates = analyzer.getEqJoinConjuncts(tblRef.getId()); List<TupleId> tupleList = Lists.newArrayList(); Expr.getIds(eqJoinPredicates, tupleList, null); for (TupleId tid : tupleList) { if (validTupleId.contains(tid)) { continue; } TableRef candidateTableRef = tableRefMap.get(tid); if (candidateTableRef != null) { Preconditions.checkState(tid == candidateTableRef.getId()); List<Expr> candidateEqJoinPredicates = analyzer.getEqJoinConjunctsExcludeAuxPredicates(tid); for (Expr candidateEqJoinPredicate : candidateEqJoinPredicates) { List<TupleId> candidateTupleList = Lists.newArrayList(); Expr.getIds(Lists.newArrayList(candidateEqJoinPredicate), candidateTupleList, null); int count = candidateTupleList.size(); for (TupleId tupleId : candidateTupleList) { if (validTupleId.contains(tupleId) || tid.equals(tupleId)) { count--; } } if (count == 0) { fromClause.add(candidateTableRef); validTupleId.add(tid); tableRefMap.remove(tid); break; } } } } i++; } if (0 != tableRefMap.size()) { fromClause.clear(); fromClause.addAll(tmpRefList); return false; } return true; } /** * Populates baseTblSmap_ with our combined inline view smap and creates * baseTblResultExprs. */ protected void resolveInlineViewRefs(Analyzer analyzer) throws AnalysisException { for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { InlineViewRef inlineViewRef = (InlineViewRef) tblRef; baseTblSmap = ExprSubstitutionMap.combine(baseTblSmap, inlineViewRef.getBaseTblSmap()); } } baseTblResultExprs = Expr.trySubstituteList(resultExprs, baseTblSmap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("baseTblSmap_: " + baseTblSmap.debugString()); LOG.debug("resultExprs: " + Expr.debugString(resultExprs)); LOG.debug("baseTblResultExprs: " + Expr.debugString(baseTblResultExprs)); } } /** * Expand "*" select list item. */ private void expandStar(Analyzer analyzer) throws AnalysisException { if (fromClause.isEmpty()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_NO_TABLES_USED); } for (TableRef tableRef : fromClause) { if (analyzer.isSemiJoined(tableRef.getId())) { continue; } expandStar(new TableName(tableRef.getAliasAsName().getCtl(), tableRef.getAliasAsName().getDb(), tableRef.getAliasAsName().getTbl()), tableRef.getDesc()); if (tableRef.lateralViewRefs != null) { for (LateralViewRef lateralViewRef : tableRef.lateralViewRefs) { expandStar(lateralViewRef.getName(), lateralViewRef.getDesc()); } } } } /** * Expand "<tbl>.*" select list item. */ private void expandStar(Analyzer analyzer, TableName tblName) throws AnalysisException { Collection<TupleDescriptor> descs = analyzer.getDescriptor(tblName); if (descs == null || descs.isEmpty()) { ErrorReport.reportAnalysisException(ErrorCode.ERR_UNKNOWN_TABLE, tblName.getTbl(), tblName.getDb()); } for (TupleDescriptor desc : descs) { expandStar(tblName, desc); } } /** * Expand "*" for a particular tuple descriptor by appending * refs for each column to selectListExprs. */ private void expandStar(TableName tblName, TupleDescriptor desc) { for (Column col : desc.getTable().getBaseSchema()) { resultExprs.add(new SlotRef(tblName, col.getName())); colLabels.add(col.getName()); } } /** * Analyze aggregation-relevant components of the select block (Group By clause, * select list, Order By clause), * Create the AggregationInfo, including the agg output tuple, and transform all post-agg exprs * given AggregationInfo's smap. */ private void analyzeAggregation(Analyzer analyzer) throws AnalysisException { if (havingClause != null) { Expr ambiguousAlias = getFirstAmbiguousAlias(havingClause); if (ambiguousAlias != null) { ErrorReport.reportAnalysisException(ErrorCode.ERR_NON_UNIQ_ERROR, ambiguousAlias.toColumnLabel()); } /* * The having clause need to be substitute by aliasSMap. * And it is analyzed after substitute. * For example: * Query: select k1 a, sum(k2) b from table group by k1 having a > 1; * Having clause: a > 1 * aliasSMap: <a, table.k1> <b, sum(table.k2)> * After substitute: a > 1 changed to table.k1 > 1 * Analyzer: check column and other subquery in having clause * having predicate: table.k1 > 1 */ /* * TODO(ml): support substitute outer column in correlated subquery * For example: select k1 key, sum(k1) sum_k1 from table a group by k1 * having k1 > * (select min(k1) from table b where a.key=b.k2); * TODO: the a.key should be replaced by a.k1 instead of unknown column 'key' in 'a' */ havingClauseAfterAnaylzed = havingClause.substitute(aliasSMap, analyzer, false); havingClauseAfterAnaylzed = rewriteQueryExprByMvColumnExpr(havingClauseAfterAnaylzed, analyzer); havingClauseAfterAnaylzed.checkReturnsBool("HAVING clause", true); if (groupingInfo != null) { groupingInfo.substituteGroupingFn(Arrays.asList(havingClauseAfterAnaylzed), analyzer); } Expr analyticExpr = havingClauseAfterAnaylzed.findFirstOf(AnalyticExpr.class); if (analyticExpr != null) { throw new AnalysisException( "HAVING clause must not contain analytic expressions: " + analyticExpr.toSql()); } } if (groupByClause == null && !selectList.isDistinct() && !TreeNode.contains(resultExprs, Expr.isAggregatePredicate()) && (havingClauseAfterAnaylzed == null || !havingClauseAfterAnaylzed.contains( Expr.isAggregatePredicate())) && (sortInfo == null || !TreeNode.contains(sortInfo.getOrderingExprs(), Expr.isAggregatePredicate()))) { if (havingClauseAfterAnaylzed != null) { if (havingClauseAfterAnaylzed.contains(Subquery.class)) { throw new AnalysisException("Only constant expr could be supported in having clause " + "when no aggregation in stmt"); } analyzer.registerConjuncts(havingClauseAfterAnaylzed, true); } return; } if (fromClause.size() == 0) { throw new AnalysisException("Aggregation without a FROM clause is not allowed"); } if (selectList.isDistinct() && groupByClause == null) { List<Expr> aggregateExpr = Lists.newArrayList(); TreeNode.collect(resultExprs, Expr.isAggregatePredicate(), aggregateExpr); if (aggregateExpr.size() == resultExprs.size()) { selectList.setIsDistinct(false); } } if (selectList.isDistinct() && (groupByClause != null || TreeNode.contains(resultExprs, Expr.isAggregatePredicate()) || (havingClauseAfterAnaylzed != null && havingClauseAfterAnaylzed.contains( Expr.isAggregatePredicate())))) { throw new AnalysisException("cannot combine SELECT DISTINCT with aggregate functions or GROUP BY"); } if (groupByClause != null || TreeNode.contains(resultExprs, Expr.isAggregatePredicate())) { for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { throw new AnalysisException( "cannot combine '*' in select list with GROUP BY: " + item.toSql()); } } } ArrayList<FunctionCallExpr> aggExprs = Lists.newArrayList(); TreeNode.collect(resultExprs, Expr.isAggregatePredicate(), aggExprs); if (havingClauseAfterAnaylzed != null) { havingClauseAfterAnaylzed.collect(Expr.isAggregatePredicate(), aggExprs); } if (sortInfo != null) { TreeNode.collect(sortInfo.getOrderingExprs(), Expr.isAggregatePredicate(), aggExprs); } ExprSubstitutionMap countAllMap = createCountAllMap(aggExprs, analyzer); final ExprSubstitutionMap multiCountOrSumDistinctMap = createSumOrCountMultiDistinctSMap(aggExprs, analyzer); countAllMap = ExprSubstitutionMap.compose(multiCountOrSumDistinctMap, countAllMap, analyzer); List<Expr> substitutedAggs = Expr.substituteList(aggExprs, countAllMap, analyzer, false); aggExprs.clear(); TreeNode.collect(substitutedAggs, Expr.isAggregatePredicate(), aggExprs); List<TupleId> groupingByTupleIds = new ArrayList<>(); if (groupByClause != null) { groupByClause.genGroupingExprs(); ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingInfo != null) { groupingInfo.buildRepeat(groupingExprs, groupByClause.getGroupingSetList()); } substituteOrdinalsAliases(groupingExprs, "GROUP BY", analyzer); if (!groupByClause.isGroupByExtension()) { groupingExprs.removeIf(Expr::isConstant); } if (groupingInfo != null) { groupingInfo.genOutputTupleDescAndSMap(analyzer, groupingExprs, aggExprs); groupingByTupleIds.add(groupingInfo.getOutputTupleDesc().getId()); } groupByClause.analyze(analyzer); createAggInfo(groupingExprs, aggExprs, analyzer); } else { createAggInfo(new ArrayList<>(), aggExprs, analyzer); } AggregateInfo finalAggInfo = aggInfo.getSecondPhaseDistinctAggInfo() != null ? aggInfo.getSecondPhaseDistinctAggInfo() : aggInfo; groupingByTupleIds.add(finalAggInfo.getOutputTupleId()); ExprSubstitutionMap combinedSmap = ExprSubstitutionMap.compose( countAllMap, finalAggInfo.getOutputSmap(), analyzer); if (LOG.isDebugEnabled()) { LOG.debug("combined smap: " + combinedSmap.debugString()); LOG.debug("desctbl: " + analyzer.getDescTbl().debugString()); LOG.debug("resultexprs: " + Expr.debugString(resultExprs)); } if (havingClauseAfterAnaylzed != null) { List<Subquery> subqueryInHaving = Lists.newArrayList(); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryInHaving); for (Subquery subquery : subqueryInHaving) { if (subquery.isCorrelatedPredicate(getTableRefIds())) { throw new AnalysisException("The correlated having clause is not supported"); } } } /* * All of columns of result and having clause are replaced by new slot ref * which is bound by top tuple of agg info. * For example: * ResultExprs: SlotRef(k1), FunctionCall(sum(SlotRef(k2))) * Having predicate: FunctionCall(sum(SlotRef(k2))) > subquery * CombinedSMap: <SlotRef(k1) tuple 0, SlotRef(k1) of tuple 3>, * <FunctionCall(SlotRef(k2)) tuple 0, SlotRef(sum(k2)) of tuple 3> * * After rewritten: * ResultExprs: SlotRef(k1) of tuple 3, SlotRef(sum(k2)) of tuple 3 * Having predicate: SlotRef(sum(k2)) of tuple 3 > subquery */ resultExprs = Expr.substituteList(resultExprs, combinedSmap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("post-agg selectListExprs: " + Expr.debugString(resultExprs)); } if (havingClauseAfterAnaylzed != null) { havingPred = havingClauseAfterAnaylzed.substitute(combinedSmap, analyzer, false); analyzer.registerConjuncts(havingPred, true, finalAggInfo.getOutputTupleId().asList()); if (LOG.isDebugEnabled()) { LOG.debug("post-agg havingPred: " + havingPred.debugString()); } } if (sortInfo != null) { sortInfo.substituteOrderingExprs(combinedSmap, analyzer); if (LOG.isDebugEnabled()) { LOG.debug("post-agg orderingExprs: " + Expr.debugString(sortInfo.getOrderingExprs())); } } for (int i = 0; i < selectList.getItems().size(); ++i) { if (!resultExprs.get(i).isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "select list expression not produced by aggregation output " + "(missing from " + "GROUP BY clause?): " + selectList.getItems().get(i).getExpr().toSql()); } } if (orderByElements != null) { for (int i = 0; i < orderByElements.size(); ++i) { if (!sortInfo.getOrderingExprs().get(i).isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "ORDER BY expression not produced by aggregation output " + "(missing from " + "GROUP BY clause?): " + orderByElements.get(i).getExpr().toSql()); } if (sortInfo.getOrderingExprs().get(i).type.isObjectStored()) { throw new AnalysisException("ORDER BY expression could not contain object-stored columnx."); } } } if (havingPred != null) { if (!havingPred.isBoundByTupleIds(groupingByTupleIds)) { throw new AnalysisException( "HAVING clause not produced by aggregation output " + "(missing from GROUP BY " + "clause?): " + havingClause.toSql()); } } } /** * Build smap count_distinct->multi_count_distinct sum_distinct->multi_count_distinct * assumes that select list and having clause have been analyzed. */ private ExprSubstitutionMap createSumOrCountMultiDistinctSMap( ArrayList<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { final List<FunctionCallExpr> distinctExprs = Lists.newArrayList(); for (FunctionCallExpr aggExpr : aggExprs) { if (aggExpr.isDistinct()) { distinctExprs.add(aggExpr); } } final ExprSubstitutionMap result = new ExprSubstitutionMap(); final boolean isUsingSetForDistinct = AggregateInfo.estimateIfUsingSetForDistinct(distinctExprs); if (!isUsingSetForDistinct) { return result; } for (FunctionCallExpr inputExpr : distinctExprs) { Expr replaceExpr = null; final String functionName = inputExpr.getFnName().getFunction(); if (functionName.equalsIgnoreCase(FunctionSet.COUNT)) { final List<Expr> countInputExpr = Lists.newArrayList(inputExpr.getChild(0).clone(null)); replaceExpr = new FunctionCallExpr("MULTI_DISTINCT_COUNT", new FunctionParams(inputExpr.isDistinct(), countInputExpr)); } else if (functionName.equalsIgnoreCase("SUM")) { final List<Expr> sumInputExprs = Lists.newArrayList(inputExpr.getChild(0).clone(null)); replaceExpr = new FunctionCallExpr("MULTI_DISTINCT_SUM", new FunctionParams(inputExpr.isDistinct(), sumInputExprs)); } else if (functionName.equalsIgnoreCase("AVG")) { final List<Expr> sumInputExprs = Lists.newArrayList(inputExpr.getChild(0).clone(null)); final List<Expr> countInputExpr = Lists.newArrayList(inputExpr.getChild(0).clone(null)); final FunctionCallExpr sumExpr = new FunctionCallExpr("MULTI_DISTINCT_SUM", new FunctionParams(inputExpr.isDistinct(), sumInputExprs)); final FunctionCallExpr countExpr = new FunctionCallExpr("MULTI_DISTINCT_COUNT", new FunctionParams(inputExpr.isDistinct(), countInputExpr)); replaceExpr = new ArithmeticExpr(ArithmeticExpr.Operator.DIVIDE, sumExpr, countExpr); } else { throw new AnalysisException(inputExpr.getFnName() + " can't support multi distinct."); } replaceExpr.analyze(analyzer); result.put(inputExpr, replaceExpr); } if (LOG.isDebugEnabled()) { LOG.debug("multi distinct smap: {}", result.debugString()); } return result; } /** * Create a map from COUNT([ALL]) -> zeroifnull(COUNT([ALL])) if * i) There is no GROUP-BY, and * ii) There are other distinct aggregates to be evaluated. * This transformation is necessary for COUNT to correctly return 0 for empty * input relations. */ private ExprSubstitutionMap createCountAllMap( List<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { ExprSubstitutionMap scalarCountAllMap = new ExprSubstitutionMap(); if (groupByClause != null && !groupByClause.isEmpty()) { return scalarCountAllMap; } com.google.common.base.Predicate<FunctionCallExpr> isNotDistinctPred = new com.google.common.base.Predicate<FunctionCallExpr>() { public boolean apply(FunctionCallExpr expr) { return !expr.isDistinct(); } }; if (Iterables.all(aggExprs, isNotDistinctPred)) { return scalarCountAllMap; } com.google.common.base.Predicate<FunctionCallExpr> isCountPred = new com.google.common.base.Predicate<FunctionCallExpr>() { public boolean apply(FunctionCallExpr expr) { return expr.getFnName().getFunction().equals(FunctionSet.COUNT); } }; Iterable<FunctionCallExpr> countAllAggs = Iterables.filter(aggExprs, Predicates.and(isCountPred, isNotDistinctPred)); for (FunctionCallExpr countAllAgg : countAllAggs) { ArrayList<Expr> zeroIfNullParam = Lists.newArrayList(countAllAgg.clone(), new IntLiteral(0, Type.BIGINT)); FunctionCallExpr zeroIfNull = new FunctionCallExpr("ifnull", zeroIfNullParam); zeroIfNull.analyze(analyzer); scalarCountAllMap.put(countAllAgg, zeroIfNull); } return scalarCountAllMap; } /** * Create aggInfo for the given grouping and agg exprs. */ private void createAggInfo( ArrayList<Expr> groupingExprs, ArrayList<FunctionCallExpr> aggExprs, Analyzer analyzer) throws AnalysisException { if (selectList.isDistinct()) { Preconditions.checkState(groupingExprs.isEmpty()); Preconditions.checkState(aggExprs.isEmpty()); aggInfo = AggregateInfo.create(Expr.cloneList(resultExprs), null, null, analyzer); } else { aggInfo = AggregateInfo.create(groupingExprs, aggExprs, null, analyzer); } } /** * If the select list contains AnalyticExprs, create AnalyticInfo and substitute * AnalyticExprs using the AnalyticInfo's smap. */ private void createAnalyticInfo(Analyzer analyzer) throws AnalysisException { ArrayList<Expr> analyticExprs = Lists.newArrayList(); TreeNode.collect(resultExprs, AnalyticExpr.class, analyticExprs); if (sortInfo != null) { TreeNode.collect(sortInfo.getOrderingExprs(), AnalyticExpr.class, analyticExprs); } if (analyticExprs.isEmpty()) { return; } ExprSubstitutionMap rewriteSmap = new ExprSubstitutionMap(); for (Expr expr : analyticExprs) { AnalyticExpr toRewrite = (AnalyticExpr) expr; Expr newExpr = AnalyticExpr.rewrite(toRewrite); if (newExpr != null) { newExpr.analyze(analyzer); if (!rewriteSmap.containsMappingFor(toRewrite)) { rewriteSmap.put(toRewrite, newExpr); } } } if (rewriteSmap.size() > 0) { ArrayList<Expr> updatedAnalyticExprs = Expr.substituteList(analyticExprs, rewriteSmap, analyzer, false); analyticExprs.clear(); TreeNode.collect(updatedAnalyticExprs, AnalyticExpr.class, analyticExprs); } analyticInfo = AnalyticInfo.create(analyticExprs, analyzer); ExprSubstitutionMap smap = analyticInfo.getSmap(); if (rewriteSmap.size() > 0) { smap = ExprSubstitutionMap.compose( rewriteSmap, analyticInfo.getSmap(), analyzer); } resultExprs = Expr.substituteList(resultExprs, smap, analyzer, false); if (LOG.isDebugEnabled()) { LOG.debug("post-analytic selectListExprs: " + Expr.debugString(resultExprs)); } if (sortInfo != null) { sortInfo.substituteOrderingExprs(smap, analyzer); if (LOG.isDebugEnabled()) { LOG.debug("post-analytic orderingExprs: " + Expr.debugString(sortInfo.getOrderingExprs())); } } } @Override public void rewriteExprs(ExprRewriter rewriter) throws AnalysisException { Preconditions.checkState(isAnalyzed()); rewriteSelectList(rewriter); for (TableRef ref : fromClause) { ref.rewriteExprs(rewriter, analyzer); } List<Subquery> subqueryExprs = Lists.newArrayList(); if (whereClause != null) { whereClause = rewriter.rewrite(whereClause, analyzer, ExprRewriter.ClauseType.WHERE_CLAUSE); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { havingClause = rewriter.rewrite(havingClause, analyzer); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { subquery.getStatement().rewriteExprs(rewriter); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { rewriter.rewriteList(groupingExprs, analyzer); } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { rewriter.rewriteList(oriGroupingExprs, analyzer); } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElements) { orderByElem.setExpr(rewriter.rewrite(orderByElem.getExpr(), analyzer)); } } } @Override public void collectExprs(Map<String, Expr> exprMap) { List<Subquery> subqueryExprs = Lists.newArrayList(); for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { continue; } registerExprId(item.getExpr()); exprMap.put(item.getExpr().getId().toString(), item.getExpr()); if (item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { item.getExpr().collect(Subquery.class, subqueryExprs); } } for (TableRef ref : fromClause) { Preconditions.checkState(ref.isAnalyzed); if (ref.onClause != null) { registerExprId(ref.onClause); exprMap.put(ref.onClause.getId().toString(), ref.onClause); } if (ref instanceof InlineViewRef) { ((InlineViewRef) ref).getViewStmt().collectExprs(exprMap); } } if (whereClause != null) { registerExprId(whereClause); exprMap.put(whereClause.getId().toString(), whereClause); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { registerExprId(havingClauseAfterAnaylzed); exprMap.put(havingClauseAfterAnaylzed.getId().toString(), havingClauseAfterAnaylzed); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { registerExprId(subquery); subquery.getStatement().collectExprs(exprMap); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { for (Expr expr : groupingExprs) { if (containAlias(expr)) { continue; } registerExprId(expr); exprMap.put(expr.getId().toString(), expr); } } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { for (Expr expr : oriGroupingExprs) { /* * Suppose there is a query statement: * * ``` * select * i_item_sk as b * from item * group by b * order by b desc * ``` * * where `b` is an alias for `i_item_sk`. * * When analyze is done, it becomes * * ``` * SELECT * `i_item_sk` * FROM `item` * GROUP BY `b` * ORDER BY `b` DESC * ``` * Aliases information of groupBy and orderBy clauses is recorded in `QueryStmt.aliasSMap`. * The select clause has its own alias info in `SelectListItem.alias`. * * Aliases expr in the `group by` and `order by` clauses are not analyzed, * i.e. `Expr.isAnalyzed=false`. Subsequent constant folding will analyze the unanalyzed Expr before * collecting the constant expressions, preventing the `INVALID_TYPE` expr from being sent to BE. * * But when analyzing the alias, the meta information corresponding to the slot cannot be found * in the catalog, an error will be reported. * * So the alias needs to be removed here. * */ if (containAlias(expr)) { continue; } registerExprId(expr); exprMap.put(expr.getId().toString(), expr); } } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElementsAfterAnalyzed) { if (containAlias(orderByElem.getExpr())) { continue; } registerExprId(orderByElem.getExpr()); exprMap.put(orderByElem.getExpr().getId().toString(), orderByElem.getExpr()); } } } @Override public void putBackExprs(Map<String, Expr> rewrittenExprMap) { List<Subquery> subqueryExprs = Lists.newArrayList(); for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { continue; } item.setExpr(rewrittenExprMap.get(item.getExpr().getId().toString())); if (item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { item.getExpr().collect(Subquery.class, subqueryExprs); } } for (TableRef ref : fromClause) { if (ref.onClause != null) { ref.setOnClause(rewrittenExprMap.get(ref.onClause.getId().toString())); } if (ref instanceof InlineViewRef) { ((InlineViewRef) ref).getViewStmt().putBackExprs(rewrittenExprMap); } } if (whereClause != null) { setWhereClause(rewrittenExprMap.get(whereClause.getId().toString())); whereClause.collect(Subquery.class, subqueryExprs); } if (havingClause != null) { havingClause = rewrittenExprMap.get(havingClauseAfterAnaylzed.getId().toString()); havingClauseAfterAnaylzed.collect(Subquery.class, subqueryExprs); } for (Subquery subquery : subqueryExprs) { subquery.getStatement().putBackExprs(rewrittenExprMap); } if (groupByClause != null) { ArrayList<Expr> groupingExprs = groupByClause.getGroupingExprs(); if (groupingExprs != null) { ArrayList<Expr> newGroupingExpr = new ArrayList<>(); for (Expr expr : groupingExprs) { if (expr.getId() == null) { newGroupingExpr.add(expr); } else { newGroupingExpr.add(rewrittenExprMap.get(expr.getId().toString())); } } groupByClause.setGroupingExpr(newGroupingExpr); } List<Expr> oriGroupingExprs = groupByClause.getOriGroupingExprs(); if (oriGroupingExprs != null) { ArrayList<Expr> newOriGroupingExprs = new ArrayList<>(); for (Expr expr : oriGroupingExprs) { if (expr.getId() == null) { newOriGroupingExprs.add(expr); } else { newOriGroupingExprs.add(rewrittenExprMap.get(expr.getId().toString())); } } groupByClause.setOriGroupingExprs(newOriGroupingExprs); } } if (orderByElements != null) { for (OrderByElement orderByElem : orderByElementsAfterAnalyzed) { Expr expr = orderByElem.getExpr(); if (expr.getId() == null) { orderByElem.setExpr(expr); } else { orderByElem.setExpr(rewrittenExprMap.get(expr.getId().toString())); } } orderByElements = (ArrayList<OrderByElement>) orderByElementsAfterAnalyzed; } } private void rewriteSelectList(ExprRewriter rewriter) throws AnalysisException { for (SelectListItem item : selectList.getItems()) { if (item.getExpr() instanceof CaseExpr && item.getExpr().contains(Predicates.instanceOf(Subquery.class))) { rewriteSubquery(item.getExpr(), analyzer); } } selectList.rewriteExprs(rewriter, analyzer); } /** equal subquery in case when to an inline view * subquery in case when statement like * * SELECT CASE * WHEN ( * SELECT COUNT(*) / 2 * FROM t * ) > k4 THEN ( * SELECT AVG(k4) * FROM t * ) * ELSE ( * SELECT SUM(k4) * FROM t * ) * END AS kk4 * FROM t; * this statement will be equal to * * SELECT CASE * WHEN t1.a > k4 THEN t2.a * ELSE t3.a * END AS kk4 * FROM t, ( * SELECT COUNT(*) / 2 AS a * FROM t * ) t1, ( * SELECT AVG(k4) AS a * FROM t * ) t2, ( * SELECT SUM(k4) AS a * FROM t * ) t3; */ private Expr rewriteSubquery(Expr expr, Analyzer analyzer) throws AnalysisException { if (expr instanceof Subquery) { if (!(((Subquery) expr).getStatement() instanceof SelectStmt)) { throw new AnalysisException("Only support select subquery in case-when clause."); } if (expr.isCorrelatedPredicate(getTableRefIds())) { throw new AnalysisException("The correlated subquery in case-when clause is not supported"); } SelectStmt subquery = (SelectStmt) ((Subquery) expr).getStatement(); if (subquery.resultExprs.size() != 1 || !subquery.returnsSingleRow()) { throw new AnalysisException("Subquery in case-when must return scala type"); } subquery.reset(); subquery.setAssertNumRowsElement(1, AssertNumRowsElement.Assertion.EQ); String alias = getTableAliasGenerator().getNextAlias(); String colAlias = getColumnAliasGenerator().getNextAlias(); InlineViewRef inlineViewRef = new InlineViewRef(alias, subquery, Arrays.asList(colAlias)); try { inlineViewRef.analyze(analyzer); } catch (UserException e) { throw new AnalysisException(e.getMessage()); } fromClause.add(inlineViewRef); expr = new SlotRef(inlineViewRef.getAliasAsName(), colAlias); } else if (CollectionUtils.isNotEmpty(expr.getChildren())) { for (int i = 0; i < expr.getChildren().size(); ++i) { expr.setChild(i, rewriteSubquery(expr.getChild(i), analyzer)); } } return expr; } @Override public String toSql() { if (sqlString != null) { return sqlString; } StringBuilder strBuilder = new StringBuilder(); if (withClause != null) { strBuilder.append(withClause.toSql()); strBuilder.append(" "); } strBuilder.append("SELECT "); if (selectList.isDistinct()) { strBuilder.append("DISTINCT "); } for (int i = 0; i < resultExprs.size(); ++i) { if (i != 0) { strBuilder.append(", "); } if (needToSql) { strBuilder.append(originalExpr.get(i).toSql()); } else { strBuilder.append(resultExprs.get(i).toSql()); } strBuilder.append(" AS ").append(SqlUtils.getIdentSql(colLabels.get(i))); } if (!fromClause.isEmpty()) { strBuilder.append(fromClause.toSql()); } if (whereClause != null) { strBuilder.append(" WHERE "); strBuilder.append(whereClause.toSql()); } if (groupByClause != null) { strBuilder.append(" GROUP BY "); strBuilder.append(groupByClause.toSql()); } if (havingClause != null) { strBuilder.append(" HAVING "); strBuilder.append(havingClause.toSql()); } if (orderByElements != null) { strBuilder.append(" ORDER BY "); for (int i = 0; i < orderByElements.size(); ++i) { strBuilder.append(orderByElements.get(i).getExpr().toSql()); if (sortInfo != null) { strBuilder.append((sortInfo.getIsAscOrder().get(i)) ? " ASC" : " DESC"); } strBuilder.append((i + 1 != orderByElements.size()) ? ", " : ""); } } if (hasLimitClause()) { strBuilder.append(limitElement.toSql()); } if (hasOutFileClause()) { strBuilder.append(outFileClause.toSql()); } return strBuilder.toString(); } @Override public String toDigest() { StringBuilder strBuilder = new StringBuilder(); if (withClause != null) { strBuilder.append(withClause.toDigest()); strBuilder.append(" "); } strBuilder.append("SELECT "); if (selectList.isDistinct()) { strBuilder.append("DISTINCT "); } if (originalExpr == null) { originalExpr = Expr.cloneList(resultExprs); } if (resultExprs.isEmpty()) { for (int i = 0; i < selectList.getItems().size(); ++i) { if (i != 0) { strBuilder.append(", "); } strBuilder.append(selectList.getItems().get(i).toDigest()); } } else { for (int i = 0; i < originalExpr.size(); ++i) { if (i != 0) { strBuilder.append(", "); } strBuilder.append(originalExpr.get(i).toDigest()); strBuilder.append(" AS ").append(SqlUtils.getIdentSql(colLabels.get(i))); } } if (!fromClause.isEmpty()) { strBuilder.append(fromClause.toDigest()); } if (whereClause != null) { strBuilder.append(" WHERE "); strBuilder.append(whereClause.toDigest()); } if (groupByClause != null) { strBuilder.append(" GROUP BY "); strBuilder.append(groupByClause.toSql()); } if (havingClause != null) { strBuilder.append(" HAVING "); strBuilder.append(havingClause.toDigest()); } if (orderByElements != null) { strBuilder.append(" ORDER BY "); for (int i = 0; i < orderByElements.size(); ++i) { strBuilder.append(orderByElements.get(i).getExpr().toDigest()); if (sortInfo != null) { strBuilder.append((sortInfo.getIsAscOrder().get(i)) ? " ASC" : " DESC"); } strBuilder.append((i + 1 != orderByElements.size()) ? ", " : ""); } } if (hasLimitClause()) { strBuilder.append(limitElement.toDigest()); } if (hasOutFileClause()) { strBuilder.append(outFileClause.toDigest()); } return strBuilder.toString(); } /** * If the select statement has a sort/top that is evaluated, then the sort tuple * is materialized. Else, if there is aggregation then the aggregate tuple id is * materialized. Otherwise, all referenced tables are materialized as long as they are * not semi-joined. If there are analytics and no sort, then the returned tuple * ids also include the logical analytic output tuple. */ @Override public void getMaterializedTupleIds(ArrayList<TupleId> tupleIdList) { if (evaluateOrderBy) { tupleIdList.add(sortInfo.getSortTupleDescriptor().getId()); } else if (aggInfo != null) { if (aggInfo.isDistinctAgg()) { tupleIdList.add(aggInfo.getSecondPhaseDistinctAggInfo().getOutputTupleId()); } else { tupleIdList.add(aggInfo.getOutputTupleId()); } } else { for (TableRef tblRef : fromClause) { tupleIdList.addAll(tblRef.getMaterializedTupleIds()); } } if (hasAnalyticInfo() && !isEvaluateOrderBy()) { tupleIdList.add(analyticInfo.getOutputTupleId()); } } @Override public void substituteSelectList(Analyzer analyzer, List<String> newColLabels) throws AnalysisException, UserException { if (hasWithClause()) { withClause.analyze(analyzer); } TableRef leftTblRef = null; for (int i = 0; i < fromClause.size(); ++i) { TableRef tblRef = fromClause.get(i); tblRef = analyzer.resolveTableRef(tblRef); Preconditions.checkNotNull(tblRef); fromClause.set(i, tblRef); tblRef.setLeftTblRef(leftTblRef); tblRef.analyze(analyzer); leftTblRef = tblRef; } for (SelectListItem item : selectList.getItems()) { if (item.isStar()) { TableName tblName = item.getTblName(); if (tblName == null) { expandStar(analyzer); } else { expandStar(analyzer, tblName); } } else { if (item.getExpr() instanceof AnalyticExpr) { item.getExpr().analyze(analyzer); } if (item.getAlias() != null) { SlotRef aliasRef = new SlotRef(null, item.getAlias()); SlotRef newAliasRef = new SlotRef(null, newColLabels.get(resultExprs.size())); newAliasRef.analysisDone(); aliasSMap.put(aliasRef, newAliasRef); } resultExprs.add(item.getExpr()); } } if (groupByClause != null) { substituteOrdinalsAliases(groupByClause.getGroupingExprs(), "GROUP BY", analyzer); } if (havingClause != null) { havingClause = havingClause.clone(aliasSMap); } if (orderByElements != null) { for (int i = 0; i < orderByElements.size(); ++i) { orderByElements = OrderByElement.substitute(orderByElements, aliasSMap, analyzer); } } colLabels.clear(); colLabels.addAll(newColLabels); } public boolean hasWhereClause() { return whereClause != null; } public boolean hasAggInfo() { return aggInfo != null; } public boolean hasGroupByClause() { return groupByClause != null; } /** * Check if the stmt returns a single row. This can happen * in the following cases: * 1. select stmt with a 'limit 1' clause * 2. select stmt with an aggregate function and no group by. * 3. select stmt with no from clause. * <p> * This function may produce false negatives because the cardinality of the * result set also depends on the data a stmt is processing. */ public boolean returnsSingleRow() { if (hasLimitClause() && getLimit() == 1) { return true; } if (fromClause.isEmpty()) { return true; } if (hasAggInfo() && !hasGroupByClause() && !selectList.isDistinct()) { return true; } return false; } @Override public void collectTableRefs(List<TableRef> tblRefs) { for (TableRef tblRef : fromClause) { if (tblRef instanceof InlineViewRef) { InlineViewRef inlineViewRef = (InlineViewRef) tblRef; inlineViewRef.getViewStmt().collectTableRefs(tblRefs); } else { tblRefs.add(tblRef); } } } private boolean checkGroupingFn(Expr expr) { if (expr instanceof GroupingFunctionCallExpr) { return true; } else if (expr.getChildren() != null) { for (Expr child : expr.getChildren()) { if (checkGroupingFn(child)) { return true; } } } return false; } private void getAggregateFnExpr(Expr expr, ArrayList<Expr> aggFnExprList) { if (expr instanceof FunctionCallExpr && expr.fn instanceof AggregateFunction) { aggFnExprList.add(expr); } else if (expr.getChildren() != null) { for (Expr child : expr.getChildren()) { getAggregateFnExpr(child, aggFnExprList); } } } @Override public int hashCode() { return id.hashCode(); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof SelectStmt)) { return false; } return this.id.equals(((SelectStmt) obj).id); } }
It'd be good to include the conflicting artifacts into the message.
public AppModel resolveModel(AppArtifact appArtifact) throws AppModelResolverException { if (appModel != null) { if (appModel.getAppArtifact().equals(appArtifact)) { return appModel; } else { throw new AppModelResolverException("Requested artifact does not match loaded model"); } } appModel = QuarkusModelHelper.convert(model, appArtifact); return appModel; }
throw new AppModelResolverException("Requested artifact does not match loaded model");
public AppModel resolveModel(AppArtifact appArtifact) throws AppModelResolverException { if (appModel != null) { if (appModel.getAppArtifact().equals(appArtifact)) { return appModel; } else { throw new AppModelResolverException( "Requested artifact : " + appArtifact + ", does not match loaded model " + appModel.getAppArtifact()); } } appModel = QuarkusModelHelper.convert(model, appArtifact); return appModel; }
class AppModelGradleResolver implements AppModelResolver { private AppModel appModel; private final Project project; private final QuarkusModel model; public AppModelGradleResolver(Project project, QuarkusModel model) { this.model = model; this.project = project; } @Override public String getLatestVersion(AppArtifact appArtifact, String upToVersion, boolean inclusive) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public String getLatestVersionFromRange(AppArtifact appArtifact, String range) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public String getNextVersion(AppArtifact appArtifact, String fromVersion, boolean fromVersionIncluded, String upToVersion, boolean upToVersionIncluded) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public List<String> listLaterVersions(AppArtifact appArtifact, String upToVersion, boolean inclusive) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public void relink(AppArtifact appArtifact, Path localPath) throws AppModelResolverException { } @Override public Path resolve(AppArtifact appArtifact) throws AppModelResolverException { if (!appArtifact.isResolved()) { final DefaultDependencyArtifact dep = new DefaultDependencyArtifact(); dep.setExtension(appArtifact.getType()); dep.setType(appArtifact.getType()); dep.setName(appArtifact.getArtifactId()); if (appArtifact.getClassifier() != null) { dep.setClassifier(appArtifact.getClassifier()); } final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(appArtifact.getGroupId(), appArtifact.getArtifactId(), appArtifact.getVersion(), null); gradleDep.addArtifact(dep); final Configuration detachedConfig = project.getConfigurations().detachedConfiguration(gradleDep); final ResolvedConfiguration rc = detachedConfig.getResolvedConfiguration(); Set<ResolvedArtifact> resolvedArtifacts = rc.getResolvedArtifacts(); for (ResolvedArtifact a : resolvedArtifacts) { if (appArtifact.getArtifactId().equals(a.getName()) && appArtifact.getType().equals(a.getType()) && (a.getClassifier() == null ? appArtifact.getClassifier() == null : a.getClassifier().equals(appArtifact.getClassifier())) && appArtifact.getGroupId().equals(a.getModuleVersion().getId().getGroup())) { appArtifact.setPath(a.getFile().toPath()); } } if (!appArtifact.isResolved()) { throw new AppModelResolverException("Failed to resolve " + appArtifact); } } return appArtifact.getPath(); } @Override public List<AppDependency> resolveUserDependencies(AppArtifact appArtifact, List<AppDependency> directDeps) { return Collections.emptyList(); } @Override @Override public AppModel resolveModel(AppArtifact root, List<AppDependency> deps) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public AppModel resolveManagedModel(AppArtifact appArtifact, List<AppDependency> directDeps, AppArtifact managingProject, Set<AppArtifactKey> localProjects) throws AppModelResolverException { return resolveModel(appArtifact); } }
class AppModelGradleResolver implements AppModelResolver { private AppModel appModel; private final Project project; private final QuarkusModel model; public AppModelGradleResolver(Project project, QuarkusModel model) { this.model = model; this.project = project; } @Override public String getLatestVersion(AppArtifact appArtifact, String upToVersion, boolean inclusive) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public String getLatestVersionFromRange(AppArtifact appArtifact, String range) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public String getNextVersion(AppArtifact appArtifact, String fromVersion, boolean fromVersionIncluded, String upToVersion, boolean upToVersionIncluded) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public List<String> listLaterVersions(AppArtifact appArtifact, String upToVersion, boolean inclusive) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public void relink(AppArtifact appArtifact, Path localPath) throws AppModelResolverException { } @Override public Path resolve(AppArtifact appArtifact) throws AppModelResolverException { if (!appArtifact.isResolved()) { final DefaultDependencyArtifact dep = new DefaultDependencyArtifact(); dep.setExtension(appArtifact.getType()); dep.setType(appArtifact.getType()); dep.setName(appArtifact.getArtifactId()); if (appArtifact.getClassifier() != null) { dep.setClassifier(appArtifact.getClassifier()); } final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(appArtifact.getGroupId(), appArtifact.getArtifactId(), appArtifact.getVersion(), null); gradleDep.addArtifact(dep); final Configuration detachedConfig = project.getConfigurations().detachedConfiguration(gradleDep); final ResolvedConfiguration rc = detachedConfig.getResolvedConfiguration(); Set<ResolvedArtifact> resolvedArtifacts = rc.getResolvedArtifacts(); for (ResolvedArtifact a : resolvedArtifacts) { if (appArtifact.getArtifactId().equals(a.getName()) && appArtifact.getType().equals(a.getType()) && (a.getClassifier() == null ? appArtifact.getClassifier() == null : a.getClassifier().equals(appArtifact.getClassifier())) && appArtifact.getGroupId().equals(a.getModuleVersion().getId().getGroup())) { appArtifact.setPath(a.getFile().toPath()); } } if (!appArtifact.isResolved()) { throw new AppModelResolverException("Failed to resolve " + appArtifact); } } return appArtifact.getPath(); } @Override public List<AppDependency> resolveUserDependencies(AppArtifact appArtifact, List<AppDependency> directDeps) { return Collections.emptyList(); } @Override @Override public AppModel resolveModel(AppArtifact root, List<AppDependency> deps) throws AppModelResolverException { throw new UnsupportedOperationException(); } @Override public AppModel resolveManagedModel(AppArtifact appArtifact, List<AppDependency> directDeps, AppArtifact managingProject, Set<AppArtifactKey> localProjects) throws AppModelResolverException { return resolveModel(appArtifact); } }
Not sure if it's important for Undertow but I would have checked if the path ends with `/` before adding a `/`
ServletBuildItem createServlet() { ServletBuildItem servletBuildItem = new ServletBuildItem("metrics", SmallRyeMetricsServlet.class.getName()); servletBuildItem.getMappings().add(metrics.path + "/*"); return servletBuildItem; }
servletBuildItem.getMappings().add(metrics.path + "/*");
ServletBuildItem createServlet() { ServletBuildItem servletBuildItem = new ServletBuildItem("metrics", SmallRyeMetricsServlet.class.getName()); servletBuildItem.getMappings().add(metrics.path + (metrics.path.endsWith("/") ? "*" : "/*")); return servletBuildItem; }
class SmallRyeMetricsConfig { /** * The path to the metrics Servlet. */ @ConfigItem(defaultValue = "/metrics") String path; }
class SmallRyeMetricsConfig { /** * The path to the metrics Servlet. */ @ConfigItem(defaultValue = "/metrics") String path; }
The last flag should be true.
public void testStreamingResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload( streamingData.subList(0, streamingData.size() / 2)), () -> TypedResult.payload( streamingData.subList( streamingData.size() / 2, streamingData.size())), TypedResult::endOfStream) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); view.displayResults(true); view.close(); Assert.assertEquals( "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| +I | (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| -U | false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| +U | true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| -D | false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator() + "| +I | true | 100 | -9223372036854775808 | abcdefg111 | (NULL) | 2020-03-01 18:39:14.123456 |" + System.lineSeparator() + "| -D | (NULL) | -1 | -1 | abcdefghijklmnopq... | -12345.06789 | (NULL) |" + System.lineSeparator() + "| +I | (NULL) | -1 | -1 | 这是一段中文 | -12345.06789 | 2020-03-04 18:39:14.0 |" + System.lineSeparator() + "| -D | (NULL) | -1 | -1 | これは日本語をテ... | -12345.06789 | 2020-03-04 18:39:14.0 |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "Received a total of 8 rows" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(0)); }
ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false);
public void testStreamingResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, true); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload( streamingData.subList(0, streamingData.size() / 2)), () -> TypedResult.payload( streamingData.subList( streamingData.size() / 2, streamingData.size())), TypedResult::endOfStream) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); view.displayResults(); view.close(); Assert.assertEquals( "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| +I | (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| -U | false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| +U | true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| -D | false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator() + "| +I | true | 100 | -9223372036854775808 | abcdefg111 | (NULL) | 2020-03-01 18:39:14.123456 |" + System.lineSeparator() + "| -D | (NULL) | -1 | -1 | abcdefghijklmnopq... | -12345.06789 | (NULL) |" + System.lineSeparator() + "| +I | (NULL) | -1 | -1 | 这是一段中文 | -12345.06789 | 2020-03-04 18:39:14.0 |" + System.lineSeparator() + "| -D | (NULL) | -1 | -1 | これは日本語をテ... | -12345.06789 | 2020-03-04 18:39:14.0 |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "Received a total of 8 rows" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(0)); }
class CliTableauResultViewTest { private ByteArrayOutputStream terminalOutput; private Terminal terminal; private TableSchema schema; private List<Row> data; private List<Row> streamingData; @Before public void setUp() { terminalOutput = new ByteArrayOutputStream(); terminal = TerminalUtils.createDummyTerminal(terminalOutput); schema = TableSchema.builder() .field("boolean", DataTypes.BOOLEAN()) .field("int", DataTypes.INT()) .field("bigint", DataTypes.BIGINT()) .field("varchar", DataTypes.STRING()) .field("decimal(10, 5)", DataTypes.DECIMAL(10, 5)) .field("timestamp", DataTypes.TIMESTAMP(6)) .build(); data = new ArrayList<>(); data.add( Row.ofKind( RowKind.INSERT, null, 1, 2, "abc", BigDecimal.valueOf(1.23), Timestamp.valueOf("2020-03-01 18:39:14"))); data.add( Row.ofKind( RowKind.UPDATE_BEFORE, false, null, 0, "", BigDecimal.valueOf(1), Timestamp.valueOf("2020-03-01 18:39:14.1"))); data.add( Row.ofKind( RowKind.UPDATE_AFTER, true, Integer.MAX_VALUE, null, "abcdefg", BigDecimal.valueOf(1234567890), Timestamp.valueOf("2020-03-01 18:39:14.12"))); data.add( Row.ofKind( RowKind.DELETE, false, Integer.MIN_VALUE, Long.MAX_VALUE, null, BigDecimal.valueOf(12345.06789), Timestamp.valueOf("2020-03-01 18:39:14.123"))); data.add( Row.ofKind( RowKind.INSERT, true, 100, Long.MIN_VALUE, "abcdefg111", null, Timestamp.valueOf("2020-03-01 18:39:14.123456"))); data.add( Row.ofKind( RowKind.DELETE, null, -1, -1, "abcdefghijklmnopqrstuvwxyz", BigDecimal.valueOf(-12345.06789), null)); data.add( Row.ofKind( RowKind.INSERT, null, -1, -1, "这是一段中文", BigDecimal.valueOf(-12345.06789), Timestamp.valueOf("2020-03-04 18:39:14"))); data.add( Row.ofKind( RowKind.DELETE, null, -1, -1, "これは日本語をテストするための文です", BigDecimal.valueOf(-12345.06789), Timestamp.valueOf("2020-03-04 18:39:14"))); streamingData = new ArrayList<>(); for (int i = 0; i < data.size(); ++i) { Row row = Row.copy(data.get(i)); streamingData.add(row); } } @Test public void testBatchResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload(data.subList(0, data.size() / 2)), () -> TypedResult.payload( data.subList(data.size() / 2, data.size())), TypedResult::endOfStream) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); view.displayResults(false); view.close(); Assert.assertEquals( "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator() + "| true | 100 | -9223372036854775808 | abcdefg111 | (NULL) | 2020-03-01 18:39:14.123456 |" + System.lineSeparator() + "| (NULL) | -1 | -1 | abcdefghijklmnopq... | -12345.06789 | (NULL) |" + System.lineSeparator() + "| (NULL) | -1 | -1 | 这是一段中文 | -12345.06789 | 2020-03-04 18:39:14.0 |" + System.lineSeparator() + "| (NULL) | -1 | -1 | これは日本語をテ... | -12345.06789 | 2020-03-04 18:39:14.0 |" + System.lineSeparator() + "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "Received a total of 8 rows" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(0)); } @Test public void testCancelBatchResult() throws Exception { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload(data.subList(0, data.size() / 2)), TypedResult::empty) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); ExecutorService executorService = Executors.newSingleThreadExecutor(); Future<?> furture = executorService.submit(() -> view.displayResults(false)); CommonTestUtils.waitUntilCondition( () -> mockExecutor.getNumRetrieveResultChancesCalls() > 0, Deadline.now().plus(Duration.ofSeconds(5)), 50L); terminal.raise(Terminal.Signal.INT); furture.get(5, TimeUnit.SECONDS); Assert.assertEquals( "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator() + "Query terminated, received a total of 4 rows" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumRetrieveResultPageCalls(), is(0)); assertThat(mockExecutor.getNumCancelCalls(), is(1)); view.close(); } @Test public void testEmptyBatchResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier(TypedResult::endOfStream) .setResultPageSupplier( () -> { throw new SqlExecutionException("query failed"); }) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); view.displayResults(false); view.close(); Assert.assertEquals( "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "Received a total of 0 row" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(0)); } @Test public void testFailedBatchResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> { throw new SqlExecutionException("query failed"); }, TypedResult::endOfStream) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); try { view.displayResults(false); Assert.fail("Shouldn't get here"); } catch (SqlExecutionException e) { Assert.assertEquals("query failed", e.getMessage()); } view.close(); assertThat(mockExecutor.getNumCancelCalls(), is(1)); } @Test @Test public void testEmptyStreamingResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, true); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier(TypedResult::endOfStream) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); view.displayResults(true); view.close(); Assert.assertEquals( "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "Received a total of 0 row" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(0)); } @Test public void testCancelStreamingResult() throws Exception { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, true); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload( streamingData.subList(0, streamingData.size() / 2)), TypedResult::empty) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); ExecutorService executorService = Executors.newSingleThreadExecutor(); Future<?> furture = executorService.submit(() -> view.displayResults(true)); CommonTestUtils.waitUntilCondition( () -> mockExecutor.getNumRetrieveResultChancesCalls() > 1, Deadline.now().plus(Duration.ofSeconds(5)), 50L); terminal.raise(Terminal.Signal.INT); furture.get(5, TimeUnit.SECONDS); view.close(); Assert.assertEquals( "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| +I | (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| -U | false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| +U | true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| -D | false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator() + "Query terminated, received a total of 4 rows" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(1)); } @Test public void testFailedStreamingResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, true); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload( streamingData.subList(0, streamingData.size() / 2)), () -> { throw new SqlExecutionException("query failed"); }) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); try { view.displayResults(true); Assert.fail("Shouldn't get here"); } catch (SqlExecutionException e) { Assert.assertEquals("query failed", e.getMessage()); } view.close(); Assert.assertEquals( "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| +I | (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| -U | false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| +U | true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| -D | false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(1)); } }
class CliTableauResultViewTest { private ByteArrayOutputStream terminalOutput; private Terminal terminal; private TableSchema schema; private List<Row> data; private List<Row> streamingData; @Before public void setUp() { terminalOutput = new ByteArrayOutputStream(); terminal = TerminalUtils.createDummyTerminal(terminalOutput); schema = TableSchema.builder() .field("boolean", DataTypes.BOOLEAN()) .field("int", DataTypes.INT()) .field("bigint", DataTypes.BIGINT()) .field("varchar", DataTypes.STRING()) .field("decimal(10, 5)", DataTypes.DECIMAL(10, 5)) .field("timestamp", DataTypes.TIMESTAMP(6)) .build(); data = new ArrayList<>(); data.add( Row.ofKind( RowKind.INSERT, null, 1, 2, "abc", BigDecimal.valueOf(1.23), Timestamp.valueOf("2020-03-01 18:39:14"))); data.add( Row.ofKind( RowKind.UPDATE_BEFORE, false, null, 0, "", BigDecimal.valueOf(1), Timestamp.valueOf("2020-03-01 18:39:14.1"))); data.add( Row.ofKind( RowKind.UPDATE_AFTER, true, Integer.MAX_VALUE, null, "abcdefg", BigDecimal.valueOf(1234567890), Timestamp.valueOf("2020-03-01 18:39:14.12"))); data.add( Row.ofKind( RowKind.DELETE, false, Integer.MIN_VALUE, Long.MAX_VALUE, null, BigDecimal.valueOf(12345.06789), Timestamp.valueOf("2020-03-01 18:39:14.123"))); data.add( Row.ofKind( RowKind.INSERT, true, 100, Long.MIN_VALUE, "abcdefg111", null, Timestamp.valueOf("2020-03-01 18:39:14.123456"))); data.add( Row.ofKind( RowKind.DELETE, null, -1, -1, "abcdefghijklmnopqrstuvwxyz", BigDecimal.valueOf(-12345.06789), null)); data.add( Row.ofKind( RowKind.INSERT, null, -1, -1, "这是一段中文", BigDecimal.valueOf(-12345.06789), Timestamp.valueOf("2020-03-04 18:39:14"))); data.add( Row.ofKind( RowKind.DELETE, null, -1, -1, "これは日本語をテストするための文です", BigDecimal.valueOf(-12345.06789), Timestamp.valueOf("2020-03-04 18:39:14"))); streamingData = new ArrayList<>(); for (int i = 0; i < data.size(); ++i) { Row row = Row.copy(data.get(i)); streamingData.add(row); } } @Test public void testBatchResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload(data.subList(0, data.size() / 2)), () -> TypedResult.payload( data.subList(data.size() / 2, data.size())), TypedResult::endOfStream) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); view.displayResults(); view.close(); Assert.assertEquals( "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator() + "| true | 100 | -9223372036854775808 | abcdefg111 | (NULL) | 2020-03-01 18:39:14.123456 |" + System.lineSeparator() + "| (NULL) | -1 | -1 | abcdefghijklmnopq... | -12345.06789 | (NULL) |" + System.lineSeparator() + "| (NULL) | -1 | -1 | 这是一段中文 | -12345.06789 | 2020-03-04 18:39:14.0 |" + System.lineSeparator() + "| (NULL) | -1 | -1 | これは日本語をテ... | -12345.06789 | 2020-03-04 18:39:14.0 |" + System.lineSeparator() + "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "Received a total of 8 rows" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(0)); } @Test public void testCancelBatchResult() throws Exception { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload(data.subList(0, data.size() / 2)), TypedResult::empty) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); ExecutorService executorService = Executors.newSingleThreadExecutor(); Future<?> furture = executorService.submit(() -> view.displayResults()); CommonTestUtils.waitUntilCondition( () -> mockExecutor.getNumRetrieveResultChancesCalls() > 0, Deadline.now().plus(Duration.ofSeconds(5)), 50L); terminal.raise(Terminal.Signal.INT); furture.get(5, TimeUnit.SECONDS); Assert.assertEquals( "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator() + "Query terminated, received a total of 4 rows" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumRetrieveResultPageCalls(), is(0)); assertThat(mockExecutor.getNumCancelCalls(), is(1)); view.close(); } @Test public void testEmptyBatchResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier(TypedResult::endOfStream) .setResultPageSupplier( () -> { throw new SqlExecutionException("query failed"); }) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); view.displayResults(); view.close(); Assert.assertEquals( "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "Received a total of 0 row" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(0)); } @Test public void testFailedBatchResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, false); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> { throw new SqlExecutionException("query failed"); }, TypedResult::endOfStream) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); try { view.displayResults(); Assert.fail("Shouldn't get here"); } catch (SqlExecutionException e) { Assert.assertEquals("query failed", e.getMessage()); } view.close(); assertThat(mockExecutor.getNumCancelCalls(), is(1)); } @Test @Test public void testEmptyStreamingResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, true); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier(TypedResult::endOfStream) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); view.displayResults(); view.close(); Assert.assertEquals( "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "Received a total of 0 row" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(0)); } @Test public void testCancelStreamingResult() throws Exception { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, true); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload( streamingData.subList(0, streamingData.size() / 2)), TypedResult::empty) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); ExecutorService executorService = Executors.newSingleThreadExecutor(); Future<?> furture = executorService.submit(() -> view.displayResults()); CommonTestUtils.waitUntilCondition( () -> mockExecutor.getNumRetrieveResultChancesCalls() > 1, Deadline.now().plus(Duration.ofSeconds(5)), 50L); terminal.raise(Terminal.Signal.INT); furture.get(5, TimeUnit.SECONDS); view.close(); Assert.assertEquals( "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| +I | (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| -U | false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| +U | true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| -D | false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator() + "Query terminated, received a total of 4 rows" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(1)); } @Test public void testFailedStreamingResult() { ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, true, true); TestingExecutor mockExecutor = new TestingExecutorBuilder() .setResultChangesSupplier( () -> TypedResult.payload( streamingData.subList(0, streamingData.size() / 2)), () -> { throw new SqlExecutionException("query failed"); }) .build(); CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor); try { view.displayResults(); Assert.fail("Shouldn't get here"); } catch (SqlExecutionException e) { Assert.assertEquals("query failed", e.getMessage()); } view.close(); Assert.assertEquals( "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+----------------------+----------------+----------------------------+" + System.lineSeparator() + "| +I | (NULL) | 1 | 2 | abc | 1.23 | 2020-03-01 18:39:14.0 |" + System.lineSeparator() + "| -U | false | (NULL) | 0 | | 1 | 2020-03-01 18:39:14.1 |" + System.lineSeparator() + "| +U | true | 2147483647 | (NULL) | abcdefg | 1234567890 | 2020-03-01 18:39:14.12 |" + System.lineSeparator() + "| -D | false | -2147483648 | 9223372036854775807 | (NULL) | 12345.06789 | 2020-03-01 18:39:14.123 |" + System.lineSeparator(), terminalOutput.toString()); assertThat(mockExecutor.getNumCancelCalls(), is(1)); } }
We might pick currentWork more than one time from workQueue. `currentTimeoutOperation == null` will indicate that are we picking up first time. We do not need to process currentWork if is picked up second time and no bufferMessages to send to it. `while ((currentWork = workQueue.peek()) != null && (currentTimeoutOperation == null || bufferMessages.size() > 0 )) {` The timeout Operation is not removing currentWork from he queue, thus currentWork needs be picked up again, thus we need to threat this different and go in the loop and remove this timeout work. setting `currentTimeoutOperation = null` indicate this case also.
private void drainQueue() { if (isTerminated()) { return; } synchronized (currentWorkLock) { if (currentWork != null && currentWork.isTerminal()) { workQueue.remove(currentWork); if (currentTimeoutOperation != null & !currentTimeoutOperation.isDisposed()) { currentTimeoutOperation.dispose(); } currentTimeoutOperation = null; } while ((currentWork = workQueue.peek()) != null && (currentTimeoutOperation == null || bufferMessages.size() > 0 )) { if (currentWork.isTerminal()) { workQueue.remove(currentWork); if (currentTimeoutOperation != null & !currentTimeoutOperation.isDisposed()) { currentTimeoutOperation.dispose(); } currentTimeoutOperation = null; continue; } if (currentTimeoutOperation == null) { currentTimeoutOperation = getTimeoutOperation(currentWork); } while (bufferMessages.size() > 0 && !currentWork.isTerminal()) { currentWork.next(bufferMessages.poll()); remaining.decrementAndGet(); } if (currentWork.isTerminal()) { if (currentWork.getError() == null) { currentWork.complete(); } workQueue.remove(currentWork); if (currentTimeoutOperation != null & !currentTimeoutOperation.isDisposed()) { currentTimeoutOperation.dispose(); } currentTimeoutOperation = null; logger.verbose("The work [{}] is complete.", currentWork.getId()); } else { long creditToAdd = currentWork.getRemaining() - (remaining.get() + bufferMessages.size()); if (creditToAdd > 0) { remaining.addAndGet(creditToAdd); subscription.request(creditToAdd); logger.verbose("Requesting [{}] from upstream for work [{}].", creditToAdd, currentWork.getId()); } } } } }
currentTimeoutOperation = null;
private void drainQueue() { if (isTerminated()) { return; } synchronized (currentWorkLock) { if (currentWork != null && currentWork.isTerminal()) { workQueue.remove(currentWork); if (currentTimeoutOperation != null && !currentTimeoutOperation.isDisposed()) { currentTimeoutOperation.dispose(); } currentTimeoutOperation = null; } while ((currentWork = workQueue.peek()) != null && (!currentWork.isProcessingStarted() || bufferMessages.size() > 0)) { if (currentWork.isTerminal()) { workQueue.remove(currentWork); if (currentTimeoutOperation != null && !currentTimeoutOperation.isDisposed()) { currentTimeoutOperation.dispose(); } continue; } if (!currentWork.isProcessingStarted()) { currentTimeoutOperation = getTimeoutOperation(currentWork); currentWork.startedProcessing(); } while (bufferMessages.size() > 0 && !currentWork.isTerminal()) { currentWork.next(bufferMessages.poll()); remaining.decrementAndGet(); } if (currentWork.isTerminal()) { if (currentWork.getError() == null) { currentWork.complete(); } workQueue.remove(currentWork); if (currentTimeoutOperation != null && !currentTimeoutOperation.isDisposed()) { currentTimeoutOperation.dispose(); } logger.verbose("The work [{}] is complete.", currentWork.getId()); } else { long creditToAdd = currentWork.getRemaining() - (remaining.get() + bufferMessages.size()); if (creditToAdd > 0) { remaining.addAndGet(creditToAdd); subscription.request(creditToAdd); logger.verbose("Requesting [{}] from upstream for work [{}].", creditToAdd, currentWork.getId()); } } } } }
class SynchronousMessageSubscriber extends BaseSubscriber<ServiceBusReceivedMessageContext> { private final ClientLogger logger = new ClientLogger(SynchronousMessageSubscriber.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicInteger wip = new AtomicInteger(); private final Queue<SynchronousReceiveWork> workQueue = new ConcurrentLinkedQueue<>(); private final Queue<ServiceBusReceivedMessageContext> bufferMessages = new ConcurrentLinkedQueue<>(); private final AtomicLong remaining = new AtomicLong(); private final long requested; private final Object currentWorkLock = new Object(); private Disposable currentTimeoutOperation; private SynchronousReceiveWork currentWork; private boolean subscriberInitialized; private volatile Subscription subscription; SynchronousMessageSubscriber(long prefetch, SynchronousReceiveWork initialWork) { this.workQueue.add(initialWork); requested = initialWork.getNumberOfEvents() > prefetch ? initialWork.getNumberOfEvents() : prefetch; } /** * On an initial subscription, will take the first work item, and request that amount of work for it. * @param subscription Subscription for upstream. */ @Override protected void hookOnSubscribe(Subscription subscription) { this.subscription = subscription; remaining.addAndGet(requested); subscription.request(requested); subscriberInitialized = true; drain(); } /** * Publishes the event to the current {@link SynchronousReceiveWork}. If that work item is complete, will dispose of * the subscriber. * @param message Event to publish. */ @Override protected void hookOnNext(ServiceBusReceivedMessageContext message) { bufferMessages.add(message); drain(); } /** * Queue the work to be picked up by drain loop. * @param work to be queued. */ void queueWork(SynchronousReceiveWork work) { logger.info("[{}] Pending: {}, Scheduling receive timeout task '{}'.", work.getId(), work.getNumberOfEvents(), work.getTimeout()); workQueue.add(work); if (subscriberInitialized) { drain(); } } /** * Drain the work, only one thread can be in this loop at a time. */ private void drain() { if (!wip.compareAndSet(0, 1)) { return; } try { drainQueue(); } finally { final int decremented = wip.decrementAndGet(); if (decremented != 0) { logger.warning("There should be 0, but was: {}", decremented); } } } /*** * Drain the queue using a lock on current work in progress. */ /** * @param work on which timeout thread need to start. * * @return */ private Disposable getTimeoutOperation(SynchronousReceiveWork work) { Duration timeout = work.getTimeout(); return Mono.delay(timeout).thenReturn(work) .subscribe(l -> { synchronized (currentWorkLock) { if (currentWork == work) { work.timeout(); } } }); } /** * {@inheritDoc} */ @Override protected void hookOnError(Throwable throwable) { logger.error("[{}] Errors occurred upstream", currentWork.getId(), throwable); synchronized (currentWorkLock) { currentWork.error(throwable); } dispose(); } @Override protected void hookOnCancel() { if (isDisposed.getAndSet(true)) { return; } synchronized (currentWorkLock) { if (currentWork != null) { currentWork.complete(); } if (currentTimeoutOperation != null && !currentTimeoutOperation.isDisposed()) { currentTimeoutOperation.dispose(); } currentTimeoutOperation = null; } subscription.cancel(); } private boolean isTerminated() { return isDisposed.get(); } }
class SynchronousMessageSubscriber extends BaseSubscriber<ServiceBusReceivedMessageContext> { private final ClientLogger logger = new ClientLogger(SynchronousMessageSubscriber.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicInteger wip = new AtomicInteger(); private final Queue<SynchronousReceiveWork> workQueue = new ConcurrentLinkedQueue<>(); private final Queue<ServiceBusReceivedMessageContext> bufferMessages = new ConcurrentLinkedQueue<>(); private final AtomicLong remaining = new AtomicLong(); private final long requested; private final Object currentWorkLock = new Object(); private Disposable currentTimeoutOperation; private SynchronousReceiveWork currentWork; private boolean subscriberInitialized; private volatile Subscription subscription; private static final AtomicReferenceFieldUpdater<SynchronousMessageSubscriber, Subscription> UPSTREAM = AtomicReferenceFieldUpdater.newUpdater(SynchronousMessageSubscriber.class, Subscription.class, "subscription"); SynchronousMessageSubscriber(long prefetch, SynchronousReceiveWork initialWork) { this.workQueue.add(initialWork); requested = initialWork.getNumberOfEvents() > prefetch ? initialWork.getNumberOfEvents() : prefetch; } /** * On an initial subscription, will take the first work item, and request that amount of work for it. * @param subscription Subscription for upstream. */ @Override protected void hookOnSubscribe(Subscription subscription) { if (Operators.setOnce(UPSTREAM, this, subscription)) { this.subscription = subscription; remaining.addAndGet(requested); subscription.request(requested); subscriberInitialized = true; drain(); } else { logger.error("Already subscribed once."); } } /** * Publishes the event to the current {@link SynchronousReceiveWork}. If that work item is complete, will dispose of * the subscriber. * @param message Event to publish. */ @Override protected void hookOnNext(ServiceBusReceivedMessageContext message) { bufferMessages.add(message); drain(); } /** * Queue the work to be picked up by drain loop. * @param work to be queued. */ void queueWork(SynchronousReceiveWork work) { logger.info("[{}] Pending: {}, Scheduling receive timeout task '{}'.", work.getId(), work.getNumberOfEvents(), work.getTimeout()); workQueue.add(work); if (subscriberInitialized) { drain(); } } /** * Drain the work, only one thread can be in this loop at a time. */ private void drain() { if (!wip.compareAndSet(0, 1)) { return; } try { drainQueue(); } finally { final int decremented = wip.decrementAndGet(); if (decremented != 0) { logger.warning("There should be 0, but was: {}", decremented); } } } /*** * Drain the queue using a lock on current work in progress. */ /** * @param work on which timeout thread need to start. * * @return {@link Disposable} for the timeout operation. */ private Disposable getTimeoutOperation(SynchronousReceiveWork work) { Duration timeout = work.getTimeout(); return Mono.delay(timeout).thenReturn(work) .subscribe(l -> { synchronized (currentWorkLock) { if (currentWork == work) { work.timeout(); } } }); } /** * {@inheritDoc} */ @Override protected void hookOnError(Throwable throwable) { logger.error("[{}] Errors occurred upstream", currentWork.getId(), throwable); synchronized (currentWorkLock) { currentWork.error(throwable); } dispose(); } @Override protected void hookOnCancel() { if (isDisposed.getAndSet(true)) { return; } synchronized (currentWorkLock) { if (currentWork != null) { currentWork.complete(); } if (currentTimeoutOperation != null && !currentTimeoutOperation.isDisposed()) { currentTimeoutOperation.dispose(); } currentTimeoutOperation = null; } subscription.cancel(); } private boolean isTerminated() { return isDisposed.get(); } int getWorkQueueSize() { return this.workQueue.size(); } long getRequested() { return this.requested; } boolean isSubscriberInitialized() { return this.subscriberInitialized; } }
what is the difference between isReferenceEqual and isEqual for simple basic types?
public static boolean isReferenceEqual(Object lhsValue, Object rhsValue) { if (lhsValue == rhsValue) { return true; } if (lhsValue == null || rhsValue == null) { return false; } Type lhsType = getType(lhsValue); Type rhsType = getType(rhsValue); switch(lhsType.getTag()) { case TypeTags.INT_TAG: if (rhsType.getTag() != TypeTags.BYTE_TAG || rhsType.getTag() != TypeTags.INT_TAG) { return false; } return lhsValue.equals(((Number) rhsValue).longValue()); case TypeTags.BYTE_TAG: if (rhsType.getTag() != TypeTags.BYTE_TAG || rhsType.getTag() != TypeTags.INT_TAG) { return false; } return lhsValue.equals(((Number) rhsValue).byteValue()); case TypeTags.FLOAT_TAG: if (rhsType.getTag() != TypeTags.FLOAT_TAG) { return false; } return lhsValue.equals(((Number) rhsValue).doubleValue()); case TypeTags.DECIMAL_TAG: if (rhsType.getTag() != TypeTags.DECIMAL_TAG) { return false; } return checkDecimalExactEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue); case TypeTags.BOOLEAN_TAG: case TypeTags.STRING_TAG: return lhsValue.equals(rhsValue); } if (TypeTags.isXMLTypeTag(lhsType.getTag()) && TypeTags.isXMLTypeTag(rhsType.getTag())) { return isXMLValueRefEqual((XmlValue) lhsValue, (XmlValue) rhsValue); } if (isHandleType(lhsType) && isHandleType(rhsType)) { return isHandleValueRefEqual(lhsValue, rhsValue); } return false; }
if (rhsType.getTag() != TypeTags.BYTE_TAG || rhsType.getTag() != TypeTags.INT_TAG) {
public static boolean isReferenceEqual(Object lhsValue, Object rhsValue) { if (lhsValue == rhsValue) { return true; } if (lhsValue == null || rhsValue == null) { return false; } Type lhsType = getType(lhsValue); Type rhsType = getType(rhsValue); switch (lhsType.getTag()) { case TypeTags.FLOAT_TAG: if (rhsType.getTag() != TypeTags.FLOAT_TAG) { return false; } return lhsValue.equals(((Number) rhsValue).doubleValue()); case TypeTags.DECIMAL_TAG: if (rhsType.getTag() != TypeTags.DECIMAL_TAG) { return false; } return checkDecimalExactEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue); case TypeTags.INT_TAG: case TypeTags.BYTE_TAG: case TypeTags.BOOLEAN_TAG: case TypeTags.STRING_TAG: return isEqual(lhsValue, rhsValue); case TypeTags.XML_TAG: case TypeTags.XML_COMMENT_TAG: case TypeTags.XML_ELEMENT_TAG: case TypeTags.XML_PI_TAG: case TypeTags.XML_TEXT_TAG: if (!TypeTags.isXMLTypeTag(rhsType.getTag())) { return false; } return isXMLValueRefEqual((XmlValue) lhsValue, (XmlValue) rhsValue); case TypeTags.HANDLE_TAG: if (rhsType.getTag() != TypeTags.HANDLE_TAG) { return false; } return isHandleValueRefEqual(lhsValue, rhsValue); } return false; }
class TypeChecker { public static Object checkCast(Object sourceVal, Type targetType) { if (checkIsType(sourceVal, targetType)) { return sourceVal; } Type sourceType = getType(sourceVal); if (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() <= TypeTags.BOOLEAN_TAG) { return TypeConverter.castValues(targetType, sourceVal); } if (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() == TypeTags.UNION_TAG) { for (Type memberType : ((BUnionType) targetType).getMemberTypes()) { try { return TypeConverter.castValues(memberType, sourceVal); } catch (Exception e) { } } } throw ErrorUtils.createTypeCastError(sourceVal, targetType); } public static long anyToInt(Object sourceVal) { return TypeConverter.anyToIntCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT)); } public static long anyToSigned32(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_32, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT_SIGNED_32)); } public static long anyToSigned16(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_16, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT_SIGNED_16)); } public static long anyToSigned8(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_8, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT_SIGNED_8)); } public static long anyToUnsigned32(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_32, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_32)); } public static long anyToUnsigned16(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_16, () -> ErrorUtils .createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_16)); } public static long anyToUnsigned8(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_8, () -> ErrorUtils .createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_8)); } public static double anyToFloat(Object sourceVal) { return TypeConverter.anyToFloatCast(sourceVal, () -> ErrorUtils .createTypeCastError(sourceVal, TYPE_FLOAT)); } public static boolean anyToBoolean(Object sourceVal) { return TypeConverter.anyToBooleanCast(sourceVal, () -> ErrorUtils .createTypeCastError(sourceVal, TYPE_BOOLEAN)); } public static int anyToByte(Object sourceVal) { return TypeConverter.anyToByteCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_BYTE)); } public static DecimalValue anyToDecimal(Object sourceVal) { return TypeConverter.anyToDecimalCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_DECIMAL)); } public static byte anyToJByte(Object sourceVal) { return TypeConverter.anyToJByteCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "byte")); } public static char anyToJChar(Object sourceVal) { return TypeConverter.anyToJCharCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "char")); } public static short anyToJShort(Object sourceVal) { return TypeConverter.anyToJShortCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "short")); } public static int anyToJInt(Object sourceVal) { return TypeConverter.anyToJIntCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "int")); } public static long anyToJLong(Object sourceVal) { return TypeConverter.anyToJLongCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "long")); } public static float anyToJFloat(Object sourceVal) { return TypeConverter.anyToJFloatCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "float")); } public static double anyToJDouble(Object sourceVal) { return TypeConverter.anyToJDoubleCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "double")); } public static boolean anyToJBoolean(Object sourceVal) { return TypeConverter.anyToJBooleanCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "boolean")); } /** * Check whether a given value belongs to the given type. * * @param sourceVal value to check the type * @param targetType type to be test against * @return true if the value belongs to the given type, false otherwise */ public static boolean checkIsType(Object sourceVal, Type targetType) { return checkIsType(sourceVal, getType(sourceVal), targetType); } /** * Check whether a given value belongs to the given type. * * @param sourceVal value to check the type * @param sourceType type of the value * @param targetType type to be test against * @return true if the value belongs to the given type, false otherwise */ public static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType) { if (checkIsType(sourceVal, sourceType, targetType, null)) { return true; } if (sourceType.getTag() == TypeTags.XML_TAG) { XmlValue val = (XmlValue) sourceVal; if (val.getNodeType() == XmlNodeType.SEQUENCE) { return checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false); } } if (isMutable(sourceVal, sourceType)) { return false; } return checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false); } /** * Check whether a given value has the same shape as the given type. * * @param sourceValue value to check the shape * @param targetType type to check the shape against * @return true if the value has the same shape as the given type; false otherwise */ public static boolean checkIsLikeType(Object sourceValue, Type targetType) { return checkIsLikeType(sourceValue, targetType, false); } /** * Check whether a given value has the same shape as the given type. * * @param sourceValue value to check the shape * @param targetType type to check the shape against * @param allowNumericConversion whether numeric conversion is allowed to change the shape to the target type * @return true if the value has the same shape as the given type; false otherwise */ public static boolean checkIsLikeType(Object sourceValue, Type targetType, boolean allowNumericConversion) { return checkIsLikeType(sourceValue, targetType, new ArrayList<>(), allowNumericConversion); } /** * Check whether two types are the same. * * @param sourceType type to test * @param targetType type to test against * @return true if the two types are same; false otherwise */ public static boolean isSameType(Type sourceType, Type targetType) { int sourceTypeTag = sourceType.getTag(); int targetTypeTag = targetType.getTag(); if (sourceType == targetType) { return true; } if (sourceTypeTag == targetTypeTag) { if (sourceType.equals(targetType)) { return true; } switch (sourceTypeTag) { case TypeTags.ARRAY_TAG: return checkArrayEquivalent(sourceType, targetType); case TypeTags.FINITE_TYPE_TAG: Set<Object> sourceValueSpace = ((BFiniteType) sourceType).valueSpace; Set<Object> targetValueSpace = ((BFiniteType) targetType).valueSpace; if (sourceValueSpace.size() != targetValueSpace.size()) { return false; } for (Object sourceVal : sourceValueSpace) { if (!containsType(targetValueSpace, getType(sourceVal))) { return false; } } return true; default: break; } } if (sourceTypeTag == TypeTags.FINITE_TYPE_TAG) { for (Object value : ((BFiniteType) sourceType).valueSpace) { if (!isSameType(getType(value), targetType)) { return false; } } return true; } if (targetTypeTag == TypeTags.FINITE_TYPE_TAG) { for (Object value : ((BFiniteType) targetType).valueSpace) { if (!isSameType(getType(value), sourceType)) { return false; } } return true; } return false; } public static Type getType(Object value) { if (value == null) { return TYPE_NULL; } else if (value instanceof Number) { if (value instanceof Long) { return TYPE_INT; } else if (value instanceof Double) { return TYPE_FLOAT; } else if (value instanceof Integer || value instanceof Byte) { return TYPE_BYTE; } } else if (value instanceof BString) { return TYPE_STRING; } else if (value instanceof Boolean) { return TYPE_BOOLEAN; } return ((BValue) value).getType(); } /** * Deep value equality check for anydata. * * @param lhsValue The value on the left hand side * @param rhsValue The value on the right hand side * @return True if values are equal, else false. */ public static boolean isEqual(Object lhsValue, Object rhsValue) { return isEqual(lhsValue, rhsValue, new ArrayList<>()); } /** * Check if two decimal values are equal in value. * * @param lhsValue The value on the left hand side * @param rhsValue The value of the right hand side * @return True if values are equal, else false. */ public static boolean checkDecimalEqual(DecimalValue lhsValue, DecimalValue rhsValue) { return isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue) && lhsValue.decimalValue().compareTo(rhsValue.decimalValue()) == 0; } /** * Check if two decimal values are exactly equal. * * @param lhsValue The value on the left-hand side * @param rhsValue The value of the right-hand side * @return True if values are exactly equal, else false. */ public static boolean checkDecimalExactEqual(DecimalValue lhsValue, DecimalValue rhsValue) { return isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue) && lhsValue.decimalValue().equals(rhsValue.decimalValue()); } /** * Checks if the given decimal number is a real number. * * @param decimalValue The decimal value being checked * @return True if the decimal value is a real number. */ private static boolean isDecimalRealNumber(DecimalValue decimalValue) { return decimalValue.valueKind == DecimalValueKind.ZERO || decimalValue.valueKind == DecimalValueKind.OTHER; } /** * Reference equality check for values. If both the values are simple basic types, returns the same * result as {@link * * @param lhsValue The value on the left hand side * @param rhsValue The value on the right hand side * @return True if values are reference equal or in the case of simple basic types if the values are equal, * else false. */ private static boolean isXMLValueRefEqual(XmlValue lhsValue, XmlValue rhsValue) { if (lhsValue.getNodeType() != rhsValue.getNodeType()) { return false; } if (lhsValue.getNodeType() == XmlNodeType.SEQUENCE && rhsValue.getNodeType() == XmlNodeType.SEQUENCE) { return isXMLSequenceRefEqual((XmlSequence) lhsValue, (XmlSequence) rhsValue); } if (lhsValue.getNodeType() == XmlNodeType.TEXT && rhsValue.getNodeType() == XmlNodeType.TEXT) { return isEqual(lhsValue, rhsValue); } return false; } private static boolean isXMLSequenceRefEqual(XmlSequence lhsValue, XmlSequence rhsValue) { Iterator<BXml> lhsIter = lhsValue.getChildrenList().iterator(); Iterator<BXml> rhsIter = rhsValue.getChildrenList().iterator(); while (lhsIter.hasNext() && rhsIter.hasNext()) { BXml l = lhsIter.next(); BXml r = rhsIter.next(); if (!(l == r || isXMLValueRefEqual((XmlValue) l, (XmlValue) r))) { return false; } } return lhsIter.hasNext() == rhsIter.hasNext(); } /** * Get the typedesc of a value. * * @param value Value * @return type desc associated with the value */ public static TypedescValue getTypedesc(Object value) { Type type = TypeChecker.getType(value); if (type == null) { return null; } if (value instanceof MapValue) { TypedescValue typedesc = (TypedescValue) ((MapValue) value).getTypedesc(); if (typedesc != null) { return typedesc; } } return new TypedescValueImpl(type); } /** * Get the annotation value if present. * * @param typedescValue The typedesc value * @param annotTag The annot-tag-reference * @return the annotation value if present, nil else */ public static Object getAnnotValue(TypedescValue typedescValue, String annotTag) { Type describingType = typedescValue.getDescribingType(); if (!(describingType instanceof BAnnotatableType)) { return null; } return ((BAnnotatableType) describingType).getAnnotation(StringUtils.fromString(annotTag)); } public static Object getAnnotValue(TypedescValue typedescValue, BString annotTag) { Type describingType = typedescValue.getDescribingType(); if (!(describingType instanceof BAnnotatableType)) { return null; } return ((BAnnotatableType) describingType).getAnnotation(annotTag); } /** * Check whether a given type is equivalent to a target type. * * @param sourceType type to check * @param targetType type to compare with * @return flag indicating the the equivalence of the two types */ public static boolean checkIsType(Type sourceType, Type targetType) { return checkIsType(sourceType, targetType, (List<TypePair>) null); } @Deprecated public static boolean checkIsType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { if (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) { return true; } if (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(sourceType)) { return true; } if (targetType.isReadOnly() && !sourceType.isReadOnly()) { return false; } int sourceTypeTag = sourceType.getTag(); int targetTypeTag = targetType.getTag(); if (sourceTypeTag == TypeTags.INTERSECTION_TAG) { return checkIsType(((BIntersectionType) sourceType).getEffectiveType(), targetTypeTag != TypeTags.INTERSECTION_TAG ? targetType : ((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes); } if (targetTypeTag == TypeTags.INTERSECTION_TAG) { return checkIsType(sourceType, ((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes); } if (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) { if (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) { return checkIsType(((BParameterizedType) sourceType).getParamValueType(), targetType, unresolvedTypes); } return checkIsType(((BParameterizedType) sourceType).getParamValueType(), ((BParameterizedType) targetType).getParamValueType(), unresolvedTypes); } if (sourceTypeTag == TypeTags.READONLY_TAG) { return checkIsType(PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE, targetType, unresolvedTypes); } if (targetTypeTag == TypeTags.READONLY_TAG) { return checkIsType(sourceType, PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE, unresolvedTypes); } if (sourceTypeTag == TypeTags.UNION_TAG) { return isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes); } if (sourceTypeTag == TypeTags.FINITE_TYPE_TAG && (targetTypeTag == TypeTags.FINITE_TYPE_TAG || targetTypeTag <= TypeTags.NULL_TAG || targetTypeTag == TypeTags.XML_TEXT_TAG)) { return isFiniteTypeMatch((BFiniteType) sourceType, targetType); } switch (targetTypeTag) { case TypeTags.BYTE_TAG: case TypeTags.SIGNED8_INT_TAG: case TypeTags.FLOAT_TAG: case TypeTags.DECIMAL_TAG: case TypeTags.CHAR_STRING_TAG: case TypeTags.BOOLEAN_TAG: case TypeTags.NULL_TAG: return sourceTypeTag == targetTypeTag; case TypeTags.STRING_TAG: return TypeTags.isStringTypeTag(sourceTypeTag); case TypeTags.XML_TEXT_TAG: if (sourceTypeTag == TypeTags.XML_TAG) { return ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG; } return sourceTypeTag == targetTypeTag; case TypeTags.INT_TAG: return sourceTypeTag == TypeTags.INT_TAG || sourceTypeTag == TypeTags.BYTE_TAG || (sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.UNSIGNED32_INT_TAG); case TypeTags.SIGNED16_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || (sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED16_INT_TAG); case TypeTags.SIGNED32_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || (sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED32_INT_TAG); case TypeTags.UNSIGNED8_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG; case TypeTags.UNSIGNED16_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED16_INT_TAG; case TypeTags.UNSIGNED32_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED16_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED32_INT_TAG; case TypeTags.ANY_TAG: return checkIsAnyType(sourceType); case TypeTags.ANYDATA_TAG: return sourceType.isAnydata(); case TypeTags.SERVICE_TAG: return checkIsServiceType(sourceType, targetType, unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes); case TypeTags.HANDLE_TAG: return sourceTypeTag == TypeTags.HANDLE_TAG; case TypeTags.READONLY_TAG: return isInherentlyImmutableType(sourceType) || sourceType.isReadOnly(); case TypeTags.XML_ELEMENT_TAG: case TypeTags.XML_COMMENT_TAG: case TypeTags.XML_PI_TAG: return targetTypeTag == sourceTypeTag; default: return checkIsRecursiveType(sourceType, targetType, unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes); } } private static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { int sourceTypeTag = sourceType.getTag(); int targetTypeTag = targetType.getTag(); if (sourceTypeTag != TypeTags.RECORD_TYPE_TAG && sourceTypeTag != TypeTags.OBJECT_TYPE_TAG) { return checkIsType(sourceType, targetType); } if (targetTypeTag == TypeTags.INTERSECTION_TAG) { targetType = ((BIntersectionType) targetType).getEffectiveType(); targetTypeTag = targetType.getTag(); } if (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) { return true; } if (targetType.isReadOnly() && !sourceType.isReadOnly()) { return false; } switch (targetTypeTag) { case TypeTags.ANY_TAG: return checkIsAnyType(sourceType); case TypeTags.READONLY_TAG: return isInherentlyImmutableType(sourceType) || sourceType.isReadOnly(); default: return checkIsRecursiveTypeOnValue(sourceVal, sourceType, targetType, sourceTypeTag, targetTypeTag, unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes); } } private static boolean checkTypeDescType(Type sourceType, BTypedescType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.TYPEDESC_TAG) { return false; } BTypedescType sourceTypedesc = (BTypedescType) sourceType; return checkIsType(sourceTypedesc.getConstraint(), targetType.getConstraint(), unresolvedTypes); } private static boolean checkIsRecursiveType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { switch (targetType.getTag()) { case TypeTags.MAP_TAG: return checkIsMapType(sourceType, (BMapType) targetType, unresolvedTypes); case TypeTags.STREAM_TAG: return checkIsStreamType(sourceType, (BStreamType) targetType, unresolvedTypes); case TypeTags.TABLE_TAG: return checkIsTableType(sourceType, (BTableType) targetType, unresolvedTypes); case TypeTags.JSON_TAG: return checkIsJSONType(sourceType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: return checkIsRecordType(sourceType, (BRecordType) targetType, unresolvedTypes); case TypeTags.FUNCTION_POINTER_TAG: return checkIsFunctionType(sourceType, (BFunctionType) targetType); case TypeTags.ARRAY_TAG: return checkIsArrayType(sourceType, (BArrayType) targetType, unresolvedTypes); case TypeTags.TUPLE_TAG: return checkIsTupleType(sourceType, (BTupleType) targetType, unresolvedTypes); case TypeTags.UNION_TAG: return checkIsUnionType(sourceType, (BUnionType) targetType, unresolvedTypes); case TypeTags.OBJECT_TYPE_TAG: return checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes); case TypeTags.FINITE_TYPE_TAG: return checkIsFiniteType(sourceType, (BFiniteType) targetType); case TypeTags.FUTURE_TAG: return checkIsFutureType(sourceType, (BFutureType) targetType, unresolvedTypes); case TypeTags.ERROR_TAG: return checkIsErrorType(sourceType, (BErrorType) targetType, unresolvedTypes); case TypeTags.TYPEDESC_TAG: return checkTypeDescType(sourceType, (BTypedescType) targetType, unresolvedTypes); case TypeTags.XML_TAG: return checkIsXMLType(sourceType, targetType, unresolvedTypes); default: return false; } } private static boolean checkIsRecursiveTypeOnValue(Object sourceVal, Type sourceType, Type targetType, int sourceTypeTag, int targetTypeTag, List<TypePair> unresolvedTypes) { switch (targetTypeTag) { case TypeTags.ANYDATA_TAG: if (sourceTypeTag == TypeTags.OBJECT_TYPE_TAG) { return false; } return checkRecordBelongsToAnydataType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes); case TypeTags.MAP_TAG: return checkIsMapType(sourceVal, sourceType, (BMapType) targetType, unresolvedTypes); case TypeTags.JSON_TAG: return checkIsMapType(sourceVal, sourceType, new BMapType(targetType.isReadOnly() ? TYPE_READONLY_JSON : TYPE_JSON), unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: return checkIsRecordType(sourceVal, sourceType, (BRecordType) targetType, unresolvedTypes); case TypeTags.UNION_TAG: for (Type type : ((BUnionType) targetType).getMemberTypes()) { if (checkIsType(sourceVal, sourceType, type, unresolvedTypes)) { return true; } } return false; case TypeTags.OBJECT_TYPE_TAG: return checkObjectEquivalency(sourceVal, sourceType, (BObjectType) targetType, unresolvedTypes); default: return false; } } private static boolean isFiniteTypeMatch(BFiniteType sourceType, Type targetType) { for (Object bValue : sourceType.valueSpace) { if (!checkIsType(bValue, targetType)) { return false; } } return true; } private static boolean isUnionTypeMatch(BUnionType sourceType, Type targetType, List<TypePair> unresolvedTypes) { for (Type type : sourceType.getMemberTypes()) { if (!checkIsType(type, targetType, unresolvedTypes)) { return false; } } return true; } private static boolean checkIsUnionType(Type sourceType, BUnionType targetType, List<TypePair> unresolvedTypes) { TypePair pair = new TypePair(sourceType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); switch (sourceType.getTag()) { case TypeTags.UNION_TAG: case TypeTags.JSON_TAG: case TypeTags.ANYDATA_TAG: return isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes); case TypeTags.FINITE_TYPE_TAG: return isFiniteTypeMatch((BFiniteType) sourceType, targetType); default: for (Type type : targetType.getMemberTypes()) { if (checkIsType(sourceType, type, unresolvedTypes)) { return true; } } return false; } } private static boolean checkIsMapType(Type sourceType, BMapType targetType, List<TypePair> unresolvedTypes) { Type targetConstrainedType = targetType.getConstrainedType(); switch (sourceType.getTag()) { case TypeTags.MAP_TAG: return checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: BRecordType recType = (BRecordType) sourceType; BUnionType wideTypeUnion = new BUnionType(getWideTypeComponents(recType)); return checkConstraints(wideTypeUnion, targetConstrainedType, unresolvedTypes); default: return false; } } private static boolean checkIsMapType(Object sourceVal, Type sourceType, BMapType targetType, List<TypePair> unresolvedTypes) { Type targetConstrainedType = targetType.getConstrainedType(); switch (sourceType.getTag()) { case TypeTags.MAP_TAG: return checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: return checkIsMapType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes, targetConstrainedType); default: return false; } } private static boolean checkIsMapType(MapValue sourceVal, BRecordType sourceType, List<TypePair> unresolvedTypes, Type targetConstrainedType) { for (Field field : sourceType.getFields().values()) { if (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) { if (!checkIsType(field.getFieldType(), targetConstrainedType, unresolvedTypes)) { return false; } continue; } BString name = StringUtils.fromString(field.getFieldName()); if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(name)) { continue; } if (!checkIsLikeType(sourceVal.get(name), targetConstrainedType)) { return false; } } if (sourceType.sealed) { return true; } return checkIsType(sourceType.restFieldType, targetConstrainedType, unresolvedTypes); } private static boolean checkIsXMLType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { int sourceTag = sourceType.getTag(); if (sourceTag == TypeTags.FINITE_TYPE_TAG) { return isFiniteTypeMatch((BFiniteType) sourceType, targetType); } BXmlType target = ((BXmlType) targetType); if (sourceTag == TypeTags.XML_TAG) { Type targetConstraint = target.constraint; while (target.constraint.getTag() == TypeTags.XML_TAG) { target = (BXmlType) target.constraint; targetConstraint = target.constraint; } BXmlType source = (BXmlType) sourceType; if (source.constraint.getTag() == TypeTags.NEVER_TAG) { if (targetConstraint.getTag() == TypeTags.UNION_TAG) { return checkIsUnionType(sourceType, (BUnionType) targetConstraint, unresolvedTypes); } return targetConstraint.getTag() == TypeTags.XML_TEXT_TAG || targetConstraint.getTag() == TypeTags.NEVER_TAG; } return checkIsType(source.constraint, targetConstraint, unresolvedTypes); } if (TypeTags.isXMLTypeTag(sourceTag)) { return checkIsType(sourceType, target.constraint, unresolvedTypes); } return false; } private static List<Type> getWideTypeComponents(BRecordType recType) { List<Type> types = new ArrayList<>(); for (Field f : recType.getFields().values()) { types.add(f.getFieldType()); } if (!recType.sealed) { types.add(recType.restFieldType); } return types; } private static boolean checkIsStreamType(Type sourceType, BStreamType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.STREAM_TAG) { return false; } return checkConstraints(((BStreamType) sourceType).getConstrainedType(), targetType.getConstrainedType(), unresolvedTypes) && checkConstraints(((BStreamType) sourceType).getCompletionType(), targetType.getCompletionType(), unresolvedTypes); } private static boolean checkIsTableType(Type sourceType, BTableType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.TABLE_TAG) { return false; } BTableType srcTableType = (BTableType) sourceType; if (!checkConstraints(srcTableType.getConstrainedType(), targetType.getConstrainedType(), unresolvedTypes)) { return false; } if (targetType.getKeyType() == null && targetType.getFieldNames() == null) { return true; } if (targetType.getKeyType() != null) { if (srcTableType.getKeyType() != null && (checkConstraints(srcTableType.getKeyType(), targetType.getKeyType(), unresolvedTypes))) { return true; } if (srcTableType.getFieldNames() == null) { return false; } List<Type> fieldTypes = new ArrayList<>(); Arrays.stream(srcTableType.getFieldNames()).forEach(field -> fieldTypes .add(Objects.requireNonNull(getTableConstraintField(srcTableType.getConstrainedType(), field)) .getFieldType())); if (fieldTypes.size() == 1) { return checkConstraints(fieldTypes.get(0), targetType.getKeyType(), unresolvedTypes); } BTupleType tupleType = new BTupleType(fieldTypes); return checkConstraints(tupleType, targetType.getKeyType(), unresolvedTypes); } return Arrays.equals(srcTableType.getFieldNames(), targetType.getFieldNames()); } static BField getTableConstraintField(Type constraintType, String fieldName) { switch (constraintType.getTag()) { case TypeTags.RECORD_TYPE_TAG: Map<String, Field> fieldList = ((BRecordType) constraintType).getFields(); return (BField) fieldList.get(fieldName); case TypeTags.INTERSECTION_TAG: Type effectiveType = ((BIntersectionType) constraintType).getEffectiveType(); return getTableConstraintField(effectiveType, fieldName); case TypeTags.UNION_TAG: BUnionType unionType = (BUnionType) constraintType; List<Type> memTypes = unionType.getMemberTypes(); List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName)) .filter(Objects::nonNull).collect(Collectors.toList()); if (fields.size() != memTypes.size()) { return null; } if (fields.stream().allMatch(field -> isSameType(field.getFieldType(), fields.get(0).getFieldType()))) { return fields.get(0); } } return null; } private static boolean checkIsJSONType(Type sourceType, List<TypePair> unresolvedTypes) { BJsonType jsonType = (BJsonType) TYPE_JSON; TypePair pair = new TypePair(sourceType, jsonType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); switch (sourceType.getTag()) { case TypeTags.STRING_TAG: case TypeTags.CHAR_STRING_TAG: case TypeTags.INT_TAG: case TypeTags.SIGNED32_INT_TAG: case TypeTags.SIGNED16_INT_TAG: case TypeTags.SIGNED8_INT_TAG: case TypeTags.UNSIGNED32_INT_TAG: case TypeTags.UNSIGNED16_INT_TAG: case TypeTags.UNSIGNED8_INT_TAG: case TypeTags.BYTE_TAG: case TypeTags.FLOAT_TAG: case TypeTags.DECIMAL_TAG: case TypeTags.BOOLEAN_TAG: case TypeTags.NULL_TAG: case TypeTags.JSON_TAG: return true; case TypeTags.ARRAY_TAG: return checkIsType(((BArrayType) sourceType).getElementType(), jsonType, unresolvedTypes); case TypeTags.FINITE_TYPE_TAG: return isFiniteTypeMatch((BFiniteType) sourceType, jsonType); case TypeTags.MAP_TAG: return checkIsType(((BMapType) sourceType).getConstrainedType(), jsonType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: BRecordType recordType = (BRecordType) sourceType; for (Field field : recordType.getFields().values()) { if (!checkIsJSONType(field.getFieldType(), unresolvedTypes)) { return false; } } if (!recordType.sealed) { return checkIsJSONType(recordType.restFieldType, unresolvedTypes); } return true; case TypeTags.TUPLE_TAG: BTupleType sourceTupleType = (BTupleType) sourceType; for (Type memberType : sourceTupleType.getTupleTypes()) { if (!checkIsJSONType(memberType, unresolvedTypes)) { return false; } } Type tupleRestType = sourceTupleType.getRestType(); if (tupleRestType != null) { return checkIsJSONType(tupleRestType, unresolvedTypes); } return true; case TypeTags.UNION_TAG: for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) { if (!checkIsJSONType(memberType, unresolvedTypes)) { return false; } } return true; default: return false; } } private static boolean checkIsRecordType(Type sourceType, BRecordType targetType, List<TypePair> unresolvedTypes) { switch (sourceType.getTag()) { case TypeTags.RECORD_TYPE_TAG: return checkIsRecordType((BRecordType) sourceType, targetType, unresolvedTypes); case TypeTags.MAP_TAG: return checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes); } return false; } private static boolean checkIsRecordType(BRecordType sourceRecordType, BRecordType targetType, List<TypePair> unresolvedTypes) { TypePair pair = new TypePair(sourceRecordType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); if (targetType.sealed && !sourceRecordType.sealed) { return false; } if (!sourceRecordType.sealed && !checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) { return false; } Map<String, Field> sourceFields = sourceRecordType.getFields(); Set<String> targetFieldNames = targetType.getFields().keySet(); for (Map.Entry<String, Field> targetFieldEntry : targetType.getFields().entrySet()) { Field targetField = targetFieldEntry.getValue(); Field sourceField = sourceFields.get(targetFieldEntry.getKey()); if (sourceField == null) { return false; } if (hasIncompatibleReadOnlyFlags(targetField, sourceField)) { return false; } if (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL) && SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL)) { return false; } if (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) { return false; } } if (targetType.sealed) { return targetFieldNames.containsAll(sourceFields.keySet()); } for (Map.Entry<String, Field> sourceFieldEntry : sourceFields.entrySet()) { if (targetFieldNames.contains(sourceFieldEntry.getKey())) { continue; } if (!checkIsType(sourceFieldEntry.getValue().getFieldType(), targetType.restFieldType, unresolvedTypes)) { return false; } } return true; } private static boolean checkIsRecordType(BMapType sourceType, BRecordType targetType, List<TypePair> unresolvedTypes) { TypePair pair = new TypePair(sourceType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); if (targetType.sealed) { return false; } Type constraintType = sourceType.getConstrainedType(); for (Field field : targetType.getFields().values()) { var flags = field.getFlags(); if (!SymbolFlags.isFlagOn(flags, SymbolFlags.OPTIONAL)) { return false; } if (SymbolFlags.isFlagOn(flags, SymbolFlags.READONLY) && !sourceType.isReadOnly()) { return false; } if (!checkIsType(constraintType, field.getFieldType(), unresolvedTypes)) { return false; } } return checkIsType(constraintType, targetType.restFieldType, unresolvedTypes); } private static boolean checkRecordBelongsToAnydataType(MapValue sourceVal, BRecordType recordType, List<TypePair> unresolvedTypes) { Type targetType = TYPE_ANYDATA; TypePair pair = new TypePair(recordType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); Map<String, Field> fields = recordType.getFields(); for (Map.Entry<String, Field> fieldEntry : fields.entrySet()) { String fieldName = fieldEntry.getKey(); Field field = fieldEntry.getValue(); if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) { BString fieldNameBString = StringUtils.fromString(fieldName); if (SymbolFlags .isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(fieldNameBString)) { continue; } if (!checkIsLikeType(sourceVal.get(fieldNameBString), targetType)) { return false; } } else { if (!checkIsType(field.getFieldType(), targetType, unresolvedTypes)) { return false; } } } if (recordType.sealed) { return true; } return checkIsType(recordType.restFieldType, targetType, unresolvedTypes); } private static boolean checkIsRecordType(Object sourceVal, Type sourceType, BRecordType targetType, List<TypePair> unresolvedTypes) { switch (sourceType.getTag()) { case TypeTags.RECORD_TYPE_TAG: return checkIsRecordType((MapValue) sourceVal, (BRecordType) sourceType, targetType, unresolvedTypes); case TypeTags.MAP_TAG: return checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes); } return false; } private static boolean checkIsRecordType(MapValue sourceRecordValue, BRecordType sourceRecordType, BRecordType targetType, List<TypePair> unresolvedTypes) { TypePair pair = new TypePair(sourceRecordType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); if (targetType.sealed && !sourceRecordType.sealed) { return false; } if (!sourceRecordType.sealed && !checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) { return false; } Map<String, Field> sourceFields = sourceRecordType.getFields(); Set<String> targetFieldNames = targetType.getFields().keySet(); for (Map.Entry<String, Field> targetFieldEntry : targetType.getFields().entrySet()) { String fieldName = targetFieldEntry.getKey(); Field targetField = targetFieldEntry.getValue(); Field sourceField = sourceFields.get(fieldName); if (sourceField == null) { if (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)) { return false; } continue; } if (hasIncompatibleReadOnlyFlags(targetField, sourceField)) { return false; } boolean optionalTargetField = SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL); boolean optionalSourceField = SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL); if (SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.READONLY)) { BString fieldNameBString = StringUtils.fromString(fieldName); if (optionalSourceField && !sourceRecordValue.containsKey(fieldNameBString)) { if (!optionalTargetField) { return false; } continue; } if (!checkIsLikeType(sourceRecordValue.get(fieldNameBString), targetField.getFieldType())) { return false; } } else { if (!optionalTargetField && optionalSourceField) { return false; } if (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) { return false; } } } if (targetType.sealed) { for (String sourceFieldName : sourceFields.keySet()) { if (targetFieldNames.contains(sourceFieldName)) { continue; } if (!checkIsNeverTypeOrStructureTypeWithARequiredNeverMember( sourceFields.get(sourceFieldName).getFieldType())) { return false; } } return true; } for (Map.Entry<String, Field> targetFieldEntry : sourceFields.entrySet()) { String fieldName = targetFieldEntry.getKey(); Field field = targetFieldEntry.getValue(); if (targetFieldNames.contains(fieldName)) { continue; } if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) { if (!checkIsLikeType(sourceRecordValue.get(StringUtils.fromString(fieldName)), targetType.restFieldType)) { return false; } } else if (!checkIsType(field.getFieldType(), targetType.restFieldType, unresolvedTypes)) { return false; } } return true; } private static boolean hasIncompatibleReadOnlyFlags(Field targetField, Field sourceField) { return SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.READONLY) && !SymbolFlags .isFlagOn(sourceField.getFlags(), SymbolFlags.READONLY); } private static boolean checkIsArrayType(BArrayType sourceType, BArrayType targetType, List<TypePair> unresolvedTypes) { switch (sourceType.getState()) { case OPEN: if (targetType.getState() != ArrayState.OPEN) { return false; } break; case CLOSED: if (targetType.getState() == ArrayState.CLOSED && sourceType.getSize() != targetType.getSize()) { return false; } break; } return checkIsType(sourceType.getElementType(), targetType.getElementType(), unresolvedTypes); } private static boolean checkIsArrayType(BTupleType sourceType, BArrayType targetType, List<TypePair> unresolvedTypes) { List<Type> tupleTypes = sourceType.getTupleTypes(); Type sourceRestType = sourceType.getRestType(); Type targetElementType = targetType.getElementType(); if (targetType.getState() == ArrayState.OPEN) { for (Type sourceElementType : tupleTypes) { if (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) { return false; } } if (sourceRestType != null) { return checkIsType(sourceRestType, targetElementType, unresolvedTypes); } return true; } if (sourceRestType != null) { return false; } if (tupleTypes.size() != targetType.getSize()) { return false; } for (Type sourceElementType : tupleTypes) { if (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) { return false; } } return true; } private static boolean checkIsArrayType(Type sourceType, BArrayType targetType, List<TypePair> unresolvedTypes) { int sourceTypeTag = sourceType.getTag(); if (sourceTypeTag == TypeTags.UNION_TAG) { for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) { if (!checkIsArrayType(memberType, targetType, unresolvedTypes)) { return false; } } return true; } if (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) { return false; } if (sourceTypeTag == TypeTags.ARRAY_TAG) { return checkIsArrayType((BArrayType) sourceType, targetType, unresolvedTypes); } return checkIsArrayType((BTupleType) sourceType, targetType, unresolvedTypes); } private static boolean checkIsTupleType(BArrayType sourceType, BTupleType targetType, List<TypePair> unresolvedTypes) { Type sourceElementType = sourceType.getElementType(); List<Type> targetTypes = targetType.getTupleTypes(); Type targetRestType = targetType.getRestType(); switch (sourceType.getState()) { case OPEN: if (targetRestType == null) { return false; } if (targetTypes.isEmpty()) { return checkIsType(sourceElementType, targetRestType, unresolvedTypes); } return false; case CLOSED: if (sourceType.getSize() < targetTypes.size()) { return false; } if (targetTypes.isEmpty()) { if (targetRestType != null) { return checkIsType(sourceElementType, targetRestType, unresolvedTypes); } return sourceType.getSize() == 0; } for (Type targetElementType : targetTypes) { if (!(checkIsType(sourceElementType, targetElementType, unresolvedTypes))) { return false; } } if (sourceType.getSize() == targetTypes.size()) { return true; } if (targetRestType != null) { return checkIsType(sourceElementType, targetRestType, unresolvedTypes); } return false; default: return false; } } private static boolean checkIsTupleType(BTupleType sourceType, BTupleType targetType, List<TypePair> unresolvedTypes) { List<Type> sourceTypes = sourceType.getTupleTypes(); Type sourceRestType = sourceType.getRestType(); List<Type> targetTypes = targetType.getTupleTypes(); Type targetRestType = targetType.getRestType(); if (sourceRestType != null && targetRestType == null) { return false; } int sourceTypeSize = sourceTypes.size(); int targetTypeSize = targetTypes.size(); if (sourceRestType == null && targetRestType == null && sourceTypeSize != targetTypeSize) { return false; } if (sourceTypeSize < targetTypeSize) { return false; } for (int i = 0; i < targetTypeSize; i++) { if (!checkIsType(sourceTypes.get(i), targetTypes.get(i), unresolvedTypes)) { return false; } } if (sourceTypeSize == targetTypeSize) { if (sourceRestType != null) { return checkIsType(sourceRestType, targetRestType, unresolvedTypes); } return true; } for (int i = targetTypeSize; i < sourceTypeSize; i++) { if (!checkIsType(sourceTypes.get(i), targetRestType, unresolvedTypes)) { return false; } } if (sourceRestType != null) { return checkIsType(sourceRestType, targetRestType, unresolvedTypes); } return true; } private static boolean checkIsTupleType(Type sourceType, BTupleType targetType, List<TypePair> unresolvedTypes) { int sourceTypeTag = sourceType.getTag(); if (sourceTypeTag == TypeTags.UNION_TAG) { for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) { if (!checkIsTupleType(memberType, targetType, unresolvedTypes)) { return false; } } return true; } if (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) { return false; } if (sourceTypeTag == TypeTags.ARRAY_TAG) { return checkIsTupleType((BArrayType) sourceType, targetType, unresolvedTypes); } return checkIsTupleType((BTupleType) sourceType, targetType, unresolvedTypes); } private static boolean checkIsAnyType(Type sourceType) { switch (sourceType.getTag()) { case TypeTags.ERROR_TAG: case TypeTags.READONLY_TAG: return false; case TypeTags.UNION_TAG: case TypeTags.ANYDATA_TAG: case TypeTags.JSON_TAG: for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) { if (!checkIsAnyType(memberType)) { return false; } } return true; } return true; } private static boolean checkIsFiniteType(Type sourceType, BFiniteType targetType) { if (sourceType.getTag() != TypeTags.FINITE_TYPE_TAG) { return false; } BFiniteType sourceFiniteType = (BFiniteType) sourceType; if (sourceFiniteType.valueSpace.size() != targetType.valueSpace.size()) { return false; } return targetType.valueSpace.containsAll(sourceFiniteType.valueSpace); } private static boolean checkIsFutureType(Type sourceType, BFutureType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.FUTURE_TAG) { return false; } return checkConstraints(((BFutureType) sourceType).getConstrainedType(), targetType.getConstrainedType(), unresolvedTypes); } private static boolean checkObjectEquivalency(Type sourceType, BObjectType targetType, List<TypePair> unresolvedTypes) { return checkObjectEquivalency(null, sourceType, targetType, unresolvedTypes); } private static boolean checkObjectEquivalency(Object sourceVal, Type sourceType, BObjectType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.OBJECT_TYPE_TAG && sourceType.getTag() != TypeTags.SERVICE_TAG) { return false; } TypePair pair = new TypePair(sourceType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); BObjectType sourceObjectType = (BObjectType) sourceType; if (SymbolFlags.isFlagOn(targetType.flags, SymbolFlags.ISOLATED) && !SymbolFlags.isFlagOn(sourceObjectType.flags, SymbolFlags.ISOLATED)) { return false; } Map<String, Field> targetFields = targetType.getFields(); Map<String, Field> sourceFields = sourceObjectType.getFields(); MethodType[] targetFuncs = targetType.getMethods(); MethodType[] sourceFuncs = sourceObjectType.getMethods(); if (targetType.getFields().values().stream().anyMatch(field -> SymbolFlags .isFlagOn(field.getFlags(), SymbolFlags.PRIVATE)) || Stream.of(targetFuncs).anyMatch(func -> SymbolFlags.isFlagOn(func.getFlags(), SymbolFlags.PRIVATE))) { return false; } if (targetFields.size() > sourceFields.size() || targetFuncs.length > sourceFuncs.length) { return false; } String targetTypeModule = Optional.ofNullable(targetType.getPackage()).map(Module::toString).orElse(""); String sourceTypeModule = Optional.ofNullable(sourceObjectType.getPackage()).map(Module::toString).orElse(""); if (sourceVal == null) { if (!checkObjectSubTypeForFields(targetFields, sourceFields, targetTypeModule, sourceTypeModule, unresolvedTypes)) { return false; } } else if (!checkObjectSubTypeForFieldsByValue(targetFields, sourceFields, targetTypeModule, sourceTypeModule, (BObject) sourceVal, unresolvedTypes)) { return false; } return checkObjectSubTypeForMethods(unresolvedTypes, targetFuncs, sourceFuncs, targetTypeModule, sourceTypeModule, sourceObjectType, targetType); } private static boolean checkObjectSubTypeForFields(Map<String, Field> targetFields, Map<String, Field> sourceFields, String targetTypeModule, String sourceTypeModule, List<TypePair> unresolvedTypes) { for (Field lhsField : targetFields.values()) { Field rhsField = sourceFields.get(lhsField.getFieldName()); if (rhsField == null || !isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(), rhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField, rhsField) || !checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) { return false; } } return true; } private static boolean checkObjectSubTypeForFieldsByValue(Map<String, Field> targetFields, Map<String, Field> sourceFields, String targetTypeModule, String sourceTypeModule, BObject sourceObjVal, List<TypePair> unresolvedTypes) { for (Field lhsField : targetFields.values()) { String name = lhsField.getFieldName(); Field rhsField = sourceFields.get(name); if (rhsField == null || !isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(), rhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField, rhsField)) { return false; } if (SymbolFlags.isFlagOn(rhsField.getFlags(), SymbolFlags.FINAL)) { Object fieldValue = sourceObjVal.get(StringUtils.fromString(name)); Type fieldValueType = getType(fieldValue); if (fieldValueType.isReadOnly()) { if (!checkIsLikeType(fieldValue, lhsField.getFieldType())) { return false; } continue; } if (!checkIsType(fieldValueType, lhsField.getFieldType(), unresolvedTypes)) { return false; } } else if (!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) { return false; } } return true; } private static boolean checkObjectSubTypeForMethods(List<TypePair> unresolvedTypes, MethodType[] targetFuncs, MethodType[] sourceFuncs, String targetTypeModule, String sourceTypeModule, BObjectType sourceType, BObjectType targetType) { for (MethodType lhsFunc : targetFuncs) { if (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.RESOURCE)) { continue; } MethodType rhsFunc = getMatchingInvokableType(sourceFuncs, lhsFunc, unresolvedTypes); if (rhsFunc == null || !isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsFunc.getFlags(), rhsFunc.getFlags())) { return false; } if (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.REMOTE) != SymbolFlags .isFlagOn(rhsFunc.getFlags(), SymbolFlags.REMOTE)) { return false; } } BTypeIdSet targetTypeIdSet = targetType.typeIdSet; if (targetTypeIdSet == null) { return true; } BTypeIdSet sourceTypeIdSet = sourceType.typeIdSet; if (sourceTypeIdSet == null) { return false; } return sourceTypeIdSet.containsAll(targetTypeIdSet); } private static boolean isInSameVisibilityRegion(String lhsTypePkg, String rhsTypePkg, long lhsFlags, long rhsFlags) { if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PRIVATE)) { return lhsTypePkg.equals(rhsTypePkg); } else if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PUBLIC)) { return SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PUBLIC); } return !SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PRIVATE) && !SymbolFlags .isFlagOn(rhsFlags, SymbolFlags.PUBLIC) && lhsTypePkg.equals(rhsTypePkg); } private static MethodType getMatchingInvokableType(MethodType[] rhsFuncs, MethodType lhsFunc, List<TypePair> unresolvedTypes) { return Arrays.stream(rhsFuncs) .filter(rhsFunc -> lhsFunc.getName().equals(rhsFunc.getName())) .filter(rhsFunc -> checkFunctionTypeEqualityForObjectType(rhsFunc.getType(), lhsFunc.getType(), unresolvedTypes)) .findFirst() .orElse(null); } private static boolean checkFunctionTypeEqualityForObjectType(FunctionType source, FunctionType target, List<TypePair> unresolvedTypes) { if (hasIncompatibleIsolatedFlags(target, source)) { return false; } if (source.getParameterTypes().length != target.getParameterTypes().length) { return false; } for (int i = 0; i < source.getParameterTypes().length; i++) { if (!checkIsType(target.getParameterTypes()[i], source.getParameterTypes()[i], unresolvedTypes)) { return false; } } if (source.getReturnType() == null && target.getReturnType() == null) { return true; } else if (source.getReturnType() == null || target.getReturnType() == null) { return false; } return checkIsType(source.getReturnType(), target.getReturnType(), unresolvedTypes); } private static boolean checkIsFunctionType(Type sourceType, BFunctionType targetType) { if (sourceType.getTag() != TypeTags.FUNCTION_POINTER_TAG) { return false; } BFunctionType source = (BFunctionType) sourceType; if (hasIncompatibleIsolatedFlags(targetType, source) || hasIncompatibleTransactionalFlags(targetType, source)) { return false; } if (SymbolFlags.isFlagOn(targetType.getFlags(), SymbolFlags.ANY_FUNCTION)) { return true; } if (source.paramTypes.length != targetType.paramTypes.length) { return false; } for (int i = 0; i < source.paramTypes.length; i++) { if (!checkIsType(targetType.paramTypes[i], source.paramTypes[i], new ArrayList<>())) { return false; } } return checkIsType(source.retType, targetType.retType, new ArrayList<>()); } private static boolean hasIncompatibleIsolatedFlags(FunctionType target, FunctionType source) { return SymbolFlags.isFlagOn(target.getFlags(), SymbolFlags.ISOLATED) && !SymbolFlags .isFlagOn(source.getFlags(), SymbolFlags.ISOLATED); } private static boolean hasIncompatibleTransactionalFlags(FunctionType target, FunctionType source) { return SymbolFlags.isFlagOn(source.getFlags(), SymbolFlags.TRANSACTIONAL) && !SymbolFlags .isFlagOn(target.getFlags(), SymbolFlags.TRANSACTIONAL); } private static boolean checkIsServiceType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() == TypeTags.SERVICE_TAG) { return checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes); } if (sourceType.getTag() == TypeTags.OBJECT_TYPE_TAG) { var flags = ((BObjectType) sourceType).flags; return (flags & SymbolFlags.SERVICE) == SymbolFlags.SERVICE; } return false; } public static boolean isInherentlyImmutableType(Type sourceType) { if (isSimpleBasicType(sourceType)) { return true; } switch (sourceType.getTag()) { case TypeTags.XML_TEXT_TAG: case TypeTags.FINITE_TYPE_TAG: case TypeTags.READONLY_TAG: case TypeTags.NULL_TAG: case TypeTags.ERROR_TAG: case TypeTags.INVOKABLE_TAG: case TypeTags.SERVICE_TAG: case TypeTags.TYPEDESC_TAG: case TypeTags.FUNCTION_POINTER_TAG: case TypeTags.HANDLE_TAG: return true; case TypeTags.XML_TAG: return ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG; } return false; } public static boolean isSelectivelyImmutableType(Type type, Set<Type> unresolvedTypes) { if (!unresolvedTypes.add(type)) { return true; } switch (type.getTag()) { case TypeTags.ANY_TAG: case TypeTags.ANYDATA_TAG: case TypeTags.JSON_TAG: case TypeTags.XML_TAG: case TypeTags.XML_COMMENT_TAG: case TypeTags.XML_ELEMENT_TAG: case TypeTags.XML_PI_TAG: return true; case TypeTags.ARRAY_TAG: Type elementType = ((BArrayType) type).getElementType(); return isInherentlyImmutableType(elementType) || isSelectivelyImmutableType(elementType, unresolvedTypes); case TypeTags.TUPLE_TAG: BTupleType tupleType = (BTupleType) type; for (Type tupMemType : tupleType.getTupleTypes()) { if (!isInherentlyImmutableType(tupMemType) && !isSelectivelyImmutableType(tupMemType, unresolvedTypes)) { return false; } } Type tupRestType = tupleType.getRestType(); if (tupRestType == null) { return true; } return isInherentlyImmutableType(tupRestType) || isSelectivelyImmutableType(tupRestType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: BRecordType recordType = (BRecordType) type; for (Field field : recordType.getFields().values()) { Type fieldType = field.getFieldType(); if (!isInherentlyImmutableType(fieldType) && !isSelectivelyImmutableType(fieldType, unresolvedTypes)) { return false; } } Type recordRestType = recordType.restFieldType; if (recordRestType == null) { return true; } return isInherentlyImmutableType(recordRestType) || isSelectivelyImmutableType(recordRestType, unresolvedTypes); case TypeTags.OBJECT_TYPE_TAG: BObjectType objectType = (BObjectType) type; if (SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.CLASS) && !SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.READONLY)) { return false; } for (Field field : objectType.getFields().values()) { Type fieldType = field.getFieldType(); if (!isInherentlyImmutableType(fieldType) && !isSelectivelyImmutableType(fieldType, unresolvedTypes)) { return false; } } return true; case TypeTags.MAP_TAG: Type constraintType = ((BMapType) type).getConstrainedType(); return isInherentlyImmutableType(constraintType) || isSelectivelyImmutableType(constraintType, unresolvedTypes); case TypeTags.TABLE_TAG: Type tableConstraintType = ((BTableType) type).getConstrainedType(); return isInherentlyImmutableType(tableConstraintType) || isSelectivelyImmutableType(tableConstraintType, unresolvedTypes); case TypeTags.UNION_TAG: boolean readonlyIntersectionExists = false; for (Type memberType : ((BUnionType) type).getMemberTypes()) { if (isInherentlyImmutableType(memberType) || isSelectivelyImmutableType(memberType, unresolvedTypes)) { readonlyIntersectionExists = true; break; } } return readonlyIntersectionExists; case TypeTags.INTERSECTION_TAG: return isSelectivelyImmutableType(((BIntersectionType) type).getEffectiveType(), unresolvedTypes); } return false; } private static boolean checkConstraints(Type sourceConstraint, Type targetConstraint, List<TypePair> unresolvedTypes) { if (sourceConstraint == null) { sourceConstraint = TYPE_ANY; } if (targetConstraint == null) { targetConstraint = TYPE_ANY; } return checkIsType(sourceConstraint, targetConstraint, unresolvedTypes); } private static boolean isMutable(Object value, Type sourceType) { if (value == null || sourceType.getTag() < TypeTags.NULL_TAG || sourceType.getTag() == TypeTags.FINITE_TYPE_TAG) { return false; } return !((RefValue) value).isFrozen(); } private static boolean checkArrayEquivalent(Type actualType, Type expType) { if (expType.getTag() == TypeTags.ARRAY_TAG && actualType.getTag() == TypeTags.ARRAY_TAG) { BArrayType lhrArrayType = (BArrayType) expType; BArrayType rhsArrayType = (BArrayType) actualType; return checkIsArrayType(rhsArrayType, lhrArrayType, new ArrayList<>()); } return expType == actualType; } private static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type) { Set<String> visitedTypeSet = new HashSet<>(); return checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(type, visitedTypeSet); } private static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type, Set<String> visitedTypeSet) { switch (type.getTag()) { case TypeTags.NEVER_TAG: return true; case TypeTags.RECORD_TYPE_TAG: BRecordType recordType = (BRecordType) type; visitedTypeSet.add(recordType.getName()); for (Field field : recordType.getFields().values()) { if ((SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED) || !SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) && !visitedTypeSet.contains(field.getFieldType()) && checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(field.getFieldType(), visitedTypeSet)) { return true; } } return false; case TypeTags.TUPLE_TAG: BTupleType tupleType = (BTupleType) type; visitedTypeSet.add(tupleType.getName()); List<Type> tupleTypes = tupleType.getTupleTypes(); for (Type mem : tupleTypes) { if (!visitedTypeSet.add(mem.getName())) { continue; } if (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(mem, visitedTypeSet)) { return true; } } return false; case TypeTags.ARRAY_TAG: BArrayType arrayType = (BArrayType) type; visitedTypeSet.add(arrayType.getName()); Type elemType = arrayType.getElementType(); visitedTypeSet.add(elemType.getName()); return arrayType.getState() != ArrayState.OPEN && checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(elemType, visitedTypeSet); default: return false; } } /** * Check whether a given value confirms to a given type. First it checks if the type of the value, and * if fails then falls back to checking the value. * * @param sourceValue Value to check * @param targetType Target type * @param unresolvedValues Values that are unresolved so far * @param allowNumericConversion Flag indicating whether to perform numeric conversions * @return True if the value confirms to the provided type. False, otherwise. */ private static boolean checkIsLikeType(Object sourceValue, Type targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { Type sourceType = getType(sourceValue); if (checkIsType(sourceType, targetType, new ArrayList<>())) { return true; } return checkIsLikeOnValue(sourceValue, sourceType, targetType, unresolvedValues, allowNumericConversion); } /** * Check whether a given value confirms to a given type. Strictly checks the value only, and does not consider the * type of the value for consideration. * * @param sourceValue Value to check * @param sourceType Type of the value * @param targetType Target type * @param unresolvedValues Values that are unresolved so far * @param allowNumericConversion Flag indicating whether to perform numeric conversions * @return True if the value confirms to the provided type. False, otherwise. */ private static boolean checkIsLikeOnValue(Object sourceValue, Type sourceType, Type targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { int sourceTypeTag = sourceType.getTag(); int targetTypeTag = targetType.getTag(); if (sourceTypeTag == TypeTags.INTERSECTION_TAG) { return checkIsLikeOnValue(sourceValue, ((BIntersectionType) sourceType).getEffectiveType(), targetTypeTag != TypeTags.INTERSECTION_TAG ? targetType : ((BIntersectionType) targetType).getEffectiveType(), unresolvedValues, allowNumericConversion); } if (targetTypeTag == TypeTags.INTERSECTION_TAG) { return checkIsLikeOnValue(sourceValue, sourceType, ((BIntersectionType) targetType).getEffectiveType(), unresolvedValues, allowNumericConversion); } if (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) { if (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) { return checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(), targetType, unresolvedValues, allowNumericConversion); } return checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(), ((BParameterizedType) targetType).getParamValueType(), unresolvedValues, allowNumericConversion); } switch (targetTypeTag) { case TypeTags.READONLY_TAG: return true; case TypeTags.BYTE_TAG: if (TypeTags.isIntegerTypeTag(sourceTypeTag)) { return isByteLiteral((Long) sourceValue); } return allowNumericConversion && TypeConverter.isConvertibleToByte(sourceValue); case TypeTags.INT_TAG: return allowNumericConversion && TypeConverter.isConvertibleToInt(sourceValue); case TypeTags.SIGNED32_INT_TAG: case TypeTags.SIGNED16_INT_TAG: case TypeTags.SIGNED8_INT_TAG: case TypeTags.UNSIGNED32_INT_TAG: case TypeTags.UNSIGNED16_INT_TAG: case TypeTags.UNSIGNED8_INT_TAG: if (TypeTags.isIntegerTypeTag(sourceTypeTag) || targetTypeTag == TypeTags.BYTE_TAG) { return TypeConverter.isConvertibleToIntSubType(sourceValue, targetType); } return allowNumericConversion && TypeConverter.isConvertibleToIntSubType(sourceValue, targetType); case TypeTags.FLOAT_TAG: case TypeTags.DECIMAL_TAG: return allowNumericConversion && TypeConverter.isConvertibleToFloatingPointTypes(sourceValue); case TypeTags.CHAR_STRING_TAG: return TypeConverter.isConvertibleToChar(sourceValue); case TypeTags.RECORD_TYPE_TAG: return checkIsLikeRecordType(sourceValue, (BRecordType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.TABLE_TAG: return checkIsLikeTableType(sourceValue, (BTableType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.JSON_TAG: return checkIsLikeJSONType(sourceValue, sourceType, (BJsonType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.MAP_TAG: return checkIsLikeMapType(sourceValue, (BMapType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.STREAM_TAG: return checkIsLikeStreamType(sourceValue, (BStreamType) targetType); case TypeTags.ARRAY_TAG: return checkIsLikeArrayType(sourceValue, (BArrayType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.TUPLE_TAG: return checkIsLikeTupleType(sourceValue, (BTupleType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.ERROR_TAG: return checkIsLikeErrorType(sourceValue, (BErrorType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.ANYDATA_TAG: return checkIsLikeAnydataType(sourceValue, sourceType, unresolvedValues, allowNumericConversion); case TypeTags.FINITE_TYPE_TAG: return checkFiniteTypeAssignable(sourceValue, sourceType, (BFiniteType) targetType); case TypeTags.XML_ELEMENT_TAG: if (sourceTypeTag == TypeTags.XML_TAG) { XmlValue xmlSource = (XmlValue) sourceValue; return xmlSource.isSingleton(); } return false; case TypeTags.XML_COMMENT_TAG: case TypeTags.XML_PI_TAG: case TypeTags.XML_TEXT_TAG: if (sourceTypeTag == TypeTags.XML_TAG) { return checkIsLikeNonElementSingleton((XmlValue) sourceValue, targetType); } return false; case TypeTags.XML_TAG: if (sourceTypeTag == TypeTags.XML_TAG) { return checkIsLikeXMLSequenceType((XmlValue) sourceValue, targetType); } return false; case TypeTags.UNION_TAG: if (allowNumericConversion) { List<Type> compatibleTypesWithNumConversion = new ArrayList<>(); List<Type> compatibleTypesWithoutNumConversion = new ArrayList<>(); for (Type type : ((BUnionType) targetType).getMemberTypes()) { List<TypeValuePair> tempList = new ArrayList<>(unresolvedValues.size()); tempList.addAll(unresolvedValues); if (checkIsLikeType(sourceValue, type, tempList, false)) { compatibleTypesWithoutNumConversion.add(type); } if (checkIsLikeType(sourceValue, type, unresolvedValues, true)) { compatibleTypesWithNumConversion.add(type); } } return compatibleTypesWithNumConversion.size() != 0 && compatibleTypesWithNumConversion.size() - compatibleTypesWithoutNumConversion.size() <= 1; } else { for (Type type : ((BUnionType) targetType).getMemberTypes()) { if (checkIsLikeType(sourceValue, type, unresolvedValues, false)) { return true; } } } return false; default: return false; } } private static XmlNodeType getXmlNodeType(Type type) { XmlNodeType nodeType = null; switch (type.getTag()) { case TypeTags.XML_ELEMENT_TAG: nodeType = XmlNodeType.ELEMENT; break; case TypeTags.XML_COMMENT_TAG: nodeType = XmlNodeType.COMMENT; break; case TypeTags.XML_PI_TAG: nodeType = XmlNodeType.PI; break; case TypeTags.XML_TEXT_TAG: nodeType = XmlNodeType.TEXT; break; default: return null; } return nodeType; } private static boolean checkIsLikeNonElementSingleton(XmlValue xmlSource, Type targetType) { XmlNodeType nodeType = getXmlNodeType(targetType); if (nodeType == null) { return false; } if (xmlSource.getNodeType() == nodeType) { return true; } if (xmlSource.getNodeType() == XmlNodeType.SEQUENCE) { XmlSequence seq = (XmlSequence) xmlSource; return seq.size() == 1 && seq.getChildrenList().get(0).getNodeType() == nodeType || (nodeType == XmlNodeType.TEXT && seq.isEmpty()); } return false; } private static boolean checkIsLikeXMLSequenceType(XmlValue xmlSource, Type targetType) { if (xmlSource.getNodeType() != XmlNodeType.SEQUENCE) { return false; } Set<XmlNodeType> acceptedNodes = new HashSet<>(); BXmlType target = (BXmlType) targetType; if (target.constraint.getTag() == TypeTags.UNION_TAG) { getXMLNodeOnUnion((BUnionType) target.constraint, acceptedNodes); } else { acceptedNodes.add(getXmlNodeType(((BXmlType) targetType).constraint)); } XmlSequence seq = (XmlSequence) xmlSource; for (BXml m : seq.getChildrenList()) { if (!acceptedNodes.contains(m.getNodeType())) { return false; } } return true; } private static void getXMLNodeOnUnion(BUnionType unionType, Set<XmlNodeType> nodeTypes) { if (nodeTypes.size() == 4) { return; } for (Type memberType : unionType.getMemberTypes()) { if (memberType.getTag() == TypeTags.UNION_TAG) { getXMLNodeOnUnion((BUnionType) memberType, nodeTypes); } else { nodeTypes.add(getXmlNodeType(memberType)); } } } public static boolean isNumericType(Type type) { return type.getTag() < TypeTags.STRING_TAG || TypeTags.isIntegerTypeTag(type.getTag()); } private static boolean checkIsLikeAnydataType(Object sourceValue, Type sourceType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { switch (sourceType.getTag()) { case TypeTags.RECORD_TYPE_TAG: case TypeTags.JSON_TAG: case TypeTags.MAP_TAG: return isLikeType(((MapValueImpl) sourceValue).values().toArray(), TYPE_ANYDATA, unresolvedValues, allowNumericConversion); case TypeTags.ARRAY_TAG: ArrayValue arr = (ArrayValue) sourceValue; BArrayType arrayType = (BArrayType) arr.getType(); switch (arrayType.getElementType().getTag()) { case TypeTags.INT_TAG: case TypeTags.FLOAT_TAG: case TypeTags.DECIMAL_TAG: case TypeTags.STRING_TAG: case TypeTags.BOOLEAN_TAG: case TypeTags.BYTE_TAG: return true; default: return isLikeType(arr.getValues(), TYPE_ANYDATA, unresolvedValues, allowNumericConversion); } case TypeTags.TUPLE_TAG: return isLikeType(((ArrayValue) sourceValue).getValues(), TYPE_ANYDATA, unresolvedValues, allowNumericConversion); case TypeTags.ANYDATA_TAG: return true; case TypeTags.FINITE_TYPE_TAG: case TypeTags.UNION_TAG: return checkIsLikeType(sourceValue, TYPE_ANYDATA, unresolvedValues, allowNumericConversion); default: return false; } } private static boolean isLikeType(Object[] objects, Type targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { for (Object value : objects) { if (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } private static boolean checkIsLikeTupleType(Object sourceValue, BTupleType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof ArrayValue)) { return false; } ArrayValue source = (ArrayValue) sourceValue; List<Type> targetTypes = targetType.getTupleTypes(); int sourceTypeSize = source.size(); int targetTypeSize = targetTypes.size(); Type targetRestType = targetType.getRestType(); if (sourceTypeSize < targetTypeSize) { return false; } if (targetRestType == null && sourceTypeSize > targetTypeSize) { return false; } for (int i = 0; i < targetTypeSize; i++) { if (!checkIsLikeType(source.getRefValue(i), targetTypes.get(i), unresolvedValues, allowNumericConversion)) { return false; } } for (int i = targetTypeSize; i < sourceTypeSize; i++) { if (!checkIsLikeType(source.getRefValue(i), targetRestType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } static boolean isByteLiteral(long longValue) { return (longValue >= BBYTE_MIN_VALUE && longValue <= BBYTE_MAX_VALUE); } static boolean isSigned32LiteralValue(Long longObject) { return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE); } static boolean isSigned16LiteralValue(Long longObject) { return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE); } static boolean isSigned8LiteralValue(Long longObject) { return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE); } static boolean isUnsigned32LiteralValue(Long longObject) { return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE); } static boolean isUnsigned16LiteralValue(Long longObject) { return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE); } static boolean isUnsigned8LiteralValue(Long longObject) { return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE); } static boolean isCharLiteralValue(Object object) { String value; if (object instanceof BString) { value = ((BString) object).getValue(); } else if (object instanceof String) { value = (String) object; } else { return false; } return value.codePoints().count() == 1; } private static boolean checkIsLikeArrayType(Object sourceValue, BArrayType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof ArrayValue)) { return false; } ArrayValue source = (ArrayValue) sourceValue; Type targetTypeElementType = targetType.getElementType(); if (source.getType().getTag() == TypeTags.ARRAY_TAG) { Type sourceElementType = ((BArrayType) source.getType()).getElementType(); if (isValueType(sourceElementType)) { if (checkIsType(sourceElementType, targetTypeElementType, new ArrayList<>())) { return true; } if (allowNumericConversion && isNumericType(sourceElementType)) { if (isNumericType(targetTypeElementType)) { return true; } if (targetTypeElementType.getTag() != TypeTags.UNION_TAG) { return false; } List<Type> targetNumericTypes = new ArrayList<>(); for (Type memType : ((BUnionType) targetTypeElementType).getMemberTypes()) { if (isNumericType(memType) && !targetNumericTypes.contains(memType)) { targetNumericTypes.add(memType); } } return targetNumericTypes.size() == 1; } if (targetTypeElementType.getTag() == TypeTags.FLOAT_TAG || targetTypeElementType.getTag() == TypeTags.DECIMAL_TAG) { return false; } } } for (int i = 0; i < source.size(); i++) { if (!checkIsLikeType(source.get(i), targetTypeElementType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } private static boolean checkIsLikeMapType(Object sourceValue, BMapType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof MapValueImpl)) { return false; } for (Object mapEntry : ((MapValueImpl) sourceValue).values()) { if (!checkIsLikeType(mapEntry, targetType.getConstrainedType(), unresolvedValues, allowNumericConversion)) { return false; } } return true; } private static boolean checkIsLikeStreamType(Object sourceValue, BStreamType targetType) { if (!(sourceValue instanceof StreamValue)) { return false; } BStreamType streamType = (BStreamType) ((StreamValue) sourceValue).getType(); return streamType.getConstrainedType() == targetType.getConstrainedType(); } private static boolean checkIsLikeJSONType(Object sourceValue, Type sourceType, BJsonType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (sourceType.getTag() == TypeTags.ARRAY_TAG) { ArrayValue source = (ArrayValue) sourceValue; Type elementType = ((BArrayType) source.getType()).getElementType(); if (isValueType(elementType)) { return checkIsType(elementType, targetType, new ArrayList<>()); } Object[] arrayValues = source.getValues(); for (int i = 0; i < ((ArrayValue) sourceValue).size(); i++) { if (!checkIsLikeType(arrayValues[i], targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } else if (sourceType.getTag() == TypeTags.MAP_TAG) { for (Object value : ((MapValueImpl) sourceValue).values()) { if (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } else if (sourceType.getTag() == TypeTags.RECORD_TYPE_TAG) { TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType); if (unresolvedValues.contains(typeValuePair)) { return true; } unresolvedValues.add(typeValuePair); for (Object object : ((MapValueImpl) sourceValue).values()) { if (!checkIsLikeType(object, targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } else if (sourceType.getTag() == TypeTags.TUPLE_TAG) { for (Object obj : ((TupleValueImpl) sourceValue).getValues()) { if (!checkIsLikeType(obj, targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } return false; } private static boolean checkIsLikeRecordType(Object sourceValue, BRecordType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof MapValueImpl)) { return false; } TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType); if (unresolvedValues.contains(typeValuePair)) { return true; } unresolvedValues.add(typeValuePair); Map<String, Type> targetTypeField = new HashMap<>(); Type restFieldType = targetType.restFieldType; for (Field field : targetType.getFields().values()) { targetTypeField.put(field.getFieldName(), field.getFieldType()); } for (Map.Entry targetTypeEntry : targetTypeField.entrySet()) { Object fieldName = StringUtils.fromString(targetTypeEntry.getKey().toString()); if (!(((MapValueImpl) sourceValue).containsKey(fieldName)) && !SymbolFlags.isFlagOn(targetType.getFields().get(fieldName.toString()).getFlags(), SymbolFlags.OPTIONAL)) { return false; } } for (Object object : ((MapValueImpl) sourceValue).entrySet()) { Map.Entry valueEntry = (Map.Entry) object; String fieldName = valueEntry.getKey().toString(); if (targetTypeField.containsKey(fieldName)) { if (!checkIsLikeType((valueEntry.getValue()), targetTypeField.get(fieldName), unresolvedValues, allowNumericConversion)) { return false; } } else { if (!targetType.sealed) { if (!checkIsLikeType((valueEntry.getValue()), restFieldType, unresolvedValues, allowNumericConversion)) { return false; } } else { return false; } } } return true; } private static boolean checkIsLikeTableType(Object sourceValue, BTableType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof TableValueImpl)) { return false; } TableValueImpl tableValue = (TableValueImpl) sourceValue; BTableType sourceType = (BTableType) tableValue.getType(); if (targetType.getKeyType() != null && sourceType.getFieldNames() == null) { return false; } if (sourceType.getKeyType() != null && !checkIsType(tableValue.getKeyType(), targetType.getKeyType())) { return false; } TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType); if (unresolvedValues.contains(typeValuePair)) { return true; } Object[] objects = tableValue.values().toArray(); for (Object object : objects) { if (!checkIsLikeType(object, targetType.getConstrainedType(), allowNumericConversion)) { return false; } } return true; } private static boolean checkFiniteTypeAssignable(Object sourceValue, Type sourceType, BFiniteType targetType) { for (Object valueSpaceItem : targetType.valueSpace) { if (isFiniteTypeValue(sourceValue, sourceType, valueSpaceItem)) { return true; } } return false; } protected static boolean isFiniteTypeValue(Object sourceValue, Type sourceType, Object valueSpaceItem) { Type valueSpaceItemType = getType(valueSpaceItem); if (valueSpaceItemType.getTag() > TypeTags.FLOAT_TAG) { return valueSpaceItemType.getTag() == sourceType.getTag() && (valueSpaceItem == sourceValue || valueSpaceItem.equals(sourceValue)); } switch (sourceType.getTag()) { case TypeTags.BYTE_TAG: case TypeTags.INT_TAG: return ((Number) sourceValue).longValue() == ((Number) valueSpaceItem).longValue(); case TypeTags.FLOAT_TAG: if (sourceType.getTag() != valueSpaceItemType.getTag()) { return false; } return ((Number) sourceValue).doubleValue() == ((Number) valueSpaceItem).doubleValue(); case TypeTags.DECIMAL_TAG: default: if (sourceType.getTag() != valueSpaceItemType.getTag()) { return false; } return valueSpaceItem.equals(sourceValue); } } private static boolean checkIsErrorType(Type sourceType, BErrorType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.ERROR_TAG) { return false; } TypePair pair = new TypePair(sourceType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); BErrorType bErrorType = (BErrorType) sourceType; if (!checkIsType(bErrorType.detailType, targetType.detailType, unresolvedTypes)) { return false; } if (targetType.typeIdSet == null) { return true; } BTypeIdSet sourceTypeIdSet = bErrorType.typeIdSet; if (sourceTypeIdSet == null) { return false; } return sourceTypeIdSet.containsAll(targetType.typeIdSet); } private static boolean checkIsLikeErrorType(Object sourceValue, BErrorType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { Type sourceType = getType(sourceValue); if (sourceValue == null || sourceType.getTag() != TypeTags.ERROR_TAG) { return false; } if (!checkIsLikeType(((ErrorValue) sourceValue).getDetails(), targetType.detailType, unresolvedValues, allowNumericConversion)) { return false; } if (targetType.typeIdSet == null) { return true; } BTypeIdSet sourceIdSet = ((BErrorType) sourceType).typeIdSet; if (sourceIdSet == null) { return false; } return sourceIdSet.containsAll(targetType.typeIdSet); } private static boolean isSimpleBasicType(Type type) { return type.getTag() < TypeTags.NULL_TAG; } private static boolean isHandleType(Type type) { return type.getTag() == TypeTags.HANDLE_TAG; } /** * Deep value equality check for anydata. * * @param lhsValue The value on the left hand side * @param rhsValue The value on the right hand side * @param checkedValues Structured value pairs already compared or being compared * @return True if values are equal, else false. */ private static boolean isEqual(Object lhsValue, Object rhsValue, List<ValuePair> checkedValues) { if (lhsValue == rhsValue) { return true; } if (null == lhsValue || null == rhsValue) { return false; } int lhsValTypeTag = getType(lhsValue).getTag(); int rhsValTypeTag = getType(rhsValue).getTag(); switch (lhsValTypeTag) { case TypeTags.STRING_TAG: case TypeTags.BOOLEAN_TAG: return lhsValue.equals(rhsValue); case TypeTags.INT_TAG: if (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) { return false; } return lhsValue.equals(((Number) rhsValue).longValue()); case TypeTags.BYTE_TAG: if (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) { return false; } return ((Number) lhsValue).byteValue() == ((Number) rhsValue).byteValue(); case TypeTags.FLOAT_TAG: if (rhsValTypeTag != TypeTags.FLOAT_TAG) { return false; } if (Double.isNaN((Double) lhsValue) && Double.isNaN((Double) rhsValue)) { return true; } return ((Number) lhsValue).doubleValue() == ((Number) rhsValue).doubleValue(); case TypeTags.DECIMAL_TAG: if (rhsValTypeTag != TypeTags.DECIMAL_TAG) { return false; } return checkDecimalEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue); case TypeTags.XML_TAG: if (lhsValue instanceof XmlText) { return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue); } return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlSequence) lhsValue, (XmlValue) rhsValue); case TypeTags.XML_ELEMENT_TAG: return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlItem) lhsValue, (XmlValue) rhsValue); case TypeTags.XML_COMMENT_TAG: return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlComment) lhsValue, (XmlValue) rhsValue); case TypeTags.XML_TEXT_TAG: return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue); case TypeTags.XML_PI_TAG: return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlPi) lhsValue, (XmlValue) rhsValue); case TypeTags.MAP_TAG: case TypeTags.JSON_TAG: case TypeTags.RECORD_TYPE_TAG: return isMappingType(rhsValTypeTag) && isEqual((MapValueImpl) lhsValue, (MapValueImpl) rhsValue, checkedValues); case TypeTags.TUPLE_TAG: case TypeTags.ARRAY_TAG: return isListType(rhsValTypeTag) && isEqual((ArrayValue) lhsValue, (ArrayValue) rhsValue, checkedValues); case TypeTags.ERROR_TAG: return rhsValTypeTag == TypeTags.ERROR_TAG && isEqual((ErrorValue) lhsValue, (ErrorValue) rhsValue, checkedValues); case TypeTags.SERVICE_TAG: break; case TypeTags.TABLE_TAG: return rhsValTypeTag == TypeTags.TABLE_TAG && isEqual((TableValueImpl) lhsValue, (TableValueImpl) rhsValue, checkedValues); } return false; } private static boolean isListType(int typeTag) { return typeTag == TypeTags.ARRAY_TAG || typeTag == TypeTags.TUPLE_TAG; } private static boolean isMappingType(int typeTag) { return typeTag == TypeTags.MAP_TAG || typeTag == TypeTags.RECORD_TYPE_TAG || typeTag == TypeTags.JSON_TAG; } /** * Deep equality check for an array/tuple. * * @param lhsList The array/tuple on the left hand side * @param rhsList The array/tuple on the right hand side * @param checkedValues Structured value pairs already compared or being compared * @return True if the array/tuple values are equal, else false. */ private static boolean isEqual(ArrayValue lhsList, ArrayValue rhsList, List<ValuePair> checkedValues) { ValuePair compValuePair = new ValuePair(lhsList, rhsList); if (checkedValues.contains(compValuePair)) { return true; } checkedValues.add(compValuePair); if (lhsList.size() != rhsList.size()) { return false; } for (int i = 0; i < lhsList.size(); i++) { if (!isEqual(lhsList.get(i), rhsList.get(i), checkedValues)) { return false; } } return true; } /** * Deep equality check for a map. * * @param lhsMap Map on the left hand side * @param rhsMap Map on the right hand side * @param checkedValues Structured value pairs already compared or being compared * @return True if the map values are equal, else false. */ private static boolean isEqual(MapValueImpl lhsMap, MapValueImpl rhsMap, List<ValuePair> checkedValues) { ValuePair compValuePair = new ValuePair(lhsMap, rhsMap); if (checkedValues.contains(compValuePair)) { return true; } checkedValues.add(compValuePair); if (lhsMap.size() != rhsMap.size()) { return false; } if (!lhsMap.keySet().containsAll(rhsMap.keySet())) { return false; } Iterator<Map.Entry<BString, Object>> mapIterator = lhsMap.entrySet().iterator(); while (mapIterator.hasNext()) { Map.Entry<BString, Object> lhsMapEntry = mapIterator.next(); if (!isEqual(lhsMapEntry.getValue(), rhsMap.get(lhsMapEntry.getKey()), checkedValues)) { return false; } } return true; } /** * Deep equality check for a table. * * @param lhsTable Table on the left hand side * @param rhsTable Table on the right hand side * @param checkedValues Structured value pairs already compared or being compared * @return True if the table values are equal, else false. */ private static boolean isEqual(TableValueImpl lhsTable, TableValueImpl rhsTable, List<ValuePair> checkedValues) { ValuePair compValuePair = new ValuePair(lhsTable, rhsTable); if (checkedValues.contains(compValuePair)) { return true; } checkedValues.add(compValuePair); if (lhsTable.size() != rhsTable.size()) { return false; } boolean isLhsKeyedTable = ((BTableType) lhsTable.getType()).getFieldNames() != null && ((BTableType) lhsTable.getType()).getFieldNames().length > 0; boolean isRhsKeyedTable = ((BTableType) rhsTable.getType()).getFieldNames() != null && ((BTableType) rhsTable.getType()).getFieldNames().length > 0; Object[] lhsTableValues = lhsTable.values().toArray(); Object[] rhsTableValues = rhsTable.values().toArray(); if (isLhsKeyedTable == isRhsKeyedTable) { for (int i = 0; i < lhsTableValues.length; i++) { if (!isEqual(lhsTableValues[i], rhsTableValues[i], checkedValues)) { return false; } } return true; } return false; } /** * Deep equality check for error. * * @param lhsError The error on the left hand side * @param rhsError The error on the right hand side * @param checkedValues Errors already compared or being compared * @return True if the error values are equal, else false. */ private static boolean isEqual(ErrorValue lhsError, ErrorValue rhsError, List<ValuePair> checkedValues) { ValuePair compValuePair = new ValuePair(lhsError, rhsError); if (checkedValues.contains(compValuePair)) { return true; } checkedValues.add(compValuePair); return isEqual(lhsError.getMessage(), rhsError.getMessage(), checkedValues) && isEqual((MapValueImpl) lhsError.getDetails(), (MapValueImpl) rhsError.getDetails(), checkedValues) && isEqual(lhsError.getCause(), rhsError.getCause(), checkedValues); } /** * Deep equality check for XML Sequence. * * @param lhsXMLSequence The XML sequence on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlSequence lhsXMLSequence, XmlValue rhsXml) { if (rhsXml instanceof XmlSequence) { XmlSequence rhsXMLSequence = (XmlSequence) rhsXml; return isXMLSequenceChildrenEqual(lhsXMLSequence.getChildrenList(), rhsXMLSequence.getChildrenList()); } if (rhsXml instanceof XmlItem) { return lhsXMLSequence.getChildrenList().size() == 1 && isEqual(lhsXMLSequence.getChildrenList().get(0), rhsXml); } return lhsXMLSequence.getChildrenList().isEmpty() && TypeUtils.getType(rhsXml) == PredefinedTypes.TYPE_XML_NEVER; } /** * Deep equality check for XML item. * * @param lhsXMLItem The XML item on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlItem lhsXMLItem, XmlValue rhsXml) { if (rhsXml instanceof XmlItem) { XmlItem rhsXMLItem = (XmlItem) rhsXml; if (!(rhsXMLItem.getQName().equals(lhsXMLItem.getQName()))) { return false; } if (!(rhsXMLItem.getAttributesMap().entrySet().equals(lhsXMLItem.getAttributesMap().entrySet()))) { return false; } return isEqual(rhsXMLItem.getChildrenSeq(), lhsXMLItem.getChildrenSeq()); } if (rhsXml instanceof XmlSequence) { XmlSequence rhsXMLSequence = (XmlSequence) rhsXml; return rhsXMLSequence.getChildrenList().size() == 1 && isEqual(lhsXMLItem, rhsXMLSequence.getChildrenList().get(0)); } return false; } /** * Deep equality check for XML Text. * * @param lhsXMLText The XML text on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlText lhsXMLText, XmlValue rhsXml) { if (rhsXml instanceof XmlText) { XmlText rhsXMLText = (XmlText) rhsXml; return lhsXMLText.getTextValue().equals(rhsXMLText.getTextValue()); } return lhsXMLText.getType() == PredefinedTypes.TYPE_XML_NEVER && rhsXml instanceof XmlSequence && ((XmlSequence) rhsXml).getChildrenList().isEmpty(); } /** * Deep equality check for XML Comment. * * @param lhsXMLComment The XML comment on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlComment lhsXMLComment, XmlValue rhsXml) { if (!(rhsXml instanceof XmlComment)) { return false; } XmlComment rhXMLComment = (XmlComment) rhsXml; return lhsXMLComment.getTextValue().equals(rhXMLComment.getTextValue()); } /** * Deep equality check for XML Processing Instruction. * * @param lhsXMLPi The XML processing instruction on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlPi lhsXMLPi, XmlValue rhsXml) { if (!(rhsXml instanceof XmlPi)) { return false; } XmlPi rhsXMLPi = (XmlPi) rhsXml; return lhsXMLPi.getData().equals(rhsXMLPi.getData()) && lhsXMLPi.getTarget().equals(rhsXMLPi.getTarget()); } private static boolean isXMLSequenceChildrenEqual(List<BXml> lhsList, List<BXml> rhsList) { if (lhsList.size() != rhsList.size()) { return false; } for (int i = 0; i < lhsList.size(); i++) { if (!isEqual(lhsList.get(i), rhsList.get(i))) { return false; } } return true; } /** * Type vector of size two, to hold the source and the target types. * * @since 0.995.0 */ private static class TypePair { Type sourceType; Type targetType; public TypePair(Type sourceType, Type targetType) { this.sourceType = sourceType; this.targetType = targetType; } @Override public boolean equals(Object obj) { if (!(obj instanceof TypePair)) { return false; } TypePair other = (TypePair) obj; return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType); } } /** * Check the reference equality of handle values. * * @param lhsValue The value on the left hand side * @param rhsValue The value on the right hand side * @return True if values are equal, else false. */ private static boolean isHandleValueRefEqual(Object lhsValue, Object rhsValue) { HandleValue lhsHandle = (HandleValue) lhsValue; HandleValue rhsHandle = (HandleValue) rhsValue; return lhsHandle.getValue() == rhsHandle.getValue(); } /** * Unordered value vector of size two, to hold two values being compared. * * @since 0.995.0 */ private static class ValuePair { ArrayList<Object> valueList = new ArrayList<>(2); ValuePair(Object valueOne, Object valueTwo) { valueList.add(valueOne); valueList.add(valueTwo); } @Override public boolean equals(Object otherPair) { if (!(otherPair instanceof ValuePair)) { return false; } ArrayList otherList = ((ValuePair) otherPair).valueList; ArrayList currentList = valueList; if (otherList.size() != currentList.size()) { return false; } for (int i = 0; i < otherList.size(); i++) { if (!otherList.get(i).equals(currentList.get(i))) { return false; } } return true; } } /** * Checks whether a given {@link BType} has an implicit initial value or not. * @param type {@link BType} to be analyzed. * @return whether there's an implicit initial value or not. */ public static boolean hasFillerValue(Type type) { return hasFillerValue(type, new ArrayList<>()); } private static boolean hasFillerValue(Type type, List<Type> unanalyzedTypes) { if (type == null) { return true; } if (type.getTag() < TypeTags.RECORD_TYPE_TAG && !(type.getTag() == TypeTags.CHAR_STRING_TAG || type.getTag() == TypeTags.NEVER_TAG)) { return true; } switch (type.getTag()) { case TypeTags.STREAM_TAG: case TypeTags.MAP_TAG: case TypeTags.ANY_TAG: return true; case TypeTags.ARRAY_TAG: return checkFillerValue((BArrayType) type, unanalyzedTypes); case TypeTags.FINITE_TYPE_TAG: return checkFillerValue((BFiniteType) type); case TypeTags.OBJECT_TYPE_TAG: return checkFillerValue((BObjectType) type); case TypeTags.RECORD_TYPE_TAG: return checkFillerValue((BRecordType) type, unanalyzedTypes); case TypeTags.TUPLE_TAG: return checkFillerValue((BTupleType) type, unanalyzedTypes); case TypeTags.UNION_TAG: return checkFillerValue((BUnionType) type, unanalyzedTypes); default: return false; } } private static boolean checkFillerValue(BTupleType tupleType, List<Type> unAnalyzedTypes) { if (unAnalyzedTypes.contains(tupleType)) { return true; } unAnalyzedTypes.add(tupleType); for (Type member : tupleType.getTupleTypes()) { if (!hasFillerValue(member, unAnalyzedTypes)) { return false; } } return true; } private static boolean checkFillerValue(BUnionType type, List<Type> unAnalyzedTypes) { if (unAnalyzedTypes.contains(type)) { return true; } unAnalyzedTypes.add(type); if (type.isNullable()) { return true; } Iterator<Type> iterator = type.getMemberTypes().iterator(); Type firstMember; for (firstMember = iterator.next(); iterator.hasNext(); ) { if (!isSameType(firstMember, iterator.next())) { return false; } } return isValueType(firstMember) && hasFillerValue(firstMember); } private static boolean checkFillerValue(BRecordType type, List<Type> unAnalyzedTypes) { if (unAnalyzedTypes.contains(type)) { return true; } unAnalyzedTypes.add(type); for (Field field : type.getFields().values()) { if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) { continue; } if (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED)) { continue; } return false; } return true; } private static boolean checkFillerValue(BArrayType type, List<Type> unAnalyzedTypes) { return type.getState() == ArrayState.OPEN || hasFillerValue(type.getElementType(), unAnalyzedTypes); } private static boolean checkFillerValue(BObjectType type) { if (type.getTag() == TypeTags.SERVICE_TAG) { return false; } else { MethodType generatedInitializer = type.generatedInitializer; if (generatedInitializer == null) { return false; } FunctionType initFuncType = generatedInitializer.getType(); boolean noParams = initFuncType.getParameterTypes().length == 0; boolean nilReturn = initFuncType.getReturnType().getTag() == TypeTags.NULL_TAG; return noParams && nilReturn; } } private static boolean checkFillerValue(BFiniteType type) { for (Object value: type.valueSpace) { if (value == null) { return true; } } if (type.valueSpace.size() == 1) { return true; } Object firstElement = type.valueSpace.iterator().next(); for (Object value : type.valueSpace) { if (value.getClass() != firstElement.getClass()) { return false; } } if (firstElement instanceof String) { return containsElement(type.valueSpace, "\"\""); } else if (firstElement instanceof Byte || firstElement instanceof Integer || firstElement instanceof Long) { return containsElement(type.valueSpace, "0"); } else if (firstElement instanceof Float || firstElement instanceof Double || firstElement instanceof BigDecimal) { return containsElement(type.valueSpace, "0.0"); } else if (firstElement instanceof Boolean) { return containsElement(type.valueSpace, "false"); } else { return false; } } private static boolean containsElement(Set<Object> valueSpace, String e) { for (Object value : valueSpace) { if (value != null && value.toString().equals(e)) { return true; } } return false; } private static boolean containsType(Set<Object> valueSpace, Type type) { for (Object value : valueSpace) { if (!isSameType(type, getType(value))) { return false; } } return true; } public static Object handleAnydataValues(Object sourceVal, Type targetType) { if (sourceVal != null && !(sourceVal instanceof Number) && !(sourceVal instanceof BString) && !(sourceVal instanceof Boolean) && !(sourceVal instanceof BValue)) { throw ErrorUtils.createJToBTypeCastError(sourceVal.getClass(), targetType); } return sourceVal; } private TypeChecker() { } }
class TypeChecker { public static Object checkCast(Object sourceVal, Type targetType) { if (checkIsType(sourceVal, targetType)) { return sourceVal; } Type sourceType = getType(sourceVal); if (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() <= TypeTags.BOOLEAN_TAG) { return TypeConverter.castValues(targetType, sourceVal); } if (sourceType.getTag() <= TypeTags.BOOLEAN_TAG && targetType.getTag() == TypeTags.UNION_TAG) { for (Type memberType : ((BUnionType) targetType).getMemberTypes()) { try { return TypeConverter.castValues(memberType, sourceVal); } catch (Exception e) { } } } throw ErrorUtils.createTypeCastError(sourceVal, targetType); } public static long anyToInt(Object sourceVal) { return TypeConverter.anyToIntCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT)); } public static long anyToSigned32(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_32, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT_SIGNED_32)); } public static long anyToSigned16(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_16, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT_SIGNED_16)); } public static long anyToSigned8(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_SIGNED_8, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT_SIGNED_8)); } public static long anyToUnsigned32(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_32, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_32)); } public static long anyToUnsigned16(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_16, () -> ErrorUtils .createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_16)); } public static long anyToUnsigned8(Object sourceVal) { return TypeConverter.anyToIntSubTypeCast(sourceVal, TYPE_INT_UNSIGNED_8, () -> ErrorUtils .createTypeCastError(sourceVal, TYPE_INT_UNSIGNED_8)); } public static double anyToFloat(Object sourceVal) { return TypeConverter.anyToFloatCast(sourceVal, () -> ErrorUtils .createTypeCastError(sourceVal, TYPE_FLOAT)); } public static boolean anyToBoolean(Object sourceVal) { return TypeConverter.anyToBooleanCast(sourceVal, () -> ErrorUtils .createTypeCastError(sourceVal, TYPE_BOOLEAN)); } public static int anyToByte(Object sourceVal) { return TypeConverter.anyToByteCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_BYTE)); } public static DecimalValue anyToDecimal(Object sourceVal) { return TypeConverter.anyToDecimalCast(sourceVal, () -> ErrorUtils.createTypeCastError(sourceVal, TYPE_DECIMAL)); } public static byte anyToJByte(Object sourceVal) { return TypeConverter.anyToJByteCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "byte")); } public static char anyToJChar(Object sourceVal) { return TypeConverter.anyToJCharCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "char")); } public static short anyToJShort(Object sourceVal) { return TypeConverter.anyToJShortCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "short")); } public static int anyToJInt(Object sourceVal) { return TypeConverter.anyToJIntCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "int")); } public static long anyToJLong(Object sourceVal) { return TypeConverter.anyToJLongCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "long")); } public static float anyToJFloat(Object sourceVal) { return TypeConverter.anyToJFloatCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "float")); } public static double anyToJDouble(Object sourceVal) { return TypeConverter.anyToJDoubleCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "double")); } public static boolean anyToJBoolean(Object sourceVal) { return TypeConverter.anyToJBooleanCast(sourceVal, () -> ErrorUtils.createBToJTypeCastError(sourceVal, "boolean")); } /** * Check whether a given value belongs to the given type. * * @param sourceVal value to check the type * @param targetType type to be test against * @return true if the value belongs to the given type, false otherwise */ public static boolean checkIsType(Object sourceVal, Type targetType) { return checkIsType(sourceVal, getType(sourceVal), targetType); } /** * Check whether a given value belongs to the given type. * * @param sourceVal value to check the type * @param sourceType type of the value * @param targetType type to be test against * @return true if the value belongs to the given type, false otherwise */ public static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType) { if (checkIsType(sourceVal, sourceType, targetType, null)) { return true; } if (sourceType.getTag() == TypeTags.XML_TAG) { XmlValue val = (XmlValue) sourceVal; if (val.getNodeType() == XmlNodeType.SEQUENCE) { return checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false); } } if (isMutable(sourceVal, sourceType)) { return false; } return checkIsLikeOnValue(sourceVal, sourceType, targetType, new ArrayList<>(), false); } /** * Check whether a given value has the same shape as the given type. * * @param sourceValue value to check the shape * @param targetType type to check the shape against * @return true if the value has the same shape as the given type; false otherwise */ public static boolean checkIsLikeType(Object sourceValue, Type targetType) { return checkIsLikeType(sourceValue, targetType, false); } /** * Check whether a given value has the same shape as the given type. * * @param sourceValue value to check the shape * @param targetType type to check the shape against * @param allowNumericConversion whether numeric conversion is allowed to change the shape to the target type * @return true if the value has the same shape as the given type; false otherwise */ public static boolean checkIsLikeType(Object sourceValue, Type targetType, boolean allowNumericConversion) { return checkIsLikeType(sourceValue, targetType, new ArrayList<>(), allowNumericConversion); } /** * Check whether two types are the same. * * @param sourceType type to test * @param targetType type to test against * @return true if the two types are same; false otherwise */ public static boolean isSameType(Type sourceType, Type targetType) { int sourceTypeTag = sourceType.getTag(); int targetTypeTag = targetType.getTag(); if (sourceType == targetType) { return true; } if (sourceTypeTag == targetTypeTag) { if (sourceType.equals(targetType)) { return true; } switch (sourceTypeTag) { case TypeTags.ARRAY_TAG: return checkArrayEquivalent(sourceType, targetType); case TypeTags.FINITE_TYPE_TAG: Set<Object> sourceValueSpace = ((BFiniteType) sourceType).valueSpace; Set<Object> targetValueSpace = ((BFiniteType) targetType).valueSpace; if (sourceValueSpace.size() != targetValueSpace.size()) { return false; } for (Object sourceVal : sourceValueSpace) { if (!containsType(targetValueSpace, getType(sourceVal))) { return false; } } return true; default: break; } } if (sourceTypeTag == TypeTags.FINITE_TYPE_TAG) { for (Object value : ((BFiniteType) sourceType).valueSpace) { if (!isSameType(getType(value), targetType)) { return false; } } return true; } if (targetTypeTag == TypeTags.FINITE_TYPE_TAG) { for (Object value : ((BFiniteType) targetType).valueSpace) { if (!isSameType(getType(value), sourceType)) { return false; } } return true; } return false; } public static Type getType(Object value) { if (value == null) { return TYPE_NULL; } else if (value instanceof Number) { if (value instanceof Long) { return TYPE_INT; } else if (value instanceof Double) { return TYPE_FLOAT; } else if (value instanceof Integer || value instanceof Byte) { return TYPE_BYTE; } } else if (value instanceof BString) { return TYPE_STRING; } else if (value instanceof Boolean) { return TYPE_BOOLEAN; } return ((BValue) value).getType(); } /** * Deep value equality check for anydata. * * @param lhsValue The value on the left hand side * @param rhsValue The value on the right hand side * @return True if values are equal, else false. */ public static boolean isEqual(Object lhsValue, Object rhsValue) { return isEqual(lhsValue, rhsValue, new ArrayList<>()); } /** * Check if two decimal values are equal in value. * * @param lhsValue The value on the left hand side * @param rhsValue The value of the right hand side * @return True if values are equal, else false. */ public static boolean checkDecimalEqual(DecimalValue lhsValue, DecimalValue rhsValue) { return isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue) && lhsValue.decimalValue().compareTo(rhsValue.decimalValue()) == 0; } /** * Check if two decimal values are exactly equal. * * @param lhsValue The value on the left-hand side * @param rhsValue The value of the right-hand side * @return True if values are exactly equal, else false. */ public static boolean checkDecimalExactEqual(DecimalValue lhsValue, DecimalValue rhsValue) { return isDecimalRealNumber(lhsValue) && isDecimalRealNumber(rhsValue) && lhsValue.decimalValue().equals(rhsValue.decimalValue()); } /** * Checks if the given decimal number is a real number. * * @param decimalValue The decimal value being checked * @return True if the decimal value is a real number. */ private static boolean isDecimalRealNumber(DecimalValue decimalValue) { return decimalValue.valueKind == DecimalValueKind.ZERO || decimalValue.valueKind == DecimalValueKind.OTHER; } /** * Reference equality check for values. If both the values are simple basic types, returns the same * result as {@link * * @param lhsValue The value on the left hand side * @param rhsValue The value on the right hand side * @return True if values are reference equal or in the case of simple basic types if the values are equal, * else false. */ private static boolean isXMLValueRefEqual(XmlValue lhsValue, XmlValue rhsValue) { if (lhsValue.getNodeType() != rhsValue.getNodeType()) { return false; } if (lhsValue.getNodeType() == XmlNodeType.SEQUENCE && rhsValue.getNodeType() == XmlNodeType.SEQUENCE) { return isXMLSequenceRefEqual((XmlSequence) lhsValue, (XmlSequence) rhsValue); } if (lhsValue.getNodeType() == XmlNodeType.TEXT && rhsValue.getNodeType() == XmlNodeType.TEXT) { return isEqual(lhsValue, rhsValue); } return false; } private static boolean isXMLSequenceRefEqual(XmlSequence lhsValue, XmlSequence rhsValue) { Iterator<BXml> lhsIter = lhsValue.getChildrenList().iterator(); Iterator<BXml> rhsIter = rhsValue.getChildrenList().iterator(); while (lhsIter.hasNext() && rhsIter.hasNext()) { BXml l = lhsIter.next(); BXml r = rhsIter.next(); if (!(l == r || isXMLValueRefEqual((XmlValue) l, (XmlValue) r))) { return false; } } return lhsIter.hasNext() == rhsIter.hasNext(); } /** * Get the typedesc of a value. * * @param value Value * @return type desc associated with the value */ public static TypedescValue getTypedesc(Object value) { Type type = TypeChecker.getType(value); if (type == null) { return null; } if (isSimpleBasicType(type)) { return new TypedescValueImpl(new BFiniteType(value.toString(), Set.of(value), 0)); } if (value instanceof RefValue) { return (TypedescValue) ((RefValue) value).getTypedesc(); } return new TypedescValueImpl(type); } /** * Get the annotation value if present. * * @param typedescValue The typedesc value * @param annotTag The annot-tag-reference * @return the annotation value if present, nil else */ public static Object getAnnotValue(TypedescValue typedescValue, String annotTag) { Type describingType = typedescValue.getDescribingType(); if (!(describingType instanceof BAnnotatableType)) { return null; } return ((BAnnotatableType) describingType).getAnnotation(StringUtils.fromString(annotTag)); } public static Object getAnnotValue(TypedescValue typedescValue, BString annotTag) { Type describingType = typedescValue.getDescribingType(); if (!(describingType instanceof BAnnotatableType)) { return null; } return ((BAnnotatableType) describingType).getAnnotation(annotTag); } /** * Check whether a given type is equivalent to a target type. * * @param sourceType type to check * @param targetType type to compare with * @return flag indicating the the equivalence of the two types */ public static boolean checkIsType(Type sourceType, Type targetType) { return checkIsType(sourceType, targetType, (List<TypePair>) null); } @Deprecated public static boolean checkIsType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { if (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) { return true; } if (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(sourceType)) { return true; } if (targetType.isReadOnly() && !sourceType.isReadOnly()) { return false; } int sourceTypeTag = sourceType.getTag(); int targetTypeTag = targetType.getTag(); if (sourceTypeTag == TypeTags.INTERSECTION_TAG) { return checkIsType(((BIntersectionType) sourceType).getEffectiveType(), targetTypeTag != TypeTags.INTERSECTION_TAG ? targetType : ((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes); } if (targetTypeTag == TypeTags.INTERSECTION_TAG) { return checkIsType(sourceType, ((BIntersectionType) targetType).getEffectiveType(), unresolvedTypes); } if (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) { if (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) { return checkIsType(((BParameterizedType) sourceType).getParamValueType(), targetType, unresolvedTypes); } return checkIsType(((BParameterizedType) sourceType).getParamValueType(), ((BParameterizedType) targetType).getParamValueType(), unresolvedTypes); } if (sourceTypeTag == TypeTags.READONLY_TAG) { return checkIsType(PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE, targetType, unresolvedTypes); } if (targetTypeTag == TypeTags.READONLY_TAG) { return checkIsType(sourceType, PredefinedTypes.ANY_AND_READONLY_OR_ERROR_TYPE, unresolvedTypes); } if (sourceTypeTag == TypeTags.UNION_TAG) { return isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes); } if (sourceTypeTag == TypeTags.FINITE_TYPE_TAG && (targetTypeTag == TypeTags.FINITE_TYPE_TAG || targetTypeTag <= TypeTags.NULL_TAG || targetTypeTag == TypeTags.XML_TEXT_TAG)) { return isFiniteTypeMatch((BFiniteType) sourceType, targetType); } switch (targetTypeTag) { case TypeTags.BYTE_TAG: case TypeTags.SIGNED8_INT_TAG: case TypeTags.FLOAT_TAG: case TypeTags.DECIMAL_TAG: case TypeTags.CHAR_STRING_TAG: case TypeTags.BOOLEAN_TAG: case TypeTags.NULL_TAG: return sourceTypeTag == targetTypeTag; case TypeTags.STRING_TAG: return TypeTags.isStringTypeTag(sourceTypeTag); case TypeTags.XML_TEXT_TAG: if (sourceTypeTag == TypeTags.XML_TAG) { return ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG; } return sourceTypeTag == targetTypeTag; case TypeTags.INT_TAG: return sourceTypeTag == TypeTags.INT_TAG || sourceTypeTag == TypeTags.BYTE_TAG || (sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.UNSIGNED32_INT_TAG); case TypeTags.SIGNED16_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || (sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED16_INT_TAG); case TypeTags.SIGNED32_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || (sourceTypeTag >= TypeTags.SIGNED8_INT_TAG && sourceTypeTag <= TypeTags.SIGNED32_INT_TAG); case TypeTags.UNSIGNED8_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG; case TypeTags.UNSIGNED16_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED16_INT_TAG; case TypeTags.UNSIGNED32_INT_TAG: return sourceTypeTag == TypeTags.BYTE_TAG || sourceTypeTag == TypeTags.UNSIGNED8_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED16_INT_TAG || sourceTypeTag == TypeTags.UNSIGNED32_INT_TAG; case TypeTags.ANY_TAG: return checkIsAnyType(sourceType); case TypeTags.ANYDATA_TAG: return sourceType.isAnydata(); case TypeTags.SERVICE_TAG: return checkIsServiceType(sourceType, targetType, unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes); case TypeTags.HANDLE_TAG: return sourceTypeTag == TypeTags.HANDLE_TAG; case TypeTags.READONLY_TAG: return isInherentlyImmutableType(sourceType) || sourceType.isReadOnly(); case TypeTags.XML_ELEMENT_TAG: case TypeTags.XML_COMMENT_TAG: case TypeTags.XML_PI_TAG: return targetTypeTag == sourceTypeTag; default: return checkIsRecursiveType(sourceType, targetType, unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes); } } private static boolean checkIsType(Object sourceVal, Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { int sourceTypeTag = sourceType.getTag(); int targetTypeTag = targetType.getTag(); if (sourceTypeTag != TypeTags.RECORD_TYPE_TAG && sourceTypeTag != TypeTags.OBJECT_TYPE_TAG) { return checkIsType(sourceType, targetType); } if (targetTypeTag == TypeTags.INTERSECTION_TAG) { targetType = ((BIntersectionType) targetType).getEffectiveType(); targetTypeTag = targetType.getTag(); } if (sourceType == targetType || (sourceType.getTag() == targetType.getTag() && sourceType.equals(targetType))) { return true; } if (targetType.isReadOnly() && !sourceType.isReadOnly()) { return false; } switch (targetTypeTag) { case TypeTags.ANY_TAG: return checkIsAnyType(sourceType); case TypeTags.READONLY_TAG: return isInherentlyImmutableType(sourceType) || sourceType.isReadOnly(); default: return checkIsRecursiveTypeOnValue(sourceVal, sourceType, targetType, sourceTypeTag, targetTypeTag, unresolvedTypes == null ? new ArrayList<>() : unresolvedTypes); } } private static boolean checkTypeDescType(Type sourceType, BTypedescType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.TYPEDESC_TAG) { return false; } BTypedescType sourceTypedesc = (BTypedescType) sourceType; return checkIsType(sourceTypedesc.getConstraint(), targetType.getConstraint(), unresolvedTypes); } private static boolean checkIsRecursiveType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { switch (targetType.getTag()) { case TypeTags.MAP_TAG: return checkIsMapType(sourceType, (BMapType) targetType, unresolvedTypes); case TypeTags.STREAM_TAG: return checkIsStreamType(sourceType, (BStreamType) targetType, unresolvedTypes); case TypeTags.TABLE_TAG: return checkIsTableType(sourceType, (BTableType) targetType, unresolvedTypes); case TypeTags.JSON_TAG: return checkIsJSONType(sourceType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: return checkIsRecordType(sourceType, (BRecordType) targetType, unresolvedTypes); case TypeTags.FUNCTION_POINTER_TAG: return checkIsFunctionType(sourceType, (BFunctionType) targetType); case TypeTags.ARRAY_TAG: return checkIsArrayType(sourceType, (BArrayType) targetType, unresolvedTypes); case TypeTags.TUPLE_TAG: return checkIsTupleType(sourceType, (BTupleType) targetType, unresolvedTypes); case TypeTags.UNION_TAG: return checkIsUnionType(sourceType, (BUnionType) targetType, unresolvedTypes); case TypeTags.OBJECT_TYPE_TAG: return checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes); case TypeTags.FINITE_TYPE_TAG: return checkIsFiniteType(sourceType, (BFiniteType) targetType); case TypeTags.FUTURE_TAG: return checkIsFutureType(sourceType, (BFutureType) targetType, unresolvedTypes); case TypeTags.ERROR_TAG: return checkIsErrorType(sourceType, (BErrorType) targetType, unresolvedTypes); case TypeTags.TYPEDESC_TAG: return checkTypeDescType(sourceType, (BTypedescType) targetType, unresolvedTypes); case TypeTags.XML_TAG: return checkIsXMLType(sourceType, targetType, unresolvedTypes); default: return false; } } private static boolean checkIsRecursiveTypeOnValue(Object sourceVal, Type sourceType, Type targetType, int sourceTypeTag, int targetTypeTag, List<TypePair> unresolvedTypes) { switch (targetTypeTag) { case TypeTags.ANYDATA_TAG: if (sourceTypeTag == TypeTags.OBJECT_TYPE_TAG) { return false; } return checkRecordBelongsToAnydataType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes); case TypeTags.MAP_TAG: return checkIsMapType(sourceVal, sourceType, (BMapType) targetType, unresolvedTypes); case TypeTags.JSON_TAG: return checkIsMapType(sourceVal, sourceType, new BMapType(targetType.isReadOnly() ? TYPE_READONLY_JSON : TYPE_JSON), unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: return checkIsRecordType(sourceVal, sourceType, (BRecordType) targetType, unresolvedTypes); case TypeTags.UNION_TAG: for (Type type : ((BUnionType) targetType).getMemberTypes()) { if (checkIsType(sourceVal, sourceType, type, unresolvedTypes)) { return true; } } return false; case TypeTags.OBJECT_TYPE_TAG: return checkObjectEquivalency(sourceVal, sourceType, (BObjectType) targetType, unresolvedTypes); default: return false; } } private static boolean isFiniteTypeMatch(BFiniteType sourceType, Type targetType) { for (Object bValue : sourceType.valueSpace) { if (!checkIsType(bValue, targetType)) { return false; } } return true; } private static boolean isUnionTypeMatch(BUnionType sourceType, Type targetType, List<TypePair> unresolvedTypes) { for (Type type : sourceType.getMemberTypes()) { if (!checkIsType(type, targetType, unresolvedTypes)) { return false; } } return true; } private static boolean checkIsUnionType(Type sourceType, BUnionType targetType, List<TypePair> unresolvedTypes) { TypePair pair = new TypePair(sourceType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); switch (sourceType.getTag()) { case TypeTags.UNION_TAG: case TypeTags.JSON_TAG: case TypeTags.ANYDATA_TAG: return isUnionTypeMatch((BUnionType) sourceType, targetType, unresolvedTypes); case TypeTags.FINITE_TYPE_TAG: return isFiniteTypeMatch((BFiniteType) sourceType, targetType); default: for (Type type : targetType.getMemberTypes()) { if (checkIsType(sourceType, type, unresolvedTypes)) { return true; } } return false; } } private static boolean checkIsMapType(Type sourceType, BMapType targetType, List<TypePair> unresolvedTypes) { Type targetConstrainedType = targetType.getConstrainedType(); switch (sourceType.getTag()) { case TypeTags.MAP_TAG: return checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: BRecordType recType = (BRecordType) sourceType; BUnionType wideTypeUnion = new BUnionType(getWideTypeComponents(recType)); return checkConstraints(wideTypeUnion, targetConstrainedType, unresolvedTypes); default: return false; } } private static boolean checkIsMapType(Object sourceVal, Type sourceType, BMapType targetType, List<TypePair> unresolvedTypes) { Type targetConstrainedType = targetType.getConstrainedType(); switch (sourceType.getTag()) { case TypeTags.MAP_TAG: return checkConstraints(((BMapType) sourceType).getConstrainedType(), targetConstrainedType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: return checkIsMapType((MapValue) sourceVal, (BRecordType) sourceType, unresolvedTypes, targetConstrainedType); default: return false; } } private static boolean checkIsMapType(MapValue sourceVal, BRecordType sourceType, List<TypePair> unresolvedTypes, Type targetConstrainedType) { for (Field field : sourceType.getFields().values()) { if (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) { if (!checkIsType(field.getFieldType(), targetConstrainedType, unresolvedTypes)) { return false; } continue; } BString name = StringUtils.fromString(field.getFieldName()); if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(name)) { continue; } if (!checkIsLikeType(sourceVal.get(name), targetConstrainedType)) { return false; } } if (sourceType.sealed) { return true; } return checkIsType(sourceType.restFieldType, targetConstrainedType, unresolvedTypes); } private static boolean checkIsXMLType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { int sourceTag = sourceType.getTag(); if (sourceTag == TypeTags.FINITE_TYPE_TAG) { return isFiniteTypeMatch((BFiniteType) sourceType, targetType); } BXmlType target = ((BXmlType) targetType); if (sourceTag == TypeTags.XML_TAG) { Type targetConstraint = target.constraint; while (target.constraint.getTag() == TypeTags.XML_TAG) { target = (BXmlType) target.constraint; targetConstraint = target.constraint; } BXmlType source = (BXmlType) sourceType; if (source.constraint.getTag() == TypeTags.NEVER_TAG) { if (targetConstraint.getTag() == TypeTags.UNION_TAG) { return checkIsUnionType(sourceType, (BUnionType) targetConstraint, unresolvedTypes); } return targetConstraint.getTag() == TypeTags.XML_TEXT_TAG || targetConstraint.getTag() == TypeTags.NEVER_TAG; } return checkIsType(source.constraint, targetConstraint, unresolvedTypes); } if (TypeTags.isXMLTypeTag(sourceTag)) { return checkIsType(sourceType, target.constraint, unresolvedTypes); } return false; } private static List<Type> getWideTypeComponents(BRecordType recType) { List<Type> types = new ArrayList<>(); for (Field f : recType.getFields().values()) { types.add(f.getFieldType()); } if (!recType.sealed) { types.add(recType.restFieldType); } return types; } private static boolean checkIsStreamType(Type sourceType, BStreamType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.STREAM_TAG) { return false; } return checkConstraints(((BStreamType) sourceType).getConstrainedType(), targetType.getConstrainedType(), unresolvedTypes) && checkConstraints(((BStreamType) sourceType).getCompletionType(), targetType.getCompletionType(), unresolvedTypes); } private static boolean checkIsTableType(Type sourceType, BTableType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.TABLE_TAG) { return false; } BTableType srcTableType = (BTableType) sourceType; if (!checkConstraints(srcTableType.getConstrainedType(), targetType.getConstrainedType(), unresolvedTypes)) { return false; } if (targetType.getKeyType() == null && targetType.getFieldNames() == null) { return true; } if (targetType.getKeyType() != null) { if (srcTableType.getKeyType() != null && (checkConstraints(srcTableType.getKeyType(), targetType.getKeyType(), unresolvedTypes))) { return true; } if (srcTableType.getFieldNames() == null) { return false; } List<Type> fieldTypes = new ArrayList<>(); Arrays.stream(srcTableType.getFieldNames()).forEach(field -> fieldTypes .add(Objects.requireNonNull(getTableConstraintField(srcTableType.getConstrainedType(), field)) .getFieldType())); if (fieldTypes.size() == 1) { return checkConstraints(fieldTypes.get(0), targetType.getKeyType(), unresolvedTypes); } BTupleType tupleType = new BTupleType(fieldTypes); return checkConstraints(tupleType, targetType.getKeyType(), unresolvedTypes); } return Arrays.equals(srcTableType.getFieldNames(), targetType.getFieldNames()); } static BField getTableConstraintField(Type constraintType, String fieldName) { switch (constraintType.getTag()) { case TypeTags.RECORD_TYPE_TAG: Map<String, Field> fieldList = ((BRecordType) constraintType).getFields(); return (BField) fieldList.get(fieldName); case TypeTags.INTERSECTION_TAG: Type effectiveType = ((BIntersectionType) constraintType).getEffectiveType(); return getTableConstraintField(effectiveType, fieldName); case TypeTags.UNION_TAG: BUnionType unionType = (BUnionType) constraintType; List<Type> memTypes = unionType.getMemberTypes(); List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName)) .filter(Objects::nonNull).collect(Collectors.toList()); if (fields.size() != memTypes.size()) { return null; } if (fields.stream().allMatch(field -> isSameType(field.getFieldType(), fields.get(0).getFieldType()))) { return fields.get(0); } } return null; } private static boolean checkIsJSONType(Type sourceType, List<TypePair> unresolvedTypes) { BJsonType jsonType = (BJsonType) TYPE_JSON; TypePair pair = new TypePair(sourceType, jsonType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); switch (sourceType.getTag()) { case TypeTags.STRING_TAG: case TypeTags.CHAR_STRING_TAG: case TypeTags.INT_TAG: case TypeTags.SIGNED32_INT_TAG: case TypeTags.SIGNED16_INT_TAG: case TypeTags.SIGNED8_INT_TAG: case TypeTags.UNSIGNED32_INT_TAG: case TypeTags.UNSIGNED16_INT_TAG: case TypeTags.UNSIGNED8_INT_TAG: case TypeTags.BYTE_TAG: case TypeTags.FLOAT_TAG: case TypeTags.DECIMAL_TAG: case TypeTags.BOOLEAN_TAG: case TypeTags.NULL_TAG: case TypeTags.JSON_TAG: return true; case TypeTags.ARRAY_TAG: return checkIsType(((BArrayType) sourceType).getElementType(), jsonType, unresolvedTypes); case TypeTags.FINITE_TYPE_TAG: return isFiniteTypeMatch((BFiniteType) sourceType, jsonType); case TypeTags.MAP_TAG: return checkIsType(((BMapType) sourceType).getConstrainedType(), jsonType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: BRecordType recordType = (BRecordType) sourceType; for (Field field : recordType.getFields().values()) { if (!checkIsJSONType(field.getFieldType(), unresolvedTypes)) { return false; } } if (!recordType.sealed) { return checkIsJSONType(recordType.restFieldType, unresolvedTypes); } return true; case TypeTags.TUPLE_TAG: BTupleType sourceTupleType = (BTupleType) sourceType; for (Type memberType : sourceTupleType.getTupleTypes()) { if (!checkIsJSONType(memberType, unresolvedTypes)) { return false; } } Type tupleRestType = sourceTupleType.getRestType(); if (tupleRestType != null) { return checkIsJSONType(tupleRestType, unresolvedTypes); } return true; case TypeTags.UNION_TAG: for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) { if (!checkIsJSONType(memberType, unresolvedTypes)) { return false; } } return true; default: return false; } } private static boolean checkIsRecordType(Type sourceType, BRecordType targetType, List<TypePair> unresolvedTypes) { switch (sourceType.getTag()) { case TypeTags.RECORD_TYPE_TAG: return checkIsRecordType((BRecordType) sourceType, targetType, unresolvedTypes); case TypeTags.MAP_TAG: return checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes); } return false; } private static boolean checkIsRecordType(BRecordType sourceRecordType, BRecordType targetType, List<TypePair> unresolvedTypes) { TypePair pair = new TypePair(sourceRecordType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); if (targetType.sealed && !sourceRecordType.sealed) { return false; } if (!sourceRecordType.sealed && !checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) { return false; } Map<String, Field> sourceFields = sourceRecordType.getFields(); Set<String> targetFieldNames = targetType.getFields().keySet(); for (Map.Entry<String, Field> targetFieldEntry : targetType.getFields().entrySet()) { Field targetField = targetFieldEntry.getValue(); Field sourceField = sourceFields.get(targetFieldEntry.getKey()); if (sourceField == null) { return false; } if (hasIncompatibleReadOnlyFlags(targetField, sourceField)) { return false; } if (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL) && SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL)) { return false; } if (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) { return false; } } if (targetType.sealed) { return targetFieldNames.containsAll(sourceFields.keySet()); } for (Map.Entry<String, Field> sourceFieldEntry : sourceFields.entrySet()) { if (targetFieldNames.contains(sourceFieldEntry.getKey())) { continue; } if (!checkIsType(sourceFieldEntry.getValue().getFieldType(), targetType.restFieldType, unresolvedTypes)) { return false; } } return true; } private static boolean checkIsRecordType(BMapType sourceType, BRecordType targetType, List<TypePair> unresolvedTypes) { TypePair pair = new TypePair(sourceType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); if (targetType.sealed) { return false; } Type constraintType = sourceType.getConstrainedType(); for (Field field : targetType.getFields().values()) { var flags = field.getFlags(); if (!SymbolFlags.isFlagOn(flags, SymbolFlags.OPTIONAL)) { return false; } if (SymbolFlags.isFlagOn(flags, SymbolFlags.READONLY) && !sourceType.isReadOnly()) { return false; } if (!checkIsType(constraintType, field.getFieldType(), unresolvedTypes)) { return false; } } return checkIsType(constraintType, targetType.restFieldType, unresolvedTypes); } private static boolean checkRecordBelongsToAnydataType(MapValue sourceVal, BRecordType recordType, List<TypePair> unresolvedTypes) { Type targetType = TYPE_ANYDATA; TypePair pair = new TypePair(recordType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); Map<String, Field> fields = recordType.getFields(); for (Map.Entry<String, Field> fieldEntry : fields.entrySet()) { String fieldName = fieldEntry.getKey(); Field field = fieldEntry.getValue(); if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) { BString fieldNameBString = StringUtils.fromString(fieldName); if (SymbolFlags .isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL) && !sourceVal.containsKey(fieldNameBString)) { continue; } if (!checkIsLikeType(sourceVal.get(fieldNameBString), targetType)) { return false; } } else { if (!checkIsType(field.getFieldType(), targetType, unresolvedTypes)) { return false; } } } if (recordType.sealed) { return true; } return checkIsType(recordType.restFieldType, targetType, unresolvedTypes); } private static boolean checkIsRecordType(Object sourceVal, Type sourceType, BRecordType targetType, List<TypePair> unresolvedTypes) { switch (sourceType.getTag()) { case TypeTags.RECORD_TYPE_TAG: return checkIsRecordType((MapValue) sourceVal, (BRecordType) sourceType, targetType, unresolvedTypes); case TypeTags.MAP_TAG: return checkIsRecordType((BMapType) sourceType, targetType, unresolvedTypes); } return false; } private static boolean checkIsRecordType(MapValue sourceRecordValue, BRecordType sourceRecordType, BRecordType targetType, List<TypePair> unresolvedTypes) { TypePair pair = new TypePair(sourceRecordType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); if (targetType.sealed && !sourceRecordType.sealed) { return false; } if (!sourceRecordType.sealed && !checkIsType(sourceRecordType.restFieldType, targetType.restFieldType, unresolvedTypes)) { return false; } Map<String, Field> sourceFields = sourceRecordType.getFields(); Set<String> targetFieldNames = targetType.getFields().keySet(); for (Map.Entry<String, Field> targetFieldEntry : targetType.getFields().entrySet()) { String fieldName = targetFieldEntry.getKey(); Field targetField = targetFieldEntry.getValue(); Field sourceField = sourceFields.get(fieldName); if (sourceField == null) { if (!SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL)) { return false; } continue; } if (hasIncompatibleReadOnlyFlags(targetField, sourceField)) { return false; } boolean optionalTargetField = SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.OPTIONAL); boolean optionalSourceField = SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.OPTIONAL); if (SymbolFlags.isFlagOn(sourceField.getFlags(), SymbolFlags.READONLY)) { BString fieldNameBString = StringUtils.fromString(fieldName); if (optionalSourceField && !sourceRecordValue.containsKey(fieldNameBString)) { if (!optionalTargetField) { return false; } continue; } if (!checkIsLikeType(sourceRecordValue.get(fieldNameBString), targetField.getFieldType())) { return false; } } else { if (!optionalTargetField && optionalSourceField) { return false; } if (!checkIsType(sourceField.getFieldType(), targetField.getFieldType(), unresolvedTypes)) { return false; } } } if (targetType.sealed) { for (String sourceFieldName : sourceFields.keySet()) { if (targetFieldNames.contains(sourceFieldName)) { continue; } if (!checkIsNeverTypeOrStructureTypeWithARequiredNeverMember( sourceFields.get(sourceFieldName).getFieldType())) { return false; } } return true; } for (Map.Entry<String, Field> targetFieldEntry : sourceFields.entrySet()) { String fieldName = targetFieldEntry.getKey(); Field field = targetFieldEntry.getValue(); if (targetFieldNames.contains(fieldName)) { continue; } if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.READONLY)) { if (!checkIsLikeType(sourceRecordValue.get(StringUtils.fromString(fieldName)), targetType.restFieldType)) { return false; } } else if (!checkIsType(field.getFieldType(), targetType.restFieldType, unresolvedTypes)) { return false; } } return true; } private static boolean hasIncompatibleReadOnlyFlags(Field targetField, Field sourceField) { return SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.READONLY) && !SymbolFlags .isFlagOn(sourceField.getFlags(), SymbolFlags.READONLY); } private static boolean checkIsArrayType(BArrayType sourceType, BArrayType targetType, List<TypePair> unresolvedTypes) { switch (sourceType.getState()) { case OPEN: if (targetType.getState() != ArrayState.OPEN) { return false; } break; case CLOSED: if (targetType.getState() == ArrayState.CLOSED && sourceType.getSize() != targetType.getSize()) { return false; } break; } return checkIsType(sourceType.getElementType(), targetType.getElementType(), unresolvedTypes); } private static boolean checkIsArrayType(BTupleType sourceType, BArrayType targetType, List<TypePair> unresolvedTypes) { List<Type> tupleTypes = sourceType.getTupleTypes(); Type sourceRestType = sourceType.getRestType(); Type targetElementType = targetType.getElementType(); if (targetType.getState() == ArrayState.OPEN) { for (Type sourceElementType : tupleTypes) { if (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) { return false; } } if (sourceRestType != null) { return checkIsType(sourceRestType, targetElementType, unresolvedTypes); } return true; } if (sourceRestType != null) { return false; } if (tupleTypes.size() != targetType.getSize()) { return false; } for (Type sourceElementType : tupleTypes) { if (!checkIsType(sourceElementType, targetElementType, unresolvedTypes)) { return false; } } return true; } private static boolean checkIsArrayType(Type sourceType, BArrayType targetType, List<TypePair> unresolvedTypes) { int sourceTypeTag = sourceType.getTag(); if (sourceTypeTag == TypeTags.UNION_TAG) { for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) { if (!checkIsArrayType(memberType, targetType, unresolvedTypes)) { return false; } } return true; } if (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) { return false; } if (sourceTypeTag == TypeTags.ARRAY_TAG) { return checkIsArrayType((BArrayType) sourceType, targetType, unresolvedTypes); } return checkIsArrayType((BTupleType) sourceType, targetType, unresolvedTypes); } private static boolean checkIsTupleType(BArrayType sourceType, BTupleType targetType, List<TypePair> unresolvedTypes) { Type sourceElementType = sourceType.getElementType(); List<Type> targetTypes = targetType.getTupleTypes(); Type targetRestType = targetType.getRestType(); switch (sourceType.getState()) { case OPEN: if (targetRestType == null) { return false; } if (targetTypes.isEmpty()) { return checkIsType(sourceElementType, targetRestType, unresolvedTypes); } return false; case CLOSED: if (sourceType.getSize() < targetTypes.size()) { return false; } if (targetTypes.isEmpty()) { if (targetRestType != null) { return checkIsType(sourceElementType, targetRestType, unresolvedTypes); } return sourceType.getSize() == 0; } for (Type targetElementType : targetTypes) { if (!(checkIsType(sourceElementType, targetElementType, unresolvedTypes))) { return false; } } if (sourceType.getSize() == targetTypes.size()) { return true; } if (targetRestType != null) { return checkIsType(sourceElementType, targetRestType, unresolvedTypes); } return false; default: return false; } } private static boolean checkIsTupleType(BTupleType sourceType, BTupleType targetType, List<TypePair> unresolvedTypes) { List<Type> sourceTypes = sourceType.getTupleTypes(); Type sourceRestType = sourceType.getRestType(); List<Type> targetTypes = targetType.getTupleTypes(); Type targetRestType = targetType.getRestType(); if (sourceRestType != null && targetRestType == null) { return false; } int sourceTypeSize = sourceTypes.size(); int targetTypeSize = targetTypes.size(); if (sourceRestType == null && targetRestType == null && sourceTypeSize != targetTypeSize) { return false; } if (sourceTypeSize < targetTypeSize) { return false; } for (int i = 0; i < targetTypeSize; i++) { if (!checkIsType(sourceTypes.get(i), targetTypes.get(i), unresolvedTypes)) { return false; } } if (sourceTypeSize == targetTypeSize) { if (sourceRestType != null) { return checkIsType(sourceRestType, targetRestType, unresolvedTypes); } return true; } for (int i = targetTypeSize; i < sourceTypeSize; i++) { if (!checkIsType(sourceTypes.get(i), targetRestType, unresolvedTypes)) { return false; } } if (sourceRestType != null) { return checkIsType(sourceRestType, targetRestType, unresolvedTypes); } return true; } private static boolean checkIsTupleType(Type sourceType, BTupleType targetType, List<TypePair> unresolvedTypes) { int sourceTypeTag = sourceType.getTag(); if (sourceTypeTag == TypeTags.UNION_TAG) { for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) { if (!checkIsTupleType(memberType, targetType, unresolvedTypes)) { return false; } } return true; } if (sourceTypeTag != TypeTags.ARRAY_TAG && sourceTypeTag != TypeTags.TUPLE_TAG) { return false; } if (sourceTypeTag == TypeTags.ARRAY_TAG) { return checkIsTupleType((BArrayType) sourceType, targetType, unresolvedTypes); } return checkIsTupleType((BTupleType) sourceType, targetType, unresolvedTypes); } private static boolean checkIsAnyType(Type sourceType) { switch (sourceType.getTag()) { case TypeTags.ERROR_TAG: case TypeTags.READONLY_TAG: return false; case TypeTags.UNION_TAG: case TypeTags.ANYDATA_TAG: case TypeTags.JSON_TAG: for (Type memberType : ((BUnionType) sourceType).getMemberTypes()) { if (!checkIsAnyType(memberType)) { return false; } } return true; } return true; } private static boolean checkIsFiniteType(Type sourceType, BFiniteType targetType) { if (sourceType.getTag() != TypeTags.FINITE_TYPE_TAG) { return false; } BFiniteType sourceFiniteType = (BFiniteType) sourceType; if (sourceFiniteType.valueSpace.size() != targetType.valueSpace.size()) { return false; } return targetType.valueSpace.containsAll(sourceFiniteType.valueSpace); } private static boolean checkIsFutureType(Type sourceType, BFutureType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.FUTURE_TAG) { return false; } return checkConstraints(((BFutureType) sourceType).getConstrainedType(), targetType.getConstrainedType(), unresolvedTypes); } private static boolean checkObjectEquivalency(Type sourceType, BObjectType targetType, List<TypePair> unresolvedTypes) { return checkObjectEquivalency(null, sourceType, targetType, unresolvedTypes); } private static boolean checkObjectEquivalency(Object sourceVal, Type sourceType, BObjectType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.OBJECT_TYPE_TAG && sourceType.getTag() != TypeTags.SERVICE_TAG) { return false; } TypePair pair = new TypePair(sourceType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); BObjectType sourceObjectType = (BObjectType) sourceType; if (SymbolFlags.isFlagOn(targetType.flags, SymbolFlags.ISOLATED) && !SymbolFlags.isFlagOn(sourceObjectType.flags, SymbolFlags.ISOLATED)) { return false; } Map<String, Field> targetFields = targetType.getFields(); Map<String, Field> sourceFields = sourceObjectType.getFields(); MethodType[] targetFuncs = targetType.getMethods(); MethodType[] sourceFuncs = sourceObjectType.getMethods(); if (targetType.getFields().values().stream().anyMatch(field -> SymbolFlags .isFlagOn(field.getFlags(), SymbolFlags.PRIVATE)) || Stream.of(targetFuncs).anyMatch(func -> SymbolFlags.isFlagOn(func.getFlags(), SymbolFlags.PRIVATE))) { return false; } if (targetFields.size() > sourceFields.size() || targetFuncs.length > sourceFuncs.length) { return false; } String targetTypeModule = Optional.ofNullable(targetType.getPackage()).map(Module::toString).orElse(""); String sourceTypeModule = Optional.ofNullable(sourceObjectType.getPackage()).map(Module::toString).orElse(""); if (sourceVal == null) { if (!checkObjectSubTypeForFields(targetFields, sourceFields, targetTypeModule, sourceTypeModule, unresolvedTypes)) { return false; } } else if (!checkObjectSubTypeForFieldsByValue(targetFields, sourceFields, targetTypeModule, sourceTypeModule, (BObject) sourceVal, unresolvedTypes)) { return false; } return checkObjectSubTypeForMethods(unresolvedTypes, targetFuncs, sourceFuncs, targetTypeModule, sourceTypeModule, sourceObjectType, targetType); } private static boolean checkObjectSubTypeForFields(Map<String, Field> targetFields, Map<String, Field> sourceFields, String targetTypeModule, String sourceTypeModule, List<TypePair> unresolvedTypes) { for (Field lhsField : targetFields.values()) { Field rhsField = sourceFields.get(lhsField.getFieldName()); if (rhsField == null || !isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(), rhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField, rhsField) || !checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) { return false; } } return true; } private static boolean checkObjectSubTypeForFieldsByValue(Map<String, Field> targetFields, Map<String, Field> sourceFields, String targetTypeModule, String sourceTypeModule, BObject sourceObjVal, List<TypePair> unresolvedTypes) { for (Field lhsField : targetFields.values()) { String name = lhsField.getFieldName(); Field rhsField = sourceFields.get(name); if (rhsField == null || !isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsField.getFlags(), rhsField.getFlags()) || hasIncompatibleReadOnlyFlags(lhsField, rhsField)) { return false; } if (SymbolFlags.isFlagOn(rhsField.getFlags(), SymbolFlags.FINAL)) { Object fieldValue = sourceObjVal.get(StringUtils.fromString(name)); Type fieldValueType = getType(fieldValue); if (fieldValueType.isReadOnly()) { if (!checkIsLikeType(fieldValue, lhsField.getFieldType())) { return false; } continue; } if (!checkIsType(fieldValueType, lhsField.getFieldType(), unresolvedTypes)) { return false; } } else if (!checkIsType(rhsField.getFieldType(), lhsField.getFieldType(), unresolvedTypes)) { return false; } } return true; } private static boolean checkObjectSubTypeForMethods(List<TypePair> unresolvedTypes, MethodType[] targetFuncs, MethodType[] sourceFuncs, String targetTypeModule, String sourceTypeModule, BObjectType sourceType, BObjectType targetType) { for (MethodType lhsFunc : targetFuncs) { if (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.RESOURCE)) { continue; } MethodType rhsFunc = getMatchingInvokableType(sourceFuncs, lhsFunc, unresolvedTypes); if (rhsFunc == null || !isInSameVisibilityRegion(targetTypeModule, sourceTypeModule, lhsFunc.getFlags(), rhsFunc.getFlags())) { return false; } if (SymbolFlags.isFlagOn(lhsFunc.getFlags(), SymbolFlags.REMOTE) != SymbolFlags .isFlagOn(rhsFunc.getFlags(), SymbolFlags.REMOTE)) { return false; } } BTypeIdSet targetTypeIdSet = targetType.typeIdSet; if (targetTypeIdSet == null) { return true; } BTypeIdSet sourceTypeIdSet = sourceType.typeIdSet; if (sourceTypeIdSet == null) { return false; } return sourceTypeIdSet.containsAll(targetTypeIdSet); } private static boolean isInSameVisibilityRegion(String lhsTypePkg, String rhsTypePkg, long lhsFlags, long rhsFlags) { if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PRIVATE)) { return lhsTypePkg.equals(rhsTypePkg); } else if (SymbolFlags.isFlagOn(lhsFlags, SymbolFlags.PUBLIC)) { return SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PUBLIC); } return !SymbolFlags.isFlagOn(rhsFlags, SymbolFlags.PRIVATE) && !SymbolFlags .isFlagOn(rhsFlags, SymbolFlags.PUBLIC) && lhsTypePkg.equals(rhsTypePkg); } private static MethodType getMatchingInvokableType(MethodType[] rhsFuncs, MethodType lhsFunc, List<TypePair> unresolvedTypes) { return Arrays.stream(rhsFuncs) .filter(rhsFunc -> lhsFunc.getName().equals(rhsFunc.getName())) .filter(rhsFunc -> checkFunctionTypeEqualityForObjectType(rhsFunc.getType(), lhsFunc.getType(), unresolvedTypes)) .findFirst() .orElse(null); } private static boolean checkFunctionTypeEqualityForObjectType(FunctionType source, FunctionType target, List<TypePair> unresolvedTypes) { if (hasIncompatibleIsolatedFlags(target, source)) { return false; } if (source.getParameters().length != target.getParameters().length) { return false; } for (int i = 0; i < source.getParameters().length; i++) { if (!checkIsType(target.getParameters()[i].type, source.getParameters()[i].type, unresolvedTypes)) { return false; } } if (source.getReturnType() == null && target.getReturnType() == null) { return true; } else if (source.getReturnType() == null || target.getReturnType() == null) { return false; } return checkIsType(source.getReturnType(), target.getReturnType(), unresolvedTypes); } private static boolean checkIsFunctionType(Type sourceType, BFunctionType targetType) { if (sourceType.getTag() != TypeTags.FUNCTION_POINTER_TAG) { return false; } BFunctionType source = (BFunctionType) sourceType; if (hasIncompatibleIsolatedFlags(targetType, source) || hasIncompatibleTransactionalFlags(targetType, source)) { return false; } if (SymbolFlags.isFlagOn(targetType.getFlags(), SymbolFlags.ANY_FUNCTION)) { return true; } if (source.parameters.length != targetType.parameters.length) { return false; } for (int i = 0; i < source.parameters.length; i++) { if (!checkIsType(targetType.parameters[i].type, source.parameters[i].type, new ArrayList<>())) { return false; } } return checkIsType(source.retType, targetType.retType, new ArrayList<>()); } private static boolean hasIncompatibleIsolatedFlags(FunctionType target, FunctionType source) { return SymbolFlags.isFlagOn(target.getFlags(), SymbolFlags.ISOLATED) && !SymbolFlags .isFlagOn(source.getFlags(), SymbolFlags.ISOLATED); } private static boolean hasIncompatibleTransactionalFlags(FunctionType target, FunctionType source) { return SymbolFlags.isFlagOn(source.getFlags(), SymbolFlags.TRANSACTIONAL) && !SymbolFlags .isFlagOn(target.getFlags(), SymbolFlags.TRANSACTIONAL); } private static boolean checkIsServiceType(Type sourceType, Type targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() == TypeTags.SERVICE_TAG) { return checkObjectEquivalency(sourceType, (BObjectType) targetType, unresolvedTypes); } if (sourceType.getTag() == TypeTags.OBJECT_TYPE_TAG) { var flags = ((BObjectType) sourceType).flags; return (flags & SymbolFlags.SERVICE) == SymbolFlags.SERVICE; } return false; } public static boolean isInherentlyImmutableType(Type sourceType) { if (isSimpleBasicType(sourceType)) { return true; } switch (sourceType.getTag()) { case TypeTags.XML_TEXT_TAG: case TypeTags.FINITE_TYPE_TAG: case TypeTags.READONLY_TAG: case TypeTags.NULL_TAG: case TypeTags.ERROR_TAG: case TypeTags.INVOKABLE_TAG: case TypeTags.SERVICE_TAG: case TypeTags.TYPEDESC_TAG: case TypeTags.FUNCTION_POINTER_TAG: case TypeTags.HANDLE_TAG: return true; case TypeTags.XML_TAG: return ((BXmlType) sourceType).constraint.getTag() == TypeTags.NEVER_TAG; } return false; } public static boolean isSelectivelyImmutableType(Type type, Set<Type> unresolvedTypes) { if (!unresolvedTypes.add(type)) { return true; } switch (type.getTag()) { case TypeTags.ANY_TAG: case TypeTags.ANYDATA_TAG: case TypeTags.JSON_TAG: case TypeTags.XML_TAG: case TypeTags.XML_COMMENT_TAG: case TypeTags.XML_ELEMENT_TAG: case TypeTags.XML_PI_TAG: return true; case TypeTags.ARRAY_TAG: Type elementType = ((BArrayType) type).getElementType(); return isInherentlyImmutableType(elementType) || isSelectivelyImmutableType(elementType, unresolvedTypes); case TypeTags.TUPLE_TAG: BTupleType tupleType = (BTupleType) type; for (Type tupMemType : tupleType.getTupleTypes()) { if (!isInherentlyImmutableType(tupMemType) && !isSelectivelyImmutableType(tupMemType, unresolvedTypes)) { return false; } } Type tupRestType = tupleType.getRestType(); if (tupRestType == null) { return true; } return isInherentlyImmutableType(tupRestType) || isSelectivelyImmutableType(tupRestType, unresolvedTypes); case TypeTags.RECORD_TYPE_TAG: BRecordType recordType = (BRecordType) type; for (Field field : recordType.getFields().values()) { Type fieldType = field.getFieldType(); if (!isInherentlyImmutableType(fieldType) && !isSelectivelyImmutableType(fieldType, unresolvedTypes)) { return false; } } Type recordRestType = recordType.restFieldType; if (recordRestType == null) { return true; } return isInherentlyImmutableType(recordRestType) || isSelectivelyImmutableType(recordRestType, unresolvedTypes); case TypeTags.OBJECT_TYPE_TAG: BObjectType objectType = (BObjectType) type; if (SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.CLASS) && !SymbolFlags.isFlagOn(objectType.flags, SymbolFlags.READONLY)) { return false; } for (Field field : objectType.getFields().values()) { Type fieldType = field.getFieldType(); if (!isInherentlyImmutableType(fieldType) && !isSelectivelyImmutableType(fieldType, unresolvedTypes)) { return false; } } return true; case TypeTags.MAP_TAG: Type constraintType = ((BMapType) type).getConstrainedType(); return isInherentlyImmutableType(constraintType) || isSelectivelyImmutableType(constraintType, unresolvedTypes); case TypeTags.TABLE_TAG: Type tableConstraintType = ((BTableType) type).getConstrainedType(); return isInherentlyImmutableType(tableConstraintType) || isSelectivelyImmutableType(tableConstraintType, unresolvedTypes); case TypeTags.UNION_TAG: boolean readonlyIntersectionExists = false; for (Type memberType : ((BUnionType) type).getMemberTypes()) { if (isInherentlyImmutableType(memberType) || isSelectivelyImmutableType(memberType, unresolvedTypes)) { readonlyIntersectionExists = true; break; } } return readonlyIntersectionExists; case TypeTags.INTERSECTION_TAG: return isSelectivelyImmutableType(((BIntersectionType) type).getEffectiveType(), unresolvedTypes); } return false; } private static boolean checkConstraints(Type sourceConstraint, Type targetConstraint, List<TypePair> unresolvedTypes) { if (sourceConstraint == null) { sourceConstraint = TYPE_ANY; } if (targetConstraint == null) { targetConstraint = TYPE_ANY; } return checkIsType(sourceConstraint, targetConstraint, unresolvedTypes); } private static boolean isMutable(Object value, Type sourceType) { if (value == null || sourceType.getTag() < TypeTags.NULL_TAG || sourceType.getTag() == TypeTags.FINITE_TYPE_TAG) { return false; } return !((RefValue) value).isFrozen(); } private static boolean checkArrayEquivalent(Type actualType, Type expType) { if (expType.getTag() == TypeTags.ARRAY_TAG && actualType.getTag() == TypeTags.ARRAY_TAG) { BArrayType lhrArrayType = (BArrayType) expType; BArrayType rhsArrayType = (BArrayType) actualType; return checkIsArrayType(rhsArrayType, lhrArrayType, new ArrayList<>()); } return expType == actualType; } private static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type) { Set<String> visitedTypeSet = new HashSet<>(); return checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(type, visitedTypeSet); } private static boolean checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(Type type, Set<String> visitedTypeSet) { switch (type.getTag()) { case TypeTags.NEVER_TAG: return true; case TypeTags.RECORD_TYPE_TAG: BRecordType recordType = (BRecordType) type; visitedTypeSet.add(recordType.getName()); for (Field field : recordType.getFields().values()) { if ((SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED) || !SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) && !visitedTypeSet.contains(field.getFieldType()) && checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(field.getFieldType(), visitedTypeSet)) { return true; } } return false; case TypeTags.TUPLE_TAG: BTupleType tupleType = (BTupleType) type; visitedTypeSet.add(tupleType.getName()); List<Type> tupleTypes = tupleType.getTupleTypes(); for (Type mem : tupleTypes) { if (!visitedTypeSet.add(mem.getName())) { continue; } if (checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(mem, visitedTypeSet)) { return true; } } return false; case TypeTags.ARRAY_TAG: BArrayType arrayType = (BArrayType) type; visitedTypeSet.add(arrayType.getName()); Type elemType = arrayType.getElementType(); visitedTypeSet.add(elemType.getName()); return arrayType.getState() != ArrayState.OPEN && checkIsNeverTypeOrStructureTypeWithARequiredNeverMember(elemType, visitedTypeSet); default: return false; } } /** * Check whether a given value confirms to a given type. First it checks if the type of the value, and * if fails then falls back to checking the value. * * @param sourceValue Value to check * @param targetType Target type * @param unresolvedValues Values that are unresolved so far * @param allowNumericConversion Flag indicating whether to perform numeric conversions * @return True if the value confirms to the provided type. False, otherwise. */ private static boolean checkIsLikeType(Object sourceValue, Type targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { Type sourceType = getType(sourceValue); if (checkIsType(sourceType, targetType, new ArrayList<>())) { return true; } return checkIsLikeOnValue(sourceValue, sourceType, targetType, unresolvedValues, allowNumericConversion); } /** * Check whether a given value confirms to a given type. Strictly checks the value only, and does not consider the * type of the value for consideration. * * @param sourceValue Value to check * @param sourceType Type of the value * @param targetType Target type * @param unresolvedValues Values that are unresolved so far * @param allowNumericConversion Flag indicating whether to perform numeric conversions * @return True if the value confirms to the provided type. False, otherwise. */ private static boolean checkIsLikeOnValue(Object sourceValue, Type sourceType, Type targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { int sourceTypeTag = sourceType.getTag(); int targetTypeTag = targetType.getTag(); if (sourceTypeTag == TypeTags.INTERSECTION_TAG) { return checkIsLikeOnValue(sourceValue, ((BIntersectionType) sourceType).getEffectiveType(), targetTypeTag != TypeTags.INTERSECTION_TAG ? targetType : ((BIntersectionType) targetType).getEffectiveType(), unresolvedValues, allowNumericConversion); } if (targetTypeTag == TypeTags.INTERSECTION_TAG) { return checkIsLikeOnValue(sourceValue, sourceType, ((BIntersectionType) targetType).getEffectiveType(), unresolvedValues, allowNumericConversion); } if (sourceTypeTag == TypeTags.PARAMETERIZED_TYPE_TAG) { if (targetTypeTag != TypeTags.PARAMETERIZED_TYPE_TAG) { return checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(), targetType, unresolvedValues, allowNumericConversion); } return checkIsLikeOnValue(sourceValue, ((BParameterizedType) sourceType).getParamValueType(), ((BParameterizedType) targetType).getParamValueType(), unresolvedValues, allowNumericConversion); } switch (targetTypeTag) { case TypeTags.READONLY_TAG: return true; case TypeTags.BYTE_TAG: if (TypeTags.isIntegerTypeTag(sourceTypeTag)) { return isByteLiteral((Long) sourceValue); } return allowNumericConversion && TypeConverter.isConvertibleToByte(sourceValue); case TypeTags.INT_TAG: return allowNumericConversion && TypeConverter.isConvertibleToInt(sourceValue); case TypeTags.SIGNED32_INT_TAG: case TypeTags.SIGNED16_INT_TAG: case TypeTags.SIGNED8_INT_TAG: case TypeTags.UNSIGNED32_INT_TAG: case TypeTags.UNSIGNED16_INT_TAG: case TypeTags.UNSIGNED8_INT_TAG: if (TypeTags.isIntegerTypeTag(sourceTypeTag) || targetTypeTag == TypeTags.BYTE_TAG) { return TypeConverter.isConvertibleToIntSubType(sourceValue, targetType); } return allowNumericConversion && TypeConverter.isConvertibleToIntSubType(sourceValue, targetType); case TypeTags.FLOAT_TAG: case TypeTags.DECIMAL_TAG: return allowNumericConversion && TypeConverter.isConvertibleToFloatingPointTypes(sourceValue); case TypeTags.CHAR_STRING_TAG: return TypeConverter.isConvertibleToChar(sourceValue); case TypeTags.RECORD_TYPE_TAG: return checkIsLikeRecordType(sourceValue, (BRecordType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.TABLE_TAG: return checkIsLikeTableType(sourceValue, (BTableType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.JSON_TAG: return checkIsLikeJSONType(sourceValue, sourceType, (BJsonType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.MAP_TAG: return checkIsLikeMapType(sourceValue, (BMapType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.STREAM_TAG: return checkIsLikeStreamType(sourceValue, (BStreamType) targetType); case TypeTags.ARRAY_TAG: return checkIsLikeArrayType(sourceValue, (BArrayType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.TUPLE_TAG: return checkIsLikeTupleType(sourceValue, (BTupleType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.ERROR_TAG: return checkIsLikeErrorType(sourceValue, (BErrorType) targetType, unresolvedValues, allowNumericConversion); case TypeTags.ANYDATA_TAG: return checkIsLikeAnydataType(sourceValue, sourceType, unresolvedValues, allowNumericConversion); case TypeTags.FINITE_TYPE_TAG: return checkFiniteTypeAssignable(sourceValue, sourceType, (BFiniteType) targetType); case TypeTags.XML_ELEMENT_TAG: if (sourceTypeTag == TypeTags.XML_TAG) { XmlValue xmlSource = (XmlValue) sourceValue; return xmlSource.isSingleton(); } return false; case TypeTags.XML_COMMENT_TAG: case TypeTags.XML_PI_TAG: case TypeTags.XML_TEXT_TAG: if (sourceTypeTag == TypeTags.XML_TAG) { return checkIsLikeNonElementSingleton((XmlValue) sourceValue, targetType); } return false; case TypeTags.XML_TAG: if (sourceTypeTag == TypeTags.XML_TAG) { return checkIsLikeXMLSequenceType((XmlValue) sourceValue, targetType); } return false; case TypeTags.UNION_TAG: if (allowNumericConversion) { List<Type> compatibleTypesWithNumConversion = new ArrayList<>(); List<Type> compatibleTypesWithoutNumConversion = new ArrayList<>(); for (Type type : ((BUnionType) targetType).getMemberTypes()) { List<TypeValuePair> tempList = new ArrayList<>(unresolvedValues.size()); tempList.addAll(unresolvedValues); if (checkIsLikeType(sourceValue, type, tempList, false)) { compatibleTypesWithoutNumConversion.add(type); } if (checkIsLikeType(sourceValue, type, unresolvedValues, true)) { compatibleTypesWithNumConversion.add(type); } } return compatibleTypesWithNumConversion.size() != 0 && compatibleTypesWithNumConversion.size() - compatibleTypesWithoutNumConversion.size() <= 1; } else { for (Type type : ((BUnionType) targetType).getMemberTypes()) { if (checkIsLikeType(sourceValue, type, unresolvedValues, false)) { return true; } } } return false; default: return false; } } private static XmlNodeType getXmlNodeType(Type type) { XmlNodeType nodeType = null; switch (type.getTag()) { case TypeTags.XML_ELEMENT_TAG: nodeType = XmlNodeType.ELEMENT; break; case TypeTags.XML_COMMENT_TAG: nodeType = XmlNodeType.COMMENT; break; case TypeTags.XML_PI_TAG: nodeType = XmlNodeType.PI; break; case TypeTags.XML_TEXT_TAG: nodeType = XmlNodeType.TEXT; break; default: return null; } return nodeType; } private static boolean checkIsLikeNonElementSingleton(XmlValue xmlSource, Type targetType) { XmlNodeType nodeType = getXmlNodeType(targetType); if (nodeType == null) { return false; } if (xmlSource.getNodeType() == nodeType) { return true; } if (xmlSource.getNodeType() == XmlNodeType.SEQUENCE) { XmlSequence seq = (XmlSequence) xmlSource; return seq.size() == 1 && seq.getChildrenList().get(0).getNodeType() == nodeType || (nodeType == XmlNodeType.TEXT && seq.isEmpty()); } return false; } private static boolean checkIsLikeXMLSequenceType(XmlValue xmlSource, Type targetType) { if (xmlSource.getNodeType() != XmlNodeType.SEQUENCE) { return false; } Set<XmlNodeType> acceptedNodes = new HashSet<>(); BXmlType target = (BXmlType) targetType; if (target.constraint.getTag() == TypeTags.UNION_TAG) { getXMLNodeOnUnion((BUnionType) target.constraint, acceptedNodes); } else { acceptedNodes.add(getXmlNodeType(((BXmlType) targetType).constraint)); } XmlSequence seq = (XmlSequence) xmlSource; for (BXml m : seq.getChildrenList()) { if (!acceptedNodes.contains(m.getNodeType())) { return false; } } return true; } private static void getXMLNodeOnUnion(BUnionType unionType, Set<XmlNodeType> nodeTypes) { if (nodeTypes.size() == 4) { return; } for (Type memberType : unionType.getMemberTypes()) { if (memberType.getTag() == TypeTags.UNION_TAG) { getXMLNodeOnUnion((BUnionType) memberType, nodeTypes); } else { nodeTypes.add(getXmlNodeType(memberType)); } } } public static boolean isNumericType(Type type) { return type.getTag() < TypeTags.STRING_TAG || TypeTags.isIntegerTypeTag(type.getTag()); } private static boolean checkIsLikeAnydataType(Object sourceValue, Type sourceType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { switch (sourceType.getTag()) { case TypeTags.RECORD_TYPE_TAG: case TypeTags.JSON_TAG: case TypeTags.MAP_TAG: return isLikeType(((MapValueImpl) sourceValue).values().toArray(), TYPE_ANYDATA, unresolvedValues, allowNumericConversion); case TypeTags.ARRAY_TAG: ArrayValue arr = (ArrayValue) sourceValue; BArrayType arrayType = (BArrayType) arr.getType(); switch (arrayType.getElementType().getTag()) { case TypeTags.INT_TAG: case TypeTags.FLOAT_TAG: case TypeTags.DECIMAL_TAG: case TypeTags.STRING_TAG: case TypeTags.BOOLEAN_TAG: case TypeTags.BYTE_TAG: return true; default: return isLikeType(arr.getValues(), TYPE_ANYDATA, unresolvedValues, allowNumericConversion); } case TypeTags.TUPLE_TAG: return isLikeType(((ArrayValue) sourceValue).getValues(), TYPE_ANYDATA, unresolvedValues, allowNumericConversion); case TypeTags.ANYDATA_TAG: return true; case TypeTags.FINITE_TYPE_TAG: case TypeTags.UNION_TAG: return checkIsLikeType(sourceValue, TYPE_ANYDATA, unresolvedValues, allowNumericConversion); default: return false; } } private static boolean isLikeType(Object[] objects, Type targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { for (Object value : objects) { if (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } private static boolean checkIsLikeTupleType(Object sourceValue, BTupleType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof ArrayValue)) { return false; } ArrayValue source = (ArrayValue) sourceValue; List<Type> targetTypes = targetType.getTupleTypes(); int sourceTypeSize = source.size(); int targetTypeSize = targetTypes.size(); Type targetRestType = targetType.getRestType(); if (sourceTypeSize < targetTypeSize) { return false; } if (targetRestType == null && sourceTypeSize > targetTypeSize) { return false; } for (int i = 0; i < targetTypeSize; i++) { if (!checkIsLikeType(source.getRefValue(i), targetTypes.get(i), unresolvedValues, allowNumericConversion)) { return false; } } for (int i = targetTypeSize; i < sourceTypeSize; i++) { if (!checkIsLikeType(source.getRefValue(i), targetRestType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } static boolean isByteLiteral(long longValue) { return (longValue >= BBYTE_MIN_VALUE && longValue <= BBYTE_MAX_VALUE); } static boolean isSigned32LiteralValue(Long longObject) { return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE); } static boolean isSigned16LiteralValue(Long longObject) { return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE); } static boolean isSigned8LiteralValue(Long longObject) { return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE); } static boolean isUnsigned32LiteralValue(Long longObject) { return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE); } static boolean isUnsigned16LiteralValue(Long longObject) { return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE); } static boolean isUnsigned8LiteralValue(Long longObject) { return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE); } static boolean isCharLiteralValue(Object object) { String value; if (object instanceof BString) { value = ((BString) object).getValue(); } else if (object instanceof String) { value = (String) object; } else { return false; } return value.codePoints().count() == 1; } private static boolean checkIsLikeArrayType(Object sourceValue, BArrayType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof ArrayValue)) { return false; } ArrayValue source = (ArrayValue) sourceValue; Type targetTypeElementType = targetType.getElementType(); if (source.getType().getTag() == TypeTags.ARRAY_TAG) { Type sourceElementType = ((BArrayType) source.getType()).getElementType(); if (isValueType(sourceElementType)) { if (checkIsType(sourceElementType, targetTypeElementType, new ArrayList<>())) { return true; } if (allowNumericConversion && isNumericType(sourceElementType)) { if (isNumericType(targetTypeElementType)) { return true; } if (targetTypeElementType.getTag() != TypeTags.UNION_TAG) { return false; } List<Type> targetNumericTypes = new ArrayList<>(); for (Type memType : ((BUnionType) targetTypeElementType).getMemberTypes()) { if (isNumericType(memType) && !targetNumericTypes.contains(memType)) { targetNumericTypes.add(memType); } } return targetNumericTypes.size() == 1; } if (targetTypeElementType.getTag() == TypeTags.FLOAT_TAG || targetTypeElementType.getTag() == TypeTags.DECIMAL_TAG) { return false; } } } for (int i = 0; i < source.size(); i++) { if (!checkIsLikeType(source.get(i), targetTypeElementType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } private static boolean checkIsLikeMapType(Object sourceValue, BMapType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof MapValueImpl)) { return false; } for (Object mapEntry : ((MapValueImpl) sourceValue).values()) { if (!checkIsLikeType(mapEntry, targetType.getConstrainedType(), unresolvedValues, allowNumericConversion)) { return false; } } return true; } private static boolean checkIsLikeStreamType(Object sourceValue, BStreamType targetType) { if (!(sourceValue instanceof StreamValue)) { return false; } BStreamType streamType = (BStreamType) ((StreamValue) sourceValue).getType(); return streamType.getConstrainedType() == targetType.getConstrainedType(); } private static boolean checkIsLikeJSONType(Object sourceValue, Type sourceType, BJsonType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (sourceType.getTag() == TypeTags.ARRAY_TAG) { ArrayValue source = (ArrayValue) sourceValue; Type elementType = ((BArrayType) source.getType()).getElementType(); if (isValueType(elementType)) { return checkIsType(elementType, targetType, new ArrayList<>()); } Object[] arrayValues = source.getValues(); for (int i = 0; i < ((ArrayValue) sourceValue).size(); i++) { if (!checkIsLikeType(arrayValues[i], targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } else if (sourceType.getTag() == TypeTags.MAP_TAG) { for (Object value : ((MapValueImpl) sourceValue).values()) { if (!checkIsLikeType(value, targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } else if (sourceType.getTag() == TypeTags.RECORD_TYPE_TAG) { TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType); if (unresolvedValues.contains(typeValuePair)) { return true; } unresolvedValues.add(typeValuePair); for (Object object : ((MapValueImpl) sourceValue).values()) { if (!checkIsLikeType(object, targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } else if (sourceType.getTag() == TypeTags.TUPLE_TAG) { for (Object obj : ((TupleValueImpl) sourceValue).getValues()) { if (!checkIsLikeType(obj, targetType, unresolvedValues, allowNumericConversion)) { return false; } } return true; } return false; } private static boolean checkIsLikeRecordType(Object sourceValue, BRecordType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof MapValueImpl)) { return false; } TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType); if (unresolvedValues.contains(typeValuePair)) { return true; } unresolvedValues.add(typeValuePair); Map<String, Type> targetTypeField = new HashMap<>(); Type restFieldType = targetType.restFieldType; for (Field field : targetType.getFields().values()) { targetTypeField.put(field.getFieldName(), field.getFieldType()); } for (Map.Entry targetTypeEntry : targetTypeField.entrySet()) { Object fieldName = StringUtils.fromString(targetTypeEntry.getKey().toString()); if (!(((MapValueImpl) sourceValue).containsKey(fieldName)) && !SymbolFlags.isFlagOn(targetType.getFields().get(fieldName.toString()).getFlags(), SymbolFlags.OPTIONAL)) { return false; } } for (Object object : ((MapValueImpl) sourceValue).entrySet()) { Map.Entry valueEntry = (Map.Entry) object; String fieldName = valueEntry.getKey().toString(); if (targetTypeField.containsKey(fieldName)) { if (!checkIsLikeType((valueEntry.getValue()), targetTypeField.get(fieldName), unresolvedValues, allowNumericConversion)) { return false; } } else { if (!targetType.sealed) { if (!checkIsLikeType((valueEntry.getValue()), restFieldType, unresolvedValues, allowNumericConversion)) { return false; } } else { return false; } } } return true; } private static boolean checkIsLikeTableType(Object sourceValue, BTableType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { if (!(sourceValue instanceof TableValueImpl)) { return false; } TableValueImpl tableValue = (TableValueImpl) sourceValue; BTableType sourceType = (BTableType) tableValue.getType(); if (targetType.getKeyType() != null && sourceType.getFieldNames() == null) { return false; } if (sourceType.getKeyType() != null && !checkIsType(tableValue.getKeyType(), targetType.getKeyType())) { return false; } TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType); if (unresolvedValues.contains(typeValuePair)) { return true; } Object[] objects = tableValue.values().toArray(); for (Object object : objects) { if (!checkIsLikeType(object, targetType.getConstrainedType(), allowNumericConversion)) { return false; } } return true; } private static boolean checkFiniteTypeAssignable(Object sourceValue, Type sourceType, BFiniteType targetType) { for (Object valueSpaceItem : targetType.valueSpace) { if (isFiniteTypeValue(sourceValue, sourceType, valueSpaceItem)) { return true; } } return false; } protected static boolean isFiniteTypeValue(Object sourceValue, Type sourceType, Object valueSpaceItem) { Type valueSpaceItemType = getType(valueSpaceItem); if (valueSpaceItemType.getTag() > TypeTags.FLOAT_TAG) { return valueSpaceItemType.getTag() == sourceType.getTag() && (valueSpaceItem == sourceValue || valueSpaceItem.equals(sourceValue)); } switch (sourceType.getTag()) { case TypeTags.BYTE_TAG: case TypeTags.INT_TAG: return ((Number) sourceValue).longValue() == ((Number) valueSpaceItem).longValue(); case TypeTags.FLOAT_TAG: if (sourceType.getTag() != valueSpaceItemType.getTag()) { return false; } return ((Number) sourceValue).doubleValue() == ((Number) valueSpaceItem).doubleValue(); case TypeTags.DECIMAL_TAG: default: if (sourceType.getTag() != valueSpaceItemType.getTag()) { return false; } return valueSpaceItem.equals(sourceValue); } } private static boolean checkIsErrorType(Type sourceType, BErrorType targetType, List<TypePair> unresolvedTypes) { if (sourceType.getTag() != TypeTags.ERROR_TAG) { return false; } TypePair pair = new TypePair(sourceType, targetType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); BErrorType bErrorType = (BErrorType) sourceType; if (!checkIsType(bErrorType.detailType, targetType.detailType, unresolvedTypes)) { return false; } if (targetType.typeIdSet == null) { return true; } BTypeIdSet sourceTypeIdSet = bErrorType.typeIdSet; if (sourceTypeIdSet == null) { return false; } return sourceTypeIdSet.containsAll(targetType.typeIdSet); } private static boolean checkIsLikeErrorType(Object sourceValue, BErrorType targetType, List<TypeValuePair> unresolvedValues, boolean allowNumericConversion) { Type sourceType = getType(sourceValue); if (sourceValue == null || sourceType.getTag() != TypeTags.ERROR_TAG) { return false; } if (!checkIsLikeType(((ErrorValue) sourceValue).getDetails(), targetType.detailType, unresolvedValues, allowNumericConversion)) { return false; } if (targetType.typeIdSet == null) { return true; } BTypeIdSet sourceIdSet = ((BErrorType) sourceType).typeIdSet; if (sourceIdSet == null) { return false; } return sourceIdSet.containsAll(targetType.typeIdSet); } private static boolean isSimpleBasicType(Type type) { return type.getTag() < TypeTags.NULL_TAG; } private static boolean isHandleType(Type type) { return type.getTag() == TypeTags.HANDLE_TAG; } /** * Deep value equality check for anydata. * * @param lhsValue The value on the left hand side * @param rhsValue The value on the right hand side * @param checkedValues Structured value pairs already compared or being compared * @return True if values are equal, else false. */ private static boolean isEqual(Object lhsValue, Object rhsValue, List<ValuePair> checkedValues) { if (lhsValue == rhsValue) { return true; } if (null == lhsValue || null == rhsValue) { return false; } int lhsValTypeTag = getType(lhsValue).getTag(); int rhsValTypeTag = getType(rhsValue).getTag(); switch (lhsValTypeTag) { case TypeTags.STRING_TAG: case TypeTags.BOOLEAN_TAG: return lhsValue.equals(rhsValue); case TypeTags.INT_TAG: if (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) { return false; } return lhsValue.equals(((Number) rhsValue).longValue()); case TypeTags.BYTE_TAG: if (rhsValTypeTag != TypeTags.BYTE_TAG && rhsValTypeTag != TypeTags.INT_TAG) { return false; } return ((Number) lhsValue).byteValue() == ((Number) rhsValue).byteValue(); case TypeTags.FLOAT_TAG: if (rhsValTypeTag != TypeTags.FLOAT_TAG) { return false; } if (Double.isNaN((Double) lhsValue) && Double.isNaN((Double) rhsValue)) { return true; } return ((Number) lhsValue).doubleValue() == ((Number) rhsValue).doubleValue(); case TypeTags.DECIMAL_TAG: if (rhsValTypeTag != TypeTags.DECIMAL_TAG) { return false; } return checkDecimalEqual((DecimalValue) lhsValue, (DecimalValue) rhsValue); case TypeTags.XML_TAG: if (lhsValue instanceof XmlText) { return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue); } return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlSequence) lhsValue, (XmlValue) rhsValue); case TypeTags.XML_ELEMENT_TAG: return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlItem) lhsValue, (XmlValue) rhsValue); case TypeTags.XML_COMMENT_TAG: return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlComment) lhsValue, (XmlValue) rhsValue); case TypeTags.XML_TEXT_TAG: return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlText) lhsValue, (XmlValue) rhsValue); case TypeTags.XML_PI_TAG: return TypeTags.isXMLTypeTag(rhsValTypeTag) && isEqual((XmlPi) lhsValue, (XmlValue) rhsValue); case TypeTags.MAP_TAG: case TypeTags.JSON_TAG: case TypeTags.RECORD_TYPE_TAG: return isMappingType(rhsValTypeTag) && isEqual((MapValueImpl) lhsValue, (MapValueImpl) rhsValue, checkedValues); case TypeTags.TUPLE_TAG: case TypeTags.ARRAY_TAG: return isListType(rhsValTypeTag) && isEqual((ArrayValue) lhsValue, (ArrayValue) rhsValue, checkedValues); case TypeTags.ERROR_TAG: return rhsValTypeTag == TypeTags.ERROR_TAG && isEqual((ErrorValue) lhsValue, (ErrorValue) rhsValue, checkedValues); case TypeTags.SERVICE_TAG: break; case TypeTags.TABLE_TAG: return rhsValTypeTag == TypeTags.TABLE_TAG && isEqual((TableValueImpl) lhsValue, (TableValueImpl) rhsValue, checkedValues); } return false; } private static boolean isListType(int typeTag) { return typeTag == TypeTags.ARRAY_TAG || typeTag == TypeTags.TUPLE_TAG; } private static boolean isMappingType(int typeTag) { return typeTag == TypeTags.MAP_TAG || typeTag == TypeTags.RECORD_TYPE_TAG || typeTag == TypeTags.JSON_TAG; } /** * Deep equality check for an array/tuple. * * @param lhsList The array/tuple on the left hand side * @param rhsList The array/tuple on the right hand side * @param checkedValues Structured value pairs already compared or being compared * @return True if the array/tuple values are equal, else false. */ private static boolean isEqual(ArrayValue lhsList, ArrayValue rhsList, List<ValuePair> checkedValues) { ValuePair compValuePair = new ValuePair(lhsList, rhsList); if (checkedValues.contains(compValuePair)) { return true; } checkedValues.add(compValuePair); if (lhsList.size() != rhsList.size()) { return false; } for (int i = 0; i < lhsList.size(); i++) { if (!isEqual(lhsList.get(i), rhsList.get(i), checkedValues)) { return false; } } return true; } /** * Deep equality check for a map. * * @param lhsMap Map on the left hand side * @param rhsMap Map on the right hand side * @param checkedValues Structured value pairs already compared or being compared * @return True if the map values are equal, else false. */ private static boolean isEqual(MapValueImpl lhsMap, MapValueImpl rhsMap, List<ValuePair> checkedValues) { ValuePair compValuePair = new ValuePair(lhsMap, rhsMap); if (checkedValues.contains(compValuePair)) { return true; } checkedValues.add(compValuePair); if (lhsMap.size() != rhsMap.size()) { return false; } if (!lhsMap.keySet().containsAll(rhsMap.keySet())) { return false; } Iterator<Map.Entry<BString, Object>> mapIterator = lhsMap.entrySet().iterator(); while (mapIterator.hasNext()) { Map.Entry<BString, Object> lhsMapEntry = mapIterator.next(); if (!isEqual(lhsMapEntry.getValue(), rhsMap.get(lhsMapEntry.getKey()), checkedValues)) { return false; } } return true; } /** * Deep equality check for a table. * * @param lhsTable Table on the left hand side * @param rhsTable Table on the right hand side * @param checkedValues Structured value pairs already compared or being compared * @return True if the table values are equal, else false. */ private static boolean isEqual(TableValueImpl lhsTable, TableValueImpl rhsTable, List<ValuePair> checkedValues) { ValuePair compValuePair = new ValuePair(lhsTable, rhsTable); if (checkedValues.contains(compValuePair)) { return true; } checkedValues.add(compValuePair); if (lhsTable.size() != rhsTable.size()) { return false; } boolean isLhsKeyedTable = ((BTableType) lhsTable.getType()).getFieldNames() != null && ((BTableType) lhsTable.getType()).getFieldNames().length > 0; boolean isRhsKeyedTable = ((BTableType) rhsTable.getType()).getFieldNames() != null && ((BTableType) rhsTable.getType()).getFieldNames().length > 0; Object[] lhsTableValues = lhsTable.values().toArray(); Object[] rhsTableValues = rhsTable.values().toArray(); if (isLhsKeyedTable == isRhsKeyedTable) { for (int i = 0; i < lhsTableValues.length; i++) { if (!isEqual(lhsTableValues[i], rhsTableValues[i], checkedValues)) { return false; } } return true; } return false; } /** * Deep equality check for error. * * @param lhsError The error on the left hand side * @param rhsError The error on the right hand side * @param checkedValues Errors already compared or being compared * @return True if the error values are equal, else false. */ private static boolean isEqual(ErrorValue lhsError, ErrorValue rhsError, List<ValuePair> checkedValues) { ValuePair compValuePair = new ValuePair(lhsError, rhsError); if (checkedValues.contains(compValuePair)) { return true; } checkedValues.add(compValuePair); return isEqual(lhsError.getMessage(), rhsError.getMessage(), checkedValues) && isEqual((MapValueImpl) lhsError.getDetails(), (MapValueImpl) rhsError.getDetails(), checkedValues) && isEqual(lhsError.getCause(), rhsError.getCause(), checkedValues); } /** * Deep equality check for XML Sequence. * * @param lhsXMLSequence The XML sequence on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlSequence lhsXMLSequence, XmlValue rhsXml) { if (rhsXml instanceof XmlSequence) { XmlSequence rhsXMLSequence = (XmlSequence) rhsXml; return isXMLSequenceChildrenEqual(lhsXMLSequence.getChildrenList(), rhsXMLSequence.getChildrenList()); } if (rhsXml instanceof XmlItem) { return lhsXMLSequence.getChildrenList().size() == 1 && isEqual(lhsXMLSequence.getChildrenList().get(0), rhsXml); } return lhsXMLSequence.getChildrenList().isEmpty() && TypeUtils.getType(rhsXml) == PredefinedTypes.TYPE_XML_NEVER; } /** * Deep equality check for XML item. * * @param lhsXMLItem The XML item on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlItem lhsXMLItem, XmlValue rhsXml) { if (rhsXml instanceof XmlItem) { XmlItem rhsXMLItem = (XmlItem) rhsXml; if (!(rhsXMLItem.getQName().equals(lhsXMLItem.getQName()))) { return false; } if (!(rhsXMLItem.getAttributesMap().entrySet().equals(lhsXMLItem.getAttributesMap().entrySet()))) { return false; } return isEqual(rhsXMLItem.getChildrenSeq(), lhsXMLItem.getChildrenSeq()); } if (rhsXml instanceof XmlSequence) { XmlSequence rhsXMLSequence = (XmlSequence) rhsXml; return rhsXMLSequence.getChildrenList().size() == 1 && isEqual(lhsXMLItem, rhsXMLSequence.getChildrenList().get(0)); } return false; } /** * Deep equality check for XML Text. * * @param lhsXMLText The XML text on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlText lhsXMLText, XmlValue rhsXml) { if (rhsXml instanceof XmlText) { XmlText rhsXMLText = (XmlText) rhsXml; return lhsXMLText.getTextValue().equals(rhsXMLText.getTextValue()); } return lhsXMLText.getType() == PredefinedTypes.TYPE_XML_NEVER && rhsXml instanceof XmlSequence && ((XmlSequence) rhsXml).getChildrenList().isEmpty(); } /** * Deep equality check for XML Comment. * * @param lhsXMLComment The XML comment on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlComment lhsXMLComment, XmlValue rhsXml) { if (!(rhsXml instanceof XmlComment)) { return false; } XmlComment rhXMLComment = (XmlComment) rhsXml; return lhsXMLComment.getTextValue().equals(rhXMLComment.getTextValue()); } /** * Deep equality check for XML Processing Instruction. * * @param lhsXMLPi The XML processing instruction on the left hand side * @param rhsXml The XML on the right hand side * @return True if the XML values are equal, else false. */ private static boolean isEqual(XmlPi lhsXMLPi, XmlValue rhsXml) { if (!(rhsXml instanceof XmlPi)) { return false; } XmlPi rhsXMLPi = (XmlPi) rhsXml; return lhsXMLPi.getData().equals(rhsXMLPi.getData()) && lhsXMLPi.getTarget().equals(rhsXMLPi.getTarget()); } private static boolean isXMLSequenceChildrenEqual(List<BXml> lhsList, List<BXml> rhsList) { if (lhsList.size() != rhsList.size()) { return false; } for (int i = 0; i < lhsList.size(); i++) { if (!isEqual(lhsList.get(i), rhsList.get(i))) { return false; } } return true; } /** * Type vector of size two, to hold the source and the target types. * * @since 0.995.0 */ private static class TypePair { Type sourceType; Type targetType; public TypePair(Type sourceType, Type targetType) { this.sourceType = sourceType; this.targetType = targetType; } @Override public boolean equals(Object obj) { if (!(obj instanceof TypePair)) { return false; } TypePair other = (TypePair) obj; return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType); } } /** * Check the reference equality of handle values. * * @param lhsValue The value on the left hand side * @param rhsValue The value on the right hand side * @return True if values are equal, else false. */ private static boolean isHandleValueRefEqual(Object lhsValue, Object rhsValue) { HandleValue lhsHandle = (HandleValue) lhsValue; HandleValue rhsHandle = (HandleValue) rhsValue; return lhsHandle.getValue() == rhsHandle.getValue(); } /** * Unordered value vector of size two, to hold two values being compared. * * @since 0.995.0 */ private static class ValuePair { ArrayList<Object> valueList = new ArrayList<>(2); ValuePair(Object valueOne, Object valueTwo) { valueList.add(valueOne); valueList.add(valueTwo); } @Override public boolean equals(Object otherPair) { if (!(otherPair instanceof ValuePair)) { return false; } ArrayList otherList = ((ValuePair) otherPair).valueList; ArrayList currentList = valueList; if (otherList.size() != currentList.size()) { return false; } for (int i = 0; i < otherList.size(); i++) { if (!otherList.get(i).equals(currentList.get(i))) { return false; } } return true; } } /** * Checks whether a given {@link BType} has an implicit initial value or not. * @param type {@link BType} to be analyzed. * @return whether there's an implicit initial value or not. */ public static boolean hasFillerValue(Type type) { return hasFillerValue(type, new ArrayList<>()); } private static boolean hasFillerValue(Type type, List<Type> unanalyzedTypes) { if (type == null) { return true; } if (type.getTag() < TypeTags.RECORD_TYPE_TAG && !(type.getTag() == TypeTags.CHAR_STRING_TAG || type.getTag() == TypeTags.NEVER_TAG)) { return true; } switch (type.getTag()) { case TypeTags.STREAM_TAG: case TypeTags.MAP_TAG: case TypeTags.ANY_TAG: return true; case TypeTags.ARRAY_TAG: return checkFillerValue((BArrayType) type, unanalyzedTypes); case TypeTags.FINITE_TYPE_TAG: return checkFillerValue((BFiniteType) type); case TypeTags.OBJECT_TYPE_TAG: return checkFillerValue((BObjectType) type); case TypeTags.RECORD_TYPE_TAG: return checkFillerValue((BRecordType) type, unanalyzedTypes); case TypeTags.TUPLE_TAG: return checkFillerValue((BTupleType) type, unanalyzedTypes); case TypeTags.UNION_TAG: return checkFillerValue((BUnionType) type, unanalyzedTypes); default: return false; } } private static boolean checkFillerValue(BTupleType tupleType, List<Type> unAnalyzedTypes) { if (unAnalyzedTypes.contains(tupleType)) { return true; } unAnalyzedTypes.add(tupleType); for (Type member : tupleType.getTupleTypes()) { if (!hasFillerValue(member, unAnalyzedTypes)) { return false; } } return true; } private static boolean checkFillerValue(BUnionType type, List<Type> unAnalyzedTypes) { if (unAnalyzedTypes.contains(type)) { return true; } unAnalyzedTypes.add(type); if (type.isNullable()) { return true; } Iterator<Type> iterator = type.getMemberTypes().iterator(); Type firstMember; for (firstMember = iterator.next(); iterator.hasNext(); ) { if (!isSameType(firstMember, iterator.next())) { return false; } } return isValueType(firstMember) && hasFillerValue(firstMember); } private static boolean checkFillerValue(BRecordType type, List<Type> unAnalyzedTypes) { if (unAnalyzedTypes.contains(type)) { return true; } unAnalyzedTypes.add(type); for (Field field : type.getFields().values()) { if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.OPTIONAL)) { continue; } if (!SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED)) { continue; } return false; } return true; } private static boolean checkFillerValue(BArrayType type, List<Type> unAnalyzedTypes) { return type.getState() == ArrayState.OPEN || hasFillerValue(type.getElementType(), unAnalyzedTypes); } private static boolean checkFillerValue(BObjectType type) { if (type.getTag() == TypeTags.SERVICE_TAG) { return false; } else { MethodType generatedInitializer = type.generatedInitializer; if (generatedInitializer == null) { return false; } FunctionType initFuncType = generatedInitializer.getType(); boolean noParams = initFuncType.getParameters().length == 0; boolean nilReturn = initFuncType.getReturnType().getTag() == TypeTags.NULL_TAG; return noParams && nilReturn; } } private static boolean checkFillerValue(BFiniteType type) { for (Object value: type.valueSpace) { if (value == null) { return true; } } if (type.valueSpace.size() == 1) { return true; } Object firstElement = type.valueSpace.iterator().next(); for (Object value : type.valueSpace) { if (value.getClass() != firstElement.getClass()) { return false; } } if (firstElement instanceof String) { return containsElement(type.valueSpace, "\"\""); } else if (firstElement instanceof Byte || firstElement instanceof Integer || firstElement instanceof Long) { return containsElement(type.valueSpace, "0"); } else if (firstElement instanceof Float || firstElement instanceof Double || firstElement instanceof BigDecimal) { return containsElement(type.valueSpace, "0.0"); } else if (firstElement instanceof Boolean) { return containsElement(type.valueSpace, "false"); } else { return false; } } private static boolean containsElement(Set<Object> valueSpace, String e) { for (Object value : valueSpace) { if (value != null && value.toString().equals(e)) { return true; } } return false; } private static boolean containsType(Set<Object> valueSpace, Type type) { for (Object value : valueSpace) { if (!isSameType(type, getType(value))) { return false; } } return true; } public static Object handleAnydataValues(Object sourceVal, Type targetType) { if (sourceVal != null && !(sourceVal instanceof Number) && !(sourceVal instanceof BString) && !(sourceVal instanceof Boolean) && !(sourceVal instanceof BValue)) { throw ErrorUtils.createJToBTypeCastError(sourceVal.getClass(), targetType); } return sourceVal; } private TypeChecker() { } }
This is created after decoding the org and name values. So I guess `pkg.packageID` and `currentPkgId` could be different
private void rewriteAsyncInvocations(BIRFunction func, BIRTypeDefinition attachedTypeDef, BIRPackage pkg) { PackageID packageID = pkg.packageID; Name org = new Name(IdentifierUtils.decodeIdentifier(packageID.orgName.getValue())); Name module = new Name(IdentifierUtils.decodeIdentifier(packageID.name.getValue())); PackageID currentPkgId = new PackageID(org, module, packageID.version); BSymbol functionOwner; List<BIRFunction> scopeFunctionsList; if (attachedTypeDef == null) { functionOwner = packageCache.getSymbol(currentPkgId); scopeFunctionsList = pkg.functions; } else { functionOwner = attachedTypeDef.type.tsymbol; scopeFunctionsList = attachedTypeDef.attachedFuncs; } for (BIRBasicBlock currentBB : func.basicBlocks) { if (currentBB.terminator.kind != InstructionKind.ASYNC_CALL || !isObservable((AsyncCall) currentBB.terminator)) { continue; } AsyncCall asyncCallIns = (AsyncCall) currentBB.terminator; /* * The wrapper function generated below invokes the actual function synchronously, allowing the * instrumentation to record the actual start and end times of the function. The wrapper function * is invoked asynchronously preserving the asynchronous behaviour. */ BType returnType = ((BFutureType) asyncCallIns.lhsOp.variableDcl.type).constraint; List<BType> argTypes = asyncCallIns.args.stream() .map(arg -> arg.variableDcl.type) .collect(Collectors.toList()); Name lambdaName = new Name(String.format("$lambda$observability%d$%s", lambdaIndex++, asyncCallIns.name.value.replace(".", "_"))); BInvokableType bInvokableType = new BInvokableType(argTypes, null, returnType, null); BIRFunction desugaredFunc = new BIRFunction(asyncCallIns.pos, lambdaName, 0, bInvokableType, func.workerName, 0, null, VIRTUAL); desugaredFunc.receiver = func.receiver; scopeFunctionsList.add(desugaredFunc); BIRVariableDcl funcReturnVariableDcl = new BIRVariableDcl(returnType, new Name(String.format("$%s$retVal", lambdaName.value)), VarScope.FUNCTION, VarKind.RETURN); BIROperand funcReturnOperand = new BIROperand(funcReturnVariableDcl); desugaredFunc.localVars.add(funcReturnVariableDcl); desugaredFunc.returnVariable = funcReturnVariableDcl; BInvokableSymbol invokableSymbol = new BInvokableSymbol(SymTag.FUNCTION, 0, lambdaName, currentPkgId, bInvokableType, functionOwner, desugaredFunc.pos, VIRTUAL); invokableSymbol.retType = funcReturnVariableDcl.type; invokableSymbol.kind = SymbolKind.FUNCTION; invokableSymbol.params = asyncCallIns.args.stream() .map(arg -> new BVarSymbol(0, arg.variableDcl.name, currentPkgId, arg.variableDcl.type, invokableSymbol, arg.pos, VIRTUAL)) .collect(Collectors.toList()); invokableSymbol.scope = new Scope(invokableSymbol); invokableSymbol.params.forEach(param -> invokableSymbol.scope.define(param.name, param)); if (attachedTypeDef == null) { functionOwner.scope.define(lambdaName, invokableSymbol); } List<BIROperand> funcParamOperands = new ArrayList<>(); Name selfArgName = new Name("%self"); for (int i = 0; i < asyncCallIns.args.size(); i++) { BIROperand arg = asyncCallIns.args.get(i); BIRFunctionParameter funcParam; if (arg.variableDcl.kind == VarKind.SELF) { funcParam = new BIRFunctionParameter(asyncCallIns.pos, arg.variableDcl.type, selfArgName, VarScope.FUNCTION, VarKind.SELF, selfArgName.value, false); } else { Name argName = new Name(String.format("$funcParam%d", i)); funcParam = new BIRFunctionParameter(asyncCallIns.pos, arg.variableDcl.type, argName, VarScope.FUNCTION, VarKind.ARG, argName.value, false); desugaredFunc.localVars.add(funcParam); desugaredFunc.parameters.put(funcParam, Collections.emptyList()); desugaredFunc.requiredParams.add(new BIRParameter(asyncCallIns.pos, argName, 0)); desugaredFunc.argsCount++; } funcParamOperands.add(new BIROperand(funcParam)); } BIRBasicBlock callInsBB = insertBasicBlock(desugaredFunc, 0); BIRBasicBlock returnInsBB = insertBasicBlock(desugaredFunc, 1); callInsBB.terminator = new Call(asyncCallIns.pos, InstructionKind.CALL, asyncCallIns.isVirtual, asyncCallIns.calleePkg, asyncCallIns.name, funcParamOperands, funcReturnOperand, returnInsBB, asyncCallIns.calleeAnnotAttachments, asyncCallIns.calleeFlags); returnInsBB.terminator = new Return(asyncCallIns.pos); asyncCallIns.name = lambdaName; asyncCallIns.calleePkg = currentPkgId; asyncCallIns.isVirtual = attachedTypeDef != null; if (attachedTypeDef != null) { asyncCallIns.args.add(0, new BIROperand(new BIRVariableDcl(attachedTypeDef.type, selfArgName, VarScope.FUNCTION, VarKind.SELF))); } } }
List<BIRFunction> scopeFunctionsList;
private void rewriteAsyncInvocations(BIRFunction func, BIRTypeDefinition attachedTypeDef, BIRPackage pkg) { PackageID packageID = pkg.packageID; Name org = new Name(IdentifierUtils.decodeIdentifier(packageID.orgName.getValue())); Name module = new Name(IdentifierUtils.decodeIdentifier(packageID.name.getValue())); PackageID currentPkgId = new PackageID(org, module, packageID.version); BSymbol functionOwner; List<BIRFunction> scopeFunctionsList; if (attachedTypeDef == null) { functionOwner = packageCache.getSymbol(currentPkgId); scopeFunctionsList = pkg.functions; } else { functionOwner = attachedTypeDef.type.tsymbol; scopeFunctionsList = attachedTypeDef.attachedFuncs; } for (BIRBasicBlock currentBB : func.basicBlocks) { if (currentBB.terminator.kind != InstructionKind.ASYNC_CALL || !isObservable((AsyncCall) currentBB.terminator)) { continue; } AsyncCall asyncCallIns = (AsyncCall) currentBB.terminator; /* * The wrapper function generated below invokes the actual function synchronously, allowing the * instrumentation to record the actual start and end times of the function. The wrapper function * is invoked asynchronously preserving the asynchronous behaviour. */ BType returnType = ((BFutureType) asyncCallIns.lhsOp.variableDcl.type).constraint; List<BType> argTypes = asyncCallIns.args.stream() .map(arg -> arg.variableDcl.type) .collect(Collectors.toList()); Name lambdaName = new Name(String.format("$lambda$observability%d$%s", lambdaIndex++, asyncCallIns.name.value.replace(".", "_"))); BInvokableType bInvokableType = new BInvokableType(argTypes, null, returnType, null); BIRFunction desugaredFunc = new BIRFunction(asyncCallIns.pos, lambdaName, 0, bInvokableType, func.workerName, 0, null, VIRTUAL); desugaredFunc.receiver = func.receiver; scopeFunctionsList.add(desugaredFunc); BIRVariableDcl funcReturnVariableDcl = new BIRVariableDcl(returnType, new Name(String.format("$%s$retVal", lambdaName.value)), VarScope.FUNCTION, VarKind.RETURN); BIROperand funcReturnOperand = new BIROperand(funcReturnVariableDcl); desugaredFunc.localVars.add(funcReturnVariableDcl); desugaredFunc.returnVariable = funcReturnVariableDcl; BInvokableSymbol invokableSymbol = new BInvokableSymbol(SymTag.FUNCTION, 0, lambdaName, currentPkgId, bInvokableType, functionOwner, desugaredFunc.pos, VIRTUAL); invokableSymbol.retType = funcReturnVariableDcl.type; invokableSymbol.kind = SymbolKind.FUNCTION; invokableSymbol.params = asyncCallIns.args.stream() .map(arg -> new BVarSymbol(0, arg.variableDcl.name, currentPkgId, arg.variableDcl.type, invokableSymbol, arg.pos, VIRTUAL)) .collect(Collectors.toList()); invokableSymbol.scope = new Scope(invokableSymbol); invokableSymbol.params.forEach(param -> invokableSymbol.scope.define(param.name, param)); if (attachedTypeDef == null) { functionOwner.scope.define(lambdaName, invokableSymbol); } List<BIROperand> funcParamOperands = new ArrayList<>(); Name selfArgName = new Name("%self"); for (int i = 0; i < asyncCallIns.args.size(); i++) { BIROperand arg = asyncCallIns.args.get(i); BIRFunctionParameter funcParam; if (arg.variableDcl.kind == VarKind.SELF) { funcParam = new BIRFunctionParameter(asyncCallIns.pos, arg.variableDcl.type, selfArgName, VarScope.FUNCTION, VarKind.SELF, selfArgName.value, false); } else { Name argName = new Name(String.format("$funcParam%d", i)); funcParam = new BIRFunctionParameter(asyncCallIns.pos, arg.variableDcl.type, argName, VarScope.FUNCTION, VarKind.ARG, argName.value, false); desugaredFunc.localVars.add(funcParam); desugaredFunc.parameters.put(funcParam, Collections.emptyList()); desugaredFunc.requiredParams.add(new BIRParameter(asyncCallIns.pos, argName, 0)); desugaredFunc.argsCount++; } funcParamOperands.add(new BIROperand(funcParam)); } BIRBasicBlock callInsBB = insertBasicBlock(desugaredFunc, 0); BIRBasicBlock returnInsBB = insertBasicBlock(desugaredFunc, 1); callInsBB.terminator = new Call(asyncCallIns.pos, InstructionKind.CALL, asyncCallIns.isVirtual, asyncCallIns.calleePkg, asyncCallIns.name, funcParamOperands, funcReturnOperand, returnInsBB, asyncCallIns.calleeAnnotAttachments, asyncCallIns.calleeFlags); returnInsBB.terminator = new Return(asyncCallIns.pos); asyncCallIns.name = lambdaName; asyncCallIns.calleePkg = currentPkgId; asyncCallIns.isVirtual = attachedTypeDef != null; if (attachedTypeDef != null) { asyncCallIns.args.add(0, new BIROperand(new BIRVariableDcl(attachedTypeDef.type, selfArgName, VarScope.FUNCTION, VarKind.SELF))); } } }
class JvmObservabilityGen { private static final String ENTRY_POINT_MAIN_METHOD_NAME = "main"; private static final String NEW_BB_PREFIX = "observabilityDesugaredBB"; private static final String SERVICE_IDENTIFIER = "$$service$"; private static final String ANONYMOUS_SERVICE_IDENTIFIER = "$anonService$"; private static final String INVOCATION_INSTRUMENTATION_TYPE = "invocation"; private static final String FUNC_BODY_INSTRUMENTATION_TYPE = "funcBody"; private static final Location COMPILE_TIME_CONST_POS = new BLangDiagnosticLocation(null, -1, -1, -1, -1); private final PackageCache packageCache; private final SymbolTable symbolTable; private int lambdaIndex; private int desugaredBBIndex; private int constantIndex; private final Map<Object, BIROperand> compileTimeConstants; JvmObservabilityGen(PackageCache packageCache, SymbolTable symbolTable) { this.compileTimeConstants = new HashMap<>(); this.packageCache = packageCache; this.symbolTable = symbolTable; this.lambdaIndex = 0; this.desugaredBBIndex = 0; this.constantIndex = 0; } /** * Instrument the package by rewriting the BIR to add relevant Observability related instructions. * * @param pkg The package to instrument */ void instrumentPackage(BIRPackage pkg) { for (int i = 0; i < pkg.functions.size(); i++) { BIRFunction func = pkg.functions.get(i); rewriteAsyncInvocations(func, null, pkg); rewriteObservableFunctionInvocations(func, pkg); if (ENTRY_POINT_MAIN_METHOD_NAME.equals(func.name.value)) { rewriteObservableFunctionBody(func, pkg, false, true, false, StringUtils.EMPTY, func.name.value); } else if ((func.flags & Flags.WORKER) == Flags.WORKER) { rewriteObservableFunctionBody(func, pkg, false, false, true, StringUtils.EMPTY, func.workerName.value); } } for (BIRTypeDefinition typeDef : pkg.typeDefs) { if ((typeDef.flags & Flags.CLASS) != Flags.CLASS && typeDef.type.tag == TypeTags.OBJECT) { continue; } boolean isService = (typeDef.type.flags & Flags.SERVICE) == Flags.SERVICE; for (int i = 0; i < typeDef.attachedFuncs.size(); i++) { BIRFunction func = typeDef.attachedFuncs.get(i); rewriteAsyncInvocations(func, typeDef, pkg); rewriteObservableFunctionInvocations(func, pkg); if (isService && (func.flags & Flags.RESOURCE) == Flags.RESOURCE) { rewriteObservableFunctionBody(func, pkg, true, false, false, cleanUpServiceName(typeDef.name.value), func.name.value); } } } BIRFunction initFunc = pkg.functions.get(0); BIRBasicBlock constInitBB = initFunc.basicBlocks.get(0); for (Map.Entry<Object, BIROperand> entry : compileTimeConstants.entrySet()) { BIROperand operand = entry.getValue(); ConstantLoad constLoadIns = new ConstantLoad(COMPILE_TIME_CONST_POS, entry.getKey(), operand.variableDcl.type, operand); constInitBB.instructions.add(constLoadIns); } } /** * Rewrite the invocations in the function bodies to call a lambda asynchronously which in turn calls the * actual function synchronously. This is done so that the actual invocation can be observed accurately. * * Without this wrapper, the start and end time recorded would only reflect the time it took to give the the async * invocation to the scheduler. However, we require the actual time it took for the invocation. * * @param func The function of which the instructions in the body should be rewritten * @param attachedTypeDef The type definition to which the function was attached to or null * @param pkg The package containing the function */ /** * Rewrite a function so that the internal body will be observed. This adds the relevant start and stop calls at * the beginning and return basic blocks of the function. * * This is only to be used in service resource functions, workers and main method. * * This method expects that Observable invocations had already been instrumented properly before this method is * called. This is because the uncaught panics thrown from such observable invocations are reported to the * observation covering the function body by using the re-panic terminators which gets added in * rewriteObservableFunctionInvocations method. * * @param func The function to instrument * @param pkg The package which contains the function * @param isResource True if the function is a service resource * @param isMainEntryPoint True if the function is the main entry point * @param isWorker True if the function was a worker * @param serviceName The name of the service * @param resourceOrAction The name of the resource or action which will be observed */ private void rewriteObservableFunctionBody(BIRFunction func, BIRPackage pkg, boolean isResource, boolean isMainEntryPoint, boolean isWorker, String serviceName, String resourceOrAction) { { BIRBasicBlock startBB = func.basicBlocks.get(0); BIRBasicBlock newStartBB = insertBasicBlock(func, 1); swapBasicBlockContent(startBB, newStartBB); if (isResource) { injectStartResourceObservationCall(startBB, serviceName, resourceOrAction, pkg, func.pos); } else { BIROperand objectTypeOperand = generateGlobalConstantOperand(pkg, symbolTable.nilType, null); injectStartCallableObservationCall(startBB, null, false, isMainEntryPoint, isWorker, objectTypeOperand, resourceOrAction, pkg, func.pos); } startBB.terminator.thenBB = newStartBB; } boolean isErrorCheckRequired = isErrorAssignable(func.returnVariable); BIROperand returnValOperand = new BIROperand(func.returnVariable); int i = 1; while (i < func.basicBlocks.size()) { BIRBasicBlock currentBB = func.basicBlocks.get(i); if (currentBB.terminator.kind == InstructionKind.RETURN) { if (isErrorCheckRequired) { BIRBasicBlock errorReportBB = insertBasicBlock(func, i + 1); BIRBasicBlock observeEndBB = insertBasicBlock(func, i + 2); BIRBasicBlock newCurrentBB = insertBasicBlock(func, i + 3); swapBasicBlockTerminator(currentBB, newCurrentBB); injectCheckErrorCalls(currentBB, errorReportBB, observeEndBB, func.localVars, null, returnValOperand, FUNC_BODY_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, null, returnValOperand, FUNC_BODY_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, null); observeEndBB.terminator.thenBB = newCurrentBB; errorReportBB.terminator.thenBB = observeEndBB; i += 3; } else { BIRBasicBlock newCurrentBB = insertBasicBlock(func, i + 1); swapBasicBlockTerminator(currentBB, newCurrentBB); injectStopObservationCall(currentBB, null); currentBB.terminator.thenBB = newCurrentBB; i += 1; } } else if (currentBB.terminator.kind == InstructionKind.PANIC) { Panic panicCall = (Panic) currentBB.terminator; BIRBasicBlock observeEndBB = insertBasicBlock(func, i + 1); BIRBasicBlock newCurrentBB = insertBasicBlock(func, i + 2); swapBasicBlockTerminator(currentBB, newCurrentBB); injectReportErrorCall(currentBB, func.localVars, newCurrentBB.terminator.pos, panicCall.errorOp, FUNC_BODY_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, newCurrentBB.terminator.pos); currentBB.terminator.thenBB = observeEndBB; observeEndBB.terminator.thenBB = newCurrentBB; i += 2; } else if (currentBB.terminator.kind == InstructionKind.CALL || (currentBB.terminator.kind == InstructionKind.FP_CALL && !((FPCall) currentBB.terminator).isAsync)) { /* * Traps for errors needs to be injected for each call and fp call separately to avoid messing up the * line numbers in the stack trace shown when a panic is thrown. * * These panic traps are different from the traps added in rewriteObservableFunctionInvocations method, * in the sense that these report the error to the Observation covering the current function this body * belongs to. Also these do not cover the observable calls and fp calls (they are handled using the * panic terminator handling logic) */ Optional<BIRErrorEntry> existingEE = func.errorTable.stream() .filter(errorEntry -> isBBCoveredInErrorEntry(errorEntry, func.basicBlocks, currentBB)) .findAny(); if (existingEE.isEmpty()) { BIRBasicBlock errorCheckBB = insertBasicBlock(func, i + 1); BIRBasicBlock errorReportBB = insertBasicBlock(func, i + 2); BIRBasicBlock observeEndBB = insertBasicBlock(func, i + 3); BIRBasicBlock rePanicBB = insertBasicBlock(func, i + 4); BIRVariableDcl trappedErrorVariableDcl = new BIRVariableDcl(symbolTable.errorType, new Name(String.format("$%s$trappedError", currentBB.id.value)), VarScope.FUNCTION, VarKind.TEMP); func.localVars.add(trappedErrorVariableDcl); BIROperand trappedErrorOperand = new BIROperand(trappedErrorVariableDcl); injectCheckErrorCalls(errorCheckBB, errorReportBB, currentBB.terminator.thenBB, func.localVars, currentBB.terminator.pos, trappedErrorOperand, FUNC_BODY_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, currentBB.terminator.pos, trappedErrorOperand, FUNC_BODY_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, currentBB.terminator.pos); rePanicBB.terminator = new Panic(currentBB.terminator.pos, trappedErrorOperand); BIRErrorEntry errorEntry = new BIRErrorEntry(currentBB, currentBB, trappedErrorOperand, errorCheckBB); func.errorTable.add(errorEntry); currentBB.terminator.thenBB = errorCheckBB; errorReportBB.terminator.thenBB = observeEndBB; observeEndBB.terminator.thenBB = rePanicBB; i += 4; } } i++; } } /** * Re-write the relevant basic blocks in the list of basic blocks to observe function invocations. * * @param func The function of which the instructions in the body should be instrumented * @param pkg The package which contains the instruction which will be observed */ private void rewriteObservableFunctionInvocations(BIRFunction func, BIRPackage pkg) { int i = 0; while (i < func.basicBlocks.size()) { BIRBasicBlock currentBB = func.basicBlocks.get(i); if (currentBB.terminator.kind == InstructionKind.CALL && isObservable((Call) currentBB.terminator)) { Call callIns = (Call) currentBB.terminator; Location desugaredInsPosition = callIns.pos; BIRBasicBlock observeStartBB = insertBasicBlock(func, i + 1); int newCurrentIndex = i + 2; BIRBasicBlock newCurrentBB = insertBasicBlock(func, newCurrentIndex); swapBasicBlockTerminator(currentBB, newCurrentBB); { BIROperand objectTypeOperand; String action; if (callIns.isVirtual) { objectTypeOperand = callIns.args.get(0); if (callIns.name.value.contains(".")) { String[] split = callIns.name.value.split("\\."); action = split[1]; } else { action = callIns.name.value; } } else { objectTypeOperand = generateGlobalConstantOperand(pkg, symbolTable.nilType, null); action = callIns.name.value; } currentBB.terminator = new GOTO(desugaredInsPosition, observeStartBB); BIRBasicBlock observeEndBB; boolean isRemote = callIns.calleeFlags.contains(Flag.REMOTE); Location originalInsPos = callIns.pos; if (isErrorAssignable(callIns.lhsOp.variableDcl)) { BIRBasicBlock errorCheckBB = insertBasicBlock(func, i + 3); BIRBasicBlock errorReportBB = insertBasicBlock(func, i + 4); observeEndBB = insertBasicBlock(func, i + 5); injectStartCallableObservationCall(observeStartBB, desugaredInsPosition, isRemote, false, false, objectTypeOperand, action, pkg, originalInsPos); injectCheckErrorCalls(errorCheckBB, errorReportBB, observeEndBB, func.localVars, desugaredInsPosition, callIns.lhsOp, INVOCATION_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, desugaredInsPosition, callIns.lhsOp, INVOCATION_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, desugaredInsPosition); observeEndBB.terminator.thenBB = newCurrentBB.terminator.thenBB; errorReportBB.terminator.thenBB = observeEndBB; newCurrentBB.terminator.thenBB = errorCheckBB; observeStartBB.terminator.thenBB = newCurrentBB; i += 5; } else { observeEndBB = insertBasicBlock(func, i + 3); injectStartCallableObservationCall(observeStartBB, desugaredInsPosition, isRemote, false, false, objectTypeOperand, action, pkg, originalInsPos); injectStopObservationCall(observeEndBB, desugaredInsPosition); observeEndBB.terminator.thenBB = newCurrentBB.terminator.thenBB; newCurrentBB.terminator.thenBB = observeEndBB; observeStartBB.terminator.thenBB = newCurrentBB; i += 3; } fixErrorTable(func, currentBB, observeEndBB); } { /* * Adding panic traps for the invocations. These report the error to the Observation covering * the invocation. */ Optional<BIRErrorEntry> existingEE = func.errorTable.stream() .filter(errorEntry -> isBBCoveredInErrorEntry(errorEntry, func.basicBlocks, newCurrentBB)) .findAny(); Location desugaredInsPos = callIns.pos; if (existingEE.isPresent()) { BIRErrorEntry errorEntry = existingEE.get(); int eeTargetIndex = func.basicBlocks.indexOf(errorEntry.targetBB); if (eeTargetIndex == -1) { throw new BLangCompilerException("Invalid Error Entry pointing to non-existent " + "target Basic Block " + errorEntry.targetBB.id); } BIRBasicBlock errorReportBB = insertBasicBlock(func, eeTargetIndex + 1); BIRBasicBlock observeEndBB = insertBasicBlock(func, eeTargetIndex + 2); BIRBasicBlock newTargetBB = insertBasicBlock(func, eeTargetIndex + 3); swapBasicBlockContent(errorEntry.targetBB, newTargetBB); injectCheckErrorCalls(errorEntry.targetBB, errorReportBB, newTargetBB, func.localVars, desugaredInsPos, errorEntry.errorOp, INVOCATION_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, desugaredInsPos, errorEntry.errorOp, INVOCATION_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, desugaredInsPos); errorReportBB.terminator.thenBB = observeEndBB; observeEndBB.terminator.thenBB = newTargetBB; fixErrorTable(func, errorEntry.targetBB, newTargetBB); } else { BIRBasicBlock errorCheckBB = insertBasicBlock(func, newCurrentIndex + 1); BIRBasicBlock errorReportBB = insertBasicBlock(func, newCurrentIndex + 2); BIRBasicBlock observeEndBB = insertBasicBlock(func, newCurrentIndex + 3); BIRBasicBlock rePanicBB = insertBasicBlock(func, newCurrentIndex + 4); BIRVariableDcl trappedErrorVariableDcl = new BIRVariableDcl(symbolTable.errorType, new Name(String.format("$%s$trappedError", newCurrentBB.id.value)), VarScope.FUNCTION, VarKind.TEMP); func.localVars.add(trappedErrorVariableDcl); BIROperand trappedErrorOperand = new BIROperand(trappedErrorVariableDcl); injectCheckErrorCalls(errorCheckBB, errorReportBB, newCurrentBB.terminator.thenBB, func.localVars, newCurrentBB.terminator.pos, trappedErrorOperand, INVOCATION_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, newCurrentBB.terminator.pos, trappedErrorOperand, INVOCATION_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, newCurrentBB.terminator.pos); rePanicBB.terminator = new Panic(newCurrentBB.terminator.pos, trappedErrorOperand); BIRErrorEntry errorEntry = new BIRErrorEntry(newCurrentBB, newCurrentBB, trappedErrorOperand, errorCheckBB); func.errorTable.add(errorEntry); newCurrentBB.terminator.thenBB = errorCheckBB; errorReportBB.terminator.thenBB = observeEndBB; observeEndBB.terminator.thenBB = rePanicBB; i += 4; } } } i += 1; } } /** * Inject start observation call to a basic block. * @param observeStartBB The basic block to which the start observation call should be injected * @param serviceName The service to which the instruction was attached to * @param resource The name of the resource which will be observed * @param pkg The package the invocation belongs to * @param originalInsPosition The source code position of the invocation */ private void injectStartResourceObservationCall(BIRBasicBlock observeStartBB, String serviceName, String resource, BIRPackage pkg, Location originalInsPosition) { String pkgId = generatePackageId(pkg.packageID); String position = generatePositionId(originalInsPosition); BIROperand serviceNameOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, serviceName); BIROperand resourceOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, resource); BIROperand pkgOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, pkgId); BIROperand originalInsPosOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, position); JIMethodCall observeStartCallTerminator = new JIMethodCall(null); observeStartCallTerminator.invocationType = INVOKESTATIC; observeStartCallTerminator.jClassName = OBSERVE_UTILS; observeStartCallTerminator.jMethodVMSig = String.format("(L%s;L%s;L%s;L%s;L%s;)V", BAL_ENV, B_STRING_VALUE, B_STRING_VALUE, B_STRING_VALUE, B_STRING_VALUE); observeStartCallTerminator.name = START_RESOURCE_OBSERVATION_METHOD; observeStartCallTerminator.args = Arrays.asList(serviceNameOperand, resourceOperand, pkgOperand, originalInsPosOperand); observeStartBB.terminator = observeStartCallTerminator; } /** * Inject start observation call to a basic block. * * @param observeStartBB The basic block to which the start observation call should be injected * @param desugaredInsLocation The position of all instructions, variables declarations, terminators to be generated * @param isRemote True if a remote function will be observed by the observation * @param isMainEntryPoint True if the main function will be observed by the observation * @param isWorker True if a worker function will be observed by the observation * @param objectOperand The object the function was attached to * @param action The name of the action which will be observed * @param pkg The package the invocation belongs to * @param originalInsPosition The source code position of the invocation */ private void injectStartCallableObservationCall(BIRBasicBlock observeStartBB, Location desugaredInsLocation, boolean isRemote, boolean isMainEntryPoint, boolean isWorker, BIROperand objectOperand, String action, BIRPackage pkg, Location originalInsPosition) { String pkgId = generatePackageId(pkg.packageID); String position = generatePositionId(originalInsPosition); BIROperand isRemoteOperand = generateGlobalConstantOperand(pkg, symbolTable.booleanType, isRemote); BIROperand isMainEntryPointOperand = generateGlobalConstantOperand(pkg, symbolTable.booleanType, isMainEntryPoint); BIROperand isWorkerOperand = generateGlobalConstantOperand(pkg, symbolTable.booleanType, isWorker); BIROperand pkgOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, pkgId); BIROperand originalInsPosOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, position); BIROperand actionOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, action); JIMethodCall observeStartCallTerminator = new JIMethodCall(desugaredInsLocation); observeStartCallTerminator.invocationType = INVOKESTATIC; observeStartCallTerminator.jClassName = OBSERVE_UTILS; observeStartCallTerminator.jMethodVMSig = String.format("(L%s;ZZZL%s;L%s;L%s;L%s;)V", BAL_ENV, B_OBJECT, B_STRING_VALUE, B_STRING_VALUE, B_STRING_VALUE); observeStartCallTerminator.name = START_CALLABLE_OBSERVATION_METHOD; observeStartCallTerminator.args = Arrays.asList(isRemoteOperand, isMainEntryPointOperand, isWorkerOperand, objectOperand, actionOperand, pkgOperand, originalInsPosOperand); observeStartBB.terminator = observeStartCallTerminator; } /** * Inject branch condition for checking if a value is an error. * * @param errorCheckBB The basic block to which the error check should be injected * @param isErrorBB The basic block to which errors should go to * @param noErrorBB The basic block to which no errors should go to * @param scopeVarList The variables list in the scope * @param pos The position of all instructions, variables declarations, terminators, etc. * @param valueOperand Operand for passing the value which should be checked if it is an error * @param uniqueId A unique ID to identify the check error call */ private void injectCheckErrorCalls(BIRBasicBlock errorCheckBB, BIRBasicBlock isErrorBB, BIRBasicBlock noErrorBB, Collection<BIRVariableDcl> scopeVarList, Location pos, BIROperand valueOperand, String uniqueId) { BIRVariableDcl isErrorVariableDcl = new BIRVariableDcl(symbolTable.booleanType, new Name(String.format("$%s$%s$isError", uniqueId, errorCheckBB.id.value)), VarScope.FUNCTION, VarKind.TEMP); scopeVarList.add(isErrorVariableDcl); BIROperand isErrorOperand = new BIROperand(isErrorVariableDcl); TypeTest errorTypeTestInstruction = new TypeTest(pos, symbolTable.errorType, isErrorOperand, valueOperand); errorCheckBB.instructions.add(errorTypeTestInstruction); errorCheckBB.terminator = new Branch(pos, isErrorOperand, isErrorBB, noErrorBB); } /** * Inject report error call. * * @param errorReportBB The basic block to which the report error call should be injected * @param scopeVarList The variables list in the scope * @param pos The position of all instructions, variables declarations, terminators, etc. * @param errorOperand Operand for passing the error * @param uniqueId A unique ID to identify the check error call */ private void injectReportErrorCall(BIRBasicBlock errorReportBB, Collection<BIRVariableDcl> scopeVarList, Location pos, BIROperand errorOperand, String uniqueId) { BIRVariableDcl castedErrorVariableDcl = new BIRVariableDcl(symbolTable.errorType, new Name(String.format("$%s$%s$castedError", uniqueId, errorReportBB.id.value)), VarScope.FUNCTION, VarKind.TEMP); scopeVarList.add(castedErrorVariableDcl); BIROperand castedErrorOperand = new BIROperand(castedErrorVariableDcl); TypeCast errorCastInstruction = new TypeCast(pos, castedErrorOperand, errorOperand, symbolTable.errorType, false); errorReportBB.instructions.add(errorCastInstruction); JIMethodCall reportErrorCallTerminator = new JIMethodCall(pos); reportErrorCallTerminator.invocationType = INVOKESTATIC; reportErrorCallTerminator.jClassName = OBSERVE_UTILS; reportErrorCallTerminator.jMethodVMSig = String.format("(L%s;L%s;)V", BAL_ENV, ERROR_VALUE); reportErrorCallTerminator.name = REPORT_ERROR_METHOD; reportErrorCallTerminator.args = Collections.singletonList(castedErrorOperand); errorReportBB.terminator = reportErrorCallTerminator; } /** * Inject a stop observation call to a basic block. * * @param observeEndBB The basic block to which the stop observation call should be injected * @param pos The position of all instructions, variables declarations, terminators, etc. */ private void injectStopObservationCall(BIRBasicBlock observeEndBB, Location pos) { JIMethodCall observeEndCallTerminator = new JIMethodCall(pos); observeEndCallTerminator.invocationType = INVOKESTATIC; observeEndCallTerminator.jClassName = OBSERVE_UTILS; observeEndCallTerminator.jMethodVMSig = String.format("(L%s;)V", BAL_ENV); observeEndCallTerminator.name = STOP_OBSERVATION_METHOD; observeEndCallTerminator.args = Collections.emptyList(); observeEndBB.terminator = observeEndCallTerminator; } /** * Generate a constant operand from a compile-time known value. * * @param pkg The package which should contain the constant * @param constantType The type of the constant * @param constantValue The constant value which should end up being passed in the operand * @return The generated operand which will pass the constant */ private BIROperand generateGlobalConstantOperand(BIRPackage pkg, BType constantType, Object constantValue) { return compileTimeConstants.computeIfAbsent(constantValue, k -> { PackageID pkgId = pkg.packageID; BIRGlobalVariableDcl constLoadVariableDcl = new BIRGlobalVariableDcl(COMPILE_TIME_CONST_POS, 0, constantType, pkgId, new Name("$observabilityConst" + constantIndex++), VarScope.GLOBAL, VarKind.CONSTANT, "", VIRTUAL); pkg.globalVars.add(constLoadVariableDcl); return new BIROperand(constLoadVariableDcl); }); } /** * Create and insert a new basic block into a function in the specified index. * * @param func The function to which the basic block should be injected * @param insertIndex The index at which the basic block should be injected * @return The injected new BB */ private BIRBasicBlock insertBasicBlock(BIRFunction func, int insertIndex) { BIRBasicBlock newBB = new BIRBasicBlock(new Name(NEW_BB_PREFIX + desugaredBBIndex++)); func.basicBlocks.add(insertIndex, newBB); return newBB; } /** * Swap the effective content of two basic blocks. * * @param firstBB The first BB of which content should end up in second BB * @param secondBB The second BB of which content should end up in first BB */ private void swapBasicBlockContent(BIRBasicBlock firstBB, BIRBasicBlock secondBB) { List<BIRNonTerminator> firstBBInstructions = firstBB.instructions; firstBB.instructions = secondBB.instructions; secondBB.instructions = firstBBInstructions; swapBasicBlockTerminator(firstBB, secondBB); } /** * Swap the terminators of two basic blocks. * * @param firstBB The first BB of which terminator should end up in second BB * @param secondBB The second BB of which terminator should end up in first BB */ private void swapBasicBlockTerminator(BIRBasicBlock firstBB, BIRBasicBlock secondBB) { BIRTerminator firstBBTerminator = firstBB.terminator; firstBB.terminator = secondBB.terminator; secondBB.terminator = firstBBTerminator; } /** * Fix the ending BB of error entries in the error table of a function. * * When desugar instructions were added after the original BB, * where the original BB is a trap ending BB, the new trap ending BBs changes. * This needs to be adjusted properly. * * @param func The function of which the error table should be fixed * @param oldBB The old ending BB of error entries to be fixed * @param newBB The new ending BB which should be updated to in the error entries to be fixed */ private void fixErrorTable(BIRFunction func, BIRBasicBlock oldBB, BIRBasicBlock newBB) { for (BIRErrorEntry errorEntry : func.errorTable) { if (errorEntry.endBB == oldBB) { errorEntry.endBB = newBB; } } } /** * Check if a call instruction is observable. * * @param callIns The call instruction to check * @return True if the call instruction is observable */ private boolean isObservable(Call callIns) { boolean isRemote = callIns.calleeFlags.contains(Flag.REMOTE); boolean isObservableAnnotationPresent = false; for (BIRAnnotationAttachment annot : callIns.calleeAnnotAttachments) { if (OBSERVABLE_ANNOTATION.equals( JvmCodeGenUtil.getPackageName( new PackageID(annot.packageID.orgName, annot.packageID.name, Names.EMPTY)) + annot.annotTagRef.value)) { isObservableAnnotationPresent = true; break; } } return isRemote || isObservableAnnotationPresent; } /** * Check is an error is assignable to a variable declaration. * * @param variableDcl The variable declaration which should be checked. * @return True if an error can be assigned and false otherwise */ private boolean isErrorAssignable(BIRVariableDcl variableDcl) { boolean isErrorAssignable = false; if (variableDcl.type instanceof BUnionType) { BUnionType returnUnionType = (BUnionType) variableDcl.type; isErrorAssignable = returnUnionType.getMemberTypes().stream() .anyMatch(type -> type instanceof BErrorType); } else if (variableDcl.type instanceof BErrorType) { isErrorAssignable = true; } return isErrorAssignable; } /** * Check if a basic block is covered in a error entry. * * @param errorEntry The error entry from the error table * @param basicBlocksList The basic blocks list which contains the basic block to be checked for * @param basicBlock The basic block which should be checked for * @return True if the basic block is covered in the error entry */ private boolean isBBCoveredInErrorEntry(BIRErrorEntry errorEntry, List<BIRBasicBlock> basicBlocksList, BIRBasicBlock basicBlock) { boolean isCovered = Objects.equals(basicBlock, errorEntry.trapBB) || Objects.equals(basicBlock, errorEntry.endBB); if (!isCovered) { /* * Traverse in the same way MethodGen.generateBasicBlocks traverses through basic blocks to generate * method body to check if the basic block is covered in the error entry. */ int i = 0; for (; i < basicBlocksList.size(); i++) { BIRBasicBlock currentBB = basicBlocksList.get(i); if (currentBB == errorEntry.trapBB) { break; } } for (; i < basicBlocksList.size(); i++) { BIRBasicBlock currentBB = basicBlocksList.get(i); if (currentBB == basicBlock) { isCovered = true; break; } if (currentBB == errorEntry.endBB) { break; } } } return isCovered; } /** * Remove the additional prefixes and postfixes added by the compiler. * This is done to get the original name used by the developer. * * @param serviceName The service name to be cleaned up * @return The cleaned up service name which should be equal to the name given by the developer */ private String cleanUpServiceName(String serviceName) { if (serviceName.contains(SERVICE_IDENTIFIER)) { return serviceName.substring(0, serviceName.indexOf(SERVICE_IDENTIFIER)); } return serviceName; } /** * Generate a ID for a source code position. * * @param pos The position for which the ID should be generated * @return The generated ID */ private String generatePositionId(Location pos) { return String.format("%s:%d:%d", pos.lineRange().filePath(), pos.lineRange().startLine().line() + 1, pos.lineRange().startLine().offset() + 1); } /** * Generate a ID for a ballerina module. * * @param pkg The module for which the ID should be generated * @return The generated ID */ private String generatePackageId(PackageID pkg) { return String.format("%s/%s:%s", pkg.orgName.value, pkg.name.value, pkg.version.value); } }
class JvmObservabilityGen { private static final String ENTRY_POINT_MAIN_METHOD_NAME = "main"; private static final String NEW_BB_PREFIX = "observabilityDesugaredBB"; private static final String SERVICE_IDENTIFIER = "$$service$"; private static final String INVOCATION_INSTRUMENTATION_TYPE = "invocation"; private static final String FUNC_BODY_INSTRUMENTATION_TYPE = "funcBody"; private static final Location COMPILE_TIME_CONST_POS = new BLangDiagnosticLocation(null, -1, -1, -1, -1); private final PackageCache packageCache; private final SymbolTable symbolTable; private int lambdaIndex; private int desugaredBBIndex; private int constantIndex; private final Map<Object, BIROperand> compileTimeConstants; JvmObservabilityGen(PackageCache packageCache, SymbolTable symbolTable) { this.compileTimeConstants = new HashMap<>(); this.packageCache = packageCache; this.symbolTable = symbolTable; this.lambdaIndex = 0; this.desugaredBBIndex = 0; this.constantIndex = 0; } /** * Instrument the package by rewriting the BIR to add relevant Observability related instructions. * * @param pkg The package to instrument */ void instrumentPackage(BIRPackage pkg) { for (int i = 0; i < pkg.functions.size(); i++) { BIRFunction func = pkg.functions.get(i); rewriteAsyncInvocations(func, null, pkg); rewriteObservableFunctionInvocations(func, pkg); if (ENTRY_POINT_MAIN_METHOD_NAME.equals(func.name.value)) { rewriteObservableFunctionBody(func, pkg, null, func.name.value, false, false, true, false); } else if ((func.flags & Flags.WORKER) == Flags.WORKER) { rewriteObservableFunctionBody(func, pkg, null, func.workerName.value, false, false, false, true); } } for (BIRTypeDefinition typeDef : pkg.typeDefs) { if ((typeDef.flags & Flags.CLASS) != Flags.CLASS && typeDef.type.tag == TypeTags.OBJECT) { continue; } boolean isService = (typeDef.type.flags & Flags.SERVICE) == Flags.SERVICE; for (int i = 0; i < typeDef.attachedFuncs.size(); i++) { BIRFunction func = typeDef.attachedFuncs.get(i); rewriteAsyncInvocations(func, typeDef, pkg); rewriteObservableFunctionInvocations(func, pkg); if (isService) { if ((func.flags & Flags.RESOURCE) == Flags.RESOURCE) { rewriteObservableFunctionBody(func, pkg, typeDef, func.name.value, true, false, false, false); } else if ((func.flags & Flags.REMOTE) == Flags.REMOTE) { rewriteObservableFunctionBody(func, pkg, typeDef, func.name.value, false, true, false, false); } } } } BIRFunction initFunc = pkg.functions.get(0); BIRBasicBlock constInitBB = initFunc.basicBlocks.get(0); for (Map.Entry<Object, BIROperand> entry : compileTimeConstants.entrySet()) { BIROperand operand = entry.getValue(); ConstantLoad constLoadIns = new ConstantLoad(COMPILE_TIME_CONST_POS, entry.getKey(), operand.variableDcl.type, operand); constInitBB.instructions.add(constLoadIns); } } /** * Rewrite the invocations in the function bodies to call a lambda asynchronously which in turn calls the * actual function synchronously. This is done so that the actual invocation can be observed accurately. * * Without this wrapper, the start and end time recorded would only reflect the time it took to give the the async * invocation to the scheduler. However, we require the actual time it took for the invocation. * * @param func The function of which the instructions in the body should be rewritten * @param attachedTypeDef The type definition to which the function was attached to or null * @param pkg The package containing the function */ /** * Rewrite a function so that the internal body will be observed. This adds the relevant start and stop calls at * the beginning and return basic blocks of the function. * * This is only to be used in service resource functions, workers and main method. * * This method expects that Observable invocations had already been instrumented properly before this method is * called. This is because the uncaught panics thrown from such observable invocations are reported to the * observation covering the function body by using the re-panic terminators which gets added in * rewriteObservableFunctionInvocations method. * * @param func The function to instrument * @param pkg The package which contains the function * @param attachedTypeDef The type definition the function is attached to * @param functionName The name of the function which will be observed * @param isResource True if the function is a resource function * @param isRemote True if the function is a remote function * @param isMainEntryPoint True if the function is the main entry point * @param isWorker True if the function was a worker */ private void rewriteObservableFunctionBody(BIRFunction func, BIRPackage pkg, BIRTypeDefinition attachedTypeDef, String functionName, boolean isResource, boolean isRemote, boolean isMainEntryPoint, boolean isWorker) { { BIRBasicBlock startBB = func.basicBlocks.get(0); BIRBasicBlock newStartBB = insertBasicBlock(func, 1); swapBasicBlockContent(startBB, newStartBB); if (isResource || isRemote) { String serviceName = cleanUpServiceName(attachedTypeDef.name.value); String resourcePathOrFunction = functionName; String resourceAccessor = null; if (isResource) { for (BAttachedFunction attachedFunc : ((BClassSymbol) attachedTypeDef.type.tsymbol).attachedFuncs) { if (Objects.equals(attachedFunc.funcName.value, functionName)) { BResourceFunction resourceFunction = (BResourceFunction) attachedFunc; StringBuilder resourcePathOrFunctionBuilder = new StringBuilder(); for (Name name : resourceFunction.resourcePath) { resourcePathOrFunctionBuilder.append("/").append(name.value); } resourcePathOrFunction = resourcePathOrFunctionBuilder.toString(); resourceAccessor = resourceFunction.accessor.value; break; } } } injectStartResourceObservationCall(startBB, serviceName, resourcePathOrFunction, resourceAccessor, isResource, isRemote, pkg, func.pos); } else { BIROperand objectTypeOperand = generateGlobalConstantOperand(pkg, symbolTable.nilType, null); injectStartCallableObservationCall(startBB, null, false, isMainEntryPoint, isWorker, objectTypeOperand, functionName, pkg, func.pos); } startBB.terminator.thenBB = newStartBB; } boolean isErrorCheckRequired = isErrorAssignable(func.returnVariable); BIROperand returnValOperand = new BIROperand(func.returnVariable); int i = 1; while (i < func.basicBlocks.size()) { BIRBasicBlock currentBB = func.basicBlocks.get(i); if (currentBB.terminator.kind == InstructionKind.RETURN) { if (isErrorCheckRequired) { BIRBasicBlock errorReportBB = insertBasicBlock(func, i + 1); BIRBasicBlock observeEndBB = insertBasicBlock(func, i + 2); BIRBasicBlock newCurrentBB = insertBasicBlock(func, i + 3); swapBasicBlockTerminator(currentBB, newCurrentBB); injectCheckErrorCalls(currentBB, errorReportBB, observeEndBB, func.localVars, null, returnValOperand, FUNC_BODY_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, null, returnValOperand, FUNC_BODY_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, null); observeEndBB.terminator.thenBB = newCurrentBB; errorReportBB.terminator.thenBB = observeEndBB; i += 3; } else { BIRBasicBlock newCurrentBB = insertBasicBlock(func, i + 1); swapBasicBlockTerminator(currentBB, newCurrentBB); injectStopObservationCall(currentBB, null); currentBB.terminator.thenBB = newCurrentBB; i += 1; } } else if (currentBB.terminator.kind == InstructionKind.PANIC) { Panic panicCall = (Panic) currentBB.terminator; BIRBasicBlock observeEndBB = insertBasicBlock(func, i + 1); BIRBasicBlock newCurrentBB = insertBasicBlock(func, i + 2); swapBasicBlockTerminator(currentBB, newCurrentBB); injectReportErrorCall(currentBB, func.localVars, newCurrentBB.terminator.pos, panicCall.errorOp, FUNC_BODY_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, newCurrentBB.terminator.pos); currentBB.terminator.thenBB = observeEndBB; observeEndBB.terminator.thenBB = newCurrentBB; i += 2; } else if (currentBB.terminator.kind == InstructionKind.CALL || (currentBB.terminator.kind == InstructionKind.FP_CALL && !((FPCall) currentBB.terminator).isAsync)) { /* * Traps for errors needs to be injected for each call and fp call separately to avoid messing up the * line numbers in the stack trace shown when a panic is thrown. * * These panic traps are different from the traps added in rewriteObservableFunctionInvocations method, * in the sense that these report the error to the Observation covering the current function this body * belongs to. Also these do not cover the observable calls and fp calls (they are handled using the * panic terminator handling logic) */ Optional<BIRErrorEntry> existingEE = func.errorTable.stream() .filter(errorEntry -> isBBCoveredInErrorEntry(errorEntry, func.basicBlocks, currentBB)) .findAny(); if (existingEE.isEmpty()) { BIRBasicBlock errorCheckBB = insertBasicBlock(func, i + 1); BIRBasicBlock errorReportBB = insertBasicBlock(func, i + 2); BIRBasicBlock observeEndBB = insertBasicBlock(func, i + 3); BIRBasicBlock rePanicBB = insertBasicBlock(func, i + 4); BIRVariableDcl trappedErrorVariableDcl = new BIRVariableDcl(symbolTable.errorType, new Name(String.format("$%s$trappedError", currentBB.id.value)), VarScope.FUNCTION, VarKind.TEMP); func.localVars.add(trappedErrorVariableDcl); BIROperand trappedErrorOperand = new BIROperand(trappedErrorVariableDcl); injectCheckErrorCalls(errorCheckBB, errorReportBB, currentBB.terminator.thenBB, func.localVars, currentBB.terminator.pos, trappedErrorOperand, FUNC_BODY_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, currentBB.terminator.pos, trappedErrorOperand, FUNC_BODY_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, currentBB.terminator.pos); rePanicBB.terminator = new Panic(currentBB.terminator.pos, trappedErrorOperand); BIRErrorEntry errorEntry = new BIRErrorEntry(currentBB, currentBB, trappedErrorOperand, errorCheckBB); func.errorTable.add(errorEntry); currentBB.terminator.thenBB = errorCheckBB; errorReportBB.terminator.thenBB = observeEndBB; observeEndBB.terminator.thenBB = rePanicBB; i += 4; } } i++; } } /** * Re-write the relevant basic blocks in the list of basic blocks to observe function invocations. * * @param func The function of which the instructions in the body should be instrumented * @param pkg The package which contains the instruction which will be observed */ private void rewriteObservableFunctionInvocations(BIRFunction func, BIRPackage pkg) { int i = 0; while (i < func.basicBlocks.size()) { BIRBasicBlock currentBB = func.basicBlocks.get(i); if (currentBB.terminator.kind == InstructionKind.CALL && isObservable((Call) currentBB.terminator)) { Call callIns = (Call) currentBB.terminator; Location desugaredInsPosition = callIns.pos; BIRBasicBlock observeStartBB = insertBasicBlock(func, i + 1); int newCurrentIndex = i + 2; BIRBasicBlock newCurrentBB = insertBasicBlock(func, newCurrentIndex); swapBasicBlockTerminator(currentBB, newCurrentBB); { BIROperand objectTypeOperand; String action; if (callIns.isVirtual) { objectTypeOperand = callIns.args.get(0); if (callIns.name.value.contains(".")) { String[] split = callIns.name.value.split("\\."); action = split[1]; } else { action = callIns.name.value; } } else { objectTypeOperand = generateGlobalConstantOperand(pkg, symbolTable.nilType, null); action = callIns.name.value; } currentBB.terminator = new GOTO(desugaredInsPosition, observeStartBB); BIRBasicBlock observeEndBB; boolean isRemote = callIns.calleeFlags.contains(Flag.REMOTE); Location originalInsPos = callIns.pos; if (isErrorAssignable(callIns.lhsOp.variableDcl)) { BIRBasicBlock errorCheckBB = insertBasicBlock(func, i + 3); BIRBasicBlock errorReportBB = insertBasicBlock(func, i + 4); observeEndBB = insertBasicBlock(func, i + 5); injectStartCallableObservationCall(observeStartBB, desugaredInsPosition, isRemote, false, false, objectTypeOperand, action, pkg, originalInsPos); injectCheckErrorCalls(errorCheckBB, errorReportBB, observeEndBB, func.localVars, desugaredInsPosition, callIns.lhsOp, INVOCATION_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, desugaredInsPosition, callIns.lhsOp, INVOCATION_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, desugaredInsPosition); observeEndBB.terminator.thenBB = newCurrentBB.terminator.thenBB; errorReportBB.terminator.thenBB = observeEndBB; newCurrentBB.terminator.thenBB = errorCheckBB; observeStartBB.terminator.thenBB = newCurrentBB; i += 5; } else { observeEndBB = insertBasicBlock(func, i + 3); injectStartCallableObservationCall(observeStartBB, desugaredInsPosition, isRemote, false, false, objectTypeOperand, action, pkg, originalInsPos); injectStopObservationCall(observeEndBB, desugaredInsPosition); observeEndBB.terminator.thenBB = newCurrentBB.terminator.thenBB; newCurrentBB.terminator.thenBB = observeEndBB; observeStartBB.terminator.thenBB = newCurrentBB; i += 3; } fixErrorTable(func, currentBB, observeEndBB); } { /* * Adding panic traps for the invocations. These report the error to the Observation covering * the invocation. */ Optional<BIRErrorEntry> existingEE = func.errorTable.stream() .filter(errorEntry -> isBBCoveredInErrorEntry(errorEntry, func.basicBlocks, newCurrentBB)) .findAny(); Location desugaredInsPos = callIns.pos; if (existingEE.isPresent()) { BIRErrorEntry errorEntry = existingEE.get(); int eeTargetIndex = func.basicBlocks.indexOf(errorEntry.targetBB); if (eeTargetIndex == -1) { throw new BLangCompilerException("Invalid Error Entry pointing to non-existent " + "target Basic Block " + errorEntry.targetBB.id); } BIRBasicBlock errorReportBB = insertBasicBlock(func, eeTargetIndex + 1); BIRBasicBlock observeEndBB = insertBasicBlock(func, eeTargetIndex + 2); BIRBasicBlock newTargetBB = insertBasicBlock(func, eeTargetIndex + 3); swapBasicBlockContent(errorEntry.targetBB, newTargetBB); injectCheckErrorCalls(errorEntry.targetBB, errorReportBB, newTargetBB, func.localVars, desugaredInsPos, errorEntry.errorOp, INVOCATION_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, desugaredInsPos, errorEntry.errorOp, INVOCATION_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, desugaredInsPos); errorReportBB.terminator.thenBB = observeEndBB; observeEndBB.terminator.thenBB = newTargetBB; fixErrorTable(func, errorEntry.targetBB, newTargetBB); } else { BIRBasicBlock errorCheckBB = insertBasicBlock(func, newCurrentIndex + 1); BIRBasicBlock errorReportBB = insertBasicBlock(func, newCurrentIndex + 2); BIRBasicBlock observeEndBB = insertBasicBlock(func, newCurrentIndex + 3); BIRBasicBlock rePanicBB = insertBasicBlock(func, newCurrentIndex + 4); BIRVariableDcl trappedErrorVariableDcl = new BIRVariableDcl(symbolTable.errorType, new Name(String.format("$%s$trappedError", newCurrentBB.id.value)), VarScope.FUNCTION, VarKind.TEMP); func.localVars.add(trappedErrorVariableDcl); BIROperand trappedErrorOperand = new BIROperand(trappedErrorVariableDcl); injectCheckErrorCalls(errorCheckBB, errorReportBB, newCurrentBB.terminator.thenBB, func.localVars, newCurrentBB.terminator.pos, trappedErrorOperand, INVOCATION_INSTRUMENTATION_TYPE); injectReportErrorCall(errorReportBB, func.localVars, newCurrentBB.terminator.pos, trappedErrorOperand, INVOCATION_INSTRUMENTATION_TYPE); injectStopObservationCall(observeEndBB, newCurrentBB.terminator.pos); rePanicBB.terminator = new Panic(newCurrentBB.terminator.pos, trappedErrorOperand); BIRErrorEntry errorEntry = new BIRErrorEntry(newCurrentBB, newCurrentBB, trappedErrorOperand, errorCheckBB); func.errorTable.add(errorEntry); newCurrentBB.terminator.thenBB = errorCheckBB; errorReportBB.terminator.thenBB = observeEndBB; observeEndBB.terminator.thenBB = rePanicBB; i += 4; } } } i += 1; } } /** * Inject start observation call to a basic block. * @param observeStartBB The basic block to which the start observation call should be injected * @param serviceName The service to which the instruction was attached to * @param resourcePathOrFunction The resource path or function name * @param resourceAccessor The resource accessor if this is a resource * @param isResource True if the function is a resource * @param isRemote True if the function is a remote * @param pkg The package the invocation belongs to * @param originalInsPosition The source code position of the invocation */ private void injectStartResourceObservationCall(BIRBasicBlock observeStartBB, String serviceName, String resourcePathOrFunction, String resourceAccessor, boolean isResource, boolean isRemote, BIRPackage pkg, Location originalInsPosition) { String pkgId = generatePackageId(pkg.packageID); String position = generatePositionId(originalInsPosition); BIROperand pkgOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, pkgId); BIROperand originalInsPosOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, position); BIROperand serviceNameOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, serviceName); BIROperand resourcePathOrFunctionOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, resourcePathOrFunction); BIROperand resourceAccessorOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, resourceAccessor); BIROperand isResourceOperand = generateGlobalConstantOperand(pkg, symbolTable.booleanType, isResource); BIROperand isRemoteOperand = generateGlobalConstantOperand(pkg, symbolTable.booleanType, isRemote); JIMethodCall observeStartCallTerminator = new JIMethodCall(null); observeStartCallTerminator.invocationType = INVOKESTATIC; observeStartCallTerminator.jClassName = OBSERVE_UTILS; observeStartCallTerminator.jMethodVMSig = String.format("(L%s;L%s;L%s;L%s;L%s;L%s;ZZ)V", BAL_ENV, B_STRING_VALUE, B_STRING_VALUE, B_STRING_VALUE, B_STRING_VALUE, B_STRING_VALUE); observeStartCallTerminator.name = START_RESOURCE_OBSERVATION_METHOD; observeStartCallTerminator.args = Arrays.asList(pkgOperand, originalInsPosOperand, serviceNameOperand, resourcePathOrFunctionOperand, resourceAccessorOperand, isResourceOperand, isRemoteOperand); observeStartBB.terminator = observeStartCallTerminator; } /** * Inject start observation call to a basic block. * * @param observeStartBB The basic block to which the start observation call should be injected * @param desugaredInsLocation The position of all instructions, variables declarations, terminators to be generated * @param isRemote True if a remote function will be observed by the observation * @param isMainEntryPoint True if the main function will be observed by the observation * @param isWorker True if a worker function will be observed by the observation * @param objectOperand The object the function was attached to * @param action The name of the action which will be observed * @param pkg The package the invocation belongs to * @param originalInsPosition The source code position of the invocation */ private void injectStartCallableObservationCall(BIRBasicBlock observeStartBB, Location desugaredInsLocation, boolean isRemote, boolean isMainEntryPoint, boolean isWorker, BIROperand objectOperand, String action, BIRPackage pkg, Location originalInsPosition) { String pkgId = generatePackageId(pkg.packageID); String position = generatePositionId(originalInsPosition); BIROperand pkgOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, pkgId); BIROperand originalInsPosOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, position); BIROperand actionOperand = generateGlobalConstantOperand(pkg, symbolTable.stringType, action); BIROperand isMainEntryPointOperand = generateGlobalConstantOperand(pkg, symbolTable.booleanType, isMainEntryPoint); BIROperand isRemoteOperand = generateGlobalConstantOperand(pkg, symbolTable.booleanType, isRemote); BIROperand isWorkerOperand = generateGlobalConstantOperand(pkg, symbolTable.booleanType, isWorker); JIMethodCall observeStartCallTerminator = new JIMethodCall(desugaredInsLocation); observeStartCallTerminator.invocationType = INVOKESTATIC; observeStartCallTerminator.jClassName = OBSERVE_UTILS; observeStartCallTerminator.jMethodVMSig = String.format("(L%s;L%s;L%s;L%s;L%s;ZZZ)V", BAL_ENV, B_STRING_VALUE, B_STRING_VALUE, B_OBJECT, B_STRING_VALUE); observeStartCallTerminator.name = START_CALLABLE_OBSERVATION_METHOD; observeStartCallTerminator.args = Arrays.asList(pkgOperand, originalInsPosOperand, objectOperand, actionOperand, isMainEntryPointOperand, isRemoteOperand, isWorkerOperand); observeStartBB.terminator = observeStartCallTerminator; } /** * Inject branch condition for checking if a value is an error. * * @param errorCheckBB The basic block to which the error check should be injected * @param isErrorBB The basic block to which errors should go to * @param noErrorBB The basic block to which no errors should go to * @param scopeVarList The variables list in the scope * @param pos The position of all instructions, variables declarations, terminators, etc. * @param valueOperand Operand for passing the value which should be checked if it is an error * @param uniqueId A unique ID to identify the check error call */ private void injectCheckErrorCalls(BIRBasicBlock errorCheckBB, BIRBasicBlock isErrorBB, BIRBasicBlock noErrorBB, Collection<BIRVariableDcl> scopeVarList, Location pos, BIROperand valueOperand, String uniqueId) { BIRVariableDcl isErrorVariableDcl = new BIRVariableDcl(symbolTable.booleanType, new Name(String.format("$%s$%s$isError", uniqueId, errorCheckBB.id.value)), VarScope.FUNCTION, VarKind.TEMP); scopeVarList.add(isErrorVariableDcl); BIROperand isErrorOperand = new BIROperand(isErrorVariableDcl); TypeTest errorTypeTestInstruction = new TypeTest(pos, symbolTable.errorType, isErrorOperand, valueOperand); errorCheckBB.instructions.add(errorTypeTestInstruction); errorCheckBB.terminator = new Branch(pos, isErrorOperand, isErrorBB, noErrorBB); } /** * Inject report error call. * * @param errorReportBB The basic block to which the report error call should be injected * @param scopeVarList The variables list in the scope * @param pos The position of all instructions, variables declarations, terminators, etc. * @param errorOperand Operand for passing the error * @param uniqueId A unique ID to identify the check error call */ private void injectReportErrorCall(BIRBasicBlock errorReportBB, Collection<BIRVariableDcl> scopeVarList, Location pos, BIROperand errorOperand, String uniqueId) { BIRVariableDcl castedErrorVariableDcl = new BIRVariableDcl(symbolTable.errorType, new Name(String.format("$%s$%s$castedError", uniqueId, errorReportBB.id.value)), VarScope.FUNCTION, VarKind.TEMP); scopeVarList.add(castedErrorVariableDcl); BIROperand castedErrorOperand = new BIROperand(castedErrorVariableDcl); TypeCast errorCastInstruction = new TypeCast(pos, castedErrorOperand, errorOperand, symbolTable.errorType, false); errorReportBB.instructions.add(errorCastInstruction); JIMethodCall reportErrorCallTerminator = new JIMethodCall(pos); reportErrorCallTerminator.invocationType = INVOKESTATIC; reportErrorCallTerminator.jClassName = OBSERVE_UTILS; reportErrorCallTerminator.jMethodVMSig = String.format("(L%s;L%s;)V", BAL_ENV, ERROR_VALUE); reportErrorCallTerminator.name = REPORT_ERROR_METHOD; reportErrorCallTerminator.args = Collections.singletonList(castedErrorOperand); errorReportBB.terminator = reportErrorCallTerminator; } /** * Inject a stop observation call to a basic block. * * @param observeEndBB The basic block to which the stop observation call should be injected * @param pos The position of all instructions, variables declarations, terminators, etc. */ private void injectStopObservationCall(BIRBasicBlock observeEndBB, Location pos) { JIMethodCall observeEndCallTerminator = new JIMethodCall(pos); observeEndCallTerminator.invocationType = INVOKESTATIC; observeEndCallTerminator.jClassName = OBSERVE_UTILS; observeEndCallTerminator.jMethodVMSig = String.format("(L%s;)V", BAL_ENV); observeEndCallTerminator.name = STOP_OBSERVATION_METHOD; observeEndCallTerminator.args = Collections.emptyList(); observeEndBB.terminator = observeEndCallTerminator; } /** * Generate a constant operand from a compile-time known value. * * @param pkg The package which should contain the constant * @param constantType The type of the constant * @param constantValue The constant value which should end up being passed in the operand * @return The generated operand which will pass the constant */ private BIROperand generateGlobalConstantOperand(BIRPackage pkg, BType constantType, Object constantValue) { return compileTimeConstants.computeIfAbsent(constantValue, k -> { PackageID pkgId = pkg.packageID; BIRGlobalVariableDcl constLoadVariableDcl = new BIRGlobalVariableDcl(COMPILE_TIME_CONST_POS, 0, constantType, pkgId, new Name("$observabilityConst" + constantIndex++), VarScope.GLOBAL, VarKind.CONSTANT, "", VIRTUAL); pkg.globalVars.add(constLoadVariableDcl); return new BIROperand(constLoadVariableDcl); }); } /** * Create and insert a new basic block into a function in the specified index. * * @param func The function to which the basic block should be injected * @param insertIndex The index at which the basic block should be injected * @return The injected new BB */ private BIRBasicBlock insertBasicBlock(BIRFunction func, int insertIndex) { BIRBasicBlock newBB = new BIRBasicBlock(new Name(NEW_BB_PREFIX + desugaredBBIndex++)); func.basicBlocks.add(insertIndex, newBB); return newBB; } /** * Swap the effective content of two basic blocks. * * @param firstBB The first BB of which content should end up in second BB * @param secondBB The second BB of which content should end up in first BB */ private void swapBasicBlockContent(BIRBasicBlock firstBB, BIRBasicBlock secondBB) { List<BIRNonTerminator> firstBBInstructions = firstBB.instructions; firstBB.instructions = secondBB.instructions; secondBB.instructions = firstBBInstructions; swapBasicBlockTerminator(firstBB, secondBB); } /** * Swap the terminators of two basic blocks. * * @param firstBB The first BB of which terminator should end up in second BB * @param secondBB The second BB of which terminator should end up in first BB */ private void swapBasicBlockTerminator(BIRBasicBlock firstBB, BIRBasicBlock secondBB) { BIRTerminator firstBBTerminator = firstBB.terminator; firstBB.terminator = secondBB.terminator; secondBB.terminator = firstBBTerminator; } /** * Fix the ending BB of error entries in the error table of a function. * * When desugar instructions were added after the original BB, * where the original BB is a trap ending BB, the new trap ending BBs changes. * This needs to be adjusted properly. * * @param func The function of which the error table should be fixed * @param oldBB The old ending BB of error entries to be fixed * @param newBB The new ending BB which should be updated to in the error entries to be fixed */ private void fixErrorTable(BIRFunction func, BIRBasicBlock oldBB, BIRBasicBlock newBB) { for (BIRErrorEntry errorEntry : func.errorTable) { if (errorEntry.endBB == oldBB) { errorEntry.endBB = newBB; } } } /** * Check if a call instruction is observable. * * @param callIns The call instruction to check * @return True if the call instruction is observable */ private boolean isObservable(Call callIns) { boolean isRemote = callIns.calleeFlags.contains(Flag.REMOTE); boolean isObservableAnnotationPresent = false; for (BIRAnnotationAttachment annot : callIns.calleeAnnotAttachments) { if (OBSERVABLE_ANNOTATION.equals( JvmCodeGenUtil.getPackageName( new PackageID(annot.packageID.orgName, annot.packageID.name, Names.EMPTY)) + annot.annotTagRef.value)) { isObservableAnnotationPresent = true; break; } } return isRemote || isObservableAnnotationPresent; } /** * Check is an error is assignable to a variable declaration. * * @param variableDcl The variable declaration which should be checked. * @return True if an error can be assigned and false otherwise */ private boolean isErrorAssignable(BIRVariableDcl variableDcl) { boolean isErrorAssignable = false; if (variableDcl.type instanceof BUnionType) { BUnionType returnUnionType = (BUnionType) variableDcl.type; isErrorAssignable = returnUnionType.getMemberTypes().stream() .anyMatch(type -> type instanceof BErrorType); } else if (variableDcl.type instanceof BErrorType) { isErrorAssignable = true; } return isErrorAssignable; } /** * Check if a basic block is covered in a error entry. * * @param errorEntry The error entry from the error table * @param basicBlocksList The basic blocks list which contains the basic block to be checked for * @param basicBlock The basic block which should be checked for * @return True if the basic block is covered in the error entry */ private boolean isBBCoveredInErrorEntry(BIRErrorEntry errorEntry, List<BIRBasicBlock> basicBlocksList, BIRBasicBlock basicBlock) { boolean isCovered = Objects.equals(basicBlock, errorEntry.trapBB) || Objects.equals(basicBlock, errorEntry.endBB); if (!isCovered) { /* * Traverse in the same way MethodGen.generateBasicBlocks traverses through basic blocks to generate * method body to check if the basic block is covered in the error entry. */ int i = 0; for (; i < basicBlocksList.size(); i++) { BIRBasicBlock currentBB = basicBlocksList.get(i); if (currentBB == errorEntry.trapBB) { break; } } for (; i < basicBlocksList.size(); i++) { BIRBasicBlock currentBB = basicBlocksList.get(i); if (currentBB == basicBlock) { isCovered = true; break; } if (currentBB == errorEntry.endBB) { break; } } } return isCovered; } /** * Remove the additional prefixes and postfixes added by the compiler. * This is done to get the original name used by the developer. * * @param serviceName The service name to be cleaned up * @return The cleaned up service name which should be equal to the name given by the developer */ private String cleanUpServiceName(String serviceName) { if (serviceName.contains(SERVICE_IDENTIFIER)) { return serviceName.substring(0, serviceName.indexOf(SERVICE_IDENTIFIER)); } return serviceName; } /** * Generate a ID for a source code position. * * @param pos The position for which the ID should be generated * @return The generated ID */ private String generatePositionId(Location pos) { return String.format("%s:%d:%d", pos.lineRange().filePath(), pos.lineRange().startLine().line() + 1, pos.lineRange().startLine().offset() + 1); } /** * Generate a ID for a ballerina module. * * @param pkg The module for which the ID should be generated * @return The generated ID */ private String generatePackageId(PackageID pkg) { return String.format("%s/%s:%s", pkg.orgName.value, pkg.name.value, pkg.version.value); } }
perhaps? I can't say definitively, but: it is already in [the `DeprecatedPrimitives` section of the proto](https://github.com/apache/beam/blob/a6897100e34cf2b6f177d261f65678c4ff8c7616/model/pipeline/src/main/proto/beam_runner_api.proto#L238) is it always implemented as a composite / in terms of other primitives? we're currently also having a discussion about the other "deprecated primitive", [`CREATE_VIEW`](https://github.com/apache/beam/blob/a6897100e34cf2b6f177d261f65678c4ff8c7616/model/pipeline/src/main/proto/beam_runner_api.proto#L242), which is related to a bunch of the remaining failing Java PVR tests
static Collection<String> getPrimitiveTransformIds(RunnerApi.Components components) { Collection<String> ids = new LinkedHashSet<>(); for (Map.Entry<String, PTransform> transformEntry : components.getTransformsMap().entrySet()) { PTransform transform = transformEntry.getValue(); boolean isPrimitive = isPrimitiveTransform(transform); if (isPrimitive) { Deque<String> transforms = new ArrayDeque<>(); transforms.push(transformEntry.getKey()); while (!transforms.isEmpty()) { String id = transforms.pop(); PTransform next = components.getTransformsMap().get(id); List<String> subtransforms = next.getSubtransformsList(); if (subtransforms.isEmpty()) { ids.add(id); } else { transforms.addAll(subtransforms); } } } } return ids; }
if (isPrimitive) {
static Collection<String> getPrimitiveTransformIds(RunnerApi.Components components) { Collection<String> ids = new LinkedHashSet<>(); for (Map.Entry<String, PTransform> transformEntry : components.getTransformsMap().entrySet()) { PTransform transform = transformEntry.getValue(); boolean isPrimitive = isPrimitiveTransform(transform); if (isPrimitive) { Deque<String> transforms = new ArrayDeque<>(); transforms.push(transformEntry.getKey()); while (!transforms.isEmpty()) { String id = transforms.pop(); PTransform next = components.getTransformsMap().get(id); List<String> subtransforms = next.getSubtransformsList(); if (subtransforms.isEmpty()) { ids.add(id); } else { transforms.addAll(subtransforms); } } } } return ids; }
class QueryablePipeline { /** * Create a new {@link QueryablePipeline} based on the provided components. * * <p>The returned {@link QueryablePipeline} will contain only the primitive transforms present * within the provided components. */ public static QueryablePipeline forPrimitivesIn(Components components) { return new QueryablePipeline(getPrimitiveTransformIds(components), components); } /** * Create a new {@link QueryablePipeline} which uses the root transform IDs and components of the * provided {@link Pipeline}. */ public static QueryablePipeline forPipeline(RunnerApi.Pipeline p) { return forTransforms(p.getRootTransformIdsList(), p.getComponents()); } /** * Create a new {@link QueryablePipeline} based on the provided components containing only the * provided {@code transformIds}. */ public static QueryablePipeline forTransforms( Collection<String> transformIds, Components components) { return new QueryablePipeline(transformIds, components); } private final Components components; /** * The {@link Pipeline} represented by a {@link Network}. * * <p>This is a directed bipartite graph consisting of {@link PTransformNode PTransformNodes} and * {@link PCollectionNode PCollectionNodes}. Each {@link PCollectionNode} has exactly one in edge, * and an arbitrary number of out edges. Each {@link PTransformNode} has an arbitrary number of in * and out edges. * * <p>Parallel edges are permitted, as a {@link PCollectionNode} can be consumed by a single * {@link PTransformNode} any number of times with different local names. */ private final Network<PipelineNode, PipelineEdge> pipelineNetwork; private QueryablePipeline(Collection<String> transformIds, Components components) { this.components = components; this.pipelineNetwork = buildNetwork(transformIds, this.components); } /** Produces a {@link RunnerApi.Components} which contains only primitive transforms. */ @VisibleForTesting private static final Set<String> PRIMITIVE_URNS = ImmutableSet.of( PAR_DO_TRANSFORM_URN, FLATTEN_TRANSFORM_URN, GROUP_BY_KEY_TRANSFORM_URN, IMPULSE_TRANSFORM_URN, ASSIGN_WINDOWS_TRANSFORM_URN, TEST_STREAM_TRANSFORM_URN, MAP_WINDOWS_TRANSFORM_URN, READ_TRANSFORM_URN, CREATE_VIEW_TRANSFORM_URN, SPLITTABLE_PROCESS_KEYED_URN, SPLITTABLE_PROCESS_ELEMENTS_URN); /** Returns true if the provided transform is a primitive. */ private static boolean isPrimitiveTransform(PTransform transform) { String urn = PTransformTranslation.urnForTransformOrNull(transform); return PRIMITIVE_URNS.contains(urn) || NativeTransforms.isNative(transform); } private MutableNetwork<PipelineNode, PipelineEdge> buildNetwork( Collection<String> transformIds, Components components) { MutableNetwork<PipelineNode, PipelineEdge> network = NetworkBuilder.directed().allowsParallelEdges(true).allowsSelfLoops(false).build(); Set<PCollectionNode> unproducedCollections = new HashSet<>(); for (String transformId : transformIds) { PTransform transform = components.getTransformsOrThrow(transformId); PTransformNode transformNode = PipelineNode.pTransform(transformId, this.components.getTransformsOrThrow(transformId)); network.addNode(transformNode); for (String produced : transform.getOutputsMap().values()) { PCollectionNode producedNode = PipelineNode.pCollection(produced, components.getPcollectionsOrThrow(produced)); network.addNode(producedNode); network.addEdge(transformNode, producedNode, new PerElementEdge()); checkArgument( network.inDegree(producedNode) == 1, "A %s should have exactly one producing %s, but found %s:\nPCollection:\n%s\nProducers:\n%s", PCollectionNode.class.getSimpleName(), PTransformNode.class.getSimpleName(), network.predecessors(producedNode).size(), producedNode, network.predecessors(producedNode)); unproducedCollections.remove(producedNode); } for (Map.Entry<String, String> consumed : transform.getInputsMap().entrySet()) { String pcollectionId = consumed.getValue(); PCollectionNode consumedNode = PipelineNode.pCollection( pcollectionId, this.components.getPcollectionsOrThrow(pcollectionId)); if (network.addNode(consumedNode)) { unproducedCollections.add(consumedNode); } if (getLocalSideInputNames(transform).contains(consumed.getKey())) { network.addEdge(consumedNode, transformNode, new SingletonEdge()); } else { network.addEdge(consumedNode, transformNode, new PerElementEdge()); } } } checkArgument( unproducedCollections.isEmpty(), "%ss %s were consumed but never produced", PCollectionNode.class.getSimpleName(), unproducedCollections); return network; } public Collection<PTransformNode> getTransforms() { return pipelineNetwork .nodes() .stream() .filter(PTransformNode.class::isInstance) .map(PTransformNode.class::cast) .collect(Collectors.toList()); } public Iterable<PTransformNode> getTopologicallyOrderedTransforms() { return StreamSupport.stream( Networks.topologicalOrder(pipelineNetwork, Comparator.comparing(PipelineNode::getId)) .spliterator(), false) .filter(PTransformNode.class::isInstance) .map(PTransformNode.class::cast) .collect(Collectors.toList()); } /** * Get the transforms that are roots of this {@link QueryablePipeline}. These are all nodes which * have no input {@link PCollection}. */ public Set<PTransformNode> getRootTransforms() { return pipelineNetwork .nodes() .stream() .filter(pipelineNode -> pipelineNetwork.inEdges(pipelineNode).isEmpty()) .map(pipelineNode -> (PTransformNode) pipelineNode) .collect(Collectors.toSet()); } public PTransformNode getProducer(PCollectionNode pcollection) { return (PTransformNode) Iterables.getOnlyElement(pipelineNetwork.predecessors(pcollection)); } /** * Get all of the {@link PTransformNode PTransforms} which consume the provided {@link * PCollectionNode} on a per-element basis. * * <p>If a {@link PTransformNode} consumes a {@link PCollectionNode} on a per-element basis one or * more times, it will appear a single time in the result. * * <p>In theory, a transform may consume a single {@link PCollectionNode} in both a per-element * and singleton manner. If this is the case, the transform node is included in the result, as it * does consume the {@link PCollectionNode} on a per-element basis. */ public Set<PTransformNode> getPerElementConsumers(PCollectionNode pCollection) { return pipelineNetwork .successors(pCollection) .stream() .filter( consumer -> pipelineNetwork .edgesConnecting(pCollection, consumer) .stream() .anyMatch(PipelineEdge::isPerElement)) .map(pipelineNode -> (PTransformNode) pipelineNode) .collect(Collectors.toSet()); } /** * Same as {@link * the collection as a singleton. */ public Set<PTransformNode> getSingletonConsumers(PCollectionNode pCollection) { return pipelineNetwork .successors(pCollection) .stream() .filter( consumer -> pipelineNetwork .edgesConnecting(pCollection, consumer) .stream() .anyMatch(edge -> !edge.isPerElement())) .map(pipelineNode -> (PTransformNode) pipelineNode) .collect(Collectors.toSet()); } /** * Gets each {@link PCollectionNode} that the provided {@link PTransformNode} consumes on a * per-element basis. */ public Set<PCollectionNode> getPerElementInputPCollections(PTransformNode ptransform) { return pipelineNetwork .inEdges(ptransform) .stream() .filter(PipelineEdge::isPerElement) .map(edge -> (PCollectionNode) pipelineNetwork.incidentNodes(edge).source()) .collect(Collectors.toSet()); } public Set<PCollectionNode> getOutputPCollections(PTransformNode ptransform) { return pipelineNetwork .successors(ptransform) .stream() .map(pipelineNode -> (PCollectionNode) pipelineNode) .collect(Collectors.toSet()); } public Components getComponents() { return components; } /** * Returns the {@link SideInputReference SideInputReferences} that the provided transform consumes * as side inputs. */ public Collection<SideInputReference> getSideInputs(PTransformNode transform) { return getLocalSideInputNames(transform.getTransform()) .stream() .map( localName -> { String transformId = transform.getId(); PTransform transformProto = components.getTransformsOrThrow(transformId); String collectionId = transform.getTransform().getInputsOrThrow(localName); PCollection collection = components.getPcollectionsOrThrow(collectionId); return SideInputReference.of( PipelineNode.pTransform(transformId, transformProto), localName, PipelineNode.pCollection(collectionId, collection)); }) .collect(Collectors.toSet()); } public Collection<UserStateReference> getUserStates(PTransformNode transform) { return getLocalUserStateNames(transform.getTransform()) .stream() .map( localName -> { String transformId = transform.getId(); PTransform transformProto = components.getTransformsOrThrow(transformId); String collectionId = transform .getTransform() .getInputsOrThrow( Iterables.getOnlyElement( Sets.difference( transform.getTransform().getInputsMap().keySet(), getLocalSideInputNames(transformProto)))); PCollection collection = components.getPcollectionsOrThrow(collectionId); return UserStateReference.of( PipelineNode.pTransform(transformId, transformProto), localName, PipelineNode.pCollection(collectionId, collection)); }) .collect(Collectors.toSet()); } public Collection<TimerReference> getTimers(PTransformNode transform) { return getLocalTimerNames(transform.getTransform()) .stream() .map( localName -> { String transformId = transform.getId(); PTransform transformProto = components.getTransformsOrThrow(transformId); String collectionId = transform.getTransform().getInputsOrThrow(localName); PCollection collection = components.getPcollectionsOrThrow(collectionId); return TimerReference.of( PipelineNode.pTransform(transformId, transformProto), localName, PipelineNode.pCollection(collectionId, collection)); }) .collect(Collectors.toSet()); } private Set<String> getLocalSideInputNames(PTransform transform) { if (PAR_DO_TRANSFORM_URN.equals(transform.getSpec().getUrn())) { try { return ParDoPayload.parseFrom(transform.getSpec().getPayload()).getSideInputsMap().keySet(); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } } else { return Collections.emptySet(); } } private Set<String> getLocalUserStateNames(PTransform transform) { if (PAR_DO_TRANSFORM_URN.equals(transform.getSpec().getUrn())) { try { return ParDoPayload.parseFrom(transform.getSpec().getPayload()).getStateSpecsMap().keySet(); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } } else { return Collections.emptySet(); } } private Set<String> getLocalTimerNames(PTransform transform) { if (PAR_DO_TRANSFORM_URN.equals(transform.getSpec().getUrn())) { try { return ParDoPayload.parseFrom(transform.getSpec().getPayload()).getTimerSpecsMap().keySet(); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } } else { return Collections.emptySet(); } } public Optional<Environment> getEnvironment(PTransformNode parDo) { return Environments.getEnvironment(parDo.getId(), components); } private interface PipelineEdge { boolean isPerElement(); } private static class PerElementEdge implements PipelineEdge { @Override public boolean isPerElement() { return true; } } private static class SingletonEdge implements PipelineEdge { @Override public boolean isPerElement() { return false; } } }
class QueryablePipeline { /** * Create a new {@link QueryablePipeline} based on the provided components. * * <p>The returned {@link QueryablePipeline} will contain only the primitive transforms present * within the provided components. */ public static QueryablePipeline forPrimitivesIn(Components components) { return new QueryablePipeline(getPrimitiveTransformIds(components), components); } /** * Create a new {@link QueryablePipeline} which uses the root transform IDs and components of the * provided {@link Pipeline}. */ public static QueryablePipeline forPipeline(RunnerApi.Pipeline p) { return forTransforms(p.getRootTransformIdsList(), p.getComponents()); } /** * Create a new {@link QueryablePipeline} based on the provided components containing only the * provided {@code transformIds}. */ public static QueryablePipeline forTransforms( Collection<String> transformIds, Components components) { return new QueryablePipeline(transformIds, components); } private final Components components; /** * The {@link Pipeline} represented by a {@link Network}. * * <p>This is a directed bipartite graph consisting of {@link PTransformNode PTransformNodes} and * {@link PCollectionNode PCollectionNodes}. Each {@link PCollectionNode} has exactly one in edge, * and an arbitrary number of out edges. Each {@link PTransformNode} has an arbitrary number of in * and out edges. * * <p>Parallel edges are permitted, as a {@link PCollectionNode} can be consumed by a single * {@link PTransformNode} any number of times with different local names. */ private final Network<PipelineNode, PipelineEdge> pipelineNetwork; private QueryablePipeline(Collection<String> transformIds, Components components) { this.components = components; this.pipelineNetwork = buildNetwork(transformIds, this.components); } /** Produces a {@link RunnerApi.Components} which contains only primitive transforms. */ @VisibleForTesting private static final Set<String> PRIMITIVE_URNS = ImmutableSet.of( PAR_DO_TRANSFORM_URN, FLATTEN_TRANSFORM_URN, GROUP_BY_KEY_TRANSFORM_URN, IMPULSE_TRANSFORM_URN, ASSIGN_WINDOWS_TRANSFORM_URN, TEST_STREAM_TRANSFORM_URN, MAP_WINDOWS_TRANSFORM_URN, READ_TRANSFORM_URN, CREATE_VIEW_TRANSFORM_URN, SPLITTABLE_PROCESS_KEYED_URN, SPLITTABLE_PROCESS_ELEMENTS_URN); /** Returns true if the provided transform is a primitive. */ private static boolean isPrimitiveTransform(PTransform transform) { String urn = PTransformTranslation.urnForTransformOrNull(transform); return PRIMITIVE_URNS.contains(urn) || NativeTransforms.isNative(transform); } private MutableNetwork<PipelineNode, PipelineEdge> buildNetwork( Collection<String> transformIds, Components components) { MutableNetwork<PipelineNode, PipelineEdge> network = NetworkBuilder.directed().allowsParallelEdges(true).allowsSelfLoops(false).build(); Set<PCollectionNode> unproducedCollections = new HashSet<>(); for (String transformId : transformIds) { PTransform transform = components.getTransformsOrThrow(transformId); PTransformNode transformNode = PipelineNode.pTransform(transformId, this.components.getTransformsOrThrow(transformId)); network.addNode(transformNode); for (String produced : transform.getOutputsMap().values()) { PCollectionNode producedNode = PipelineNode.pCollection(produced, components.getPcollectionsOrThrow(produced)); network.addNode(producedNode); network.addEdge(transformNode, producedNode, new PerElementEdge()); checkArgument( network.inDegree(producedNode) == 1, "A %s should have exactly one producing %s, but found %s:\nPCollection:\n%s\nProducers:\n%s", PCollectionNode.class.getSimpleName(), PTransformNode.class.getSimpleName(), network.predecessors(producedNode).size(), producedNode, network.predecessors(producedNode)); unproducedCollections.remove(producedNode); } for (Map.Entry<String, String> consumed : transform.getInputsMap().entrySet()) { String pcollectionId = consumed.getValue(); PCollectionNode consumedNode = PipelineNode.pCollection( pcollectionId, this.components.getPcollectionsOrThrow(pcollectionId)); if (network.addNode(consumedNode)) { unproducedCollections.add(consumedNode); } if (getLocalSideInputNames(transform).contains(consumed.getKey())) { network.addEdge(consumedNode, transformNode, new SingletonEdge()); } else { network.addEdge(consumedNode, transformNode, new PerElementEdge()); } } } checkArgument( unproducedCollections.isEmpty(), "%ss %s were consumed but never produced", PCollectionNode.class.getSimpleName(), unproducedCollections); return network; } public Collection<PTransformNode> getTransforms() { return pipelineNetwork .nodes() .stream() .filter(PTransformNode.class::isInstance) .map(PTransformNode.class::cast) .collect(Collectors.toList()); } public Iterable<PTransformNode> getTopologicallyOrderedTransforms() { return StreamSupport.stream( Networks.topologicalOrder(pipelineNetwork, Comparator.comparing(PipelineNode::getId)) .spliterator(), false) .filter(PTransformNode.class::isInstance) .map(PTransformNode.class::cast) .collect(Collectors.toList()); } /** * Get the transforms that are roots of this {@link QueryablePipeline}. These are all nodes which * have no input {@link PCollection}. */ public Set<PTransformNode> getRootTransforms() { return pipelineNetwork .nodes() .stream() .filter(pipelineNode -> pipelineNetwork.inEdges(pipelineNode).isEmpty()) .map(pipelineNode -> (PTransformNode) pipelineNode) .collect(Collectors.toSet()); } public PTransformNode getProducer(PCollectionNode pcollection) { return (PTransformNode) Iterables.getOnlyElement(pipelineNetwork.predecessors(pcollection)); } /** * Get all of the {@link PTransformNode PTransforms} which consume the provided {@link * PCollectionNode} on a per-element basis. * * <p>If a {@link PTransformNode} consumes a {@link PCollectionNode} on a per-element basis one or * more times, it will appear a single time in the result. * * <p>In theory, a transform may consume a single {@link PCollectionNode} in both a per-element * and singleton manner. If this is the case, the transform node is included in the result, as it * does consume the {@link PCollectionNode} on a per-element basis. */ public Set<PTransformNode> getPerElementConsumers(PCollectionNode pCollection) { return pipelineNetwork .successors(pCollection) .stream() .filter( consumer -> pipelineNetwork .edgesConnecting(pCollection, consumer) .stream() .anyMatch(PipelineEdge::isPerElement)) .map(pipelineNode -> (PTransformNode) pipelineNode) .collect(Collectors.toSet()); } /** * Same as {@link * the collection as a singleton. */ public Set<PTransformNode> getSingletonConsumers(PCollectionNode pCollection) { return pipelineNetwork .successors(pCollection) .stream() .filter( consumer -> pipelineNetwork .edgesConnecting(pCollection, consumer) .stream() .anyMatch(edge -> !edge.isPerElement())) .map(pipelineNode -> (PTransformNode) pipelineNode) .collect(Collectors.toSet()); } /** * Gets each {@link PCollectionNode} that the provided {@link PTransformNode} consumes on a * per-element basis. */ public Set<PCollectionNode> getPerElementInputPCollections(PTransformNode ptransform) { return pipelineNetwork .inEdges(ptransform) .stream() .filter(PipelineEdge::isPerElement) .map(edge -> (PCollectionNode) pipelineNetwork.incidentNodes(edge).source()) .collect(Collectors.toSet()); } public Set<PCollectionNode> getOutputPCollections(PTransformNode ptransform) { return pipelineNetwork .successors(ptransform) .stream() .map(pipelineNode -> (PCollectionNode) pipelineNode) .collect(Collectors.toSet()); } public Components getComponents() { return components; } /** * Returns the {@link SideInputReference SideInputReferences} that the provided transform consumes * as side inputs. */ public Collection<SideInputReference> getSideInputs(PTransformNode transform) { return getLocalSideInputNames(transform.getTransform()) .stream() .map( localName -> { String transformId = transform.getId(); PTransform transformProto = components.getTransformsOrThrow(transformId); String collectionId = transform.getTransform().getInputsOrThrow(localName); PCollection collection = components.getPcollectionsOrThrow(collectionId); return SideInputReference.of( PipelineNode.pTransform(transformId, transformProto), localName, PipelineNode.pCollection(collectionId, collection)); }) .collect(Collectors.toSet()); } public Collection<UserStateReference> getUserStates(PTransformNode transform) { return getLocalUserStateNames(transform.getTransform()) .stream() .map( localName -> { String transformId = transform.getId(); PTransform transformProto = components.getTransformsOrThrow(transformId); String collectionId = transform .getTransform() .getInputsOrThrow( Iterables.getOnlyElement( Sets.difference( transform.getTransform().getInputsMap().keySet(), getLocalSideInputNames(transformProto)))); PCollection collection = components.getPcollectionsOrThrow(collectionId); return UserStateReference.of( PipelineNode.pTransform(transformId, transformProto), localName, PipelineNode.pCollection(collectionId, collection)); }) .collect(Collectors.toSet()); } public Collection<TimerReference> getTimers(PTransformNode transform) { return getLocalTimerNames(transform.getTransform()) .stream() .map( localName -> { String transformId = transform.getId(); PTransform transformProto = components.getTransformsOrThrow(transformId); String collectionId = transform.getTransform().getInputsOrThrow(localName); PCollection collection = components.getPcollectionsOrThrow(collectionId); return TimerReference.of( PipelineNode.pTransform(transformId, transformProto), localName, PipelineNode.pCollection(collectionId, collection)); }) .collect(Collectors.toSet()); } private Set<String> getLocalSideInputNames(PTransform transform) { if (PAR_DO_TRANSFORM_URN.equals(transform.getSpec().getUrn())) { try { return ParDoPayload.parseFrom(transform.getSpec().getPayload()).getSideInputsMap().keySet(); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } } else { return Collections.emptySet(); } } private Set<String> getLocalUserStateNames(PTransform transform) { if (PAR_DO_TRANSFORM_URN.equals(transform.getSpec().getUrn())) { try { return ParDoPayload.parseFrom(transform.getSpec().getPayload()).getStateSpecsMap().keySet(); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } } else { return Collections.emptySet(); } } private Set<String> getLocalTimerNames(PTransform transform) { if (PAR_DO_TRANSFORM_URN.equals(transform.getSpec().getUrn())) { try { return ParDoPayload.parseFrom(transform.getSpec().getPayload()).getTimerSpecsMap().keySet(); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } } else { return Collections.emptySet(); } } public Optional<Environment> getEnvironment(PTransformNode parDo) { return Environments.getEnvironment(parDo.getId(), components); } private interface PipelineEdge { boolean isPerElement(); } private static class PerElementEdge implements PipelineEdge { @Override public boolean isPerElement() { return true; } } private static class SingletonEdge implements PipelineEdge { @Override public boolean isPerElement() { return false; } } }
This can return any constructor - you first need to look for `@Inject` constructor and if not present, then for no-args one.
private void addConstructorLevelBindings(ClassInfo classInfo, Collection<AnnotationInstance> bindings) { MethodInfo constructor = classInfo.method(Methods.INIT); if (constructor == null) { Optional<Injection> constructorWithInject = injections.stream().filter(Injection::isConstructor).findAny(); if (constructorWithInject.isPresent()) { constructor = constructorWithInject.get().target.asMethod(); } } if (constructor != null) { beanDeployment.getAnnotations(constructor).stream() .filter(a -> beanDeployment.getInterceptorBinding(a.name()) != null && bindings.stream().noneMatch(e -> e.name().equals(a.name()))) .forEach(a -> bindings.add(a)); } }
MethodInfo constructor = classInfo.method(Methods.INIT);
private void addConstructorLevelBindings(ClassInfo classInfo, Collection<AnnotationInstance> bindings) { MethodInfo constructor; Optional<Injection> constructorWithInject = getConstructorInjection(); if (constructorWithInject.isPresent()) { constructor = constructorWithInject.get().target.asMethod(); } else { constructor = classInfo.method(Methods.INIT); } if (constructor != null) { beanDeployment.getAnnotations(constructor).stream() .filter(a -> beanDeployment.getInterceptorBinding(a.name()) != null && bindings.stream().noneMatch(e -> e.name().equals(a.name()))) .forEach(a -> bindings.add(a)); } }
class or null in case of a producer of a primitive type or an array */ public ClassInfo getImplClazz() { return implClazz; }
class or null in case of a producer of a primitive type or an array */ public ClassInfo getImplClazz() { return implClazz; }
nit: it would make me happier if this.feeder was initialized before the RemoteBundle is created so we don't have to worry about NPE.
new BundleProgressHandler() { @Override public void onProgress(ProcessBundleProgressResponse progress) { if (progress.hasSplit()) { feeder.split(progress.getSplit()); } } @Override public void onCompleted(ProcessBundleResponse response) { if (response.hasSplit()) { feeder.split(response.getSplit()); } } });
feeder.split(progress.getSplit());
new BundleProgressHandler() { @Override public void onProgress(ProcessBundleProgressResponse progress) { if (progress.hasSplit()) { feeder.split(progress.getSplit()); } } @Override public void onCompleted(ProcessBundleResponse response) { if (response.hasSplit()) { feeder.split(response.getSplit()); } } }
class SplittableRemoteStageEvaluator<InputT, RestrictionT> implements TransformEvaluator<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> { private final PTransformNode transform; private final ExecutableStage stage; private final CopyOnAccessInMemoryStateInternals<byte[]> stateInternals; private final DirectTimerInternals timerInternals; private final RemoteBundle<KV<InputT, RestrictionT>> bundle; private final Collection<UncommittedBundle<?>> outputs; private final SDFFeederViaStateAndTimers<InputT, RestrictionT> feeder; private SplittableRemoteStageEvaluator( BundleFactory bundleFactory, JobBundleFactory jobBundleFactory, StepStateAndTimers<byte[]> stp, PTransformNode transform) throws Exception { this.stateInternals = stp.stateInternals(); this.timerInternals = stp.timerInternals(); this.transform = transform; this.stage = ExecutableStage.fromPayload( ExecutableStagePayload.parseFrom(transform.getTransform().getSpec().getPayload())); this.outputs = new ArrayList<>(); this.bundle = jobBundleFactory .<KV<InputT, RestrictionT>>forStage(stage) .getBundle( BundleFactoryOutputReceiverFactory.create( bundleFactory, stage.getComponents(), outputs::add), StateRequestHandler.unsupported(), ); FullWindowedValueCoder<KV<InputT, RestrictionT>> windowedValueCoder = (FullWindowedValueCoder<KV<InputT, RestrictionT>>) WireCoders.<KV<InputT, RestrictionT>>instantiateRunnerWireCoder( stage.getInputPCollection(), stage.getComponents()); KvCoder<InputT, RestrictionT> kvCoder = ((KvCoder<InputT, RestrictionT>) windowedValueCoder.getValueCoder()); this.feeder = new SDFFeederViaStateAndTimers<>( stateInternals, timerInternals, kvCoder.getKeyCoder(), kvCoder.getValueCoder(), (Coder<BoundedWindow>) windowedValueCoder.getWindowCoder()); } @Override public void processElement( WindowedValue<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> windowedWorkItem) throws Exception { KeyedWorkItem<byte[], KV<InputT, RestrictionT>> kwi = windowedWorkItem.getValue(); WindowedValue<KV<InputT, RestrictionT>> elementRestriction = Iterables.getOnlyElement(kwi.elementsIterable(), null); if (elementRestriction != null) { feeder.seed(elementRestriction); } else { elementRestriction = feeder.resume(Iterables.getOnlyElement(kwi.timersIterable())); } bundle.getInputReceiver().accept(elementRestriction); } @Override public TransformResult<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> finishBundle() throws Exception { bundle.close(); feeder.commit(); CopyOnAccessInMemoryStateInternals<byte[]> state = stateInternals.commit(); StepTransformResult.Builder<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> result = StepTransformResult.withHold(transform, state.getEarliestWatermarkHold()); return result .addOutput(outputs) .withState(state) .withTimerUpdate(timerInternals.getTimerUpdate()) .build(); } }
class SplittableRemoteStageEvaluator<InputT, RestrictionT> implements TransformEvaluator<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> { private final PTransformNode transform; private final ExecutableStage stage; private final CopyOnAccessInMemoryStateInternals<byte[]> stateInternals; private final DirectTimerInternals timerInternals; private final RemoteBundle<KV<InputT, RestrictionT>> bundle; private final Collection<UncommittedBundle<?>> outputs; private final SDFFeederViaStateAndTimers<InputT, RestrictionT> feeder; private SplittableRemoteStageEvaluator( BundleFactory bundleFactory, JobBundleFactory jobBundleFactory, StepStateAndTimers<byte[]> stp, PTransformNode transform) throws Exception { this.stateInternals = stp.stateInternals(); this.timerInternals = stp.timerInternals(); this.transform = transform; this.stage = ExecutableStage.fromPayload( ExecutableStagePayload.parseFrom(transform.getTransform().getSpec().getPayload())); this.outputs = new ArrayList<>(); FullWindowedValueCoder<KV<InputT, RestrictionT>> windowedValueCoder = (FullWindowedValueCoder<KV<InputT, RestrictionT>>) WireCoders.<KV<InputT, RestrictionT>>instantiateRunnerWireCoder( stage.getInputPCollection(), stage.getComponents()); KvCoder<InputT, RestrictionT> kvCoder = ((KvCoder<InputT, RestrictionT>) windowedValueCoder.getValueCoder()); this.feeder = new SDFFeederViaStateAndTimers<>( stateInternals, timerInternals, kvCoder.getKeyCoder(), kvCoder.getValueCoder(), (Coder<BoundedWindow>) windowedValueCoder.getWindowCoder()); this.bundle = jobBundleFactory .<KV<InputT, RestrictionT>>forStage(stage) .getBundle( BundleFactoryOutputReceiverFactory.create( bundleFactory, stage.getComponents(), outputs::add), StateRequestHandler.unsupported(), ); } @Override public void processElement( WindowedValue<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> windowedWorkItem) throws Exception { KeyedWorkItem<byte[], KV<InputT, RestrictionT>> kwi = windowedWorkItem.getValue(); WindowedValue<KV<InputT, RestrictionT>> elementRestriction = Iterables.getOnlyElement(kwi.elementsIterable(), null); if (elementRestriction != null) { feeder.seed(elementRestriction); } else { elementRestriction = feeder.resume(Iterables.getOnlyElement(kwi.timersIterable())); } bundle.getInputReceiver().accept(elementRestriction); } @Override public TransformResult<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> finishBundle() throws Exception { bundle.close(); feeder.commit(); CopyOnAccessInMemoryStateInternals<byte[]> state = stateInternals.commit(); StepTransformResult.Builder<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> result = StepTransformResult.withHold(transform, state.getEarliestWatermarkHold()); return result .addOutput(outputs) .withState(state) .withTimerUpdate(timerInternals.getTimerUpdate()) .build(); } }
> but small costs pile up You would need a loooot of `return "";` statements to see a very tiny cost but I get your point and the PR is updated now ;-). > Certainly not worth blocking the PR thus why I approved anyway and let you decide what to do :). > (I wouldn't have asked if it made the code worse or more complex) :100:
String replacePathParameters(String path) { if (path.isEmpty()) { return ""; } StringBuilder sb = new StringBuilder(); Matcher m = PATH_PARAM_PATTERN.matcher(path); while (m.find()) { String match = m.group(); String paramName = match.substring(1, match.length() - 1); String val = pathParams.get(paramName); if (val == null) { throw new WebSocketClientException("Unable to obtain the path param for: " + paramName); } m.appendReplacement(sb, URLEncoder.encode(val, StandardCharsets.UTF_8)); } m.appendTail(sb); return path.startsWith("/") ? sb.toString() : "/" + sb.toString(); }
return "";
String replacePathParameters(String path) { if (path.isEmpty()) { return path; } StringBuilder sb = new StringBuilder(); Matcher m = PATH_PARAM_PATTERN.matcher(path); while (m.find()) { String match = m.group(); String paramName = match.substring(1, match.length() - 1); String val = pathParams.get(paramName); if (val == null) { throw new WebSocketClientException("Unable to obtain the path param for: " + paramName); } m.appendReplacement(sb, URLEncoder.encode(val, StandardCharsets.UTF_8)); } m.appendTail(sb); return path.startsWith("/") ? sb.toString() : "/" + sb.toString(); }
class WebSocketConnectorBase<THIS extends WebSocketConnectorBase<THIS>> { protected static final Pattern PATH_PARAM_PATTERN = Pattern.compile("\\{[a-zA-Z0-9_]+\\}"); protected URI baseUri; protected final Map<String, String> pathParams; protected final Map<String, List<String>> headers; protected final Set<String> subprotocols; protected String path; protected Set<String> pathParamNames; protected final Vertx vertx; protected final Codecs codecs; protected final ClientConnectionManager connectionManager; protected final WebSocketsClientRuntimeConfig config; protected final TlsConfigurationRegistry tlsConfigurationRegistry; WebSocketConnectorBase(Vertx vertx, Codecs codecs, ClientConnectionManager connectionManager, WebSocketsClientRuntimeConfig config, TlsConfigurationRegistry tlsConfigurationRegistry) { this.headers = new HashMap<>(); this.subprotocols = new HashSet<>(); this.pathParams = new HashMap<>(); this.vertx = vertx; this.codecs = codecs; this.connectionManager = connectionManager; this.config = config; this.tlsConfigurationRegistry = tlsConfigurationRegistry; this.path = ""; this.pathParamNames = Set.of(); } public THIS baseUri(URI baseUri) { this.baseUri = Objects.requireNonNull(baseUri); return self(); } public THIS addHeader(String name, String value) { Objects.requireNonNull(name); Objects.requireNonNull(value); List<String> values = headers.get(name); if (values == null) { values = new ArrayList<>(); headers.put(name, values); } values.add(value); return self(); } public THIS pathParam(String name, String value) { Objects.requireNonNull(name); Objects.requireNonNull(value); if (!pathParamNames.contains(name)) { throw new IllegalArgumentException( String.format("[%s] is not a valid path parameter in the path %s", name, path)); } pathParams.put(name, value); return self(); } public THIS addSubprotocol(String value) { subprotocols.add(Objects.requireNonNull(value)); return self(); } void setPath(String path) { this.path = Objects.requireNonNull(path); this.pathParamNames = getPathParamNames(path); } @SuppressWarnings("unchecked") protected THIS self() { return (THIS) this; } Set<String> getPathParamNames(String path) { Set<String> names = new HashSet<>(); Matcher m = PATH_PARAM_PATTERN.matcher(path); while (m.find()) { String match = m.group(); String paramName = match.substring(1, match.length() - 1); names.add(paramName); } return names; } protected WebSocketClientOptions populateClientOptions() { WebSocketClientOptions clientOptions = new WebSocketClientOptions(); if (config.offerPerMessageCompression()) { clientOptions.setTryUsePerMessageCompression(true); if (config.compressionLevel().isPresent()) { clientOptions.setCompressionLevel(config.compressionLevel().getAsInt()); } } if (config.maxMessageSize().isPresent()) { clientOptions.setMaxMessageSize(config.maxMessageSize().getAsInt()); } Optional<TlsConfiguration> maybeTlsConfiguration = TlsConfiguration.from(tlsConfigurationRegistry, config.tlsConfigurationName()); if (maybeTlsConfiguration.isPresent()) { TlsConfigUtils.configure(clientOptions, maybeTlsConfiguration.get()); } return clientOptions; } protected WebSocketConnectOptions newConnectOptions(URI serverEndpointUri) { WebSocketConnectOptions connectOptions = new WebSocketConnectOptions() .setSsl(isHttps(serverEndpointUri)) .setHost(serverEndpointUri.getHost()); if (serverEndpointUri.getPort() != -1) { connectOptions.setPort(serverEndpointUri.getPort()); } else if (isHttps(serverEndpointUri)) { connectOptions.setPort(443); } return connectOptions; } protected boolean isHttps(URI uri) { return "https".equals(uri.getScheme()); } }
class WebSocketConnectorBase<THIS extends WebSocketConnectorBase<THIS>> { protected static final Pattern PATH_PARAM_PATTERN = Pattern.compile("\\{[a-zA-Z0-9_]+\\}"); protected URI baseUri; protected final Map<String, String> pathParams; protected final Map<String, List<String>> headers; protected final Set<String> subprotocols; protected String path; protected Set<String> pathParamNames; protected final Vertx vertx; protected final Codecs codecs; protected final ClientConnectionManager connectionManager; protected final WebSocketsClientRuntimeConfig config; protected final TlsConfigurationRegistry tlsConfigurationRegistry; WebSocketConnectorBase(Vertx vertx, Codecs codecs, ClientConnectionManager connectionManager, WebSocketsClientRuntimeConfig config, TlsConfigurationRegistry tlsConfigurationRegistry) { this.headers = new HashMap<>(); this.subprotocols = new HashSet<>(); this.pathParams = new HashMap<>(); this.vertx = vertx; this.codecs = codecs; this.connectionManager = connectionManager; this.config = config; this.tlsConfigurationRegistry = tlsConfigurationRegistry; this.path = ""; this.pathParamNames = Set.of(); } public THIS baseUri(URI baseUri) { this.baseUri = Objects.requireNonNull(baseUri); return self(); } public THIS addHeader(String name, String value) { Objects.requireNonNull(name); Objects.requireNonNull(value); List<String> values = headers.get(name); if (values == null) { values = new ArrayList<>(); headers.put(name, values); } values.add(value); return self(); } public THIS pathParam(String name, String value) { Objects.requireNonNull(name); Objects.requireNonNull(value); if (!pathParamNames.contains(name)) { throw new IllegalArgumentException( String.format("[%s] is not a valid path parameter in the path %s", name, path)); } pathParams.put(name, value); return self(); } public THIS addSubprotocol(String value) { subprotocols.add(Objects.requireNonNull(value)); return self(); } void setPath(String path) { this.path = Objects.requireNonNull(path); this.pathParamNames = getPathParamNames(path); } @SuppressWarnings("unchecked") protected THIS self() { return (THIS) this; } Set<String> getPathParamNames(String path) { Set<String> names = new HashSet<>(); Matcher m = PATH_PARAM_PATTERN.matcher(path); while (m.find()) { String match = m.group(); String paramName = match.substring(1, match.length() - 1); names.add(paramName); } return names; } protected WebSocketClientOptions populateClientOptions() { WebSocketClientOptions clientOptions = new WebSocketClientOptions(); if (config.offerPerMessageCompression()) { clientOptions.setTryUsePerMessageCompression(true); if (config.compressionLevel().isPresent()) { clientOptions.setCompressionLevel(config.compressionLevel().getAsInt()); } } if (config.maxMessageSize().isPresent()) { clientOptions.setMaxMessageSize(config.maxMessageSize().getAsInt()); } Optional<TlsConfiguration> maybeTlsConfiguration = TlsConfiguration.from(tlsConfigurationRegistry, config.tlsConfigurationName()); if (maybeTlsConfiguration.isPresent()) { TlsConfigUtils.configure(clientOptions, maybeTlsConfiguration.get()); } return clientOptions; } protected WebSocketConnectOptions newConnectOptions(URI serverEndpointUri) { WebSocketConnectOptions connectOptions = new WebSocketConnectOptions() .setSsl(isHttps(serverEndpointUri)) .setHost(serverEndpointUri.getHost()); if (serverEndpointUri.getPort() != -1) { connectOptions.setPort(serverEndpointUri.getPort()); } else if (isHttps(serverEndpointUri)) { connectOptions.setPort(443); } return connectOptions; } protected boolean isHttps(URI uri) { return "https".equals(uri.getScheme()); } }
In order to get the flag value for a particular application, you need to tell the flag what application this is for before value() with a call to `with(Dimension.APPLICATION_ID, anApplicationId.serializedForm())`
private NodesSpecification createNodesSpecificationForLogserver() { DeployState deployState = context.getDeployState(); if (deployState.getProperties().useDedicatedNodeForLogserver() && context.getApplicationType() == ConfigModelContext.ApplicationType.DEFAULT && deployState.isHosted() && Flags.ENABLE_LOGSERVER.bindTo(deployState.flagSource()).value()) return NodesSpecification.dedicated(1, context); else return NodesSpecification.nonDedicated(1, context); }
Flags.ENABLE_LOGSERVER.bindTo(deployState.flagSource()).value())
private NodesSpecification createNodesSpecificationForLogserver() { DeployState deployState = context.getDeployState(); if (deployState.getProperties().useDedicatedNodeForLogserver() && context.getApplicationType() == ConfigModelContext.ApplicationType.DEFAULT && deployState.isHosted() && logServerFlagValue(deployState)) return NodesSpecification.dedicated(1, context); else return NodesSpecification.nonDedicated(1, context); }
class DomAdminV4Builder extends DomAdminBuilderBase { private ApplicationId ZONE_APPLICATION_ID = ApplicationId.from("hosted-vespa", "routing", "default"); private final Collection<ContainerModel> containerModels; private final ConfigModelContext context; public DomAdminV4Builder(ConfigModelContext context, boolean multitenant, List<ConfigServerSpec> configServerSpecs, Collection<ContainerModel> containerModels) { super(context.getApplicationType(), context.getDeployState().getFileRegistry(), multitenant, configServerSpecs); this.containerModels = containerModels; this.context = context; } @Override protected void doBuildAdmin(DeployState deployState, Admin admin, Element w3cAdminElement) { ModelElement adminElement = new ModelElement(w3cAdminElement); admin.addConfigservers(getConfigServersFromSpec(deployState.getDeployLogger(), admin)); Optional<NodesSpecification> requestedSlobroks = NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("slobroks"), context); Optional<NodesSpecification> requestedLogservers = NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("logservers"), context); assignSlobroks(deployState.getDeployLogger(), requestedSlobroks.orElse(NodesSpecification.nonDedicated(3, context)), admin); assignLogserver(deployState, requestedLogservers.orElse(createNodesSpecificationForLogserver()), admin); addLogForwarders(adminElement.getChild("logforwarding"), admin); } private void assignSlobroks(DeployLogger deployLogger, NodesSpecification nodesSpecification, Admin admin) { if (nodesSpecification.isDedicated()) { createSlobroks(deployLogger, admin, allocateHosts(admin.getHostSystem(), "slobroks", nodesSpecification)); } else { createSlobroks(deployLogger, admin, pickContainerHostsForSlobrok(nodesSpecification.count(), 2)); } } private void assignLogserver(DeployState deployState, NodesSpecification nodesSpecification, Admin admin) { if (nodesSpecification.count() > 1) throw new IllegalArgumentException("You can only request a single log server"); if (nodesSpecification.isDedicated()) { Collection<HostResource> hosts = allocateHosts(admin.getHostSystem(), "logserver", nodesSpecification); if (hosts.isEmpty()) return; Logserver logserver = createLogserver(deployState.getDeployLogger(), admin, hosts); createAdditionalContainerOnLogserverHost(deployState, admin, logserver.getHostResource()); } else if (containerModels.iterator().hasNext()) { List<HostResource> hosts = sortedContainerHostsFrom(containerModels.iterator().next(), nodesSpecification.count(), false); if (hosts.isEmpty()) return; createLogserver(deployState.getDeployLogger(), admin, hosts); } else { context.getDeployLogger().log(LogLevel.INFO, "No container host available to use for running logserver"); } } private void createAdditionalContainerOnLogserverHost(DeployState deployState, Admin admin, HostResource hostResource) { ContainerCluster logServerCluster = new ContainerCluster(admin, "logserver-cluster", "logserver-cluster", deployState); ContainerModel logserverClusterModel = new ContainerModel(context.withParent(admin).withId(logServerCluster.getSubId())); logServerCluster.addMetricStateHandler(); logServerCluster.addApplicationStatusHandler(); logServerCluster.addDefaultRootHandler(); logServerCluster.addVipHandler(); addLogHandler(logServerCluster); logserverClusterModel.setCluster(logServerCluster); Container container = new Container(logServerCluster, "" + 0, 0, deployState.isHosted()); container.setHostResource(hostResource); container.initService(deployState.getDeployLogger()); logServerCluster.addContainer(container); admin.addAndInitializeService(deployState.getDeployLogger(), hostResource, container); admin.setLogserverContainerCluster(logServerCluster); } private void addLogHandler(ContainerCluster cluster) { Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handler.LogHandler"); logHandler.addServerBindings("http: cluster.addComponent(logHandler); } private Collection<HostResource> allocateHosts(HostSystem hostSystem, String clusterId, NodesSpecification nodesSpecification) { return nodesSpecification.provision(hostSystem, ClusterSpec.Type.admin, ClusterSpec.Id.from(clusterId), context.getDeployLogger()).keySet(); } /** * Returns a list of container hosts to use for an auxiliary cluster. * The list returns the same nodes on each invocation given the same available nodes. * * @param count the desired number of nodes. More nodes may be returned to ensure a smooth transition * on topology changes, and less nodes may be returned if fewer are available * @param minHostsPerContainerCluster the desired number of hosts per cluster */ private List<HostResource> pickContainerHostsForSlobrok(int count, int minHostsPerContainerCluster) { Collection<ContainerModel> containerModelsWithSlobrok = containerModels.stream() .filter(this::shouldHaveSlobrok) .collect(Collectors.toList()); int hostsPerCluster = (int) Math.max(minHostsPerContainerCluster, Math.ceil((double) count / containerModelsWithSlobrok.size())); List<HostResource> picked = new ArrayList<>(); for (ContainerModel containerModel : containerModelsWithSlobrok) picked.addAll(pickContainerHostsFrom(containerModel, hostsPerCluster)); return picked; } private boolean shouldHaveSlobrok(ContainerModel containerModel) { ApplicationId applicationId = context.getDeployState().getProperties().applicationId(); if (!applicationId.equals(ZONE_APPLICATION_ID)) { return true; } String clustername = containerModel.getCluster().getName(); return !Objects.equals(clustername, "node-admin"); } private List<HostResource> pickContainerHostsFrom(ContainerModel model, int count) { boolean retired = true; List<HostResource> picked = sortedContainerHostsFrom(model, count, !retired); picked.addAll(sortedContainerHostsFrom(model, count, retired)); return picked; } /** Returns the count first containers in the current model having isRetired set to the given value */ private List<HostResource> sortedContainerHostsFrom(ContainerModel model, int count, boolean retired) { List<HostResource> hosts = model.getCluster().getContainers().stream() .filter(container -> retired == container.isRetired()) .map(Container::getHostResource) .collect(Collectors.toList()); return HostResource.pickHosts(hosts, count, 1); } private Logserver createLogserver(DeployLogger deployLogger, Admin admin, Collection<HostResource> hosts) { Logserver logserver = new Logserver(admin); logserver.setHostResource(hosts.iterator().next()); admin.setLogserver(logserver); logserver.initService(deployLogger); return logserver; } private void createSlobroks(DeployLogger deployLogger, Admin admin, Collection<HostResource> hosts) { if (hosts.isEmpty()) return; List<Slobrok> slobroks = new ArrayList<>(); int index = 0; for (HostResource host : hosts) { Slobrok slobrok = new Slobrok(admin, index++); slobrok.setHostResource(host); slobroks.add(slobrok); slobrok.initService(deployLogger); } admin.addSlobroks(slobroks); } }
class DomAdminV4Builder extends DomAdminBuilderBase { private ApplicationId ZONE_APPLICATION_ID = ApplicationId.from("hosted-vespa", "routing", "default"); private final Collection<ContainerModel> containerModels; private final ConfigModelContext context; public DomAdminV4Builder(ConfigModelContext context, boolean multitenant, List<ConfigServerSpec> configServerSpecs, Collection<ContainerModel> containerModels) { super(context.getApplicationType(), context.getDeployState().getFileRegistry(), multitenant, configServerSpecs); this.containerModels = containerModels; this.context = context; } @Override protected void doBuildAdmin(DeployState deployState, Admin admin, Element w3cAdminElement) { ModelElement adminElement = new ModelElement(w3cAdminElement); admin.addConfigservers(getConfigServersFromSpec(deployState.getDeployLogger(), admin)); Optional<NodesSpecification> requestedSlobroks = NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("slobroks"), context); Optional<NodesSpecification> requestedLogservers = NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("logservers"), context); assignSlobroks(deployState.getDeployLogger(), requestedSlobroks.orElse(NodesSpecification.nonDedicated(3, context)), admin); assignLogserver(deployState, requestedLogservers.orElse(createNodesSpecificationForLogserver()), admin); addLogForwarders(adminElement.getChild("logforwarding"), admin); } private void assignSlobroks(DeployLogger deployLogger, NodesSpecification nodesSpecification, Admin admin) { if (nodesSpecification.isDedicated()) { createSlobroks(deployLogger, admin, allocateHosts(admin.getHostSystem(), "slobroks", nodesSpecification)); } else { createSlobroks(deployLogger, admin, pickContainerHostsForSlobrok(nodesSpecification.count(), 2)); } } private void assignLogserver(DeployState deployState, NodesSpecification nodesSpecification, Admin admin) { if (nodesSpecification.count() > 1) throw new IllegalArgumentException("You can only request a single log server"); if (nodesSpecification.isDedicated()) { Collection<HostResource> hosts = allocateHosts(admin.getHostSystem(), "logserver", nodesSpecification); if (hosts.isEmpty()) return; Logserver logserver = createLogserver(deployState.getDeployLogger(), admin, hosts); createAdditionalContainerOnLogserverHost(deployState, admin, logserver.getHostResource()); } else if (containerModels.iterator().hasNext()) { List<HostResource> hosts = sortedContainerHostsFrom(containerModels.iterator().next(), nodesSpecification.count(), false); if (hosts.isEmpty()) return; createLogserver(deployState.getDeployLogger(), admin, hosts); } else { context.getDeployLogger().log(LogLevel.INFO, "No container host available to use for running logserver"); } } private void createAdditionalContainerOnLogserverHost(DeployState deployState, Admin admin, HostResource hostResource) { ContainerCluster logServerCluster = new ContainerCluster(admin, "logserver-cluster", "logserver-cluster", deployState); ContainerModel logserverClusterModel = new ContainerModel(context.withParent(admin).withId(logServerCluster.getSubId())); logServerCluster.addMetricStateHandler(); logServerCluster.addApplicationStatusHandler(); logServerCluster.addDefaultRootHandler(); logServerCluster.addVipHandler(); addLogHandler(logServerCluster); logserverClusterModel.setCluster(logServerCluster); Container container = new Container(logServerCluster, "" + 0, 0, deployState.isHosted()); container.setHostResource(hostResource); container.initService(deployState.getDeployLogger()); logServerCluster.addContainer(container); admin.addAndInitializeService(deployState.getDeployLogger(), hostResource, container); admin.setLogserverContainerCluster(logServerCluster); } private void addLogHandler(ContainerCluster cluster) { Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handler.LogHandler"); logHandler.addServerBindings("http: cluster.addComponent(logHandler); } private Collection<HostResource> allocateHosts(HostSystem hostSystem, String clusterId, NodesSpecification nodesSpecification) { return nodesSpecification.provision(hostSystem, ClusterSpec.Type.admin, ClusterSpec.Id.from(clusterId), context.getDeployLogger()).keySet(); } /** * Returns a list of container hosts to use for an auxiliary cluster. * The list returns the same nodes on each invocation given the same available nodes. * * @param count the desired number of nodes. More nodes may be returned to ensure a smooth transition * on topology changes, and less nodes may be returned if fewer are available * @param minHostsPerContainerCluster the desired number of hosts per cluster */ private List<HostResource> pickContainerHostsForSlobrok(int count, int minHostsPerContainerCluster) { Collection<ContainerModel> containerModelsWithSlobrok = containerModels.stream() .filter(this::shouldHaveSlobrok) .collect(Collectors.toList()); int hostsPerCluster = (int) Math.max(minHostsPerContainerCluster, Math.ceil((double) count / containerModelsWithSlobrok.size())); List<HostResource> picked = new ArrayList<>(); for (ContainerModel containerModel : containerModelsWithSlobrok) picked.addAll(pickContainerHostsFrom(containerModel, hostsPerCluster)); return picked; } private boolean shouldHaveSlobrok(ContainerModel containerModel) { ApplicationId applicationId = context.getDeployState().getProperties().applicationId(); if (!applicationId.equals(ZONE_APPLICATION_ID)) { return true; } String clustername = containerModel.getCluster().getName(); return !Objects.equals(clustername, "node-admin"); } private List<HostResource> pickContainerHostsFrom(ContainerModel model, int count) { boolean retired = true; List<HostResource> picked = sortedContainerHostsFrom(model, count, !retired); picked.addAll(sortedContainerHostsFrom(model, count, retired)); return picked; } /** Returns the count first containers in the current model having isRetired set to the given value */ private List<HostResource> sortedContainerHostsFrom(ContainerModel model, int count, boolean retired) { List<HostResource> hosts = model.getCluster().getContainers().stream() .filter(container -> retired == container.isRetired()) .map(Container::getHostResource) .collect(Collectors.toList()); return HostResource.pickHosts(hosts, count, 1); } private Logserver createLogserver(DeployLogger deployLogger, Admin admin, Collection<HostResource> hosts) { Logserver logserver = new Logserver(admin); logserver.setHostResource(hosts.iterator().next()); admin.setLogserver(logserver); logserver.initService(deployLogger); return logserver; } private void createSlobroks(DeployLogger deployLogger, Admin admin, Collection<HostResource> hosts) { if (hosts.isEmpty()) return; List<Slobrok> slobroks = new ArrayList<>(); int index = 0; for (HostResource host : hosts) { Slobrok slobrok = new Slobrok(admin, index++); slobrok.setHostResource(host); slobroks.add(slobrok); slobrok.initService(deployLogger); } admin.addSlobroks(slobroks); } private boolean logServerFlagValue(DeployState deployState) { return Flags.ENABLE_LOGSERVER.bindTo(deployState.flagSource()) .with(FetchVector.Dimension.APPLICATION_ID, deployState.getProperties().applicationId().serializedForm()) .value(); } }
If `databases` is a `Stream<String>` you wouldn't need to collect the filtering results. ``` Stream<String> databases = ctx.getCatalogManager().getCatalogOrThrowException(cName).listDatabases().stream(); if (likeType != null) { databases = databases .filter( row -> { if (likeType == LikeType.ILIKE) { return notLike != SqlLikeUtils.ilike(row, likePattern, "\\"); } else if (likeType == LikeType.LIKE) { return notLike != SqlLikeUtils.like(row, likePattern, "\\"); } return false; }); } ```
public TableResultInternal execute(Context ctx) { String cName = catalogName == null ? ctx.getCatalogManager().getCurrentCatalog() : catalogName; List<String> databases = ctx.getCatalogManager().getCatalogOrThrowException(cName).listDatabases(); if (likeType != null) { databases = databases.stream() .filter( row -> { if (likeType == LikeType.ILIKE) { return notLike != SqlLikeUtils.ilike(row, likePattern, "\\"); } else if (likeType == LikeType.LIKE) { return notLike != SqlLikeUtils.like(row, likePattern, "\\"); } return false; }) .collect(Collectors.toList()); } return buildStringArrayResult( "database name", databases.stream().sorted().toArray(String[]::new)); }
"database name", databases.stream().sorted().toArray(String[]::new));
public TableResultInternal execute(Context ctx) { String cName = catalogName == null ? ctx.getCatalogManager().getCurrentCatalog() : catalogName; Stream<String> databases = ctx.getCatalogManager().getCatalogOrThrowException(cName).listDatabases().stream(); if (likeType != null) { databases = databases.filter( row -> { if (likeType == LikeType.ILIKE) { return notLike != SqlLikeUtils.ilike(row, likePattern, "\\"); } else if (likeType == LikeType.LIKE) { return notLike != SqlLikeUtils.like(row, likePattern, "\\"); } return false; }); } return buildStringArrayResult("database name", databases.sorted().toArray(String[]::new)); }
class ShowDatabasesOperation implements ShowOperation { private final String preposition; private final String catalogName; private final LikeType likeType; private final String likePattern; private final boolean notLike; public ShowDatabasesOperation() { this.preposition = null; this.catalogName = null; this.likeType = null; this.likePattern = null; this.notLike = false; } public ShowDatabasesOperation(String likeType, String likePattern, boolean notLike) { this.preposition = null; this.catalogName = null; if (likeType != null) { this.likeType = LikeType.of(likeType); this.likePattern = requireNonNull(likePattern, "Like pattern must not be null"); this.notLike = notLike; } else { this.likeType = null; this.likePattern = null; this.notLike = false; } } public ShowDatabasesOperation( String preposition, String catalogName, String likeType, String likePattern, boolean notLike) { this.preposition = preposition; this.catalogName = catalogName; if (likeType != null) { this.likeType = LikeType.of(likeType); this.likePattern = requireNonNull(likePattern, "Like pattern must not be null"); this.notLike = notLike; } else { this.likeType = null; this.likePattern = null; this.notLike = false; } } @Override public String asSummaryString() { StringBuilder builder = new StringBuilder(); builder.append("SHOW DATABASES"); if (preposition != null) { builder.append(String.format(" %s %s", preposition, catalogName)); } if (likeType != null) { if (notLike) { builder.append(String.format(" NOT %s '%s'", likeType.name(), likePattern)); } else { builder.append(String.format(" %s '%s'", likeType.name(), likePattern)); } } return builder.toString(); } @Override }
class ShowDatabasesOperation implements ShowOperation { private final String catalogName; private final LikeType likeType; private final String likePattern; private final boolean notLike; public ShowDatabasesOperation() { this(null, null, null, false); } public ShowDatabasesOperation(String likeType, String likePattern, boolean notLike) { this(null, likeType, likePattern, notLike); } public ShowDatabasesOperation( String catalogName, String likeType, String likePattern, boolean notLike) { this.catalogName = catalogName; if (likeType != null) { this.likeType = LikeType.of(likeType); this.likePattern = requireNonNull(likePattern, "Like pattern must not be null"); this.notLike = notLike; } else { this.likeType = null; this.likePattern = null; this.notLike = false; } } @Override public String asSummaryString() { StringBuilder builder = new StringBuilder(); builder.append("SHOW DATABASES"); if (catalogName != null) { builder.append(String.format(" FROM/IN %s", catalogName)); } if (likeType != null) { if (notLike) { builder.append(String.format(" NOT %s '%s'", likeType.name(), likePattern)); } else { builder.append(String.format(" %s '%s'", likeType.name(), likePattern)); } } return builder.toString(); } @Override }
BELatencyInMs is just the JSON output so it doesn't need to follow Java or c# convention. I don't see a reason to spell out BackendEnd and bloat the diagnostics even further when BE is easily understandable. I would argue that we should use latency because that is the name of the header. If anything the backend implementation should be modified to include the everything rather than just the process time.
public String toString() { int statusCode = 0; int subStatusCode = HttpConstants.SubStatusCodes.UNKNOWN; if (this.storeResponse != null) { statusCode = this.storeResponse.getStatus(); subStatusCode = this.storeResponse.getSubStatusCode(); } else if (this.exception != null) { statusCode = this.exception.getStatusCode(); subStatusCode = this.exception.getSubStatusCode(); } return "storePhysicalAddress: " + this.storePhysicalAddress + ", lsn: " + this.lsn + ", globalCommittedLsn: " + this.globalCommittedLSN + ", partitionKeyRangeId: " + this.partitionKeyRangeId + ", isValid: " + this.isValid + ", statusCode: " + statusCode + ", subStatusCode: " + subStatusCode + ", isGone: " + this.isGoneException + ", isNotFound: " + this.isNotFoundException + ", isThroughputControlRequestRateTooLarge: " + this.isThroughputControlRequestRateTooLargeException + ", isInvalidPartition: " + this.isInvalidPartitionException + ", requestCharge: " + this.requestCharge + ", itemLSN: " + this.itemLSN + ", sessionToken: " + (this.sessionToken != null ? this.sessionToken.convertToString() : null) + ", backendLatencyInMs: " + this.backendLatencyInMs + ", exception: " + BridgeInternal.getInnerErrorMessage(this.exception); }
", backendLatencyInMs: " + this.backendLatencyInMs +
public String toString() { int statusCode = 0; int subStatusCode = HttpConstants.SubStatusCodes.UNKNOWN; if (this.storeResponse != null) { statusCode = this.storeResponse.getStatus(); subStatusCode = this.storeResponse.getSubStatusCode(); } else if (this.exception != null) { statusCode = this.exception.getStatusCode(); subStatusCode = this.exception.getSubStatusCode(); } return "storePhysicalAddress: " + this.storePhysicalAddress + ", lsn: " + this.lsn + ", globalCommittedLsn: " + this.globalCommittedLSN + ", partitionKeyRangeId: " + this.partitionKeyRangeId + ", isValid: " + this.isValid + ", statusCode: " + statusCode + ", subStatusCode: " + subStatusCode + ", isGone: " + this.isGoneException + ", isNotFound: " + this.isNotFoundException + ", isThroughputControlRequestRateTooLarge: " + this.isThroughputControlRequestRateTooLargeException + ", isInvalidPartition: " + this.isInvalidPartitionException + ", requestCharge: " + this.requestCharge + ", itemLSN: " + this.itemLSN + ", sessionToken: " + (this.sessionToken != null ? this.sessionToken.convertToString() : null) + ", backendLatencyInMs: " + this.backendLatencyInMs + ", exception: " + BridgeInternal.getInnerErrorMessage(this.exception); }
class StoreResult { private final static Logger logger = LoggerFactory.getLogger(StoreResult.class); private final StoreResponse storeResponse; private final CosmosException exception; final public long lsn; final public String partitionKeyRangeId; final public long quorumAckedLSN; final public long globalCommittedLSN; final public long numberOfReadRegions; final public long itemLSN; final public ISessionToken sessionToken; final public double requestCharge; final public int currentReplicaSetSize; final public int currentWriteQuorum; final public boolean isValid; final public boolean isGoneException; final public boolean isNotFoundException; final public boolean isInvalidPartitionException; final public Uri storePhysicalAddress; final public boolean isThroughputControlRequestRateTooLargeException; final public Double backendLatencyInMs; public StoreResult( StoreResponse storeResponse, CosmosException exception, String partitionKeyRangeId, long lsn, long quorumAckedLsn, double requestCharge, int currentReplicaSetSize, int currentWriteQuorum, boolean isValid, Uri storePhysicalAddress, long globalCommittedLSN, int numberOfReadRegions, long itemLSN, ISessionToken sessionToken, Double backendLatencyInMs) { this.storeResponse = storeResponse; this.exception = exception; this.partitionKeyRangeId = partitionKeyRangeId; this.lsn = lsn; this.quorumAckedLSN = quorumAckedLsn; this.requestCharge = requestCharge; this.currentReplicaSetSize = currentReplicaSetSize; this.currentWriteQuorum = currentWriteQuorum; this.isValid = isValid; this.isGoneException = this.exception != null && this.exception.getStatusCode() == HttpConstants.StatusCodes.GONE; this.isNotFoundException = this.exception != null && this.exception.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND; this.isInvalidPartitionException = this.exception != null && Exceptions.isNameCacheStale(this.exception); this.storePhysicalAddress = storePhysicalAddress; this.globalCommittedLSN = globalCommittedLSN; this.numberOfReadRegions = numberOfReadRegions; this.itemLSN = itemLSN; this.sessionToken = sessionToken; this.isThroughputControlRequestRateTooLargeException = this.exception != null && Exceptions.isThroughputControlRequestRateTooLargeException(this.exception); this.backendLatencyInMs = backendLatencyInMs; } public CosmosException getException() throws InternalServerErrorException { if (this.exception == null) { String message = "Exception should be available but found none"; assert false : message; logger.error(message); throw new InternalServerErrorException(RMResources.InternalServerError); } return exception; } public StoreResponse toResponse() { return toResponse(null); } public StoreResponse toResponse(RequestChargeTracker requestChargeTracker) { if (!this.isValid) { if (this.exception == null) { logger.error("Exception not set for invalid response"); throw new InternalServerErrorException(RMResources.InternalServerError); } throw this.exception; } if (requestChargeTracker != null) { StoreResult.setRequestCharge(this.storeResponse, this.exception, requestChargeTracker.getTotalRequestCharge()); } if (this.exception != null) { throw exception; } return this.storeResponse; } private static void setRequestCharge(StoreResponse response, CosmosException cosmosException, double totalRequestCharge) { if (cosmosException != null) { cosmosException.getResponseHeaders().put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double.toString(totalRequestCharge)); } else if (response.getResponseHeaderNames() != null) { for (int i = 0; i < response.getResponseHeaderNames().length; ++i) { if (Strings.areEqualIgnoreCase( response.getResponseHeaderNames()[i], HttpConstants.HttpHeaders.REQUEST_CHARGE)) { response.getResponseHeaderValues()[i] = Double.toString(totalRequestCharge); break; } } } } @Override public static class StoreResultSerializer extends StdSerializer<StoreResult> { private static final long serialVersionUID = 5315472126043077905L; public StoreResultSerializer(){ super(StoreResult.class); } @Override public void serialize(StoreResult storeResult, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { int statusCode = 0; int subStatusCode = HttpConstants.SubStatusCodes.UNKNOWN; if (storeResult.storeResponse != null) { statusCode = storeResult.storeResponse.getStatus(); subStatusCode = storeResult.storeResponse.getSubStatusCode(); } else if (storeResult.exception != null) { statusCode = storeResult.exception.getStatusCode(); subStatusCode = storeResult.exception.getSubStatusCode(); } jsonGenerator.writeStartObject(); jsonGenerator.writeObjectField("storePhysicalAddress", storeResult.storePhysicalAddress == null ? null : storeResult.storePhysicalAddress.getURIAsString()); jsonGenerator.writeNumberField("lsn", storeResult.lsn); jsonGenerator.writeNumberField("globalCommittedLsn", storeResult.globalCommittedLSN); jsonGenerator.writeStringField("partitionKeyRangeId", storeResult.partitionKeyRangeId); jsonGenerator.writeBooleanField("isValid", storeResult.isValid); jsonGenerator.writeNumberField("statusCode", statusCode); jsonGenerator.writeNumberField("subStatusCode", subStatusCode); jsonGenerator.writeBooleanField("isGone", storeResult.isGoneException); jsonGenerator.writeBooleanField("isNotFound", storeResult.isNotFoundException); jsonGenerator.writeBooleanField("isInvalidPartition", storeResult.isInvalidPartitionException); jsonGenerator.writeBooleanField("isThroughputControlRequestRateTooLarge", storeResult.isThroughputControlRequestRateTooLargeException); jsonGenerator.writeNumberField("requestCharge", storeResult.requestCharge); jsonGenerator.writeNumberField("itemLSN", storeResult.itemLSN); jsonGenerator.writeStringField("sessionToken", (storeResult.sessionToken != null ? storeResult.sessionToken.convertToString() : null)); jsonGenerator.writeObjectField("backendLatencyInMs", storeResult.backendLatencyInMs); jsonGenerator.writeStringField("exception", BridgeInternal.getInnerErrorMessage(storeResult.exception)); jsonGenerator.writeObjectField("transportRequestTimeline", storeResult.storeResponse != null ? storeResult.storeResponse.getRequestTimeline() : storeResult.exception != null ? BridgeInternal.getRequestTimeline(storeResult.exception) : null); jsonGenerator.writeObjectField("rntbdRequestLengthInBytes", storeResult.storeResponse != null ? storeResult.storeResponse.getRntbdRequestLength() : BridgeInternal.getRntbdRequestLength(storeResult.exception)); jsonGenerator.writeObjectField("rntbdResponseLengthInBytes", storeResult.storeResponse != null ? storeResult.storeResponse.getRntbdResponseLength() : BridgeInternal.getRntbdResponseLength(storeResult.exception)); jsonGenerator.writeObjectField("requestPayloadLengthInBytes", storeResult.storeResponse != null ? storeResult.storeResponse.getRequestPayloadLength() : BridgeInternal.getRequestBodyLength(storeResult.exception)); jsonGenerator.writeObjectField("responsePayloadLengthInBytes", storeResult.storeResponse != null ? storeResult.storeResponse.getResponseBodyLength() : null); jsonGenerator.writeObjectField("channelTaskQueueSize", storeResult.storeResponse != null ? storeResult.storeResponse.getRntbdChannelTaskQueueSize() : BridgeInternal.getChannelTaskQueueSize(storeResult.exception)); jsonGenerator.writeObjectField("pendingRequestsCount", storeResult.storeResponse != null ? storeResult.storeResponse.getPendingRequestQueueSize() : BridgeInternal.getRntbdPendingRequestQueueSize(storeResult.exception)); jsonGenerator.writeObjectField("serviceEndpointStatistics", storeResult.storeResponse != null ? storeResult.storeResponse.getEndpointStsts() : storeResult.exception != null ? BridgeInternal.getServiceEndpointStatistics(storeResult.exception) : null); jsonGenerator.writeEndObject(); } } }
class StoreResult { private final static Logger logger = LoggerFactory.getLogger(StoreResult.class); private final StoreResponse storeResponse; private final CosmosException exception; final public long lsn; final public String partitionKeyRangeId; final public long quorumAckedLSN; final public long globalCommittedLSN; final public long numberOfReadRegions; final public long itemLSN; final public ISessionToken sessionToken; final public double requestCharge; final public int currentReplicaSetSize; final public int currentWriteQuorum; final public boolean isValid; final public boolean isGoneException; final public boolean isNotFoundException; final public boolean isInvalidPartitionException; final public Uri storePhysicalAddress; final public boolean isThroughputControlRequestRateTooLargeException; final public Double backendLatencyInMs; public StoreResult( StoreResponse storeResponse, CosmosException exception, String partitionKeyRangeId, long lsn, long quorumAckedLsn, double requestCharge, int currentReplicaSetSize, int currentWriteQuorum, boolean isValid, Uri storePhysicalAddress, long globalCommittedLSN, int numberOfReadRegions, long itemLSN, ISessionToken sessionToken, Double backendLatencyInMs) { this.storeResponse = storeResponse; this.exception = exception; this.partitionKeyRangeId = partitionKeyRangeId; this.lsn = lsn; this.quorumAckedLSN = quorumAckedLsn; this.requestCharge = requestCharge; this.currentReplicaSetSize = currentReplicaSetSize; this.currentWriteQuorum = currentWriteQuorum; this.isValid = isValid; this.isGoneException = this.exception != null && this.exception.getStatusCode() == HttpConstants.StatusCodes.GONE; this.isNotFoundException = this.exception != null && this.exception.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND; this.isInvalidPartitionException = this.exception != null && Exceptions.isNameCacheStale(this.exception); this.storePhysicalAddress = storePhysicalAddress; this.globalCommittedLSN = globalCommittedLSN; this.numberOfReadRegions = numberOfReadRegions; this.itemLSN = itemLSN; this.sessionToken = sessionToken; this.isThroughputControlRequestRateTooLargeException = this.exception != null && Exceptions.isThroughputControlRequestRateTooLargeException(this.exception); this.backendLatencyInMs = backendLatencyInMs; } public CosmosException getException() throws InternalServerErrorException { if (this.exception == null) { String message = "Exception should be available but found none"; assert false : message; logger.error(message); throw new InternalServerErrorException(RMResources.InternalServerError); } return exception; } public StoreResponse toResponse() { return toResponse(null); } public StoreResponse toResponse(RequestChargeTracker requestChargeTracker) { if (!this.isValid) { if (this.exception == null) { logger.error("Exception not set for invalid response"); throw new InternalServerErrorException(RMResources.InternalServerError); } throw this.exception; } if (requestChargeTracker != null) { StoreResult.setRequestCharge(this.storeResponse, this.exception, requestChargeTracker.getTotalRequestCharge()); } if (this.exception != null) { throw exception; } return this.storeResponse; } private static void setRequestCharge(StoreResponse response, CosmosException cosmosException, double totalRequestCharge) { if (cosmosException != null) { cosmosException.getResponseHeaders().put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double.toString(totalRequestCharge)); } else if (response.getResponseHeaderNames() != null) { for (int i = 0; i < response.getResponseHeaderNames().length; ++i) { if (Strings.areEqualIgnoreCase( response.getResponseHeaderNames()[i], HttpConstants.HttpHeaders.REQUEST_CHARGE)) { response.getResponseHeaderValues()[i] = Double.toString(totalRequestCharge); break; } } } } @Override public static class StoreResultSerializer extends StdSerializer<StoreResult> { private static final long serialVersionUID = 5315472126043077905L; public StoreResultSerializer(){ super(StoreResult.class); } @Override public void serialize(StoreResult storeResult, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { int statusCode = 0; int subStatusCode = HttpConstants.SubStatusCodes.UNKNOWN; if (storeResult.storeResponse != null) { statusCode = storeResult.storeResponse.getStatus(); subStatusCode = storeResult.storeResponse.getSubStatusCode(); } else if (storeResult.exception != null) { statusCode = storeResult.exception.getStatusCode(); subStatusCode = storeResult.exception.getSubStatusCode(); } jsonGenerator.writeStartObject(); jsonGenerator.writeObjectField("storePhysicalAddress", storeResult.storePhysicalAddress == null ? null : storeResult.storePhysicalAddress.getURIAsString()); jsonGenerator.writeNumberField("lsn", storeResult.lsn); jsonGenerator.writeNumberField("globalCommittedLsn", storeResult.globalCommittedLSN); jsonGenerator.writeStringField("partitionKeyRangeId", storeResult.partitionKeyRangeId); jsonGenerator.writeBooleanField("isValid", storeResult.isValid); jsonGenerator.writeNumberField("statusCode", statusCode); jsonGenerator.writeNumberField("subStatusCode", subStatusCode); jsonGenerator.writeBooleanField("isGone", storeResult.isGoneException); jsonGenerator.writeBooleanField("isNotFound", storeResult.isNotFoundException); jsonGenerator.writeBooleanField("isInvalidPartition", storeResult.isInvalidPartitionException); jsonGenerator.writeBooleanField("isThroughputControlRequestRateTooLarge", storeResult.isThroughputControlRequestRateTooLargeException); jsonGenerator.writeNumberField("requestCharge", storeResult.requestCharge); jsonGenerator.writeNumberField("itemLSN", storeResult.itemLSN); jsonGenerator.writeStringField("sessionToken", (storeResult.sessionToken != null ? storeResult.sessionToken.convertToString() : null)); jsonGenerator.writeObjectField("backendLatencyInMs", storeResult.backendLatencyInMs); jsonGenerator.writeStringField("exception", BridgeInternal.getInnerErrorMessage(storeResult.exception)); jsonGenerator.writeObjectField("transportRequestTimeline", storeResult.storeResponse != null ? storeResult.storeResponse.getRequestTimeline() : storeResult.exception != null ? BridgeInternal.getRequestTimeline(storeResult.exception) : null); jsonGenerator.writeObjectField("rntbdRequestLengthInBytes", storeResult.storeResponse != null ? storeResult.storeResponse.getRntbdRequestLength() : BridgeInternal.getRntbdRequestLength(storeResult.exception)); jsonGenerator.writeObjectField("rntbdResponseLengthInBytes", storeResult.storeResponse != null ? storeResult.storeResponse.getRntbdResponseLength() : BridgeInternal.getRntbdResponseLength(storeResult.exception)); jsonGenerator.writeObjectField("requestPayloadLengthInBytes", storeResult.storeResponse != null ? storeResult.storeResponse.getRequestPayloadLength() : BridgeInternal.getRequestBodyLength(storeResult.exception)); jsonGenerator.writeObjectField("responsePayloadLengthInBytes", storeResult.storeResponse != null ? storeResult.storeResponse.getResponseBodyLength() : null); jsonGenerator.writeObjectField("channelTaskQueueSize", storeResult.storeResponse != null ? storeResult.storeResponse.getRntbdChannelTaskQueueSize() : BridgeInternal.getChannelTaskQueueSize(storeResult.exception)); jsonGenerator.writeObjectField("pendingRequestsCount", storeResult.storeResponse != null ? storeResult.storeResponse.getPendingRequestQueueSize() : BridgeInternal.getRntbdPendingRequestQueueSize(storeResult.exception)); jsonGenerator.writeObjectField("serviceEndpointStatistics", storeResult.storeResponse != null ? storeResult.storeResponse.getEndpointStsts() : storeResult.exception != null ? BridgeInternal.getServiceEndpointStatistics(storeResult.exception) : null); jsonGenerator.writeEndObject(); } } }
Use user who create this job?
public void before() throws JobException { if (isCanceled.get()) { throw new JobException("Export executor has been canceled, task id: {}", getTaskId()); } ctx = new ConnectContext(); ctx.setEnv(Env.getCurrentEnv()); ctx.setCluster(SystemInfoService.DEFAULT_CLUSTER); ctx.setQualifiedUser(Auth.ADMIN_USER); ctx.setCurrentUserIdentity(UserIdentity.ADMIN); ctx.getState().reset(); ctx.setThreadLocalInfo(); ctx.setDatabase(currentDb); TUniqueId queryId = generateQueryId(UUID.randomUUID().toString()); ctx.getSessionVariable().enableFallbackToOriginalPlanner = false; stmtExecutor = new StmtExecutor(ctx, (String) null); ctx.setQueryId(queryId); NereidsParser parser = new NereidsParser(); this.command = (InsertIntoTableCommand) parser.parseSingle(sql); this.command.setLabelName(Optional.of(getJobId() + "_" + getTaskId())); this.command.setJobId(getTaskId()); super.before(); }
ctx.setQualifiedUser(Auth.ADMIN_USER);
public void before() throws JobException { if (isCanceled.get()) { throw new JobException("Export executor has been canceled, task id: {}", getTaskId()); } ctx = new ConnectContext(); ctx.setEnv(Env.getCurrentEnv()); ctx.setCluster(SystemInfoService.DEFAULT_CLUSTER); ctx.setQualifiedUser(userIdentity.getQualifiedUser()); ctx.setCurrentUserIdentity(userIdentity); ctx.getState().reset(); ctx.setThreadLocalInfo(); ctx.setDatabase(currentDb); TUniqueId queryId = generateQueryId(UUID.randomUUID().toString()); ctx.getSessionVariable().enableFallbackToOriginalPlanner = false; stmtExecutor = new StmtExecutor(ctx, (String) null); ctx.setQueryId(queryId); NereidsParser parser = new NereidsParser(); this.command = (InsertIntoTableCommand) parser.parseSingle(sql); this.command.setLabelName(Optional.of(getJobId() + "_" + getTaskId())); this.command.setJobId(getTaskId()); super.before(); }
class InsertTask extends AbstractTask { private String labelName; private InsertIntoTableCommand command; private StmtExecutor stmtExecutor; private ConnectContext ctx; private String sql; private String currentDb; private AtomicBoolean isCanceled = new AtomicBoolean(false); private AtomicBoolean isFinished = new AtomicBoolean(false); @Getter @Setter private LoadJob loadJob; @Override protected TUniqueId generateQueryId(String taskIdString) { UUID taskId = UUID.fromString(taskIdString); return new TUniqueId(taskId.getMostSignificantBits(), taskId.getLeastSignificantBits()); } public InsertTask(String labelName, String currentDb, String sql) { this.labelName = labelName; this.sql = sql; this.currentDb = currentDb; } @Override public void run() throws JobException { try { command.run(ctx, stmtExecutor); } catch (Exception e) { throw new JobException(e); } } @Override public void onFail() throws JobException { isFinished.set(true); super.onFail(); } @Override public void onSuccess() throws JobException { isFinished.set(true); super.onSuccess(); } @Override public void cancel() throws JobException { if (isFinished.get() || isCanceled.get()) { return; } isCanceled.getAndSet(true); if (null != stmtExecutor) { stmtExecutor.cancel(); } super.cancel(); } @Override public List<String> getShowInfo() { if (null == loadJob) { return new ArrayList<>(); } List<String> jobInfo = Lists.newArrayList(); jobInfo.add(String.valueOf(loadJob.getId())); jobInfo.add(loadJob.getLabel()); jobInfo.add(loadJob.getState().name()); if (loadJob.getLoadingStatus().getCounters().isEmpty()) { jobInfo.add(FeConstants.null_string); } else { jobInfo.add(Joiner.on("; ").withKeyValueSeparator("=").join(loadJob.getLoadingStatus().getCounters())); } jobInfo.add("cluster:" + loadJob.getResourceName() + "; timeout(s):" + loadJob.getTimeout() + "; max_filter_ratio:" + loadJob.getMaxFilterRatio() + "; priority:" + loadJob.getPriority()); if (loadJob.getFailMsg() == null) { jobInfo.add(FeConstants.null_string); } else { jobInfo.add("type:" + loadJob.getFailMsg().getCancelType() + "; msg:" + loadJob.getFailMsg().getMsg()); } jobInfo.add(TimeUtils.longToTimeString(loadJob.getCreateTimestamp())); jobInfo.add(TimeUtils.longToTimeString(loadJob.getFinishTimestamp())); jobInfo.add(loadJob.getLoadingStatus().getTrackingUrl()); jobInfo.add(loadJob.getLoadStatistic().toJson()); jobInfo.add(loadJob.getUserInfo().getQualifiedUser()); return jobInfo; } }
class InsertTask extends AbstractTask { private String labelName; private InsertIntoTableCommand command; private StmtExecutor stmtExecutor; private ConnectContext ctx; private String sql; private String currentDb; private UserIdentity userIdentity; private AtomicBoolean isCanceled = new AtomicBoolean(false); private AtomicBoolean isFinished = new AtomicBoolean(false); @Getter @Setter private LoadJob loadJob; @Override protected TUniqueId generateQueryId(String taskIdString) { UUID taskId = UUID.fromString(taskIdString); return new TUniqueId(taskId.getMostSignificantBits(), taskId.getLeastSignificantBits()); } public InsertTask(String labelName, String currentDb, String sql, UserIdentity userIdentity) { this.labelName = labelName; this.sql = sql; this.currentDb = currentDb; this.userIdentity = userIdentity; } @Override public void run() throws JobException { try { command.run(ctx, stmtExecutor); } catch (Exception e) { throw new JobException(e); } } @Override public void onFail() throws JobException { isFinished.set(true); super.onFail(); } @Override public void onSuccess() throws JobException { isFinished.set(true); super.onSuccess(); } @Override public void cancel() throws JobException { if (isFinished.get() || isCanceled.get()) { return; } isCanceled.getAndSet(true); if (null != stmtExecutor) { stmtExecutor.cancel(); } super.cancel(); } @Override public List<String> getShowInfo() { if (null == loadJob) { return new ArrayList<>(); } List<String> jobInfo = Lists.newArrayList(); jobInfo.add(String.valueOf(loadJob.getId())); jobInfo.add(loadJob.getLabel()); jobInfo.add(loadJob.getState().name()); if (loadJob.getLoadingStatus().getCounters().isEmpty()) { jobInfo.add(FeConstants.null_string); } else { jobInfo.add(Joiner.on("; ").withKeyValueSeparator("=").join(loadJob.getLoadingStatus().getCounters())); } jobInfo.add("cluster:" + loadJob.getResourceName() + "; timeout(s):" + loadJob.getTimeout() + "; max_filter_ratio:" + loadJob.getMaxFilterRatio() + "; priority:" + loadJob.getPriority()); if (loadJob.getFailMsg() == null) { jobInfo.add(FeConstants.null_string); } else { jobInfo.add("type:" + loadJob.getFailMsg().getCancelType() + "; msg:" + loadJob.getFailMsg().getMsg()); } jobInfo.add(TimeUtils.longToTimeString(loadJob.getCreateTimestamp())); jobInfo.add(TimeUtils.longToTimeString(loadJob.getFinishTimestamp())); jobInfo.add(loadJob.getLoadingStatus().getTrackingUrl()); jobInfo.add(loadJob.getLoadStatistic().toJson()); jobInfo.add(loadJob.getUserInfo().getQualifiedUser()); return jobInfo; } }
I used this format to avoid codecov warnings for the last line in this method.
private BType getMapType(BType type) { BType refType = Types.getReferredType(type); BType resultantType = symTable.mapType; if (refType.tag == TypeTags.UNION) { for (BType memberType : ((BUnionType) type).getMemberTypes()) { BType resultType = getMapType(memberType); if (resultType != symTable.mapType) { resultantType = resultType; break; } } } else if (refType.tag == TypeTags.INTERSECTION) { resultantType = getMapType(((BIntersectionType) refType).effectiveType); } else if (refType.tag == TypeTags.MAP) { resultantType = refType; } return resultantType; }
resultantType = getMapType(((BIntersectionType) refType).effectiveType);
private BType getMapType(BType type) { BType resultantType = types.getSafeType(Types.getReferredType(type), false, true); if (resultantType.tag == TypeTags.INTERSECTION) { return getMapType(((BIntersectionType) resultantType).effectiveType); } return resultantType; }
class QueryDesugar extends BLangNodeVisitor { private static final Name QUERY_CREATE_PIPELINE_FUNCTION = new Name("createPipeline"); private static final Name QUERY_CREATE_INPUT_FUNCTION = new Name("createInputFunction"); private static final Name QUERY_CREATE_NESTED_FROM_FUNCTION = new Name("createNestedFromFunction"); private static final Name QUERY_CREATE_LET_FUNCTION = new Name("createLetFunction"); private static final Name QUERY_CREATE_INNER_JOIN_FUNCTION = new Name("createInnerJoinFunction"); private static final Name QUERY_CREATE_OUTER_JOIN_FUNCTION = new Name("createOuterJoinFunction"); private static final Name QUERY_CREATE_FILTER_FUNCTION = new Name("createFilterFunction"); private static final Name QUERY_CREATE_ORDER_BY_FUNCTION = new Name("createOrderByFunction"); private static final Name QUERY_CREATE_SELECT_FUNCTION = new Name("createSelectFunction"); private static final Name QUERY_CREATE_DO_FUNCTION = new Name("createDoFunction"); private static final Name QUERY_CREATE_LIMIT_FUNCTION = new Name("createLimitFunction"); private static final Name QUERY_ADD_STREAM_FUNCTION = new Name("addStreamFunction"); private static final Name QUERY_CONSUME_STREAM_FUNCTION = new Name("consumeStream"); private static final Name QUERY_TO_ARRAY_FUNCTION = new Name("toArray"); private static final Name QUERY_TO_STRING_FUNCTION = new Name("toString"); private static final Name QUERY_TO_XML_FUNCTION = new Name("toXML"); private static final Name QUERY_ADD_TO_TABLE_FUNCTION = new Name("addToTable"); private static final Name QUERY_ADD_TO_MAP_FUNCTION = new Name("addToMap"); private static final Name QUERY_GET_STREAM_FROM_PIPELINE_FUNCTION = new Name("getStreamFromPipeline"); private static final String FRAME_PARAMETER_NAME = "$frame$"; private static final CompilerContext.Key<QueryDesugar> QUERY_DESUGAR_KEY = new CompilerContext.Key<>(); private BLangExpression onConflictExpr; private BVarSymbol currentFrameSymbol; private BLangBlockFunctionBody currentQueryLambdaBody; private Map<String, BSymbol> identifiers; private int streamElementCount = 0; private final Desugar desugar; private final SymbolTable symTable; private final SymbolResolver symResolver; private final Names names; private final Types types; private SymbolEnv env; private SymbolEnv queryEnv; private boolean withinLambdaFunc = false; private HashSet<BType> checkedErrorList; private QueryDesugar(CompilerContext context) { context.put(QUERY_DESUGAR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.names = Names.getInstance(context); this.types = Types.getInstance(context); this.desugar = Desugar.getInstance(context); } public static QueryDesugar getInstance(CompilerContext context) { QueryDesugar desugar = context.get(QUERY_DESUGAR_KEY); if (desugar == null) { desugar = new QueryDesugar(context); } return desugar; } /** * Desugar query expression. * * @param queryExpr query expression to be desugared. * @param env symbol env. * @param stmtsToBePropagated list of statements to be propagated. * @return desugared query expression. */ BLangStatementExpression desugar(BLangQueryExpr queryExpr, SymbolEnv env, List<BLangStatement> stmtsToBePropagated) { HashSet<BType> prevCheckedErrorList = this.checkedErrorList; this.checkedErrorList = new HashSet<>(); List<BLangNode> clauses = queryExpr.getQueryClauses(); Location pos = clauses.get(0).pos; BLangBlockStmt queryBlock = ASTBuilderUtil.createBlockStmt(pos); BLangVariableReference streamRef = buildStream(clauses, queryExpr.getBType(), env, queryBlock, stmtsToBePropagated); BLangStatementExpression streamStmtExpr; BLangLiteral isReadonly = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, Symbols.isFlagOn(queryExpr.getBType().flags, Flags.READONLY)); if (queryExpr.isStream) { streamStmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, streamRef); streamStmtExpr.setBType(streamRef.getBType()); } else if (queryExpr.isTable) { onConflictExpr = (onConflictExpr == null) ? ASTBuilderUtil.createLiteral(pos, symTable.nilType, Names.NIL_VALUE) : onConflictExpr; BLangVariableReference tableRef = addTableConstructor(queryExpr, queryBlock); BLangVariableReference result = getStreamFunctionVariableRef(queryBlock, QUERY_ADD_TO_TABLE_FUNCTION, Lists.of(streamRef, tableRef, onConflictExpr, isReadonly), pos); streamStmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, addTypeConversionExpr(result, queryExpr.getBType())); streamStmtExpr.setBType(tableRef.getBType()); onConflictExpr = null; } else if (queryExpr.isMap) { onConflictExpr = (onConflictExpr == null) ? ASTBuilderUtil.createLiteral(pos, symTable.nilType, Names.NIL_VALUE) : onConflictExpr; BMapType mapType = (BMapType) getMapType(queryExpr.getBType()); BLangRecordLiteral.BLangMapLiteral mapLiteral = new BLangRecordLiteral.BLangMapLiteral(queryExpr.pos, mapType, new ArrayList<>()); BLangVariableReference result = getStreamFunctionVariableRef(queryBlock, QUERY_ADD_TO_MAP_FUNCTION, Lists.of(streamRef, mapLiteral, onConflictExpr, isReadonly), pos); streamStmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, addTypeConversionExpr(result, queryExpr.getBType())); streamStmtExpr.setBType(queryExpr.getBType()); onConflictExpr = null; } else { BLangVariableReference result; BType refType = Types.getReferredType(queryExpr.getBType()); if (isXml(refType)) { if (types.isSubTypeOfReadOnly(refType, env)) { isReadonly.value = true; } result = getStreamFunctionVariableRef(queryBlock, QUERY_TO_XML_FUNCTION, Lists.of(streamRef, isReadonly), pos); } else if (TypeTags.isStringTypeTag(refType.tag)) { result = getStreamFunctionVariableRef(queryBlock, QUERY_TO_STRING_FUNCTION, Lists.of(streamRef), pos); } else { BType arrayType = refType; if (refType.tag == TypeTags.UNION) { arrayType = ((BUnionType) refType).getMemberTypes() .stream().filter(m -> Types.getReferredType(m).tag == TypeTags.ARRAY) .findFirst().orElse(symTable.arrayType); } BLangArrayLiteral arr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arr.exprs = new ArrayList<>(); arr.setBType(arrayType); result = getStreamFunctionVariableRef(queryBlock, QUERY_TO_ARRAY_FUNCTION, Lists.of(streamRef, arr, isReadonly), pos); } streamStmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, addTypeConversionExpr(result, queryExpr.getBType())); streamStmtExpr.setBType(queryExpr.getBType()); } this.checkedErrorList = prevCheckedErrorList; return streamStmtExpr; } private boolean isXml(BType type) { BType refType = Types.getReferredType(type); if (TypeTags.isXMLTypeTag(refType.tag)) { return true; } switch (refType.tag) { case TypeTags.UNION: for (BType memberType : ((BUnionType) refType).getMemberTypes()) { if (!isXml(memberType)) { return false; } } return true; case TypeTags.INTERSECTION: return isXml(((BIntersectionType) refType).getEffectiveType()); default: return false; } } /** * Desugar query action. * * @param queryAction query action to be desugared. * @param env symbol env. * @param stmtsToBePropagated statements to be propagated to do clause. * @return desugared query action. */ BLangStatementExpression desugar(BLangQueryAction queryAction, SymbolEnv env, List<BLangStatement> stmtsToBePropagated) { HashSet<BType> prevCheckedErrorList = this.checkedErrorList; this.checkedErrorList = new HashSet<>(); List<BLangNode> clauses = queryAction.getQueryClauses(); Location pos = clauses.get(0).pos; BType returnType = symTable.errorOrNilType; if (queryAction.returnsWithinDoClause) { BInvokableSymbol invokableSymbol = env.enclInvokable.symbol; returnType = ((BInvokableType) invokableSymbol.type).retType; } BLangBlockStmt queryBlock = ASTBuilderUtil.createBlockStmt(pos); BLangVariableReference streamRef = buildStream(clauses, returnType, env, queryBlock, stmtsToBePropagated); BLangVariableReference result = getStreamFunctionVariableRef(queryBlock, QUERY_CONSUME_STREAM_FUNCTION, returnType, Lists.of(streamRef), pos); BLangStatementExpression stmtExpr; if (queryAction.returnsWithinDoClause) { BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, result); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos); ifBody.stmts.add(returnStmt); BLangTypeTestExpr nilTypeTestExpr = desugar.createTypeCheckExpr(pos, result, desugar.getNillTypeNode()); nilTypeTestExpr.setBType(symTable.booleanType); BLangGroupExpr nilCheckGroupExpr = new BLangGroupExpr(); nilCheckGroupExpr.setBType(symTable.booleanType); nilCheckGroupExpr.expression = desugar.createNotBinaryExpression(pos, nilTypeTestExpr); BLangIf ifStatement = ASTBuilderUtil.createIfStmt(pos, queryBlock); ifStatement.expr = nilCheckGroupExpr; ifStatement.body = ifBody; } stmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, addTypeConversionExpr(result, returnType)); stmtExpr.setBType(returnType); this.checkedErrorList = prevCheckedErrorList; return stmtExpr; } /** * Write the pipeline to the given `block` and return the reference to the resulting stream. * * @param clauses list of query clauses. * @param resultType result type of the query output. * @param env symbol env. * @param block parent block to write to. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created _StreamPipeline. */ BLangVariableReference buildStream(List<BLangNode> clauses, BType resultType, SymbolEnv env, BLangBlockStmt block, List<BLangStatement> stmtsToBePropagated) { this.env = env; BLangFromClause initFromClause = (BLangFromClause) clauses.get(0); final BLangVariableReference initPipeline = addPipeline(block, initFromClause.pos, initFromClause.collection, resultType, true); BLangVariableReference initFrom = addInputFunction(block, initFromClause, stmtsToBePropagated); addStreamFunction(block, initPipeline, initFrom); for (BLangNode clause : clauses.subList(1, clauses.size())) { switch (clause.getKind()) { case FROM: BLangFromClause fromClause = (BLangFromClause) clause; BLangVariableReference nestedFromFunc = addNestedFromFunction(block, fromClause, stmtsToBePropagated); addStreamFunction(block, initPipeline, nestedFromFunc); BLangVariableReference fromInputFunc = addInputFunction(block, fromClause, stmtsToBePropagated); addStreamFunction(block, initPipeline, fromInputFunc); break; case JOIN: BLangJoinClause joinClause = (BLangJoinClause) clause; BLangVariableReference joinPipeline = addPipeline(block, joinClause.pos, joinClause.collection, resultType, false); BLangVariableReference joinInputFunc = addInputFunction(block, joinClause, stmtsToBePropagated); addStreamFunction(block, joinPipeline, joinInputFunc); BLangVariableReference joinFunc = addJoinFunction(block, joinClause, joinPipeline, stmtsToBePropagated); addStreamFunction(block, initPipeline, joinFunc); break; case LET_CLAUSE: BLangVariableReference letFunc = addLetFunction(block, (BLangLetClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, letFunc); break; case WHERE: BLangVariableReference whereFunc = addWhereFunction(block, (BLangWhereClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, whereFunc); break; case ORDER_BY: BLangVariableReference orderFunc = addOrderByFunction(block, (BLangOrderByClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, orderFunc); break; case SELECT: BLangVariableReference selectFunc = addSelectFunction(block, (BLangSelectClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, selectFunc); break; case DO: BLangVariableReference doFunc = addDoFunction(block, (BLangDoClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, doFunc); break; case LIMIT: BLangVariableReference limitFunc = addLimitFunction(block, (BLangLimitClause) clause); addStreamFunction(block, initPipeline, limitFunc); break; case ON_CONFLICT: final BLangOnConflictClause onConflict = (BLangOnConflictClause) clause; onConflictExpr = onConflict.expression; break; } } return addGetStreamFromPipeline(block, initPipeline); } /** * Desugar fromClause/joinClause to below and return a reference to created join _StreamPipeline. * _StreamPipeline pipeline = createPipeline(collection); * * @param blockStmt parent block to write to. * @param pos diagnostic pos of the collection. * @param collection reference to the collection. * @param resultType constraint type of the collection. * @param assignErrorToResult should the error be assigned to result. * @return variableReference to created _StreamPipeline. */ BLangVariableReference addPipeline(BLangBlockStmt blockStmt, Location pos, BLangExpression collection, BType resultType, boolean assignErrorToResult) { String name = getNewVarName(); if (assignErrorToResult) { collection = unwrapCheckedExpression(collection); } BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString(name), env.scope.owner.pkgID, collection.getBType(), this.env.scope.owner, pos, VIRTUAL); BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(pos, name, collection.getBType(), addTypeConversionExpr(collection, collection.getBType()), dataSymbol); BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(pos, dataVariable); BLangVariableReference valueVarRef = ASTBuilderUtil.createVariableRef(pos, dataSymbol); blockStmt.addStatement(dataVarDef); BType constraintType = resultType; BType completionType = symTable.errorOrNilType; BType refType = Types.getReferredType(resultType); if (refType.tag == TypeTags.ARRAY) { constraintType = ((BArrayType) refType).eType; } else if (refType.tag == TypeTags.STREAM) { constraintType = ((BStreamType) refType).constraint; completionType = ((BStreamType) refType).completionType; } BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol); BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr(); constraintTdExpr.resolvedType = constraintType; constraintTdExpr.setBType(constraintTdType); BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol); BLangTypedescExpr completionTdExpr = new BLangTypedescExpr(); completionTdExpr.resolvedType = completionType; completionTdExpr.setBType(completionTdType); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_PIPELINE_FUNCTION, Lists.of(valueVarRef, constraintTdExpr, completionTdExpr), pos); } /** * Desugar inputClause to below and return a reference to created from _StreamFunction. * _StreamFunction xsFrom = createFromFunction(function(_Frame frame) returns _Frame|error? { * int x = <int> frame["value"]; * frame["x"] = x; * return frame; * }); * * @param blockStmt parent block to write to. * @param inputClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created from _StreamFunction. */ BLangVariableReference addInputFunction(BLangBlockStmt blockStmt, BLangInputClause inputClause, List<BLangStatement> stmtsToBePropagated) { Location pos = inputClause.pos; BLangLambdaFunction lambda = createPassthroughLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; body.stmts.addAll(0, stmtsToBePropagated); BVarSymbol frameSymbol = lambda.function.requiredParams.get(0).symbol; List<BVarSymbol> symbols = getIntroducedSymbols((BLangVariable) inputClause.variableDefinitionNode.getVariable()); shadowSymbolScope(pos, body, ASTBuilderUtil.createVariableRef(pos, frameSymbol), symbols); BLangFieldBasedAccess valueAccessExpr = desugar.getValueAccessExpression(inputClause.pos, symTable.anyOrErrorType, frameSymbol); valueAccessExpr.expr = desugar.addConversionExprIfRequired(valueAccessExpr.expr, types.getSafeType(valueAccessExpr.expr.getBType(), true, false)); VariableDefinitionNode variableDefinitionNode = inputClause.variableDefinitionNode; BLangVariable variable = (BLangVariable) variableDefinitionNode.getVariable(); setSymbolOwner(variable, env.scope.owner); variable.setInitialExpression(desugar.addConversionExprIfRequired(valueAccessExpr, inputClause.varType)); body.stmts.add(0, (BLangStatement) variableDefinitionNode); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_INPUT_FUNCTION, Lists.of(lambda), pos); } /** * Desugar fromClause to below and return a reference to created from _StreamFunction. * _StreamFunction xnFrom = createNestedFromFunction(function(_Frame frame) returns any|error? { * any collection = frame["collection"] * return collection; * }); * * @param blockStmt parent block to write to. * @param fromClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created from _StreamFunction. */ BLangVariableReference addNestedFromFunction(BLangBlockStmt blockStmt, BLangFromClause fromClause, List<BLangStatement> stmtsToBePropagated) { Location pos = fromClause.pos; BLangUnionTypeNode returnType = getAnyAndErrorTypeNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.expr = fromClause.collection; returnNode.pos = pos; BLangLambdaFunction lambda = createLambdaFunction(pos, returnType, returnNode, false); ((BLangBlockFunctionBody) lambda.function.body).stmts.addAll(0, stmtsToBePropagated); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_NESTED_FROM_FUNCTION, Lists.of(lambda), pos); } /** * Desugar joinClauses to below and return a reference to created join _StreamFunction. * _StreamFunction joinFunc = createJoinFunction(joinPipeline); * * @param blockStmt parent block to write to. * @param joinClause to be desugared. * @param joinPipeline previously created _StreamPipeline reference to be joined. * @return variableReference to created join _StreamFunction. */ BLangVariableReference addJoinFunction(BLangBlockStmt blockStmt, BLangJoinClause joinClause, BLangVariableReference joinPipeline, List<BLangStatement> stmtsToBePropagated) { BLangExpression lhsExpr = (BLangExpression) joinClause.onClause.getLeftExpression(); BLangExpression rhsExpr = (BLangExpression) joinClause.onClause.getRightExpression(); BLangLambdaFunction lhsKeyFunction = createKeyFunction(lhsExpr, stmtsToBePropagated); BLangLambdaFunction rhsKeyFunction = createKeyFunction(rhsExpr, stmtsToBePropagated); if (joinClause.isOuterJoin) { List<BVarSymbol> symbols = getIntroducedSymbols((BLangVariable) joinClause.variableDefinitionNode.getVariable()); final BLangSimpleVarRef nilFrame = defineNilFrameForType(symbols, blockStmt, rhsExpr.pos); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_OUTER_JOIN_FUNCTION, Lists.of(joinPipeline, lhsKeyFunction, rhsKeyFunction, nilFrame), joinClause.pos); } else { return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_INNER_JOIN_FUNCTION, Lists.of(joinPipeline, lhsKeyFunction, rhsKeyFunction), joinClause.pos); } } /** * Desugar letClause to below and return a reference to created let _StreamFunction. * _StreamFunction ysLet = createLetFunction(function(_Frame frame) returns _Frame|error? { * frame["y2"] = <int> frame["y"] * <int> frame["y"]; * return frame; * }); * * @param blockStmt parent block to write to. * @param letClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created let _StreamFunction. */ BLangVariableReference addLetFunction(BLangBlockStmt blockStmt, BLangLetClause letClause, List<BLangStatement> stmtsToBePropagated) { Location pos = letClause.pos; BLangLambdaFunction lambda = createPassthroughLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; BVarSymbol frameSymbol = lambda.function.requiredParams.get(0).symbol; List<BVarSymbol> symbols = getIntroducedSymbols(letClause); shadowSymbolScope(pos, body, ASTBuilderUtil.createVariableRef(pos, frameSymbol), symbols); Collections.reverse(letClause.letVarDeclarations); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { body.stmts.add(0, (BLangStatement) letVariable.definitionNode); setSymbolOwner((BLangVariable) letVariable.definitionNode.getVariable(), env.scope.owner); } body.stmts.addAll(0, stmtsToBePropagated); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_LET_FUNCTION, Lists.of(lambda), pos); } /** * Desugar whereClause to below and return a reference to created filter _StreamFunction. * _StreamFunction xsFilter = createFilterFunction(function(_Frame frame) returns boolean { * return <int>frame["x"] > 0; * }); * * @param blockStmt parent block to write to. * @param whereClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created filter _StreamFunction. */ BLangVariableReference addWhereFunction(BLangBlockStmt blockStmt, BLangWhereClause whereClause, List<BLangStatement> stmtsToBePropagated) { Location pos = whereClause.pos; BLangLambdaFunction lambda = createFilterLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = pos; body.stmts.addAll(0, stmtsToBePropagated); returnNode.expr = desugar.addConversionExprIfRequired(whereClause.expression, lambda.function.returnTypeNode.getBType()); body.addStatement(returnNode); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_FILTER_FUNCTION, Lists.of(lambda), pos); } /** * Desugar orderByClause to below and return a reference to created orderBy _StreamFunction. * _StreamFunction orderByFunc = createOrderByFunction(function(_Frame frame) { * _Frame frame = {"orderKey": frame["x2"] + frame["y2"], $orderDirection$: true + false"}; * }); * * @param blockStmt parent block to write to. * @param orderByClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created orderBy _StreamFunction. */ BLangVariableReference addOrderByFunction(BLangBlockStmt blockStmt, BLangOrderByClause orderByClause, List<BLangStatement> stmtsToBePropagated) { Location pos = orderByClause.pos; BLangLambdaFunction lambda = createActionLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; body.stmts.addAll(0, stmtsToBePropagated); BVarSymbol frameSymbol = lambda.function.requiredParams.get(0).symbol; BLangSimpleVarRef frame = ASTBuilderUtil.createVariableRef(pos, frameSymbol); BLangArrayLiteral sortFieldsArrayExpr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); sortFieldsArrayExpr.exprs = new ArrayList<>(); sortFieldsArrayExpr.setBType(new BArrayType(symTable.anydataType)); BLangArrayLiteral sortModesArrayExpr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); sortModesArrayExpr.exprs = new ArrayList<>(); sortModesArrayExpr.setBType(new BArrayType(symTable.booleanType)); for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) { BLangOrderKey orderKey = (BLangOrderKey) orderKeyNode; sortFieldsArrayExpr.exprs.add(orderKey.expression); sortModesArrayExpr.exprs.add(ASTBuilderUtil.createLiteral(orderKey.pos, symTable.booleanType, orderKey.getOrderDirection())); } BLangStatement orderKeyStmt = getAddToFrameStmt(pos, frame, "$orderKey$", sortFieldsArrayExpr); body.stmts.add(orderKeyStmt); BLangStatement orderDirectionStmt = getAddToFrameStmt(pos, frame, "$orderDirection$", sortModesArrayExpr); body.stmts.add(orderDirectionStmt); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_ORDER_BY_FUNCTION, Lists.of(lambda), pos); } /** * Desugar selectClause to below and return a reference to created select _StreamFunction. * _StreamFunction selectFunc = createSelectFunction(function(_Frame frame) returns _Frame|error? { * int x2 = <int> frame["x2"]; * int y2 = <int> frame["y2"]; * _Frame frame = {"value": x2 + y2}; * return frame; * }); * * @param blockStmt parent block to write to. * @param selectClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created select _StreamFunction. */ BLangVariableReference addSelectFunction(BLangBlockStmt blockStmt, BLangSelectClause selectClause, List<BLangStatement> stmtsToBePropagated) { Location pos = selectClause.pos; BLangLambdaFunction lambda = createPassthroughLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; body.stmts.addAll(0, stmtsToBePropagated); BVarSymbol oldFrameSymbol = lambda.function.requiredParams.get(0).symbol; BLangSimpleVarRef frame = ASTBuilderUtil.createVariableRef(pos, oldFrameSymbol); BLangStatement assignment = getAddToFrameStmt(pos, frame, "$value$", selectClause.expression); body.stmts.add(body.stmts.size() - 1, assignment); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_SELECT_FUNCTION, Lists.of(lambda), pos); } /** * Desugar doClause to below and return a reference to created do _StreamFunction. * _StreamFunction doFunc = createDoFunction(function(_Frame frame) { * int x2 = <int> frame["x2"]; * int y2 = <int> frame["y2"]; * }); * * @param blockStmt parent block to write to. * @param doClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created do _StreamFunction. */ BLangVariableReference addDoFunction(BLangBlockStmt blockStmt, BLangDoClause doClause, List<BLangStatement> stmtsToBePropagated) { Location pos = doClause.pos; BLangLambdaFunction lambda = createActionLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; body.stmts.addAll(0, stmtsToBePropagated); for (BLangStatement stmt : doClause.body.stmts) { body.addStatement(stmt); } lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_DO_FUNCTION, Lists.of(lambda), pos); } /** * Desugar limit clause and return a reference to created limit _StreamFunction. * * @param blockStmt parent block to write to. * @param limitClause to be desugared. * @return variableReference to created do _StreamFunction. */ BLangVariableReference addLimitFunction(BLangBlockStmt blockStmt, BLangLimitClause limitClause) { Location pos = limitClause.pos; BLangUnionTypeNode returnTypeNode = getIntErrorTypeNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.expr = desugar.addConversionExprIfRequired(limitClause.expression, returnTypeNode.getBType()); returnNode.pos = pos; BLangLambdaFunction limitFunction = createLambdaFunction(pos, returnTypeNode, returnNode, false); limitFunction.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_LIMIT_FUNCTION, Lists.of(limitFunction), pos); } /** * Desugar to following invocation. * stream:addStreamFunction(pipeline, streamFunction); * * @param blockStmt parent block to write to. * @param pipelineRef variableReference to pipeline. * @param functionRef variableReference to stream function. */ void addStreamFunction(BLangBlockStmt blockStmt, BLangVariableReference pipelineRef, BLangVariableReference functionRef) { BLangInvocation addStreamFunctionInvocation = createQueryLibInvocation(QUERY_ADD_STREAM_FUNCTION, Lists.of(pipelineRef, functionRef), pipelineRef.pos); BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pipelineRef.pos, blockStmt); stmt.expr = addStreamFunctionInvocation; } /** * Desugar to following invocation. * stream<any|error, error?> result = xsPipeline.getStream(); * * @param blockStmt parent block to write to. * @param pipelineRef variableReference to pipeline. * @return variableReference to stream. */ BLangVariableReference addGetStreamFromPipeline(BLangBlockStmt blockStmt, BLangVariableReference pipelineRef) { Location pos = pipelineRef.pos; return getStreamFunctionVariableRef(blockStmt, QUERY_GET_STREAM_FROM_PIPELINE_FUNCTION, null, Lists.of(pipelineRef), pos); } /** * Create a table constructor expression. * * @param queryExpr query expression. * @param queryBlock parent block to write to. * @return reference to updated table. */ BLangVariableReference addTableConstructor(BLangQueryExpr queryExpr, BLangBlockStmt queryBlock) { Location pos = queryExpr.pos; final BType type = queryExpr.getBType(); String name = getNewVarName(); BType tableType = type; BType refType = Types.getReferredType(type); if (refType.tag == TypeTags.UNION) { tableType = symTable.tableType; for (BType memberType : ((BUnionType) refType).getMemberTypes()) { int memberTypeTag = Types.getReferredType(memberType).tag; if (memberTypeTag == TypeTags.TABLE) { tableType = memberType; } else if (memberTypeTag == TypeTags.INTERSECTION && ((BIntersectionType) memberType).effectiveType.tag == TypeTags.TABLE) { tableType = ((BIntersectionType) memberType).effectiveType; } } } final List<IdentifierNode> keyFieldIdentifiers = queryExpr.fieldNameIdentifierList; BLangTableConstructorExpr tableConstructorExpr = (BLangTableConstructorExpr) TreeBuilder.createTableConstructorExpressionNode(); tableConstructorExpr.pos = pos; tableConstructorExpr.setBType(tableType); if (!keyFieldIdentifiers.isEmpty()) { BLangTableKeySpecifier keySpecifier = (BLangTableKeySpecifier) TreeBuilder.createTableKeySpecifierNode(); keySpecifier.pos = pos; for (IdentifierNode identifier : keyFieldIdentifiers) { keySpecifier.addFieldNameIdentifier(identifier); } tableConstructorExpr.tableKeySpecifier = keySpecifier; } BVarSymbol tableSymbol = new BVarSymbol(0, names.fromString(name), env.scope.owner.pkgID, tableType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVariable tableVariable = ASTBuilderUtil.createVariable(pos, name, tableType, tableConstructorExpr, tableSymbol); queryBlock.addStatement(ASTBuilderUtil.createVariableDef(pos, tableVariable)); return ASTBuilderUtil.createVariableRef(pos, tableSymbol); } /** * Adds a type cast expression to given expression. * @param expr to be casted. * @param type to be casted into. * @return expression with the type cast. */ private BLangExpression addTypeConversionExpr(BLangExpression expr, BType type) { BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = expr; conversionExpr.targetType = type; conversionExpr.setBType(type); conversionExpr.pos = expr.pos; conversionExpr.checkTypes = false; return conversionExpr; } /** * Create and return a lambda `function(_Frame frame) returns _Frame|error? {...; return frame;}` * * @param pos of the lambda. * @return created lambda function. */ private BLangLambdaFunction createPassthroughLambda(Location pos) { BLangUnionTypeNode returnType = getFrameErrorNilTypeNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = pos; return createLambdaFunction(pos, returnType, returnNode, true); } /** * Create and return a lambda `function(_Frame frame) returns boolean {...}`. * * @param pos of the lambda. * @return created lambda function. */ private BLangLambdaFunction createFilterLambda(Location pos) { BLangUnionTypeNode returnType = getBooleanErrorTypeNode(); return createLambdaFunction(pos, returnType, null, false); } /** * Create and return a lambda `function(_Frame frame) {...}`. * * @param pos of the lambda. * @return created lambda function. */ private BLangLambdaFunction createActionLambda(Location pos) { BLangUnionTypeNode returnType = getAnyAndErrorTypeNode(); return createLambdaFunction(pos, returnType, null, false); } /** * Creates and return a lambda function without body. * * @param pos of the lambda. * @return created lambda function. */ private BLangLambdaFunction createLambdaFunction(Location pos, TypeNode returnType, BLangReturn returnNode, boolean isPassthrough) { BType frameType = getFrameTypeSymbol().type; BVarSymbol frameSymbol = new BVarSymbol(0, names.fromString(FRAME_PARAMETER_NAME), this.env.scope.owner.pkgID, frameType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVariable frameVariable = ASTBuilderUtil.createVariable(pos, null, frameSymbol.type, null, frameSymbol); BLangVariableReference frameVarRef = ASTBuilderUtil.createVariableRef(pos, frameSymbol); BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); if (returnNode != null) { if (isPassthrough) { returnNode.setExpression(frameVarRef); } body.addStatement(returnNode); } return createLambdaFunction(pos, Lists.of(frameVariable), returnType, body); } /** * Creates and returns a lambda function. * * @param pos diagnostic pos. * @param requiredParams required parameters. * @param returnType return type of the lambda function. * @param lambdaBody body of the lambda function. * @return created lambda function. */ private BLangLambdaFunction createLambdaFunction(Location pos, List<BLangSimpleVariable> requiredParams, TypeNode returnType, BLangFunctionBody lambdaBody) { BLangLambdaFunction lambdaFunction = desugar.createLambdaFunction(pos, "$streamLambda$", requiredParams, returnType, lambdaBody); lambdaFunction.function.addFlag(Flag.QUERY_LAMBDA); lambdaFunction.capturedClosureEnv = env; return lambdaFunction; } /** * Creates a variable to hold what function invocation returns, * and then return a varRef to that variable. * * @param blockStmt parent block to write the varDef into. * @param functionName function name. * @param requiredArgs required args. * @param pos pos diagnostic pos. * @return varRef to the created variable. */ private BLangVariableReference getStreamFunctionVariableRef(BLangBlockStmt blockStmt, Name functionName, List<BLangExpression> requiredArgs, Location pos) { return getStreamFunctionVariableRef(blockStmt, functionName, null, requiredArgs, pos); } /** * Creates a variable to hold what function invocation returns, * and then return a varRef to that variable. * * @param blockStmt parent block to write the varDef into. * @param functionName function name. * @param type expected type of the variable. * @param requiredArgs required args. * @param pos pos diagnostic pos. * @return varRef to the created variable. */ private BLangVariableReference getStreamFunctionVariableRef(BLangBlockStmt blockStmt, Name functionName, BType type, List<BLangExpression> requiredArgs, Location pos) { String name = getNewVarName(); BLangInvocation queryLibInvocation = createQueryLibInvocation(functionName, requiredArgs, pos); type = (type == null) ? queryLibInvocation.getBType() : type; BVarSymbol varSymbol = new BVarSymbol(0, new Name(name), env.scope.owner.pkgID, type, env.scope.owner, pos, VIRTUAL); BLangSimpleVariable variable = ASTBuilderUtil.createVariable(pos, name, type, desugar.addConversionExprIfRequired(queryLibInvocation, type), varSymbol); BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDef(pos, variable); blockStmt.addStatement(variableDef); return ASTBuilderUtil.createVariableRef(pos, variable.symbol); } /** * Get unique variable name. * * @return new variable name. */ private String getNewVarName() { return "$streamElement$" + UNDERSCORE + streamElementCount++; } /** * Load a function invokable symbol and return a invocation for that function. * * @param functionName function name. * @param requiredArgs list of required args. * @param pos diagnostic pos. * @return created invocation. */ private BLangInvocation createQueryLibInvocation(Name functionName, List<BLangExpression> requiredArgs, Location pos) { BInvokableSymbol symbol = getQueryLibInvokableSymbol(functionName); BLangInvocation bLangInvocation = ASTBuilderUtil .createInvocationExprForMethod(pos, symbol, requiredArgs, symResolver); bLangInvocation.setBType(symbol.retType); return bLangInvocation; } /** * Load and return symbol for given functionName in query lib. * * @param functionName of the function. * @return symbol for the function. */ private BInvokableSymbol getQueryLibInvokableSymbol(Name functionName) { return (BInvokableSymbol) symTable.langQueryModuleSymbol.scope .lookup(functionName).symbol; } private BLangStatement getAddToFrameStmt(Location pos, BLangVariableReference frame, String key, BLangExpression value) { BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, key); BLangFieldBasedAccess valueAccess = ASTBuilderUtil.createFieldAccessExpr(frame, valueIdentifier); valueAccess.pos = pos; valueAccess.setBType(symTable.anyOrErrorType); valueAccess.originalType = valueAccess.getBType(); return ASTBuilderUtil.createAssignmentStmt(pos, valueAccess, value); } private void shadowSymbolScope(Location pos, BLangBlockFunctionBody lambdaBody, BLangSimpleVarRef frameRef, List<BVarSymbol> symbols) { Collections.reverse(symbols); for (BVarSymbol symbol : symbols) { env.scope.entries.remove(symbol.name); env.enclPkg.globalVariableDependencies.values().forEach(d -> d.remove(symbol)); BLangStatement addToFrameStmt = getAddToFrameStmt(pos, frameRef, symbol.name.value, ASTBuilderUtil.createVariableRef(pos, symbol)); lambdaBody.stmts.add(0, addToFrameStmt); } } private void setSymbolOwner(BLangVariable variable, BSymbol owner) { if (variable == null) { return; } switch (variable.getKind()) { case VARIABLE: if (variable.symbol == null) { return; } variable.symbol.owner = owner; break; case TUPLE_VARIABLE: BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; tupleVariable.memberVariables.forEach(v -> setSymbolOwner(v, owner)); setSymbolOwner(tupleVariable.restVariable, owner); break; case RECORD_VARIABLE: BLangRecordVariable recordVariable = (BLangRecordVariable) variable; recordVariable.variableList.forEach(value -> setSymbolOwner(value.valueBindingPattern, owner)); setSymbolOwner(recordVariable.restParam, owner); break; case ERROR_VARIABLE: BLangErrorVariable errorVariable = (BLangErrorVariable) variable; setSymbolOwner(errorVariable.message, owner); setSymbolOwner(errorVariable.restDetail, owner); errorVariable.detail.forEach(bLangErrorDetailEntry -> setSymbolOwner(bLangErrorDetailEntry.valueBindingPattern, owner)); break; } } private List<BVarSymbol> getIntroducedSymbols(BLangLetClause letClause) { List<BVarSymbol> symbols = new ArrayList<>(); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { symbols.addAll(getIntroducedSymbols(letVariable)); } return symbols; } private List<BVarSymbol> getIntroducedSymbols(BLangLetVariable variable) { return getIntroducedSymbols((BLangVariable) variable.definitionNode.getVariable()); } private List<BVarSymbol> getIntroducedSymbols(BLangVariable variable) { if (variable != null) { List<BVarSymbol> symbols = new ArrayList<>(); if (variable.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariable record = (BLangRecordVariable) variable; for (BLangRecordVariable.BLangRecordVariableKeyValue keyValue : record.variableList) { symbols.addAll(getIntroducedSymbols(keyValue.valueBindingPattern)); } if (record.hasRestParam()) { symbols.addAll(getIntroducedSymbols(record.restParam)); } } else if (variable.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tuple = (BLangTupleVariable) variable; for (BLangVariable memberVariable : tuple.memberVariables) { symbols.addAll(getIntroducedSymbols(memberVariable)); } if (tuple.restVariable != null) { symbols.addAll(getIntroducedSymbols(tuple.restVariable)); } } else if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BLangErrorVariable error = (BLangErrorVariable) variable; if (error.message != null) { symbols.addAll(getIntroducedSymbols(error.message)); } if (error.restDetail != null) { symbols.addAll(getIntroducedSymbols(error.restDetail)); } for (BLangErrorVariable.BLangErrorDetailEntry entry : error.detail) { symbols.addAll(getIntroducedSymbols(entry.valueBindingPattern)); } } else { if (variable.symbol != null) { symbols.add(((BLangSimpleVariable) variable).symbol); } } return symbols; } return Collections.emptyList(); } /** * Creates a lambda key function for a given expression. * function (_Frame _frame) returns any { * returns keyExpr; * } * * @param expr key function expression. * @param stmtsToBePropagated list of statements to be propagated. * @return created key function lambda. */ private BLangLambdaFunction createKeyFunction(BLangExpression expr, List<BLangStatement> stmtsToBePropagated) { BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.expr = desugar.addConversionExprIfRequired(expr, symTable.anyOrErrorType); returnNode.pos = expr.pos; BLangLambdaFunction keyFunction = createLambdaFunction(expr.pos, getAnyAndErrorTypeNode(), returnNode, false); ((BLangBlockFunctionBody) keyFunction.function.body).stmts.addAll(0, stmtsToBePropagated); keyFunction.accept(this); return keyFunction; } /** * Defines a _Frame with nil value fields for given symbols. * * @param symbols list to be added to the _Frame. * @param blockStmt parent block to write to. * @param pos diagnostic position. * @return variableReference to created _Frame. */ private BLangSimpleVarRef defineNilFrameForType(List<BVarSymbol> symbols, BLangBlockStmt blockStmt, Location pos) { BLangSimpleVarRef frame = defineFrameVariable(blockStmt, pos); for (BVarSymbol symbol : symbols) { BType type = symbol.type; String key = symbol.name.value; BType structureType = Types.getReferredType(type); if (structureType.tag == TypeTags.RECORD || structureType.tag == TypeTags.OBJECT) { List<BVarSymbol> nestedSymbols = new ArrayList<>(); for (BField field : ((BStructureType) structureType).fields.values()) { nestedSymbols.add(field.symbol); } addFrameValueToFrame(frame, key, defineNilFrameForType(nestedSymbols, blockStmt, pos), blockStmt, pos); } else { addNilValueToFrame(frame, key, blockStmt, pos); } } return frame; } /** * Adds nil value fields to a given _Frame. * * @param frameToAddValueTo _Frame to add nil values to. * @param key field name. * @param blockStmt parent block to write to. * @param pos diagnostic position. */ private void addNilValueToFrame(BLangSimpleVarRef frameToAddValueTo, String key, BLangBlockStmt blockStmt, Location pos) { BLangStatement addToFrameStmt = getAddToFrameStmt(pos, frameToAddValueTo, key, ASTBuilderUtil.createLiteral(pos, symTable.nilType, Names.NIL_VALUE)); blockStmt.addStatement(addToFrameStmt); } /** * Adds _Frame value fields to a given _Frame. * * @param frameToAddValueTo _Frame to add values to. * @param key field name. * @param frameValue frame value to be added. * @param blockStmt parent block to write to. * @param pos diagnostic position. */ private void addFrameValueToFrame(BLangSimpleVarRef frameToAddValueTo, String key, BLangSimpleVarRef frameValue, BLangBlockStmt blockStmt, Location pos) { BLangStatement addToFrameStmt = getAddToFrameStmt(pos, frameToAddValueTo, key, frameValue); blockStmt.addStatement(addToFrameStmt); } /** * Creates _Frame $frame$ = new; variable definition and return a reference to the created frame. * * @param pos diagnostic position. * @return reference to the defined frame. */ private BLangSimpleVarRef defineFrameVariable(BLangBlockStmt blockStmt, Location pos) { BSymbol frameTypeSymbol = getFrameTypeSymbol(); BRecordType frameType = (BRecordType) frameTypeSymbol.type; String frameName = getNewVarName(); BVarSymbol frameSymbol = new BVarSymbol(0, names.fromString(frameName), env.scope.owner.pkgID, frameType, this.env.scope.owner, pos, VIRTUAL); BLangRecordLiteral frameInit = ASTBuilderUtil.createEmptyRecordLiteral(pos, frameType); BLangSimpleVariable frameVariable = ASTBuilderUtil.createVariable( pos, frameName, frameType, frameInit, frameSymbol); blockStmt.addStatement(ASTBuilderUtil.createVariableDef(pos, frameVariable)); return ASTBuilderUtil.createVariableRef(pos, frameSymbol); } /** * Return BLangValueType of a nil `()` type. * * @return a nil type node. */ BLangValueType getNilTypeNode() { BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nilTypeNode.typeKind = TypeKind.NIL; nilTypeNode.setBType(symTable.nilType); return nilTypeNode; } /** * Return BLangValueType of a any type. * * @return a any type node. */ BLangValueType getAnyTypeNode() { BLangValueType anyTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); anyTypeNode.typeKind = TypeKind.ANY; anyTypeNode.setBType(symTable.anyType); return anyTypeNode; } /** * Return BLangValueType of a int type. * * @return a int type node. */ BLangValueType getIntTypeNode() { BLangValueType intTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); intTypeNode.typeKind = TypeKind.INT; intTypeNode.setBType(symTable.intType); return intTypeNode; } /** * Return BLangErrorType node. * * @return a error type node. */ BLangErrorType getErrorTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.setBType(symTable.errorType); return errorTypeNode; } /** * Return BLangValueType of a boolean type. * * @return a boolean type node. */ private BLangValueType getBooleanTypeNode() { BLangValueType booleanTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); booleanTypeNode.typeKind = TypeKind.BOOLEAN; booleanTypeNode.setBType(symTable.booleanType); return booleanTypeNode; } /** * Return union type node consists of _Frame & error & (). * * @return a union type node. */ private BLangUnionTypeNode getFrameErrorNilTypeNode() { BType frameType = getFrameTypeSymbol().type; BUnionType unionType = BUnionType.create(null, frameType, symTable.errorType, symTable.nilType); BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.setBType(unionType); unionTypeNode.memberTypeNodes.add(getFrameTypeNode()); unionTypeNode.memberTypeNodes.add(getErrorTypeNode()); unionTypeNode.memberTypeNodes.add(getNilTypeNode()); unionTypeNode.desugared = true; return unionTypeNode; } private BLangUnionTypeNode getBooleanErrorTypeNode() { BUnionType unionType = BUnionType.create(null, symTable.errorType, symTable.booleanType); BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.setBType(unionType); unionTypeNode.memberTypeNodes.add(getErrorTypeNode()); unionTypeNode.memberTypeNodes.add(getBooleanTypeNode()); unionTypeNode.desugared = true; return unionTypeNode; } private BLangUnionTypeNode getIntErrorTypeNode() { BUnionType unionType = BUnionType.create(null, symTable.errorType, symTable.intType); BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.setBType(unionType); unionTypeNode.memberTypeNodes.add(getErrorTypeNode()); unionTypeNode.memberTypeNodes.add(getIntTypeNode()); unionTypeNode.desugared = true; return unionTypeNode; } /** * Return union type node consists of any & error. * * @return a any & error type node. */ private BLangUnionTypeNode getAnyAndErrorTypeNode() { BUnionType unionType = BUnionType.create(null, symTable.anyType, symTable.errorType); BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.memberTypeNodes.add(getAnyTypeNode()); unionTypeNode.memberTypeNodes.add(getErrorTypeNode()); unionTypeNode.setBType(unionType); unionTypeNode.desugared = true; return unionTypeNode; } /** * Return _Frame type node. * * @return a _Frame type node. */ private BLangRecordTypeNode getFrameTypeNode() { BSymbol frameTypeSymbol = getFrameTypeSymbol(); BRecordType frameType = (BRecordType) frameTypeSymbol.type; BLangUnionTypeNode restFieldType = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); restFieldType.setBType(frameType.restFieldType); restFieldType.memberTypeNodes.add(getErrorTypeNode()); restFieldType.memberTypeNodes.add(getAnyTypeNode()); BLangRecordTypeNode frameTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode(); frameTypeNode.setBType(frameType); frameTypeNode.restFieldType = restFieldType; frameTypeNode.symbol = frameType.tsymbol; frameTypeNode.desugared = true; return frameTypeNode; } private BLangExpression unwrapCheckedExpression(BLangExpression collectionExp) { BLangExpression expression = unwrapGroupExpression(collectionExp); if (expression.getKind() == NodeKind.CHECK_EXPR) { return ((BLangCheckedExpr) expression).expr; } return collectionExp; } private BLangExpression unwrapGroupExpression(BLangExpression exp) { if (exp.getKind() == NodeKind.GROUP_EXPR) { return unwrapGroupExpression(((BLangGroupExpr) exp).expression); } return exp; } /** * Load and return symbol for _Frame. * * @return _Frame type symbol. */ private BSymbol getFrameTypeSymbol() { return symTable.langQueryModuleSymbol .scope.lookup(names.fromString("_Frame")).symbol; } @Override public void visit(BLangLambdaFunction lambda) { lambda.function.accept(this); lambda.function = desugar.rewrite(lambda.function, env); env.enclPkg.lambdaFunctions.add(lambda); } @Override public void visit(BLangFunction function) { if (function.flagSet.contains(Flag.QUERY_LAMBDA)) { BLangBlockFunctionBody prevQueryLambdaBody = currentQueryLambdaBody; BVarSymbol prevFrameSymbol = currentFrameSymbol; Map<String, BSymbol> prevIdentifiers = identifiers; currentFrameSymbol = function.requiredParams.get(0).symbol; identifiers = new HashMap<>(); currentQueryLambdaBody = (BLangBlockFunctionBody) function.getBody(); currentQueryLambdaBody.accept(this); currentFrameSymbol = prevFrameSymbol; identifiers = prevIdentifiers; currentQueryLambdaBody = prevQueryLambdaBody; } else { boolean prevWithinLambdaFunc = withinLambdaFunc; withinLambdaFunc = true; function.getBody().accept(this); withinLambdaFunc = prevWithinLambdaFunc; } } @Override public void visit(BLangBlockFunctionBody body) { List<BLangStatement> stmts = new ArrayList<>(body.getStatements()); stmts.forEach(stmt -> stmt.accept(this)); } @Override public void visit(BLangExprFunctionBody exprBody) { exprBody.expr.accept(this); } @Override public void visit(BLangSimpleVariableDef bLangSimpleVariableDef) { bLangSimpleVariableDef.getVariable().accept(this); } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { bLangRecordVariableDef.var.accept(this); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { bLangRecordVariable.variableList.forEach(v -> v.getValue().accept(this)); this.acceptNode(bLangRecordVariable.expr); if (bLangRecordVariable.hasRestParam()) { bLangRecordVariable.restParam.accept(this); } } @Override public void visit(BLangSimpleVariable bLangSimpleVariable) { identifiers.putIfAbsent(bLangSimpleVariable.name.value, bLangSimpleVariable.symbol); this.acceptNode(bLangSimpleVariable.expr); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { conversionExpr.expr.accept(this); } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { fieldAccessExpr.expr.accept(this); } @Override public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) { nsPrefixedFieldBasedAccess.expr.accept(this); } @Override public void visit(BLangFieldBasedAccess.BLangStructFunctionVarRef structFunctionVarRef) { structFunctionVarRef.expr.accept(this); } @Override public void visit(BLangExpressionStmt exprStmtNode) { exprStmtNode.expr.accept(this); } @Override public void visit(BLangInvocation invocationExpr) { List<BLangExpression> requiredArgs = invocationExpr.requiredArgs; if (invocationExpr.langLibInvocation && !requiredArgs.isEmpty()) { requiredArgs = requiredArgs.subList(1, requiredArgs.size()); } requiredArgs.forEach(this::acceptNode); invocationExpr.restArgs.forEach(this::acceptNode); this.acceptNode(invocationExpr.expr); } @Override public void visit(BLangInvocation.BFunctionPointerInvocation functionPointerInvocationExpr) { visit((BLangInvocation) functionPointerInvocationExpr); } @Override public void visit(BLangInvocation.BLangAttachedFunctionInvocation attachedFunctionInvocation) { visit((BLangInvocation) attachedFunctionInvocation); } @Override public void visit(BLangLiteral literalExpr) { } @Override public void visit(BLangReturn bLangReturn) { this.acceptNode(bLangReturn.expr); } @Override public void visit(BLangBinaryExpr bLangBinaryExpr) { this.acceptNode(bLangBinaryExpr.lhsExpr); this.acceptNode(bLangBinaryExpr.rhsExpr); } @Override public void visit(BLangCommitExpr commitExpr) { } @Override public void visit(BLangAssignment bLangAssignment) { this.acceptNode(bLangAssignment.varRef); this.acceptNode(bLangAssignment.expr); } @Override public void visit(BLangRecordLiteral bLangRecordLiteral) { bLangRecordLiteral.fields.forEach(field -> this.acceptNode((BLangNode) field)); } @Override public void visit(BLangRecordLiteral.BLangStructLiteral structLiteral) { visit((BLangRecordLiteral) structLiteral); } @Override public void visit(BLangRecordLiteral.BLangMapLiteral mapLiteral) { visit((BLangRecordLiteral) mapLiteral); } @Override public void visit(BLangRecordKeyValueField recordKeyValue) { this.acceptNode(recordKeyValue.key.expr); this.acceptNode(recordKeyValue.valueExpr); } @Override public void visit(BLangRecordSpreadOperatorField spreadOperatorField) { this.acceptNode(spreadOperatorField.expr); } @Override public void visit(BLangConstRef constRef) { } @Override public void visit(BLangNumericLiteral literalExpr) { } @Override public void visit(BLangTupleVarRef varRefExpr) { varRefExpr.expressions.forEach(this::acceptNode); this.acceptNode((BLangNode) varRefExpr.restParam); } @Override public void visit(BLangRecordVarRef varRefExpr) { varRefExpr.recordRefFields.forEach(recordVarRefKeyValue -> this.acceptNode(recordVarRefKeyValue.variableReference)); this.acceptNode((BLangNode) varRefExpr.restParam); } @Override public void visit(BLangErrorVarRef varRefExpr) { this.acceptNode(varRefExpr.message); this.acceptNode(varRefExpr.restVar); varRefExpr.detail.forEach(this::acceptNode); } @Override public void visit(BLangSimpleVarRef bLangSimpleVarRef) { BSymbol symbol = bLangSimpleVarRef.symbol; String identifier = bLangSimpleVarRef.variableName == null ? String.valueOf(bLangSimpleVarRef.varSymbol.name) : String.valueOf(bLangSimpleVarRef.variableName); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(env, names.fromString(identifier), SymTag.VARIABLE); if (symbol != null && symbol != resolvedSymbol && !FRAME_PARAMETER_NAME.equals(identifier)) { if ((withinLambdaFunc || queryEnv == null || !queryEnv.scope.entries.containsKey(symbol.name)) && !identifiers.containsKey(identifier)) { Location pos = currentQueryLambdaBody.pos; BLangFieldBasedAccess frameAccessExpr = desugar.getFieldAccessExpression(pos, identifier, symTable.anyOrErrorType, currentFrameSymbol); frameAccessExpr.expr = desugar.addConversionExprIfRequired(frameAccessExpr.expr, types.getSafeType(frameAccessExpr.expr.getBType(), true, false)); if (symbol instanceof BVarSymbol) { ((BVarSymbol) symbol).originalSymbol = null; if (withinLambdaFunc && symbol.closure) { symbol.closure = false; symbol = new BVarSymbol(0, symbol.name, env.scope.owner.pkgID, symbol.type, env.scope.owner, pos, VIRTUAL); symbol.closure = true; bLangSimpleVarRef.symbol = symbol; bLangSimpleVarRef.varSymbol = symbol; } BLangSimpleVariable variable = ASTBuilderUtil.createVariable(pos, identifier, symbol.type, desugar.addConversionExprIfRequired(frameAccessExpr, symbol.type), (BVarSymbol) symbol); BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDef(pos, variable); currentQueryLambdaBody.stmts.add(0, variableDef); SymbolEnv queryLambdaEnv = SymbolEnv.createFuncBodyEnv(currentQueryLambdaBody, env); queryLambdaEnv.scope.define(symbol.name, symbol); } identifiers.put(identifier, symbol); } else if (identifiers.containsKey(identifier) && withinLambdaFunc) { symbol = identifiers.get(identifier); bLangSimpleVarRef.symbol = symbol; bLangSimpleVarRef.varSymbol = symbol; } } else if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; BSymbol enclSymbol = symResolver.lookupClosureVarSymbol(env.enclEnv, names.fromString(identifier), SymTag.VARIABLE); if (enclSymbol != null && enclSymbol != symTable.notFoundSymbol) { enclSymbol.closure = true; } } } @Override public void visit(BLangSimpleVarRef.BLangPackageVarRef bLangPackageVarRef) { visit((BLangSimpleVarRef) bLangPackageVarRef); } @Override public void visit(BLangSimpleVarRef.BLangLocalVarRef localVarRef) { visit(((BLangSimpleVarRef) localVarRef)); } @Override public void visit(BLangSimpleVarRef.BLangFieldVarRef fieldVarRef) { visit(((BLangSimpleVarRef) fieldVarRef)); } @Override public void visit(BLangSimpleVarRef.BLangFunctionVarRef functionVarRef) { visit(((BLangSimpleVarRef) functionVarRef)); } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { indexAccessExpr.indexExpr.accept(this); indexAccessExpr.expr.accept(this); } @Override public void visit(BLangIndexBasedAccess.BLangStructFieldAccessExpr structFieldAccessExpr) { visit((BLangIndexBasedAccess) structFieldAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangMapAccessExpr mapAccessExpr) { visit((BLangIndexBasedAccess) mapAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangArrayAccessExpr arrayAccessExpr) { visit((BLangIndexBasedAccess) arrayAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangTableAccessExpr tableAccessExpr) { visit((BLangIndexBasedAccess) tableAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangTupleAccessExpr tupleAccessExpr) { visit((BLangIndexBasedAccess) tupleAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangStringAccessExpr stringAccessExpr) { visit((BLangIndexBasedAccess) stringAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangXMLAccessExpr xmlAccessExpr) { visit((BLangIndexBasedAccess) xmlAccessExpr); } @Override public void visit(BLangTypeInit connectorInitExpr) { connectorInitExpr.argsExpr.forEach(this::acceptNode); connectorInitExpr.initInvocation.accept(this); } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocationExpr) { actionInvocationExpr.argExprs.forEach(this::acceptNode); this.acceptNode(actionInvocationExpr.expr); } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { this.acceptNode(errorConstructorExpr.errorTypeRef); if (errorConstructorExpr.namedArgs != null) { errorConstructorExpr.namedArgs.forEach(this::acceptNode); } this.acceptNode(errorConstructorExpr.errorDetail); } @Override public void visit(BLangTernaryExpr ternaryExpr) { ternaryExpr.expr.accept(this); ternaryExpr.elseExpr.accept(this); ternaryExpr.thenExpr.accept(this); } @Override public void visit(BLangWaitExpr awaitExpr) { awaitExpr.exprList.forEach(this::acceptNode); } @Override public void visit(BLangTrapExpr trapExpr) { this.acceptNode(trapExpr.expr); } @Override public void visit(BLangElvisExpr elvisExpr) { this.acceptNode(elvisExpr.lhsExpr); this.acceptNode(elvisExpr.rhsExpr); } @Override public void visit(BLangGroupExpr groupExpr) { this.acceptNode(groupExpr.expression); } @Override public void visit(BLangLetExpression letExpr) { this.acceptNode(letExpr.expr); letExpr.letVarDeclarations.forEach(var -> this.acceptNode((BLangNode) var.definitionNode)); } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { listConstructorExpr.exprs.forEach(this::acceptNode); } @Override public void visit(BLangListConstructorExpr.BLangListConstructorSpreadOpExpr spreadOpExpr) { this.acceptNode(spreadOpExpr.expr); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { } @Override public void visit(BLangListConstructorExpr.BLangTupleLiteral tupleLiteral) { tupleLiteral.exprs.forEach(this::acceptNode); } @Override public void visit(BLangListConstructorExpr.BLangJSONArrayLiteral jsonArrayLiteral) { jsonArrayLiteral.exprs.forEach(expression -> expression.accept(this)); } @Override public void visit(BLangArrayLiteral arrayLiteral) { arrayLiteral.exprs.forEach(this::acceptNode); } @Override public void visit(BLangUnaryExpr unaryExpr) { this.acceptNode(unaryExpr.expr); } @Override public void visit(BLangTypedescExpr accessExpr) { } @Override public void visit(BLangXMLQName xmlQName) { } @Override public void visit(BLangXMLAttribute xmlAttribute) { this.acceptNode(xmlAttribute.name); this.acceptNode(xmlAttribute.value); } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { this.acceptNode(xmlElementLiteral.startTagName); this.acceptNode(xmlElementLiteral.endTagName); xmlElementLiteral.attributes.forEach(this::acceptNode); xmlElementLiteral.children.forEach(this::acceptNode); } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.textFragments.forEach(this::acceptNode); this.acceptNode(xmlTextLiteral.concatExpr); } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.textFragments.forEach(this::acceptNode); this.acceptNode(xmlCommentLiteral.concatExpr); } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.dataFragments.forEach(this::acceptNode); this.acceptNode(xmlProcInsLiteral.dataConcatExpr); } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.textFragments.forEach(this::acceptNode); this.acceptNode(xmlQuotedString.concatExpr); } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { stringTemplateLiteral.exprs.forEach(this::acceptNode); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { rawTemplateLiteral.strings.forEach(this::acceptNode); rawTemplateLiteral.insertions.forEach(this::acceptNode); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { bLangArrowFunction.params.forEach(this::acceptNode); this.acceptNode(bLangArrowFunction.body); } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { this.acceptNode(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { this.acceptNode(bLangNamedArgsExpression.expr); } @Override public void visit(BLangIsAssignableExpr assignableExpr) { this.acceptNode(assignableExpr.lhsExpr); } @Override public void visit(BLangCheckedExpr checkedExpr) { if (checkedExpr.equivalentErrorTypeList != null) { this.checkedErrorList.addAll(checkedExpr.equivalentErrorTypeList); } this.acceptNode(checkedExpr.expr); } @Override public void visit(BLangCheckPanickedExpr checkPanickedExpr) { this.acceptNode(checkPanickedExpr.expr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { this.acceptNode(serviceConstructorExpr.serviceNode); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { this.acceptNode(typeTestExpr.expr); } @Override public void visit(BLangIsLikeExpr typeTestExpr) { this.acceptNode(typeTestExpr.expr); } @Override public void visit(BLangIgnoreExpr ignoreExpr) { } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { } @Override public void visit(BLangXMLNS.BLangLocalXMLNS xmlnsNode) { } @Override public void visit(BLangXMLNS.BLangPackageXMLNS xmlnsNode) { } @Override public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { bLangXMLSequenceLiteral.xmlItems.forEach(this::acceptNode); } @Override public void visit(BLangStatementExpression bLangStatementExpression) { this.acceptNode(bLangStatementExpression.expr); this.acceptNode(bLangStatementExpression.stmt); } @Override public void visit(BLangTupleVariable bLangTupleVariable) { this.acceptNode(bLangTupleVariable.restVariable); bLangTupleVariable.memberVariables.forEach(this::acceptNode); } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { this.acceptNode(bLangTupleVariableDef.var.restVariable); this.acceptNode(bLangTupleVariableDef.var.expr); if (bLangTupleVariableDef.var.memberVariables != null) { bLangTupleVariableDef.var.memberVariables.forEach(this::acceptNode); } } @Override public void visit(BLangErrorVariable bLangErrorVariable) { this.acceptNode(bLangErrorVariable.message); bLangErrorVariable.detail.forEach(var -> this.acceptNode(var.valueBindingPattern)); this.acceptNode(bLangErrorVariable.restDetail); this.acceptNode(bLangErrorVariable.detailExpr); } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { this.acceptNode(bLangErrorVariableDef.errorVariable); } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { } @Override public void visit(BLangWaitForAllExpr waitForAllExpr) { waitForAllExpr.keyValuePairs.forEach(pair -> this.acceptNode(pair)); } @Override public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) { } @Override public void visit(BLangMarkdownReferenceDocumentation bLangMarkdownReferenceDocumentation) { } @Override public void visit(BLangWaitForAllExpr.BLangWaitKeyValue waitKeyValue) { this.acceptNode(waitKeyValue.key); this.acceptNode(waitKeyValue.valueExpr); } @Override public void visit(BLangXMLElementFilter xmlElementFilter) { this.acceptNode(xmlElementFilter.impConversionExpr); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { this.acceptNode(xmlElementAccess.expr); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { this.acceptNode(xmlNavigation.expr); this.acceptNode(xmlNavigation.childIndex); } @Override public void visit(BLangBlockStmt blockNode) { blockNode.stmts.forEach(statement -> this.acceptNode(statement)); } @Override public void visit(BLangLock.BLangLockStmt lockStmtNode) { } @Override public void visit(BLangLock.BLangUnLockStmt unLockNode) { } @Override public void visit(BLangCompoundAssignment compoundAssignNode) { this.acceptNode(compoundAssignNode.expr); this.acceptNode(compoundAssignNode.modifiedExpr); this.acceptNode(compoundAssignNode.varRef); } @Override public void visit(BLangRetry retryNode) { } @Override public void visit(BLangContinue continueNode) { } @Override public void visit(BLangBreak breakNode) { } @Override public void visit(BLangPanic panicNode) { this.acceptNode(panicNode.expr); } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { this.acceptNode(xmlnsStmtNode.xmlnsDecl); } @Override public void visit(BLangIf ifNode) { this.acceptNode(ifNode.expr); this.acceptNode(ifNode.body); this.acceptNode(ifNode.elseStmt); } @Override public void visit(BLangQueryAction queryAction) { SymbolEnv prevQueryEnv = this.queryEnv; queryAction.getQueryClauses().forEach(clause -> this.acceptNode(clause)); this.queryEnv = prevQueryEnv; } @Override public void visit(BLangQueryExpr queryExpr) { SymbolEnv prevQueryEnv = this.queryEnv; queryExpr.getQueryClauses().forEach(clause -> this.acceptNode(clause)); this.queryEnv = prevQueryEnv; } @Override public void visit(BLangForeach foreach) { this.acceptNode(foreach.collection); } @Override public void visit(BLangFromClause fromClause) { this.queryEnv = fromClause.env; this.acceptNode(fromClause.collection); } @Override public void visit(BLangJoinClause joinClause) { this.acceptNode(joinClause.collection); joinClause.collection.accept(this); this.acceptNode((BLangNode) joinClause.onClause.getLeftExpression()); this.acceptNode((BLangNode) joinClause.onClause.getRightExpression()); } @Override public void visit(BLangLetClause letClause) { } @Override public void visit(BLangSelectClause selectClause) { this.acceptNode(selectClause.expression); } @Override public void visit(BLangWhereClause whereClause) { this.acceptNode(whereClause.expression); } @Override public void visit(BLangDoClause doClause) { doClause.body.getStatements().forEach(statement -> this.acceptNode(statement)); } @Override public void visit(BLangOnConflictClause onConflictClause) { this.acceptNode(onConflictClause.expression); } @Override public void visit(BLangLimitClause limitClause) { this.acceptNode(limitClause.expression); } @Override public void visit(BLangOrderByClause orderByClause) { orderByClause.orderByKeyList.forEach(key -> this.acceptNode(((BLangOrderKey) key).expression)); } @Override public void visit(BLangWhile whileNode) { this.acceptNode(whileNode.expr); this.acceptNode(whileNode.body); } @Override public void visit(BLangDo doNode) { doNode.body.stmts.forEach(stmt -> this.acceptNode(stmt)); } @Override public void visit(BLangOnFailClause onFailClause) { onFailClause.body.stmts.forEach(stmt -> this.acceptNode(stmt)); } @Override public void visit(BLangFail failNode) { this.acceptNode(failNode.expr); } @Override public void visit(BLangLock lockNode) { this.acceptNode(lockNode.body); } @Override public void visit(BLangTransaction transactionNode) { this.acceptNode(transactionNode.transactionBody); } @Override public void visit(BLangTupleDestructure stmt) { this.acceptNode(stmt.varRef); this.acceptNode(stmt.expr); } @Override public void visit(BLangRecordDestructure stmt) { this.acceptNode(stmt.expr); this.acceptNode(stmt.varRef); } @Override public void visit(BLangErrorDestructure stmt) { this.acceptNode(stmt.expr); this.acceptNode(stmt.varRef); } @Override public void visit(BLangForkJoin forkJoin) { forkJoin.workers.forEach(worker -> this.acceptNode(worker)); } @Override public void visit(BLangWorkerSend workerSendNode) { this.acceptNode(workerSendNode.expr); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { this.acceptNode(workerReceiveNode.sendExpression); } private void acceptNode(BLangNode node) { if (node == null) { return; } node.accept(this); } }
class QueryDesugar extends BLangNodeVisitor { private static final Name QUERY_CREATE_PIPELINE_FUNCTION = new Name("createPipeline"); private static final Name QUERY_CREATE_INPUT_FUNCTION = new Name("createInputFunction"); private static final Name QUERY_CREATE_NESTED_FROM_FUNCTION = new Name("createNestedFromFunction"); private static final Name QUERY_CREATE_LET_FUNCTION = new Name("createLetFunction"); private static final Name QUERY_CREATE_INNER_JOIN_FUNCTION = new Name("createInnerJoinFunction"); private static final Name QUERY_CREATE_OUTER_JOIN_FUNCTION = new Name("createOuterJoinFunction"); private static final Name QUERY_CREATE_FILTER_FUNCTION = new Name("createFilterFunction"); private static final Name QUERY_CREATE_ORDER_BY_FUNCTION = new Name("createOrderByFunction"); private static final Name QUERY_CREATE_SELECT_FUNCTION = new Name("createSelectFunction"); private static final Name QUERY_CREATE_DO_FUNCTION = new Name("createDoFunction"); private static final Name QUERY_CREATE_LIMIT_FUNCTION = new Name("createLimitFunction"); private static final Name QUERY_ADD_STREAM_FUNCTION = new Name("addStreamFunction"); private static final Name QUERY_CONSUME_STREAM_FUNCTION = new Name("consumeStream"); private static final Name QUERY_TO_ARRAY_FUNCTION = new Name("toArray"); private static final Name QUERY_TO_STRING_FUNCTION = new Name("toString"); private static final Name QUERY_TO_XML_FUNCTION = new Name("toXML"); private static final Name QUERY_ADD_TO_TABLE_FUNCTION = new Name("addToTable"); private static final Name QUERY_ADD_TO_MAP_FUNCTION = new Name("addToMap"); private static final Name QUERY_GET_STREAM_FROM_PIPELINE_FUNCTION = new Name("getStreamFromPipeline"); private static final String FRAME_PARAMETER_NAME = "$frame$"; private static final CompilerContext.Key<QueryDesugar> QUERY_DESUGAR_KEY = new CompilerContext.Key<>(); private BLangExpression onConflictExpr; private BVarSymbol currentFrameSymbol; private BLangBlockFunctionBody currentQueryLambdaBody; private Map<String, BSymbol> identifiers; private int streamElementCount = 0; private final Desugar desugar; private final SymbolTable symTable; private final SymbolResolver symResolver; private final Names names; private final Types types; private SymbolEnv env; private SymbolEnv queryEnv; private boolean containsCheckExpr; private boolean withinLambdaFunc = false; private HashSet<BType> checkedErrorList; private QueryDesugar(CompilerContext context) { context.put(QUERY_DESUGAR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.names = Names.getInstance(context); this.types = Types.getInstance(context); this.desugar = Desugar.getInstance(context); } public static QueryDesugar getInstance(CompilerContext context) { QueryDesugar desugar = context.get(QUERY_DESUGAR_KEY); if (desugar == null) { desugar = new QueryDesugar(context); } return desugar; } /** * Desugar query expression. * * @param queryExpr query expression to be desugared. * @param env symbol env. * @param stmtsToBePropagated list of statements to be propagated. * @return desugared query expression. */ BLangStatementExpression desugar(BLangQueryExpr queryExpr, SymbolEnv env, List<BLangStatement> stmtsToBePropagated) { containsCheckExpr = false; HashSet<BType> prevCheckedErrorList = this.checkedErrorList; this.checkedErrorList = new HashSet<>(); List<BLangNode> clauses = queryExpr.getQueryClauses(); Location pos = clauses.get(0).pos; BLangBlockStmt queryBlock = ASTBuilderUtil.createBlockStmt(pos); BLangVariableReference streamRef = buildStream(clauses, queryExpr.getBType(), env, queryBlock, stmtsToBePropagated); BLangStatementExpression streamStmtExpr; BLangLiteral isReadonly = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, Symbols.isFlagOn(queryExpr.getBType().flags, Flags.READONLY)); if (queryExpr.isStream) { streamStmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, streamRef); streamStmtExpr.setBType(streamRef.getBType()); } else if (queryExpr.isTable) { onConflictExpr = (onConflictExpr == null) ? ASTBuilderUtil.createLiteral(pos, symTable.nilType, Names.NIL_VALUE) : onConflictExpr; BLangVariableReference tableRef = addTableConstructor(queryExpr, queryBlock); BLangVariableReference result = getStreamFunctionVariableRef(queryBlock, QUERY_ADD_TO_TABLE_FUNCTION, Lists.of(streamRef, tableRef, onConflictExpr, isReadonly), pos); streamStmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, addTypeConversionExpr(result, queryExpr.getBType())); streamStmtExpr.setBType(tableRef.getBType()); onConflictExpr = null; } else if (queryExpr.isMap) { onConflictExpr = (onConflictExpr == null) ? ASTBuilderUtil.createLiteral(pos, symTable.nilType, Names.NIL_VALUE) : onConflictExpr; BMapType mapType = (BMapType) getMapType(queryExpr.getBType()); BLangRecordLiteral.BLangMapLiteral mapLiteral = new BLangRecordLiteral.BLangMapLiteral(queryExpr.pos, mapType, new ArrayList<>()); BLangVariableReference result = getStreamFunctionVariableRef(queryBlock, QUERY_ADD_TO_MAP_FUNCTION, Lists.of(streamRef, mapLiteral, onConflictExpr, isReadonly), pos); streamStmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, addTypeConversionExpr(result, queryExpr.getBType())); streamStmtExpr.setBType(queryExpr.getBType()); onConflictExpr = null; } else { BLangVariableReference result; BType refType = Types.getReferredType(queryExpr.getBType()); if (isXml(refType)) { if (types.isSubTypeOfReadOnly(refType, env)) { isReadonly.value = true; } result = getStreamFunctionVariableRef(queryBlock, QUERY_TO_XML_FUNCTION, Lists.of(streamRef, isReadonly), pos); } else if (TypeTags.isStringTypeTag(refType.tag)) { result = getStreamFunctionVariableRef(queryBlock, QUERY_TO_STRING_FUNCTION, Lists.of(streamRef), pos); } else { BType arrayType = refType; if (refType.tag == TypeTags.UNION) { arrayType = ((BUnionType) refType).getMemberTypes() .stream().filter(m -> Types.getReferredType(m).tag == TypeTags.ARRAY) .findFirst().orElse(symTable.arrayType); } BLangArrayLiteral arr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arr.exprs = new ArrayList<>(); arr.setBType(arrayType); result = getStreamFunctionVariableRef(queryBlock, QUERY_TO_ARRAY_FUNCTION, Lists.of(streamRef, arr, isReadonly), pos); } if (containsCheckExpr) { desugar.resetSkipFailStmtRewrite(); BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(pos, result, queryExpr.getBType()); checkedExpr.equivalentErrorTypeList.addAll(this.checkedErrorList); streamStmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, checkedExpr); streamStmtExpr.setBType(checkedExpr.getBType()); } else { streamStmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, addTypeConversionExpr(result, queryExpr.getBType())); streamStmtExpr.setBType(queryExpr.getBType()); } } this.checkedErrorList = prevCheckedErrorList; return streamStmtExpr; } private boolean isXml(BType type) { BType refType = Types.getReferredType(type); if (TypeTags.isXMLTypeTag(refType.tag)) { return true; } switch (refType.tag) { case TypeTags.UNION: for (BType memberType : ((BUnionType) refType).getMemberTypes()) { if (!isXml(memberType)) { return false; } } return true; case TypeTags.INTERSECTION: return isXml(((BIntersectionType) refType).getEffectiveType()); default: return false; } } /** * Desugar query action. * * @param queryAction query action to be desugared. * @param env symbol env. * @param stmtsToBePropagated statements to be propagated to do clause. * @return desugared query action. */ BLangStatementExpression desugar(BLangQueryAction queryAction, SymbolEnv env, List<BLangStatement> stmtsToBePropagated) { containsCheckExpr = false; HashSet<BType> prevCheckedErrorList = this.checkedErrorList; this.checkedErrorList = new HashSet<>(); List<BLangNode> clauses = queryAction.getQueryClauses(); Location pos = clauses.get(0).pos; BType returnType = symTable.errorOrNilType; if (queryAction.returnsWithinDoClause) { BInvokableSymbol invokableSymbol = env.enclInvokable.symbol; returnType = ((BInvokableType) invokableSymbol.type).retType; } BLangBlockStmt queryBlock = ASTBuilderUtil.createBlockStmt(pos); BLangVariableReference streamRef = buildStream(clauses, returnType, env, queryBlock, stmtsToBePropagated); BLangVariableReference result = getStreamFunctionVariableRef(queryBlock, QUERY_CONSUME_STREAM_FUNCTION, returnType, Lists.of(streamRef), pos); BLangStatementExpression stmtExpr; if (queryAction.returnsWithinDoClause) { BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, result); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos); ifBody.stmts.add(returnStmt); BLangTypeTestExpr nilTypeTestExpr = desugar.createTypeCheckExpr(pos, result, desugar.getNillTypeNode()); nilTypeTestExpr.setBType(symTable.booleanType); BLangGroupExpr nilCheckGroupExpr = new BLangGroupExpr(); nilCheckGroupExpr.setBType(symTable.booleanType); nilCheckGroupExpr.expression = desugar.createNotBinaryExpression(pos, nilTypeTestExpr); BLangIf ifStatement = ASTBuilderUtil.createIfStmt(pos, queryBlock); ifStatement.expr = nilCheckGroupExpr; ifStatement.body = ifBody; } stmtExpr = ASTBuilderUtil.createStatementExpression(queryBlock, addTypeConversionExpr(result, returnType)); stmtExpr.setBType(returnType); this.checkedErrorList = prevCheckedErrorList; return stmtExpr; } /** * Write the pipeline to the given `block` and return the reference to the resulting stream. * * @param clauses list of query clauses. * @param resultType result type of the query output. * @param env symbol env. * @param block parent block to write to. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created _StreamPipeline. */ BLangVariableReference buildStream(List<BLangNode> clauses, BType resultType, SymbolEnv env, BLangBlockStmt block, List<BLangStatement> stmtsToBePropagated) { this.env = env; BLangFromClause initFromClause = (BLangFromClause) clauses.get(0); final BLangVariableReference initPipeline = addPipeline(block, initFromClause.pos, initFromClause.collection, resultType); BLangVariableReference initFrom = addInputFunction(block, initFromClause, stmtsToBePropagated); addStreamFunction(block, initPipeline, initFrom); for (BLangNode clause : clauses.subList(1, clauses.size())) { switch (clause.getKind()) { case FROM: BLangFromClause fromClause = (BLangFromClause) clause; BLangVariableReference nestedFromFunc = addNestedFromFunction(block, fromClause, stmtsToBePropagated); addStreamFunction(block, initPipeline, nestedFromFunc); BLangVariableReference fromInputFunc = addInputFunction(block, fromClause, stmtsToBePropagated); addStreamFunction(block, initPipeline, fromInputFunc); break; case JOIN: BLangJoinClause joinClause = (BLangJoinClause) clause; BLangVariableReference joinPipeline = addPipeline(block, joinClause.pos, joinClause.collection, resultType); BLangVariableReference joinInputFunc = addInputFunction(block, joinClause, stmtsToBePropagated); addStreamFunction(block, joinPipeline, joinInputFunc); BLangVariableReference joinFunc = addJoinFunction(block, joinClause, joinPipeline, stmtsToBePropagated); addStreamFunction(block, initPipeline, joinFunc); break; case LET_CLAUSE: BLangVariableReference letFunc = addLetFunction(block, (BLangLetClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, letFunc); break; case WHERE: BLangVariableReference whereFunc = addWhereFunction(block, (BLangWhereClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, whereFunc); break; case ORDER_BY: BLangVariableReference orderFunc = addOrderByFunction(block, (BLangOrderByClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, orderFunc); break; case SELECT: BLangVariableReference selectFunc = addSelectFunction(block, (BLangSelectClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, selectFunc); break; case DO: BLangVariableReference doFunc = addDoFunction(block, (BLangDoClause) clause, stmtsToBePropagated); addStreamFunction(block, initPipeline, doFunc); break; case LIMIT: BLangVariableReference limitFunc = addLimitFunction(block, (BLangLimitClause) clause); addStreamFunction(block, initPipeline, limitFunc); break; case ON_CONFLICT: final BLangOnConflictClause onConflict = (BLangOnConflictClause) clause; onConflictExpr = onConflict.expression; break; } } return addGetStreamFromPipeline(block, initPipeline); } /** * Desugar fromClause/joinClause to below and return a reference to created join _StreamPipeline. * _StreamPipeline pipeline = createPipeline(collection); * * @param blockStmt parent block to write to. * @param pos diagnostic pos of the collection. * @param collection reference to the collection. * @param resultType constraint type of the collection. * @return variableReference to created _StreamPipeline. */ BLangVariableReference addPipeline(BLangBlockStmt blockStmt, Location pos, BLangExpression collection, BType resultType) { String name = getNewVarName(); BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString(name), env.scope.owner.pkgID, collection.getBType(), this.env.scope.owner, pos, VIRTUAL); BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(pos, name, collection.getBType(), addTypeConversionExpr(collection, collection.getBType()), dataSymbol); BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(pos, dataVariable); BLangVariableReference valueVarRef = ASTBuilderUtil.createVariableRef(pos, dataSymbol); blockStmt.addStatement(dataVarDef); BType constraintType = resultType; BType completionType = symTable.errorOrNilType; BType refType = Types.getReferredType(resultType); if (refType.tag == TypeTags.ARRAY) { constraintType = ((BArrayType) refType).eType; } else if (refType.tag == TypeTags.STREAM) { constraintType = ((BStreamType) refType).constraint; completionType = ((BStreamType) refType).completionType; } BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol); BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr(); constraintTdExpr.resolvedType = constraintType; constraintTdExpr.setBType(constraintTdType); BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol); BLangTypedescExpr completionTdExpr = new BLangTypedescExpr(); completionTdExpr.resolvedType = completionType; completionTdExpr.setBType(completionTdType); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_PIPELINE_FUNCTION, Lists.of(valueVarRef, constraintTdExpr, completionTdExpr), pos); } /** * Desugar inputClause to below and return a reference to created from _StreamFunction. * _StreamFunction xsFrom = createFromFunction(function(_Frame frame) returns _Frame|error? { * int x = <int> frame["value"]; * frame["x"] = x; * return frame; * }); * * @param blockStmt parent block to write to. * @param inputClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created from _StreamFunction. */ BLangVariableReference addInputFunction(BLangBlockStmt blockStmt, BLangInputClause inputClause, List<BLangStatement> stmtsToBePropagated) { Location pos = inputClause.pos; BLangLambdaFunction lambda = createPassthroughLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; body.stmts.addAll(0, stmtsToBePropagated); BVarSymbol frameSymbol = lambda.function.requiredParams.get(0).symbol; List<BVarSymbol> symbols = getIntroducedSymbols((BLangVariable) inputClause.variableDefinitionNode.getVariable()); shadowSymbolScope(pos, body, ASTBuilderUtil.createVariableRef(pos, frameSymbol), symbols); BLangFieldBasedAccess valueAccessExpr = desugar.getValueAccessExpression(inputClause.pos, symTable.anyOrErrorType, frameSymbol); valueAccessExpr.expr = desugar.addConversionExprIfRequired(valueAccessExpr.expr, types.getSafeType(valueAccessExpr.expr.getBType(), true, false)); VariableDefinitionNode variableDefinitionNode = inputClause.variableDefinitionNode; BLangVariable variable = (BLangVariable) variableDefinitionNode.getVariable(); setSymbolOwner(variable, env.scope.owner); variable.setInitialExpression(desugar.addConversionExprIfRequired(valueAccessExpr, inputClause.varType)); body.stmts.add(0, (BLangStatement) variableDefinitionNode); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_INPUT_FUNCTION, Lists.of(lambda), pos); } /** * Desugar fromClause to below and return a reference to created from _StreamFunction. * _StreamFunction xnFrom = createNestedFromFunction(function(_Frame frame) returns any|error? { * any collection = frame["collection"] * return collection; * }); * * @param blockStmt parent block to write to. * @param fromClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created from _StreamFunction. */ BLangVariableReference addNestedFromFunction(BLangBlockStmt blockStmt, BLangFromClause fromClause, List<BLangStatement> stmtsToBePropagated) { Location pos = fromClause.pos; BLangUnionTypeNode returnType = getAnyAndErrorTypeNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.expr = fromClause.collection; returnNode.pos = pos; BLangLambdaFunction lambda = createLambdaFunction(pos, returnType, returnNode, false); ((BLangBlockFunctionBody) lambda.function.body).stmts.addAll(0, stmtsToBePropagated); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_NESTED_FROM_FUNCTION, Lists.of(lambda), pos); } /** * Desugar joinClauses to below and return a reference to created join _StreamFunction. * _StreamFunction joinFunc = createJoinFunction(joinPipeline); * * @param blockStmt parent block to write to. * @param joinClause to be desugared. * @param joinPipeline previously created _StreamPipeline reference to be joined. * @return variableReference to created join _StreamFunction. */ BLangVariableReference addJoinFunction(BLangBlockStmt blockStmt, BLangJoinClause joinClause, BLangVariableReference joinPipeline, List<BLangStatement> stmtsToBePropagated) { BLangExpression lhsExpr = (BLangExpression) joinClause.onClause.getLeftExpression(); BLangExpression rhsExpr = (BLangExpression) joinClause.onClause.getRightExpression(); BLangLambdaFunction lhsKeyFunction = createKeyFunction(lhsExpr, stmtsToBePropagated); BLangLambdaFunction rhsKeyFunction = createKeyFunction(rhsExpr, stmtsToBePropagated); if (joinClause.isOuterJoin) { List<BVarSymbol> symbols = getIntroducedSymbols((BLangVariable) joinClause.variableDefinitionNode.getVariable()); final BLangSimpleVarRef nilFrame = defineNilFrameForType(symbols, blockStmt, rhsExpr.pos); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_OUTER_JOIN_FUNCTION, Lists.of(joinPipeline, lhsKeyFunction, rhsKeyFunction, nilFrame), joinClause.pos); } else { return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_INNER_JOIN_FUNCTION, Lists.of(joinPipeline, lhsKeyFunction, rhsKeyFunction), joinClause.pos); } } /** * Desugar letClause to below and return a reference to created let _StreamFunction. * _StreamFunction ysLet = createLetFunction(function(_Frame frame) returns _Frame|error? { * frame["y2"] = <int> frame["y"] * <int> frame["y"]; * return frame; * }); * * @param blockStmt parent block to write to. * @param letClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created let _StreamFunction. */ BLangVariableReference addLetFunction(BLangBlockStmt blockStmt, BLangLetClause letClause, List<BLangStatement> stmtsToBePropagated) { Location pos = letClause.pos; BLangLambdaFunction lambda = createPassthroughLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; BVarSymbol frameSymbol = lambda.function.requiredParams.get(0).symbol; List<BVarSymbol> symbols = getIntroducedSymbols(letClause); shadowSymbolScope(pos, body, ASTBuilderUtil.createVariableRef(pos, frameSymbol), symbols); Collections.reverse(letClause.letVarDeclarations); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { body.stmts.add(0, (BLangStatement) letVariable.definitionNode); setSymbolOwner((BLangVariable) letVariable.definitionNode.getVariable(), env.scope.owner); } body.stmts.addAll(0, stmtsToBePropagated); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_LET_FUNCTION, Lists.of(lambda), pos); } /** * Desugar whereClause to below and return a reference to created filter _StreamFunction. * _StreamFunction xsFilter = createFilterFunction(function(_Frame frame) returns boolean { * return <int>frame["x"] > 0; * }); * * @param blockStmt parent block to write to. * @param whereClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created filter _StreamFunction. */ BLangVariableReference addWhereFunction(BLangBlockStmt blockStmt, BLangWhereClause whereClause, List<BLangStatement> stmtsToBePropagated) { Location pos = whereClause.pos; BLangLambdaFunction lambda = createFilterLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = pos; body.stmts.addAll(0, stmtsToBePropagated); returnNode.expr = desugar.addConversionExprIfRequired(whereClause.expression, lambda.function.returnTypeNode.getBType()); body.addStatement(returnNode); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_FILTER_FUNCTION, Lists.of(lambda), pos); } /** * Desugar orderByClause to below and return a reference to created orderBy _StreamFunction. * _StreamFunction orderByFunc = createOrderByFunction(function(_Frame frame) { * _Frame frame = {"orderKey": frame["x2"] + frame["y2"], $orderDirection$: true + false"}; * }); * * @param blockStmt parent block to write to. * @param orderByClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created orderBy _StreamFunction. */ BLangVariableReference addOrderByFunction(BLangBlockStmt blockStmt, BLangOrderByClause orderByClause, List<BLangStatement> stmtsToBePropagated) { Location pos = orderByClause.pos; BLangLambdaFunction lambda = createActionLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; body.stmts.addAll(0, stmtsToBePropagated); BVarSymbol frameSymbol = lambda.function.requiredParams.get(0).symbol; BLangSimpleVarRef frame = ASTBuilderUtil.createVariableRef(pos, frameSymbol); BLangArrayLiteral sortFieldsArrayExpr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); sortFieldsArrayExpr.exprs = new ArrayList<>(); sortFieldsArrayExpr.setBType(new BArrayType(symTable.anydataType)); BLangArrayLiteral sortModesArrayExpr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); sortModesArrayExpr.exprs = new ArrayList<>(); sortModesArrayExpr.setBType(new BArrayType(symTable.booleanType)); for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) { BLangOrderKey orderKey = (BLangOrderKey) orderKeyNode; sortFieldsArrayExpr.exprs.add(orderKey.expression); sortModesArrayExpr.exprs.add(ASTBuilderUtil.createLiteral(orderKey.pos, symTable.booleanType, orderKey.getOrderDirection())); } BLangStatement orderKeyStmt = getAddToFrameStmt(pos, frame, "$orderKey$", sortFieldsArrayExpr); body.stmts.add(orderKeyStmt); BLangStatement orderDirectionStmt = getAddToFrameStmt(pos, frame, "$orderDirection$", sortModesArrayExpr); body.stmts.add(orderDirectionStmt); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_ORDER_BY_FUNCTION, Lists.of(lambda), pos); } /** * Desugar selectClause to below and return a reference to created select _StreamFunction. * _StreamFunction selectFunc = createSelectFunction(function(_Frame frame) returns _Frame|error? { * int x2 = <int> frame["x2"]; * int y2 = <int> frame["y2"]; * _Frame frame = {"value": x2 + y2}; * return frame; * }); * * @param blockStmt parent block to write to. * @param selectClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created select _StreamFunction. */ BLangVariableReference addSelectFunction(BLangBlockStmt blockStmt, BLangSelectClause selectClause, List<BLangStatement> stmtsToBePropagated) { Location pos = selectClause.pos; BLangLambdaFunction lambda = createPassthroughLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; body.stmts.addAll(0, stmtsToBePropagated); BVarSymbol oldFrameSymbol = lambda.function.requiredParams.get(0).symbol; BLangSimpleVarRef frame = ASTBuilderUtil.createVariableRef(pos, oldFrameSymbol); BLangStatement assignment = getAddToFrameStmt(pos, frame, "$value$", selectClause.expression); body.stmts.add(body.stmts.size() - 1, assignment); lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_SELECT_FUNCTION, Lists.of(lambda), pos); } /** * Desugar doClause to below and return a reference to created do _StreamFunction. * _StreamFunction doFunc = createDoFunction(function(_Frame frame) { * int x2 = <int> frame["x2"]; * int y2 = <int> frame["y2"]; * }); * * @param blockStmt parent block to write to. * @param doClause to be desugared. * @param stmtsToBePropagated list of statements to be propagated. * @return variableReference to created do _StreamFunction. */ BLangVariableReference addDoFunction(BLangBlockStmt blockStmt, BLangDoClause doClause, List<BLangStatement> stmtsToBePropagated) { Location pos = doClause.pos; BLangLambdaFunction lambda = createActionLambda(pos); BLangBlockFunctionBody body = (BLangBlockFunctionBody) lambda.function.body; body.stmts.addAll(0, stmtsToBePropagated); for (BLangStatement stmt : doClause.body.stmts) { body.addStatement(stmt); } lambda.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_DO_FUNCTION, Lists.of(lambda), pos); } /** * Desugar limit clause and return a reference to created limit _StreamFunction. * * @param blockStmt parent block to write to. * @param limitClause to be desugared. * @return variableReference to created do _StreamFunction. */ BLangVariableReference addLimitFunction(BLangBlockStmt blockStmt, BLangLimitClause limitClause) { Location pos = limitClause.pos; BLangUnionTypeNode returnTypeNode = getIntErrorTypeNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.expr = desugar.addConversionExprIfRequired(limitClause.expression, returnTypeNode.getBType()); returnNode.pos = pos; BLangLambdaFunction limitFunction = createLambdaFunction(pos, returnTypeNode, returnNode, false); limitFunction.accept(this); return getStreamFunctionVariableRef(blockStmt, QUERY_CREATE_LIMIT_FUNCTION, Lists.of(limitFunction), pos); } /** * Desugar to following invocation. * stream:addStreamFunction(pipeline, streamFunction); * * @param blockStmt parent block to write to. * @param pipelineRef variableReference to pipeline. * @param functionRef variableReference to stream function. */ void addStreamFunction(BLangBlockStmt blockStmt, BLangVariableReference pipelineRef, BLangVariableReference functionRef) { BLangInvocation addStreamFunctionInvocation = createQueryLibInvocation(QUERY_ADD_STREAM_FUNCTION, Lists.of(pipelineRef, functionRef), pipelineRef.pos); BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pipelineRef.pos, blockStmt); stmt.expr = addStreamFunctionInvocation; } /** * Desugar to following invocation. * stream<any|error, error?> result = xsPipeline.getStream(); * * @param blockStmt parent block to write to. * @param pipelineRef variableReference to pipeline. * @return variableReference to stream. */ BLangVariableReference addGetStreamFromPipeline(BLangBlockStmt blockStmt, BLangVariableReference pipelineRef) { Location pos = pipelineRef.pos; return getStreamFunctionVariableRef(blockStmt, QUERY_GET_STREAM_FROM_PIPELINE_FUNCTION, null, Lists.of(pipelineRef), pos); } /** * Create a table constructor expression. * * @param queryExpr query expression. * @param queryBlock parent block to write to. * @return reference to updated table. */ BLangVariableReference addTableConstructor(BLangQueryExpr queryExpr, BLangBlockStmt queryBlock) { Location pos = queryExpr.pos; final BType type = queryExpr.getBType(); String name = getNewVarName(); BType tableType = type; BType refType = Types.getReferredType(type); if (refType.tag == TypeTags.UNION) { tableType = symTable.tableType; for (BType memberType : ((BUnionType) refType).getMemberTypes()) { int memberTypeTag = Types.getReferredType(memberType).tag; if (memberTypeTag == TypeTags.TABLE) { tableType = memberType; } else if (memberTypeTag == TypeTags.INTERSECTION && ((BIntersectionType) memberType).effectiveType.tag == TypeTags.TABLE) { tableType = ((BIntersectionType) memberType).effectiveType; } } } final List<IdentifierNode> keyFieldIdentifiers = queryExpr.fieldNameIdentifierList; BLangTableConstructorExpr tableConstructorExpr = (BLangTableConstructorExpr) TreeBuilder.createTableConstructorExpressionNode(); tableConstructorExpr.pos = pos; tableConstructorExpr.setBType(tableType); if (!keyFieldIdentifiers.isEmpty()) { BLangTableKeySpecifier keySpecifier = (BLangTableKeySpecifier) TreeBuilder.createTableKeySpecifierNode(); keySpecifier.pos = pos; for (IdentifierNode identifier : keyFieldIdentifiers) { keySpecifier.addFieldNameIdentifier(identifier); } tableConstructorExpr.tableKeySpecifier = keySpecifier; } BVarSymbol tableSymbol = new BVarSymbol(0, names.fromString(name), env.scope.owner.pkgID, tableType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVariable tableVariable = ASTBuilderUtil.createVariable(pos, name, tableType, tableConstructorExpr, tableSymbol); queryBlock.addStatement(ASTBuilderUtil.createVariableDef(pos, tableVariable)); return ASTBuilderUtil.createVariableRef(pos, tableSymbol); } /** * Adds a type cast expression to given expression. * @param expr to be casted. * @param type to be casted into. * @return expression with the type cast. */ private BLangExpression addTypeConversionExpr(BLangExpression expr, BType type) { BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = expr; conversionExpr.targetType = type; conversionExpr.setBType(type); conversionExpr.pos = expr.pos; conversionExpr.checkTypes = false; return conversionExpr; } /** * Create and return a lambda `function(_Frame frame) returns _Frame|error? {...; return frame;}` * * @param pos of the lambda. * @return created lambda function. */ private BLangLambdaFunction createPassthroughLambda(Location pos) { BLangUnionTypeNode returnType = getFrameErrorNilTypeNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = pos; return createLambdaFunction(pos, returnType, returnNode, true); } /** * Create and return a lambda `function(_Frame frame) returns boolean {...}`. * * @param pos of the lambda. * @return created lambda function. */ private BLangLambdaFunction createFilterLambda(Location pos) { BLangUnionTypeNode returnType = getBooleanErrorTypeNode(); return createLambdaFunction(pos, returnType, null, false); } /** * Create and return a lambda `function(_Frame frame) {...}`. * * @param pos of the lambda. * @return created lambda function. */ private BLangLambdaFunction createActionLambda(Location pos) { BLangUnionTypeNode returnType = getAnyAndErrorTypeNode(); return createLambdaFunction(pos, returnType, null, false); } /** * Creates and return a lambda function without body. * * @param pos of the lambda. * @return created lambda function. */ private BLangLambdaFunction createLambdaFunction(Location pos, TypeNode returnType, BLangReturn returnNode, boolean isPassthrough) { BType frameType = getFrameTypeSymbol().type; BVarSymbol frameSymbol = new BVarSymbol(0, names.fromString(FRAME_PARAMETER_NAME), this.env.scope.owner.pkgID, frameType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVariable frameVariable = ASTBuilderUtil.createVariable(pos, null, frameSymbol.type, null, frameSymbol); BLangVariableReference frameVarRef = ASTBuilderUtil.createVariableRef(pos, frameSymbol); BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); if (returnNode != null) { if (isPassthrough) { returnNode.setExpression(frameVarRef); } body.addStatement(returnNode); } return createLambdaFunction(pos, Lists.of(frameVariable), returnType, body); } /** * Creates and returns a lambda function. * * @param pos diagnostic pos. * @param requiredParams required parameters. * @param returnType return type of the lambda function. * @param lambdaBody body of the lambda function. * @return created lambda function. */ private BLangLambdaFunction createLambdaFunction(Location pos, List<BLangSimpleVariable> requiredParams, TypeNode returnType, BLangFunctionBody lambdaBody) { BLangLambdaFunction lambdaFunction = desugar.createLambdaFunction(pos, "$streamLambda$", requiredParams, returnType, lambdaBody); lambdaFunction.function.addFlag(Flag.QUERY_LAMBDA); lambdaFunction.capturedClosureEnv = env; return lambdaFunction; } /** * Creates a variable to hold what function invocation returns, * and then return a varRef to that variable. * * @param blockStmt parent block to write the varDef into. * @param functionName function name. * @param requiredArgs required args. * @param pos pos diagnostic pos. * @return varRef to the created variable. */ private BLangVariableReference getStreamFunctionVariableRef(BLangBlockStmt blockStmt, Name functionName, List<BLangExpression> requiredArgs, Location pos) { return getStreamFunctionVariableRef(blockStmt, functionName, null, requiredArgs, pos); } /** * Creates a variable to hold what function invocation returns, * and then return a varRef to that variable. * * @param blockStmt parent block to write the varDef into. * @param functionName function name. * @param type expected type of the variable. * @param requiredArgs required args. * @param pos pos diagnostic pos. * @return varRef to the created variable. */ private BLangVariableReference getStreamFunctionVariableRef(BLangBlockStmt blockStmt, Name functionName, BType type, List<BLangExpression> requiredArgs, Location pos) { String name = getNewVarName(); BLangInvocation queryLibInvocation = createQueryLibInvocation(functionName, requiredArgs, pos); type = (type == null) ? queryLibInvocation.getBType() : type; BVarSymbol varSymbol = new BVarSymbol(0, new Name(name), env.scope.owner.pkgID, type, env.scope.owner, pos, VIRTUAL); BLangSimpleVariable variable = ASTBuilderUtil.createVariable(pos, name, type, desugar.addConversionExprIfRequired(queryLibInvocation, type), varSymbol); BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDef(pos, variable); blockStmt.addStatement(variableDef); return ASTBuilderUtil.createVariableRef(pos, variable.symbol); } /** * Get unique variable name. * * @return new variable name. */ private String getNewVarName() { return "$streamElement$" + UNDERSCORE + streamElementCount++; } /** * Load a function invokable symbol and return a invocation for that function. * * @param functionName function name. * @param requiredArgs list of required args. * @param pos diagnostic pos. * @return created invocation. */ private BLangInvocation createQueryLibInvocation(Name functionName, List<BLangExpression> requiredArgs, Location pos) { BInvokableSymbol symbol = getQueryLibInvokableSymbol(functionName); BLangInvocation bLangInvocation = ASTBuilderUtil .createInvocationExprForMethod(pos, symbol, requiredArgs, symResolver); bLangInvocation.setBType(symbol.retType); return bLangInvocation; } /** * Load and return symbol for given functionName in query lib. * * @param functionName of the function. * @return symbol for the function. */ private BInvokableSymbol getQueryLibInvokableSymbol(Name functionName) { return (BInvokableSymbol) symTable.langQueryModuleSymbol.scope .lookup(functionName).symbol; } private BLangStatement getAddToFrameStmt(Location pos, BLangVariableReference frame, String key, BLangExpression value) { BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, key); BLangFieldBasedAccess valueAccess = ASTBuilderUtil.createFieldAccessExpr(frame, valueIdentifier); valueAccess.pos = pos; valueAccess.setBType(symTable.anyOrErrorType); valueAccess.originalType = valueAccess.getBType(); return ASTBuilderUtil.createAssignmentStmt(pos, valueAccess, value); } private void shadowSymbolScope(Location pos, BLangBlockFunctionBody lambdaBody, BLangSimpleVarRef frameRef, List<BVarSymbol> symbols) { Collections.reverse(symbols); for (BVarSymbol symbol : symbols) { env.scope.entries.remove(symbol.name); env.enclPkg.globalVariableDependencies.values().forEach(d -> d.remove(symbol)); BLangStatement addToFrameStmt = getAddToFrameStmt(pos, frameRef, symbol.name.value, ASTBuilderUtil.createVariableRef(pos, symbol)); lambdaBody.stmts.add(0, addToFrameStmt); } } private void setSymbolOwner(BLangVariable variable, BSymbol owner) { if (variable == null) { return; } switch (variable.getKind()) { case VARIABLE: if (variable.symbol == null) { return; } variable.symbol.owner = owner; break; case TUPLE_VARIABLE: BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; tupleVariable.memberVariables.forEach(v -> setSymbolOwner(v, owner)); setSymbolOwner(tupleVariable.restVariable, owner); break; case RECORD_VARIABLE: BLangRecordVariable recordVariable = (BLangRecordVariable) variable; recordVariable.variableList.forEach(value -> setSymbolOwner(value.valueBindingPattern, owner)); setSymbolOwner(recordVariable.restParam, owner); break; case ERROR_VARIABLE: BLangErrorVariable errorVariable = (BLangErrorVariable) variable; setSymbolOwner(errorVariable.message, owner); setSymbolOwner(errorVariable.restDetail, owner); errorVariable.detail.forEach(bLangErrorDetailEntry -> setSymbolOwner(bLangErrorDetailEntry.valueBindingPattern, owner)); break; } } private List<BVarSymbol> getIntroducedSymbols(BLangLetClause letClause) { List<BVarSymbol> symbols = new ArrayList<>(); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { symbols.addAll(getIntroducedSymbols(letVariable)); } return symbols; } private List<BVarSymbol> getIntroducedSymbols(BLangLetVariable variable) { return getIntroducedSymbols((BLangVariable) variable.definitionNode.getVariable()); } private List<BVarSymbol> getIntroducedSymbols(BLangVariable variable) { if (variable != null) { List<BVarSymbol> symbols = new ArrayList<>(); if (variable.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariable record = (BLangRecordVariable) variable; for (BLangRecordVariable.BLangRecordVariableKeyValue keyValue : record.variableList) { symbols.addAll(getIntroducedSymbols(keyValue.valueBindingPattern)); } if (record.hasRestParam()) { symbols.addAll(getIntroducedSymbols(record.restParam)); } } else if (variable.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tuple = (BLangTupleVariable) variable; for (BLangVariable memberVariable : tuple.memberVariables) { symbols.addAll(getIntroducedSymbols(memberVariable)); } if (tuple.restVariable != null) { symbols.addAll(getIntroducedSymbols(tuple.restVariable)); } } else if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BLangErrorVariable error = (BLangErrorVariable) variable; if (error.message != null) { symbols.addAll(getIntroducedSymbols(error.message)); } if (error.restDetail != null) { symbols.addAll(getIntroducedSymbols(error.restDetail)); } for (BLangErrorVariable.BLangErrorDetailEntry entry : error.detail) { symbols.addAll(getIntroducedSymbols(entry.valueBindingPattern)); } } else { if (variable.symbol != null) { symbols.add(((BLangSimpleVariable) variable).symbol); } } return symbols; } return Collections.emptyList(); } /** * Creates a lambda key function for a given expression. * function (_Frame _frame) returns any { * returns keyExpr; * } * * @param expr key function expression. * @param stmtsToBePropagated list of statements to be propagated. * @return created key function lambda. */ private BLangLambdaFunction createKeyFunction(BLangExpression expr, List<BLangStatement> stmtsToBePropagated) { BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.expr = desugar.addConversionExprIfRequired(expr, symTable.anyOrErrorType); returnNode.pos = expr.pos; BLangLambdaFunction keyFunction = createLambdaFunction(expr.pos, getAnyAndErrorTypeNode(), returnNode, false); ((BLangBlockFunctionBody) keyFunction.function.body).stmts.addAll(0, stmtsToBePropagated); keyFunction.accept(this); return keyFunction; } /** * Defines a _Frame with nil value fields for given symbols. * * @param symbols list to be added to the _Frame. * @param blockStmt parent block to write to. * @param pos diagnostic position. * @return variableReference to created _Frame. */ private BLangSimpleVarRef defineNilFrameForType(List<BVarSymbol> symbols, BLangBlockStmt blockStmt, Location pos) { BLangSimpleVarRef frame = defineFrameVariable(blockStmt, pos); for (BVarSymbol symbol : symbols) { BType type = symbol.type; String key = symbol.name.value; BType structureType = Types.getReferredType(type); if (structureType.tag == TypeTags.RECORD || structureType.tag == TypeTags.OBJECT) { List<BVarSymbol> nestedSymbols = new ArrayList<>(); for (BField field : ((BStructureType) structureType).fields.values()) { nestedSymbols.add(field.symbol); } addFrameValueToFrame(frame, key, defineNilFrameForType(nestedSymbols, blockStmt, pos), blockStmt, pos); } else { addNilValueToFrame(frame, key, blockStmt, pos); } } return frame; } /** * Adds nil value fields to a given _Frame. * * @param frameToAddValueTo _Frame to add nil values to. * @param key field name. * @param blockStmt parent block to write to. * @param pos diagnostic position. */ private void addNilValueToFrame(BLangSimpleVarRef frameToAddValueTo, String key, BLangBlockStmt blockStmt, Location pos) { BLangStatement addToFrameStmt = getAddToFrameStmt(pos, frameToAddValueTo, key, ASTBuilderUtil.createLiteral(pos, symTable.nilType, Names.NIL_VALUE)); blockStmt.addStatement(addToFrameStmt); } /** * Adds _Frame value fields to a given _Frame. * * @param frameToAddValueTo _Frame to add values to. * @param key field name. * @param frameValue frame value to be added. * @param blockStmt parent block to write to. * @param pos diagnostic position. */ private void addFrameValueToFrame(BLangSimpleVarRef frameToAddValueTo, String key, BLangSimpleVarRef frameValue, BLangBlockStmt blockStmt, Location pos) { BLangStatement addToFrameStmt = getAddToFrameStmt(pos, frameToAddValueTo, key, frameValue); blockStmt.addStatement(addToFrameStmt); } /** * Creates _Frame $frame$ = new; variable definition and return a reference to the created frame. * * @param pos diagnostic position. * @return reference to the defined frame. */ private BLangSimpleVarRef defineFrameVariable(BLangBlockStmt blockStmt, Location pos) { BSymbol frameTypeSymbol = getFrameTypeSymbol(); BRecordType frameType = (BRecordType) frameTypeSymbol.type; String frameName = getNewVarName(); BVarSymbol frameSymbol = new BVarSymbol(0, names.fromString(frameName), env.scope.owner.pkgID, frameType, this.env.scope.owner, pos, VIRTUAL); BLangRecordLiteral frameInit = ASTBuilderUtil.createEmptyRecordLiteral(pos, frameType); BLangSimpleVariable frameVariable = ASTBuilderUtil.createVariable( pos, frameName, frameType, frameInit, frameSymbol); blockStmt.addStatement(ASTBuilderUtil.createVariableDef(pos, frameVariable)); return ASTBuilderUtil.createVariableRef(pos, frameSymbol); } /** * Return BLangValueType of a nil `()` type. * * @return a nil type node. */ BLangValueType getNilTypeNode() { BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nilTypeNode.typeKind = TypeKind.NIL; nilTypeNode.setBType(symTable.nilType); return nilTypeNode; } /** * Return BLangValueType of a any type. * * @return a any type node. */ BLangValueType getAnyTypeNode() { BLangValueType anyTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); anyTypeNode.typeKind = TypeKind.ANY; anyTypeNode.setBType(symTable.anyType); return anyTypeNode; } /** * Return BLangValueType of a int type. * * @return a int type node. */ BLangValueType getIntTypeNode() { BLangValueType intTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); intTypeNode.typeKind = TypeKind.INT; intTypeNode.setBType(symTable.intType); return intTypeNode; } /** * Return BLangErrorType node. * * @return a error type node. */ BLangErrorType getErrorTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.setBType(symTable.errorType); return errorTypeNode; } /** * Return BLangValueType of a boolean type. * * @return a boolean type node. */ private BLangValueType getBooleanTypeNode() { BLangValueType booleanTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); booleanTypeNode.typeKind = TypeKind.BOOLEAN; booleanTypeNode.setBType(symTable.booleanType); return booleanTypeNode; } /** * Return union type node consists of _Frame & error & (). * * @return a union type node. */ private BLangUnionTypeNode getFrameErrorNilTypeNode() { BType frameType = getFrameTypeSymbol().type; BUnionType unionType = BUnionType.create(null, frameType, symTable.errorType, symTable.nilType); BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.setBType(unionType); unionTypeNode.memberTypeNodes.add(getFrameTypeNode()); unionTypeNode.memberTypeNodes.add(getErrorTypeNode()); unionTypeNode.memberTypeNodes.add(getNilTypeNode()); unionTypeNode.desugared = true; return unionTypeNode; } private BLangUnionTypeNode getBooleanErrorTypeNode() { BUnionType unionType = BUnionType.create(null, symTable.errorType, symTable.booleanType); BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.setBType(unionType); unionTypeNode.memberTypeNodes.add(getErrorTypeNode()); unionTypeNode.memberTypeNodes.add(getBooleanTypeNode()); unionTypeNode.desugared = true; return unionTypeNode; } private BLangUnionTypeNode getIntErrorTypeNode() { BUnionType unionType = BUnionType.create(null, symTable.errorType, symTable.intType); BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.setBType(unionType); unionTypeNode.memberTypeNodes.add(getErrorTypeNode()); unionTypeNode.memberTypeNodes.add(getIntTypeNode()); unionTypeNode.desugared = true; return unionTypeNode; } /** * Return union type node consists of any & error. * * @return a any & error type node. */ private BLangUnionTypeNode getAnyAndErrorTypeNode() { BUnionType unionType = BUnionType.create(null, symTable.anyType, symTable.errorType); BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.memberTypeNodes.add(getAnyTypeNode()); unionTypeNode.memberTypeNodes.add(getErrorTypeNode()); unionTypeNode.setBType(unionType); unionTypeNode.desugared = true; return unionTypeNode; } /** * Return _Frame type node. * * @return a _Frame type node. */ private BLangRecordTypeNode getFrameTypeNode() { BSymbol frameTypeSymbol = getFrameTypeSymbol(); BRecordType frameType = (BRecordType) frameTypeSymbol.type; BLangUnionTypeNode restFieldType = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); restFieldType.setBType(frameType.restFieldType); restFieldType.memberTypeNodes.add(getErrorTypeNode()); restFieldType.memberTypeNodes.add(getAnyTypeNode()); BLangRecordTypeNode frameTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode(); frameTypeNode.setBType(frameType); frameTypeNode.restFieldType = restFieldType; frameTypeNode.symbol = frameType.tsymbol; frameTypeNode.desugared = true; return frameTypeNode; } /** * Load and return symbol for _Frame. * * @return _Frame type symbol. */ private BSymbol getFrameTypeSymbol() { return symTable.langQueryModuleSymbol .scope.lookup(names.fromString("_Frame")).symbol; } @Override public void visit(BLangLambdaFunction lambda) { lambda.function.accept(this); lambda.function = desugar.rewrite(lambda.function, env); env.enclPkg.lambdaFunctions.add(lambda); } @Override public void visit(BLangFunction function) { if (function.flagSet.contains(Flag.QUERY_LAMBDA)) { BLangBlockFunctionBody prevQueryLambdaBody = currentQueryLambdaBody; BVarSymbol prevFrameSymbol = currentFrameSymbol; Map<String, BSymbol> prevIdentifiers = identifiers; currentFrameSymbol = function.requiredParams.get(0).symbol; identifiers = new HashMap<>(); currentQueryLambdaBody = (BLangBlockFunctionBody) function.getBody(); currentQueryLambdaBody.accept(this); currentFrameSymbol = prevFrameSymbol; identifiers = prevIdentifiers; currentQueryLambdaBody = prevQueryLambdaBody; } else { boolean prevWithinLambdaFunc = withinLambdaFunc; withinLambdaFunc = true; function.getBody().accept(this); withinLambdaFunc = prevWithinLambdaFunc; } } @Override public void visit(BLangBlockFunctionBody body) { List<BLangStatement> stmts = new ArrayList<>(body.getStatements()); stmts.forEach(stmt -> stmt.accept(this)); } @Override public void visit(BLangExprFunctionBody exprBody) { exprBody.expr.accept(this); } @Override public void visit(BLangSimpleVariableDef bLangSimpleVariableDef) { bLangSimpleVariableDef.getVariable().accept(this); } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { bLangRecordVariableDef.var.accept(this); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { bLangRecordVariable.variableList.forEach(v -> v.getValue().accept(this)); this.acceptNode(bLangRecordVariable.expr); if (bLangRecordVariable.hasRestParam()) { bLangRecordVariable.restParam.accept(this); } } @Override public void visit(BLangSimpleVariable bLangSimpleVariable) { identifiers.putIfAbsent(bLangSimpleVariable.name.value, bLangSimpleVariable.symbol); this.acceptNode(bLangSimpleVariable.expr); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { conversionExpr.expr.accept(this); } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { fieldAccessExpr.expr.accept(this); } @Override public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) { nsPrefixedFieldBasedAccess.expr.accept(this); } @Override public void visit(BLangFieldBasedAccess.BLangStructFunctionVarRef structFunctionVarRef) { structFunctionVarRef.expr.accept(this); } @Override public void visit(BLangExpressionStmt exprStmtNode) { exprStmtNode.expr.accept(this); } @Override public void visit(BLangInvocation invocationExpr) { List<BLangExpression> requiredArgs = invocationExpr.requiredArgs; if (invocationExpr.langLibInvocation && !requiredArgs.isEmpty()) { requiredArgs = requiredArgs.subList(1, requiredArgs.size()); } requiredArgs.forEach(this::acceptNode); invocationExpr.restArgs.forEach(this::acceptNode); this.acceptNode(invocationExpr.expr); } @Override public void visit(BLangInvocation.BFunctionPointerInvocation functionPointerInvocationExpr) { visit((BLangInvocation) functionPointerInvocationExpr); } @Override public void visit(BLangInvocation.BLangAttachedFunctionInvocation attachedFunctionInvocation) { visit((BLangInvocation) attachedFunctionInvocation); } @Override public void visit(BLangLiteral literalExpr) { } @Override public void visit(BLangReturn bLangReturn) { this.acceptNode(bLangReturn.expr); } @Override public void visit(BLangBinaryExpr bLangBinaryExpr) { this.acceptNode(bLangBinaryExpr.lhsExpr); this.acceptNode(bLangBinaryExpr.rhsExpr); } @Override public void visit(BLangCommitExpr commitExpr) { } @Override public void visit(BLangAssignment bLangAssignment) { this.acceptNode(bLangAssignment.varRef); this.acceptNode(bLangAssignment.expr); } @Override public void visit(BLangRecordLiteral bLangRecordLiteral) { bLangRecordLiteral.fields.forEach(field -> this.acceptNode((BLangNode) field)); } @Override public void visit(BLangRecordLiteral.BLangStructLiteral structLiteral) { visit((BLangRecordLiteral) structLiteral); } @Override public void visit(BLangRecordLiteral.BLangMapLiteral mapLiteral) { visit((BLangRecordLiteral) mapLiteral); } @Override public void visit(BLangRecordKeyValueField recordKeyValue) { this.acceptNode(recordKeyValue.key.expr); this.acceptNode(recordKeyValue.valueExpr); } @Override public void visit(BLangRecordSpreadOperatorField spreadOperatorField) { this.acceptNode(spreadOperatorField.expr); } @Override public void visit(BLangConstRef constRef) { } @Override public void visit(BLangNumericLiteral literalExpr) { } @Override public void visit(BLangTupleVarRef varRefExpr) { varRefExpr.expressions.forEach(this::acceptNode); this.acceptNode((BLangNode) varRefExpr.restParam); } @Override public void visit(BLangRecordVarRef varRefExpr) { varRefExpr.recordRefFields.forEach(recordVarRefKeyValue -> this.acceptNode(recordVarRefKeyValue.variableReference)); this.acceptNode((BLangNode) varRefExpr.restParam); } @Override public void visit(BLangErrorVarRef varRefExpr) { this.acceptNode(varRefExpr.message); this.acceptNode(varRefExpr.restVar); varRefExpr.detail.forEach(this::acceptNode); } @Override public void visit(BLangSimpleVarRef bLangSimpleVarRef) { BSymbol symbol = bLangSimpleVarRef.symbol; String identifier = bLangSimpleVarRef.variableName == null ? String.valueOf(bLangSimpleVarRef.varSymbol.name) : String.valueOf(bLangSimpleVarRef.variableName); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(env, names.fromString(identifier), SymTag.VARIABLE); if (symbol != null && symbol != resolvedSymbol && !FRAME_PARAMETER_NAME.equals(identifier)) { if (symbol instanceof BVarSymbol) { BVarSymbol originalSymbol = ((BVarSymbol) symbol).originalSymbol; if (originalSymbol != null) { symbol = originalSymbol; } } if ((withinLambdaFunc || queryEnv == null || !queryEnv.scope.entries.containsKey(symbol.name)) && !identifiers.containsKey(identifier)) { Location pos = currentQueryLambdaBody.pos; BLangFieldBasedAccess frameAccessExpr = desugar.getFieldAccessExpression(pos, identifier, symTable.anyOrErrorType, currentFrameSymbol); frameAccessExpr.expr = desugar.addConversionExprIfRequired(frameAccessExpr.expr, types.getSafeType(frameAccessExpr.expr.getBType(), true, false)); if (symbol instanceof BVarSymbol) { ((BVarSymbol) symbol).originalSymbol = null; if (withinLambdaFunc && symbol.closure) { symbol.closure = false; symbol = new BVarSymbol(0, symbol.name, env.scope.owner.pkgID, symbol.type, env.scope.owner, pos, VIRTUAL); symbol.closure = true; bLangSimpleVarRef.symbol = symbol; bLangSimpleVarRef.varSymbol = symbol; } BLangSimpleVariable variable = ASTBuilderUtil.createVariable(pos, identifier, symbol.type, desugar.addConversionExprIfRequired(frameAccessExpr, symbol.type), (BVarSymbol) symbol); BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDef(pos, variable); currentQueryLambdaBody.stmts.add(0, variableDef); SymbolEnv queryLambdaEnv = SymbolEnv.createFuncBodyEnv(currentQueryLambdaBody, env); queryLambdaEnv.scope.define(symbol.name, symbol); } identifiers.put(identifier, symbol); } else if (identifiers.containsKey(identifier) && withinLambdaFunc) { symbol = identifiers.get(identifier); bLangSimpleVarRef.symbol = symbol; bLangSimpleVarRef.varSymbol = symbol; } } else if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; BSymbol enclSymbol = symResolver.lookupClosureVarSymbol(env.enclEnv, names.fromString(identifier), SymTag.VARIABLE); if (enclSymbol != null && enclSymbol != symTable.notFoundSymbol) { enclSymbol.closure = true; } } } @Override public void visit(BLangSimpleVarRef.BLangPackageVarRef bLangPackageVarRef) { visit((BLangSimpleVarRef) bLangPackageVarRef); } @Override public void visit(BLangSimpleVarRef.BLangLocalVarRef localVarRef) { visit(((BLangSimpleVarRef) localVarRef)); } @Override public void visit(BLangSimpleVarRef.BLangFieldVarRef fieldVarRef) { visit(((BLangSimpleVarRef) fieldVarRef)); } @Override public void visit(BLangSimpleVarRef.BLangFunctionVarRef functionVarRef) { visit(((BLangSimpleVarRef) functionVarRef)); } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { indexAccessExpr.indexExpr.accept(this); indexAccessExpr.expr.accept(this); } @Override public void visit(BLangIndexBasedAccess.BLangStructFieldAccessExpr structFieldAccessExpr) { visit((BLangIndexBasedAccess) structFieldAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangMapAccessExpr mapAccessExpr) { visit((BLangIndexBasedAccess) mapAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangArrayAccessExpr arrayAccessExpr) { visit((BLangIndexBasedAccess) arrayAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangTableAccessExpr tableAccessExpr) { visit((BLangIndexBasedAccess) tableAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangTupleAccessExpr tupleAccessExpr) { visit((BLangIndexBasedAccess) tupleAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangStringAccessExpr stringAccessExpr) { visit((BLangIndexBasedAccess) stringAccessExpr); } @Override public void visit(BLangIndexBasedAccess.BLangXMLAccessExpr xmlAccessExpr) { visit((BLangIndexBasedAccess) xmlAccessExpr); } @Override public void visit(BLangTypeInit connectorInitExpr) { connectorInitExpr.argsExpr.forEach(this::acceptNode); connectorInitExpr.initInvocation.accept(this); } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocationExpr) { actionInvocationExpr.argExprs.forEach(this::acceptNode); this.acceptNode(actionInvocationExpr.expr); } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { this.acceptNode(errorConstructorExpr.errorTypeRef); if (errorConstructorExpr.namedArgs != null) { errorConstructorExpr.namedArgs.forEach(this::acceptNode); } this.acceptNode(errorConstructorExpr.errorDetail); } @Override public void visit(BLangTernaryExpr ternaryExpr) { ternaryExpr.expr.accept(this); ternaryExpr.elseExpr.accept(this); ternaryExpr.thenExpr.accept(this); } @Override public void visit(BLangWaitExpr awaitExpr) { awaitExpr.exprList.forEach(this::acceptNode); } @Override public void visit(BLangTrapExpr trapExpr) { this.acceptNode(trapExpr.expr); } @Override public void visit(BLangElvisExpr elvisExpr) { this.acceptNode(elvisExpr.lhsExpr); this.acceptNode(elvisExpr.rhsExpr); } @Override public void visit(BLangGroupExpr groupExpr) { this.acceptNode(groupExpr.expression); } @Override public void visit(BLangLetExpression letExpr) { this.acceptNode(letExpr.expr); letExpr.letVarDeclarations.forEach(var -> this.acceptNode((BLangNode) var.definitionNode)); } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { listConstructorExpr.exprs.forEach(this::acceptNode); } @Override public void visit(BLangListConstructorExpr.BLangListConstructorSpreadOpExpr spreadOpExpr) { this.acceptNode(spreadOpExpr.expr); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { } @Override public void visit(BLangListConstructorExpr.BLangTupleLiteral tupleLiteral) { tupleLiteral.exprs.forEach(this::acceptNode); } @Override public void visit(BLangListConstructorExpr.BLangJSONArrayLiteral jsonArrayLiteral) { jsonArrayLiteral.exprs.forEach(expression -> expression.accept(this)); } @Override public void visit(BLangArrayLiteral arrayLiteral) { arrayLiteral.exprs.forEach(this::acceptNode); } @Override public void visit(BLangUnaryExpr unaryExpr) { this.acceptNode(unaryExpr.expr); } @Override public void visit(BLangTypedescExpr accessExpr) { } @Override public void visit(BLangXMLQName xmlQName) { } @Override public void visit(BLangXMLAttribute xmlAttribute) { this.acceptNode(xmlAttribute.name); this.acceptNode(xmlAttribute.value); } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { this.acceptNode(xmlElementLiteral.startTagName); this.acceptNode(xmlElementLiteral.endTagName); xmlElementLiteral.attributes.forEach(this::acceptNode); xmlElementLiteral.children.forEach(this::acceptNode); } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.textFragments.forEach(this::acceptNode); this.acceptNode(xmlTextLiteral.concatExpr); } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.textFragments.forEach(this::acceptNode); this.acceptNode(xmlCommentLiteral.concatExpr); } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.dataFragments.forEach(this::acceptNode); this.acceptNode(xmlProcInsLiteral.dataConcatExpr); } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.textFragments.forEach(this::acceptNode); this.acceptNode(xmlQuotedString.concatExpr); } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { stringTemplateLiteral.exprs.forEach(this::acceptNode); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { rawTemplateLiteral.strings.forEach(this::acceptNode); rawTemplateLiteral.insertions.forEach(this::acceptNode); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { bLangArrowFunction.params.forEach(this::acceptNode); this.acceptNode(bLangArrowFunction.body); } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { this.acceptNode(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { this.acceptNode(bLangNamedArgsExpression.expr); } @Override public void visit(BLangIsAssignableExpr assignableExpr) { this.acceptNode(assignableExpr.lhsExpr); } @Override public void visit(BLangCheckedExpr checkedExpr) { containsCheckExpr = true; if (this.checkedErrorList != null && checkedExpr.equivalentErrorTypeList != null) { this.checkedErrorList.addAll(checkedExpr.equivalentErrorTypeList); } this.acceptNode(checkedExpr.expr); } @Override public void visit(BLangCheckPanickedExpr checkPanickedExpr) { this.acceptNode(checkPanickedExpr.expr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { this.acceptNode(serviceConstructorExpr.serviceNode); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { this.acceptNode(typeTestExpr.expr); } @Override public void visit(BLangIsLikeExpr typeTestExpr) { this.acceptNode(typeTestExpr.expr); } @Override public void visit(BLangIgnoreExpr ignoreExpr) { } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { } @Override public void visit(BLangXMLNS.BLangLocalXMLNS xmlnsNode) { } @Override public void visit(BLangXMLNS.BLangPackageXMLNS xmlnsNode) { } @Override public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) { bLangXMLSequenceLiteral.xmlItems.forEach(this::acceptNode); } @Override public void visit(BLangStatementExpression bLangStatementExpression) { this.acceptNode(bLangStatementExpression.expr); this.acceptNode(bLangStatementExpression.stmt); } @Override public void visit(BLangTupleVariable bLangTupleVariable) { this.acceptNode(bLangTupleVariable.restVariable); bLangTupleVariable.memberVariables.forEach(this::acceptNode); } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { this.acceptNode(bLangTupleVariableDef.var.restVariable); this.acceptNode(bLangTupleVariableDef.var.expr); if (bLangTupleVariableDef.var.memberVariables != null) { bLangTupleVariableDef.var.memberVariables.forEach(this::acceptNode); } } @Override public void visit(BLangErrorVariable bLangErrorVariable) { this.acceptNode(bLangErrorVariable.message); bLangErrorVariable.detail.forEach(var -> this.acceptNode(var.valueBindingPattern)); this.acceptNode(bLangErrorVariable.restDetail); this.acceptNode(bLangErrorVariable.detailExpr); } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { this.acceptNode(bLangErrorVariableDef.errorVariable); } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { } @Override public void visit(BLangWaitForAllExpr waitForAllExpr) { waitForAllExpr.keyValuePairs.forEach(pair -> this.acceptNode(pair)); } @Override public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) { } @Override public void visit(BLangMarkdownReferenceDocumentation bLangMarkdownReferenceDocumentation) { } @Override public void visit(BLangWaitForAllExpr.BLangWaitKeyValue waitKeyValue) { this.acceptNode(waitKeyValue.key); this.acceptNode(waitKeyValue.valueExpr); } @Override public void visit(BLangXMLElementFilter xmlElementFilter) { this.acceptNode(xmlElementFilter.impConversionExpr); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { this.acceptNode(xmlElementAccess.expr); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { this.acceptNode(xmlNavigation.expr); this.acceptNode(xmlNavigation.childIndex); } @Override public void visit(BLangBlockStmt blockNode) { blockNode.stmts.forEach(statement -> this.acceptNode(statement)); } @Override public void visit(BLangLock.BLangLockStmt lockStmtNode) { } @Override public void visit(BLangLock.BLangUnLockStmt unLockNode) { } @Override public void visit(BLangCompoundAssignment compoundAssignNode) { this.acceptNode(compoundAssignNode.expr); this.acceptNode(compoundAssignNode.modifiedExpr); this.acceptNode(compoundAssignNode.varRef); } @Override public void visit(BLangRetry retryNode) { } @Override public void visit(BLangContinue continueNode) { } @Override public void visit(BLangBreak breakNode) { } @Override public void visit(BLangPanic panicNode) { this.acceptNode(panicNode.expr); } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { this.acceptNode(xmlnsStmtNode.xmlnsDecl); } @Override public void visit(BLangIf ifNode) { this.acceptNode(ifNode.expr); this.acceptNode(ifNode.body); this.acceptNode(ifNode.elseStmt); } @Override public void visit(BLangQueryAction queryAction) { SymbolEnv prevQueryEnv = this.queryEnv; queryAction.getQueryClauses().forEach(clause -> this.acceptNode(clause)); this.queryEnv = prevQueryEnv; } @Override public void visit(BLangQueryExpr queryExpr) { SymbolEnv prevQueryEnv = this.queryEnv; queryExpr.getQueryClauses().forEach(clause -> this.acceptNode(clause)); this.queryEnv = prevQueryEnv; } @Override public void visit(BLangForeach foreach) { this.acceptNode(foreach.collection); this.acceptNode(foreach.body); } @Override public void visit(BLangFromClause fromClause) { this.queryEnv = fromClause.env; this.acceptNode(fromClause.collection); } @Override public void visit(BLangJoinClause joinClause) { this.acceptNode(joinClause.collection); joinClause.collection.accept(this); this.acceptNode((BLangNode) joinClause.onClause.getLeftExpression()); this.acceptNode((BLangNode) joinClause.onClause.getRightExpression()); } @Override public void visit(BLangLetClause letClause) { } @Override public void visit(BLangSelectClause selectClause) { this.acceptNode(selectClause.expression); } @Override public void visit(BLangWhereClause whereClause) { this.acceptNode(whereClause.expression); } @Override public void visit(BLangDoClause doClause) { doClause.body.getStatements().forEach(statement -> this.acceptNode(statement)); } @Override public void visit(BLangOnConflictClause onConflictClause) { this.acceptNode(onConflictClause.expression); } @Override public void visit(BLangLimitClause limitClause) { this.acceptNode(limitClause.expression); } @Override public void visit(BLangOrderByClause orderByClause) { orderByClause.orderByKeyList.forEach(key -> this.acceptNode(((BLangOrderKey) key).expression)); } @Override public void visit(BLangWhile whileNode) { this.acceptNode(whileNode.expr); this.acceptNode(whileNode.body); } @Override public void visit(BLangDo doNode) { doNode.body.stmts.forEach(stmt -> this.acceptNode(stmt)); } @Override public void visit(BLangOnFailClause onFailClause) { onFailClause.body.stmts.forEach(stmt -> this.acceptNode(stmt)); } @Override public void visit(BLangFail failNode) { this.acceptNode(failNode.expr); } @Override public void visit(BLangLock lockNode) { this.acceptNode(lockNode.body); } @Override public void visit(BLangTransaction transactionNode) { this.acceptNode(transactionNode.transactionBody); } @Override public void visit(BLangTupleDestructure stmt) { this.acceptNode(stmt.varRef); this.acceptNode(stmt.expr); } @Override public void visit(BLangRecordDestructure stmt) { this.acceptNode(stmt.expr); this.acceptNode(stmt.varRef); } @Override public void visit(BLangErrorDestructure stmt) { this.acceptNode(stmt.expr); this.acceptNode(stmt.varRef); } @Override public void visit(BLangForkJoin forkJoin) { forkJoin.workers.forEach(worker -> this.acceptNode(worker)); } @Override public void visit(BLangWorkerSend workerSendNode) { this.acceptNode(workerSendNode.expr); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { this.acceptNode(workerReceiveNode.sendExpression); } private void acceptNode(BLangNode node) { if (node == null) { return; } node.accept(this); } }
Two main comments: 1) Let's filter out tables from the schema that are not tracked by change streams. We don't want to cache tables in the schema if they are not tracked by change streams. 2) Let's filter out columns from a change stream-tracked table if they are not tracked by change streams. Three potential cases: CREATE CHANGE STREAM changeStream FOR table(); // This statement will track only primary key columns. CREATE CHANGE STREAM changeStream FOR table(column1); this statement will track only primary key columns and column1. CREATE CHANGE STREAM changeStream FOR table; // This statement will track all columns
public PCollectionRowTuple expand(PCollectionRowTuple input) { Pipeline p = input.getPipeline(); Schema tableChangesSchema = getTableSchema(configuration); SpannerIO.ReadChangeStream readChangeStream = SpannerIO.readChangeStream() .withSpannerConfig( SpannerConfig.create() .withProjectId(configuration.getProjectId()) .withInstanceId(configuration.getInstanceId()) .withDatabaseId(configuration.getDatabaseId())) .withChangeStreamName(configuration.getChangeStreamName()) .withInclusiveStartAt( Timestamp.parseTimestamp(configuration.getStartAtTimestamp())) .withDatabaseId(configuration.getDatabaseId()) .withProjectId(configuration.getProjectId()) .withInstanceId(configuration.getInstanceId()); if (configuration.getEndAtTimestamp() != null) { String endTs = Objects.requireNonNull(Objects.requireNonNull(configuration.getEndAtTimestamp())); readChangeStream = readChangeStream.withInclusiveEndAt(Timestamp.parseTimestamp(endTs)); } return PCollectionRowTuple.of( "output", p.apply(readChangeStream) .apply( ParDo.of( new DataChangeRecordToRow( configuration.getTable(), tableChangesSchema))) .setRowSchema(tableChangesSchema)); }
Schema tableChangesSchema = getTableSchema(configuration);
public PCollectionRowTuple expand(PCollectionRowTuple input) { Pipeline p = input.getPipeline(); Schema tableChangesSchema = getTableSchema(configuration); SpannerIO.ReadChangeStream readChangeStream = SpannerIO.readChangeStream() .withSpannerConfig( SpannerConfig.create() .withProjectId(configuration.getProjectId()) .withInstanceId(configuration.getInstanceId()) .withDatabaseId(configuration.getDatabaseId())) .withChangeStreamName(configuration.getChangeStreamName()) .withInclusiveStartAt( Timestamp.parseTimestamp(configuration.getStartAtTimestamp())) .withDatabaseId(configuration.getDatabaseId()) .withProjectId(configuration.getProjectId()) .withInstanceId(configuration.getInstanceId()); if (configuration.getEndAtTimestamp() != null) { String endTs = Objects.requireNonNull(Objects.requireNonNull(configuration.getEndAtTimestamp())); readChangeStream = readChangeStream.withInclusiveEndAt(Timestamp.parseTimestamp(endTs)); } return PCollectionRowTuple.of( "output", p.apply(readChangeStream) .apply( ParDo.of( new DataChangeRecordToRow( configuration.getTable(), tableChangesSchema))) .setRowSchema(tableChangesSchema)); }
class SpannerChangestreamsReadSchemaTransformProvider extends TypedSchemaTransformProvider< SpannerChangestreamsReadSchemaTransformProvider.SpannerChangestreamsReadConfiguration> { @Override protected @UnknownKeyFor @NonNull @Initialized Class<SpannerChangestreamsReadConfiguration> configurationClass() { return SpannerChangestreamsReadConfiguration.class; } @Override public @UnknownKeyFor @NonNull @Initialized SchemaTransform from( SpannerChangestreamsReadSchemaTransformProvider.SpannerChangestreamsReadConfiguration configuration) { return new SchemaTransform() { @Override public @UnknownKeyFor @NonNull @Initialized PTransform< @UnknownKeyFor @NonNull @Initialized PCollectionRowTuple, @UnknownKeyFor @NonNull @Initialized PCollectionRowTuple> buildTransform() { return new PTransform<PCollectionRowTuple, PCollectionRowTuple>() { @Override }; } }; } @Override public @UnknownKeyFor @NonNull @Initialized String identifier() { return "beam:schematransform:org.apache.beam:spanner_cdc_read:v1"; } @Override public @UnknownKeyFor @NonNull @Initialized List<@UnknownKeyFor @NonNull @Initialized String> inputCollectionNames() { return Collections.emptyList(); } @Override public @UnknownKeyFor @NonNull @Initialized List<@UnknownKeyFor @NonNull @Initialized String> outputCollectionNames() { return Collections.singletonList("output"); } @DefaultSchema(AutoValueSchema.class) @AutoValue public abstract static class SpannerChangestreamsReadConfiguration implements Serializable { public abstract String getDatabaseId(); public abstract String getProjectId(); public abstract String getInstanceId(); public abstract String getTable(); public abstract String getStartAtTimestamp(); public abstract @Nullable String getEndAtTimestamp(); public abstract String getChangeStreamName(); public static Builder builder() { return new AutoValue_SpannerChangestreamsReadSchemaTransformProvider_SpannerChangestreamsReadConfiguration .Builder(); } @AutoValue.Builder public abstract static class Builder { public abstract Builder setDatabaseId(String databaseId); public abstract Builder setProjectId(String projectId); public abstract Builder setInstanceId(String instanceId); public abstract Builder setTable(String table); public abstract Builder setStartAtTimestamp(String isoTimestamp); public abstract Builder setEndAtTimestamp(String isoTimestamp); public abstract Builder setChangeStreamName(String changeStreamName); public abstract SpannerChangestreamsReadConfiguration build(); } } private static final class DataChangeRecordToRow extends DoFn<DataChangeRecord, Row> { private final Schema tableChangeRecordSchema; private final String tableName; private transient Gson gson; DataChangeRecordToRow(String tableName, Schema tableChangeRecordSchema) { this.tableName = tableName; this.tableChangeRecordSchema = tableChangeRecordSchema; this.gson = new Gson(); } public Gson getGson() { if (gson == null) { gson = new Gson(); } return gson; } @ProcessElement public void process(@DoFn.Element DataChangeRecord record, OutputReceiver<Row> receiver) { if (!record.getTableName().equalsIgnoreCase(tableName)) { return; } final Instant timestamp = new Instant(record.getRecordTimestamp().toSqlTimestamp()); for (Mod mod : record.getMods()) { Row.FieldValueBuilder rowBuilder = Row.fromRow(Row.nullRow(tableChangeRecordSchema)); final Map<String, String> newValues = Optional.ofNullable(mod.getNewValuesJson()) .map(nonNullValues -> getGson().fromJson(nonNullValues, Map.class)) .orElseGet(Collections::emptyMap); final Map<String, String> keyValues = Optional.ofNullable(mod.getKeysJson()) .map(nonNullValues -> getGson().fromJson(nonNullValues, Map.class)) .orElseGet(Collections::emptyMap); for (Map.Entry<String, String> valueEntry : newValues.entrySet()) { if (valueEntry.getValue() == null) { continue; } rowBuilder = rowBuilder.withFieldValue( valueEntry.getKey().toLowerCase(), stringToParsedValue( tableChangeRecordSchema.getField(valueEntry.getKey().toLowerCase()).getType(), valueEntry.getValue())); } for (Map.Entry<String, String> pkEntry : keyValues.entrySet()) { if (pkEntry.getValue() == null) { continue; } rowBuilder = rowBuilder.withFieldValue( pkEntry.getKey().toLowerCase(), stringToParsedValue( tableChangeRecordSchema.getField(pkEntry.getKey().toLowerCase()).getType(), pkEntry.getValue())); } receiver.outputWithTimestamp(rowBuilder.build(), timestamp); } } } private static final HashMap<String, SpannerSchema> TABLE_SCHEMAS = new HashMap<>(); private static Schema getTableSchema(SpannerChangestreamsReadConfiguration config) { Pipeline miniPipeline = Pipeline.create(); PCollectionView<Dialect> sqlDialectView = miniPipeline .apply("Create Dialect", Create.of(Dialect.GOOGLE_STANDARD_SQL)) .apply("Dialect to View", View.asSingleton()); miniPipeline .apply(Create.of((Void) null)) .apply( ParDo.of( new ReadSpannerSchema( SpannerConfig.create() .withDatabaseId(config.getDatabaseId()) .withInstanceId(config.getInstanceId()) .withProjectId(config.getProjectId()), sqlDialectView)) .withSideInput("dialect", sqlDialectView)) .apply( ParDo.of( new DoFn<SpannerSchema, String>() { @ProcessElement public void process(@DoFn.Element SpannerSchema schema) { TABLE_SCHEMAS.put(config.getTable(), schema); } })) .setCoder(StringUtf8Coder.of()); miniPipeline.run().waitUntilFinish(); SpannerSchema finalSchemaObj = TABLE_SCHEMAS.remove(config.getTable()); if (finalSchemaObj == null) { throw new RuntimeException( String.format("Could not get schema for configuration %s", config)); } return spannerSchemaToBeamSchema(finalSchemaObj, config.getTable()); } private static Schema spannerSchemaToBeamSchema( SpannerSchema spannerSchema, final String tableName) { OptionalInt optionalIdx = IntStream.range(0, spannerSchema.getTables().size()) .filter(idx -> spannerSchema.getTables().get(idx).equalsIgnoreCase(tableName)) .findAny(); if (!optionalIdx.isPresent()) { throw new IllegalArgumentException( String.format( "Unable to retrieve schema for table %s. Found only tables: [%s]", tableName, String.join(", ", spannerSchema.getTables()))); } Schema.Builder schemaBuilder = Schema.builder(); String spannerTableName = spannerSchema.getTables().get(optionalIdx.getAsInt()); for (SpannerSchema.Column col : spannerSchema.getColumns(spannerTableName)) { schemaBuilder = schemaBuilder.addNullableField(col.getName(), spannerTypeToBeamType(col.getType())); } schemaBuilder = schemaBuilder.setOptions( Schema.Options.builder() .setOption( "primaryKeyColumns", Schema.FieldType.array(Schema.FieldType.STRING), spannerSchema.getKeyParts(spannerTableName).stream() .map(SpannerSchema.KeyPart::getField) .collect(Collectors.toList()))); return schemaBuilder.build(); } private static Object stringToParsedValue(Schema.FieldType fieldType, String fieldValue) { switch (fieldType.getTypeName()) { case STRING: return fieldValue; case INT64: return Long.valueOf(fieldValue); case INT16: case INT32: return Integer.valueOf(fieldValue); case FLOAT: return Float.parseFloat(fieldValue); case DOUBLE: return Double.parseDouble(fieldValue); case BOOLEAN: return Boolean.parseBoolean(fieldValue); case BYTES: return fieldValue.getBytes(StandardCharsets.UTF_8); case DATETIME: return new DateTime(fieldValue); case DECIMAL: return new BigDecimal(fieldValue); default: throw new IllegalArgumentException( String.format("Unable to parse field with type %s", fieldType)); } } private static Schema.FieldType spannerTypeToBeamType(Type spannerType) { switch (spannerType.getCode()) { case BOOL: return Schema.FieldType.BOOLEAN; case BYTES: return Schema.FieldType.BYTES; case STRING: return Schema.FieldType.STRING; case INT64: return Schema.FieldType.INT64; case NUMERIC: return Schema.FieldType.DECIMAL; case FLOAT64: return Schema.FieldType.DOUBLE; case TIMESTAMP: case DATE: return Schema.FieldType.DATETIME; case ARRAY: return Schema.FieldType.array(spannerTypeToBeamType(spannerType.getArrayElementType())); case JSON: case STRUCT: default: throw new IllegalArgumentException( String.format("Unsupported spanner type: %s", spannerType)); } } }
class SpannerChangestreamsReadSchemaTransformProvider extends TypedSchemaTransformProvider< SpannerChangestreamsReadSchemaTransformProvider.SpannerChangestreamsReadConfiguration> { @Override protected @UnknownKeyFor @NonNull @Initialized Class<SpannerChangestreamsReadConfiguration> configurationClass() { return SpannerChangestreamsReadConfiguration.class; } @Override public @UnknownKeyFor @NonNull @Initialized SchemaTransform from( SpannerChangestreamsReadSchemaTransformProvider.SpannerChangestreamsReadConfiguration configuration) { return new SchemaTransform() { @Override public @UnknownKeyFor @NonNull @Initialized PTransform< @UnknownKeyFor @NonNull @Initialized PCollectionRowTuple, @UnknownKeyFor @NonNull @Initialized PCollectionRowTuple> buildTransform() { return new PTransform<PCollectionRowTuple, PCollectionRowTuple>() { @Override }; } }; } @Override public @UnknownKeyFor @NonNull @Initialized String identifier() { return "beam:schematransform:org.apache.beam:spanner_cdc_read:v1"; } @Override public @UnknownKeyFor @NonNull @Initialized List<@UnknownKeyFor @NonNull @Initialized String> inputCollectionNames() { return Collections.emptyList(); } @Override public @UnknownKeyFor @NonNull @Initialized List<@UnknownKeyFor @NonNull @Initialized String> outputCollectionNames() { return Collections.singletonList("output"); } @DefaultSchema(AutoValueSchema.class) @AutoValue public abstract static class SpannerChangestreamsReadConfiguration implements Serializable { public abstract String getDatabaseId(); public abstract String getProjectId(); public abstract String getInstanceId(); public abstract String getTable(); public abstract String getStartAtTimestamp(); public abstract @Nullable String getEndAtTimestamp(); public abstract String getChangeStreamName(); public static Builder builder() { return new AutoValue_SpannerChangestreamsReadSchemaTransformProvider_SpannerChangestreamsReadConfiguration .Builder(); } @AutoValue.Builder public abstract static class Builder { public abstract Builder setDatabaseId(String databaseId); public abstract Builder setProjectId(String projectId); public abstract Builder setInstanceId(String instanceId); public abstract Builder setTable(String table); public abstract Builder setStartAtTimestamp(String isoTimestamp); public abstract Builder setEndAtTimestamp(String isoTimestamp); public abstract Builder setChangeStreamName(String changeStreamName); public abstract SpannerChangestreamsReadConfiguration build(); } } private static final class DataChangeRecordToRow extends DoFn<DataChangeRecord, Row> { private final Schema tableChangeRecordSchema; private final String tableName; private transient Gson gson; DataChangeRecordToRow(String tableName, Schema tableChangeRecordSchema) { this.tableName = tableName; this.tableChangeRecordSchema = tableChangeRecordSchema; this.gson = new Gson(); } public Gson getGson() { if (gson == null) { gson = new Gson(); } return gson; } @ProcessElement public void process(@DoFn.Element DataChangeRecord record, OutputReceiver<Row> receiver) { if (!record.getTableName().equalsIgnoreCase(tableName)) { return; } final Instant timestamp = new Instant(record.getRecordTimestamp().toSqlTimestamp()); for (Mod mod : record.getMods()) { Schema internalRowSchema = tableChangeRecordSchema.getField("rowValues").getType().getRowSchema(); if (internalRowSchema == null) { throw new RuntimeException("Row schema for internal row is null and cannot be utilized."); } Row.FieldValueBuilder rowBuilder = Row.fromRow(Row.nullRow(internalRowSchema)); final Map<String, String> newValues = Optional.ofNullable(mod.getNewValuesJson()) .map(nonNullValues -> getGson().fromJson(nonNullValues, Map.class)) .orElseGet(Collections::emptyMap); final Map<String, String> keyValues = Optional.ofNullable(mod.getKeysJson()) .map(nonNullValues -> getGson().fromJson(nonNullValues, Map.class)) .orElseGet(Collections::emptyMap); for (Map.Entry<String, String> valueEntry : newValues.entrySet()) { if (valueEntry.getValue() == null) { continue; } rowBuilder = rowBuilder.withFieldValue( valueEntry.getKey().toLowerCase(), stringToParsedValue( internalRowSchema.getField(valueEntry.getKey().toLowerCase()).getType(), valueEntry.getValue())); } for (Map.Entry<String, String> pkEntry : keyValues.entrySet()) { if (pkEntry.getValue() == null) { continue; } rowBuilder = rowBuilder.withFieldValue( pkEntry.getKey().toLowerCase(), stringToParsedValue( internalRowSchema.getField(pkEntry.getKey().toLowerCase()).getType(), pkEntry.getValue())); } receiver.outputWithTimestamp( Row.withSchema(tableChangeRecordSchema) .addValue(record.getModType().toString()) .addValue(record.getCommitTimestamp().toString()) .addValue(Long.parseLong(record.getRecordSequence())) .addValue(rowBuilder.build()) .build(), timestamp); } } } private static final HashMap<String, SpannerSchema> TABLE_SCHEMAS = new HashMap<>(); private static Schema getTableSchema(SpannerChangestreamsReadConfiguration config) { Pipeline miniPipeline = Pipeline.create(); PCollectionView<Dialect> sqlDialectView = miniPipeline .apply("Create Dialect", Create.of(Dialect.GOOGLE_STANDARD_SQL)) .apply("Dialect to View", View.asSingleton()); miniPipeline .apply(Create.of((Void) null)) .apply( ParDo.of( new ReadSpannerSchema( SpannerConfig.create() .withDatabaseId(config.getDatabaseId()) .withInstanceId(config.getInstanceId()) .withProjectId(config.getProjectId()), sqlDialectView, Sets.newHashSet(config.getTable()))) .withSideInput("dialect", sqlDialectView)) .apply( ParDo.of( new DoFn<SpannerSchema, String>() { @ProcessElement public void process(@DoFn.Element SpannerSchema schema) { TABLE_SCHEMAS.put(config.getTable(), schema); } })) .setCoder(StringUtf8Coder.of()); miniPipeline.run().waitUntilFinish(); SpannerSchema finalSchemaObj = TABLE_SCHEMAS.remove(config.getTable()); if (finalSchemaObj == null) { throw new RuntimeException( String.format("Could not get schema for configuration %s", config)); } return spannerSchemaToBeamSchema(finalSchemaObj, config.getTable()); } private static Schema spannerSchemaToBeamSchema( SpannerSchema spannerSchema, final String tableName) { OptionalInt optionalIdx = IntStream.range(0, spannerSchema.getTables().size()) .filter(idx -> spannerSchema.getTables().get(idx).equalsIgnoreCase(tableName)) .findAny(); if (!optionalIdx.isPresent()) { throw new IllegalArgumentException( String.format( "Unable to retrieve schema for table %s. Found only tables: [%s]", tableName, String.join(", ", spannerSchema.getTables()))); } Schema.Builder schemaBuilder = Schema.builder(); String spannerTableName = spannerSchema.getTables().get(optionalIdx.getAsInt()); for (SpannerSchema.Column col : spannerSchema.getColumns(spannerTableName)) { schemaBuilder = schemaBuilder.addNullableField(col.getName(), spannerTypeToBeamType(col.getType())); } schemaBuilder = schemaBuilder.setOptions( Schema.Options.builder() .setOption( "primaryKeyColumns", Schema.FieldType.array(Schema.FieldType.STRING), spannerSchema.getKeyParts(spannerTableName).stream() .map(SpannerSchema.KeyPart::getField) .collect(Collectors.toList()))); return Schema.builder() .addStringField("operation") .addStringField("commitTimestamp") .addInt64Field("recordSequence") .addRowField("rowValues", schemaBuilder.build()) .build(); } private static Object stringToParsedValue(Schema.FieldType fieldType, String fieldValue) { switch (fieldType.getTypeName()) { case STRING: return fieldValue; case INT64: return Long.valueOf(fieldValue); case INT16: case INT32: return Integer.valueOf(fieldValue); case FLOAT: return Float.parseFloat(fieldValue); case DOUBLE: return Double.parseDouble(fieldValue); case BOOLEAN: return Boolean.parseBoolean(fieldValue); case BYTES: return fieldValue.getBytes(StandardCharsets.UTF_8); case DATETIME: return new DateTime(fieldValue); case DECIMAL: return new BigDecimal(fieldValue); default: throw new IllegalArgumentException( String.format("Unable to parse field with type %s", fieldType)); } } private static Schema.FieldType spannerTypeToBeamType(Type spannerType) { switch (spannerType.getCode()) { case BOOL: return Schema.FieldType.BOOLEAN; case BYTES: return Schema.FieldType.BYTES; case STRING: return Schema.FieldType.STRING; case INT64: return Schema.FieldType.INT64; case NUMERIC: return Schema.FieldType.DECIMAL; case FLOAT64: return Schema.FieldType.DOUBLE; case TIMESTAMP: case DATE: return Schema.FieldType.DATETIME; case ARRAY: return Schema.FieldType.array(spannerTypeToBeamType(spannerType.getArrayElementType())); case JSON: case STRUCT: default: throw new IllegalArgumentException( String.format("Unsupported spanner type: %s", spannerType)); } } }
```suggestion String maxDate = DateLiteral.createMaxValue(Type.DATE).getStringValue(); ``` minInt -> maxInt? minDate -> maxDate?
public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) { try (Connection connection = getConnection()) { List<Partition> partitions = schemaResolver.getPartitions(connection, table); String minInt = IntLiteral.createMaxValue(Type.INT).getStringValue(); String minDate = DateLiteral.createMaxValue(Type.DATE).getStringValue(); ImmutableList.Builder<PartitionInfo> list = ImmutableList.builder(); if (!partitions.isEmpty()) { for (Partition partition : partitions) { String partitionName = partition.getPartitionName(); if (partitionNames.contains(partitionName)) { list.add(partition); } if (partitionName.equalsIgnoreCase(PartitionUtil.MYSQL_PARTITION_MAXVALUE)) { if (partitionNames.contains(minInt) || partitionNames.contains(minDate)) { list.add(partition); } } } return list.build(); } else { return Lists.newArrayList(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } }
String minDate = DateLiteral.createMaxValue(Type.DATE).getStringValue();
public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) { try (Connection connection = getConnection()) { List<Partition> partitions = schemaResolver.getPartitions(connection, table); String maxInt = IntLiteral.createMaxValue(Type.INT).getStringValue(); String maxDate = DateLiteral.createMaxValue(Type.DATE).getStringValue(); ImmutableList.Builder<PartitionInfo> list = ImmutableList.builder(); if (partitions.isEmpty()) { return Lists.newArrayList(); } for (Partition partition : partitions) { String partitionName = partition.getPartitionName(); if (partitionNames.contains(partitionName)) { list.add(partition); } if (partitionName.equalsIgnoreCase(PartitionUtil.MYSQL_PARTITION_MAXVALUE)) { if (partitionNames.contains(maxInt) || partitionNames.contains(maxDate)) { list.add(partition); } } } return list.build(); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } }
class JDBCMetadata implements ConnectorMetadata { private static Logger LOG = LogManager.getLogger(JDBCMetadata.class); private Map<String, String> properties; private String catalogName; private JDBCSchemaResolver schemaResolver; public JDBCMetadata(Map<String, String> properties, String catalogName) { this.properties = properties; this.catalogName = catalogName; try { Class.forName(properties.get(JDBCResource.DRIVER_CLASS)); } catch (ClassNotFoundException e) { LOG.warn(e.getMessage()); throw new StarRocksConnectorException("doesn't find class: " + e.getMessage()); } if (properties.get(JDBCResource.DRIVER_CLASS).toLowerCase().contains("mysql")) { schemaResolver = new MysqlSchemaResolver(); } else if (properties.get(JDBCResource.DRIVER_CLASS).toLowerCase().contains("postgresql")) { schemaResolver = new PostgresSchemaResolver(); } else { LOG.warn("{} not support yet", properties.get(JDBCResource.DRIVER_CLASS)); throw new StarRocksConnectorException(properties.get(JDBCResource.DRIVER_CLASS) + " not support yet"); } } public Connection getConnection() throws SQLException { return DriverManager.getConnection(properties.get(JDBCResource.URI), properties.get(JDBCResource.USER), properties.get(JDBCResource.PASSWORD)); } @Override public List<String> listDbNames() { try (Connection connection = getConnection()) { return Lists.newArrayList(schemaResolver.listSchemas(connection)); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override public Database getDb(String name) { try { if (listDbNames().contains(name)) { return new Database(0, name); } else { return null; } } catch (StarRocksConnectorException e) { return null; } } @Override public List<String> listTableNames(String dbName) { try (Connection connection = getConnection()) { try (ResultSet resultSet = schemaResolver.getTables(connection, dbName)) { ImmutableList.Builder<String> list = ImmutableList.builder(); while (resultSet.next()) { String tableName = resultSet.getString("TABLE_NAME"); list.add(tableName); } return list.build(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override public Table getTable(String dbName, String tblName) { try (Connection connection = getConnection()) { ResultSet columnSet = schemaResolver.getColumns(connection, dbName, tblName); List<Column> fullSchema = schemaResolver.convertToSRTable(columnSet); List<Column> partitionColumns = listPartitionColumns(dbName, tblName, fullSchema); if (fullSchema.isEmpty()) { return null; } JDBCTableName tableKey = JDBCTableName.of(catalogName, dbName, tblName); if (JDBCTableIdCache.containsTableId(tableKey)) { return schemaResolver.getTable(JDBCTableIdCache.getTableId(tableKey), tblName, fullSchema, partitionColumns, dbName, catalogName, properties); } else { Integer tableId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt(); JDBCTableIdCache.putTableId(tableKey, tableId); return schemaResolver.getTable(tableId, tblName, fullSchema, partitionColumns, dbName, catalogName, properties); } } catch (SQLException | DdlException e) { LOG.warn(e.getMessage()); return null; } } @Override public List<String> listPartitionNames(String databaseName, String tableName) { try (Connection connection = getConnection()) { return schemaResolver.listPartitionNames(connection, databaseName, tableName); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } public List<Column> listPartitionColumns(String databaseName, String tableName, List<Column> fullSchema) { try (Connection connection = getConnection()) { Set<String> partitionColumnNames = schemaResolver.listPartitionColumns(connection, databaseName, tableName) .stream().map(columnName -> columnName.toLowerCase()).collect(Collectors.toSet()); if (partitionColumnNames.size() > 0) { return fullSchema.stream().filter(column -> partitionColumnNames.contains(column.getName().toLowerCase())) .collect(Collectors.toList()); } else { return Lists.newArrayList(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override }
class JDBCMetadata implements ConnectorMetadata { private static Logger LOG = LogManager.getLogger(JDBCMetadata.class); private Map<String, String> properties; private String catalogName; private JDBCSchemaResolver schemaResolver; public JDBCMetadata(Map<String, String> properties, String catalogName) { this.properties = properties; this.catalogName = catalogName; try { Class.forName(properties.get(JDBCResource.DRIVER_CLASS)); } catch (ClassNotFoundException e) { LOG.warn(e.getMessage()); throw new StarRocksConnectorException("doesn't find class: " + e.getMessage()); } if (properties.get(JDBCResource.DRIVER_CLASS).toLowerCase().contains("mysql")) { schemaResolver = new MysqlSchemaResolver(); } else if (properties.get(JDBCResource.DRIVER_CLASS).toLowerCase().contains("postgresql")) { schemaResolver = new PostgresSchemaResolver(); } else { LOG.warn("{} not support yet", properties.get(JDBCResource.DRIVER_CLASS)); throw new StarRocksConnectorException(properties.get(JDBCResource.DRIVER_CLASS) + " not support yet"); } } public Connection getConnection() throws SQLException { return DriverManager.getConnection(properties.get(JDBCResource.URI), properties.get(JDBCResource.USER), properties.get(JDBCResource.PASSWORD)); } @Override public List<String> listDbNames() { try (Connection connection = getConnection()) { return Lists.newArrayList(schemaResolver.listSchemas(connection)); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override public Database getDb(String name) { try { if (listDbNames().contains(name)) { return new Database(0, name); } else { return null; } } catch (StarRocksConnectorException e) { return null; } } @Override public List<String> listTableNames(String dbName) { try (Connection connection = getConnection()) { try (ResultSet resultSet = schemaResolver.getTables(connection, dbName)) { ImmutableList.Builder<String> list = ImmutableList.builder(); while (resultSet.next()) { String tableName = resultSet.getString("TABLE_NAME"); list.add(tableName); } return list.build(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override public Table getTable(String dbName, String tblName) { try (Connection connection = getConnection()) { ResultSet columnSet = schemaResolver.getColumns(connection, dbName, tblName); List<Column> fullSchema = schemaResolver.convertToSRTable(columnSet); List<Column> partitionColumns = listPartitionColumns(dbName, tblName, fullSchema); if (fullSchema.isEmpty()) { return null; } JDBCTableName tableKey = JDBCTableName.of(catalogName, dbName, tblName); if (JDBCTableIdCache.containsTableId(tableKey)) { return schemaResolver.getTable(JDBCTableIdCache.getTableId(tableKey), tblName, fullSchema, partitionColumns, dbName, catalogName, properties); } else { Integer tableId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt(); JDBCTableIdCache.putTableId(tableKey, tableId); return schemaResolver.getTable(tableId, tblName, fullSchema, partitionColumns, dbName, catalogName, properties); } } catch (SQLException | DdlException e) { LOG.warn(e.getMessage()); return null; } } @Override public List<String> listPartitionNames(String databaseName, String tableName) { try (Connection connection = getConnection()) { return schemaResolver.listPartitionNames(connection, databaseName, tableName); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } public List<Column> listPartitionColumns(String databaseName, String tableName, List<Column> fullSchema) { try (Connection connection = getConnection()) { Set<String> partitionColumnNames = schemaResolver.listPartitionColumns(connection, databaseName, tableName) .stream().map(columnName -> columnName.toLowerCase()).collect(Collectors.toSet()); if (partitionColumnNames.size() > 0) { return fullSchema.stream().filter(column -> partitionColumnNames.contains(column.getName().toLowerCase())) .collect(Collectors.toList()); } else { return Lists.newArrayList(); } } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } } @Override }
this can be simplified now right? you don't need an intermediate list, but we can start writing to the buf directly from the loop?
private void writeFunctionsGlobalVarDependency(ByteBuf buf, BIRNode.BIRFunction birFunction) { List<Integer> globalVarBuf = new LinkedList<>(); for (BIRNode.BIRVariableDcl var : birFunction.dependentGlobalVars) { globalVarBuf.add(addStringCPEntry(var.name.value)); } buf.writeInt(globalVarBuf.size()); globalVarBuf.forEach(buf::writeInt); }
globalVarBuf.forEach(buf::writeInt);
private void writeFunctionsGlobalVarDependency(ByteBuf buf, BIRNode.BIRFunction birFunction) { buf.writeInt(birFunction.dependentGlobalVars.size()); for (BIRNode.BIRVariableDcl var : birFunction.dependentGlobalVars) { buf.writeInt(addStringCPEntry(var.name.value)); } }
class BIRBinaryWriter { private final ConstantPool cp = new ConstantPool(); private final BIRNode.BIRPackage birPackage; public BIRBinaryWriter(BIRNode.BIRPackage birPackage) { this.birPackage = birPackage; } public byte[] serialize() { ByteBuf birbuf = Unpooled.buffer(); BIRTypeWriter typeWriter = new BIRTypeWriter(birbuf, cp); BIRInstructionWriter insWriter = new BIRInstructionWriter(birbuf, cp, this); int orgCPIndex = addStringCPEntry(birPackage.org.value); int nameCPIndex = addStringCPEntry(birPackage.name.value); int versionCPIndex = addStringCPEntry(birPackage.version.value); int pkgIndex = cp.addCPEntry(new PackageCPEntry(orgCPIndex, nameCPIndex, versionCPIndex)); birbuf.writeInt(pkgIndex); writeImportModuleDecls(birbuf, birPackage.importModules); writeConstants(birbuf, birPackage.constants); writeTypeDefs(birbuf, typeWriter, insWriter, birPackage.typeDefs); writeGlobalVars(birbuf, typeWriter, birPackage.globalVars); writeTypeDefBodies(birbuf, typeWriter, insWriter, birPackage.typeDefs); writeFunctions(birbuf, typeWriter, insWriter, birPackage.functions); writeAnnotations(birbuf, typeWriter, birPackage.annotations); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (DataOutputStream dataOut = new DataOutputStream(baos)) { dataOut.write(cp.serialize()); dataOut.write(birbuf.nioBuffer().array(), 0, birbuf.nioBuffer().limit()); return baos.toByteArray(); } catch (IOException e) { throw new BLangCompilerException("failed to serialize the bir", e); } } private void writeImportModuleDecls(ByteBuf buf, List<BIRNode.BIRImportModule> birImpModList) { buf.writeInt(birImpModList.size()); birImpModList.forEach(impMod -> { buf.writeInt(addStringCPEntry(impMod.org.value)); buf.writeInt(addStringCPEntry(impMod.name.value)); buf.writeInt(addStringCPEntry(impMod.version.value)); }); } /** * Write the type definitions. Only the container will be written, to avoid * cyclic dependencies with global vars. * * @param buf ByteBuf * @param typeWriter Type writer * @param insWriter Instruction writer * @param birTypeDefList Type definitions list */ private void writeTypeDefs(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, List<BIRTypeDefinition> birTypeDefList) { buf.writeInt(birTypeDefList.size()); birTypeDefList.forEach(typeDef -> writeType(buf, typeWriter, insWriter, typeDef)); } /** * Write the body of the type definitions. * * @param buf ByteBuf * @param typeWriter Type writer * @param birTypeDefList Type definitions list */ private void writeTypeDefBodies(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, List<BIRTypeDefinition> birTypeDefList) { List<BIRTypeDefinition> filtered = birTypeDefList.stream().filter(t -> t.type.tag == TypeTags.OBJECT || t.type.tag == TypeTags.RECORD).collect(Collectors.toList()); buf.writeInt(filtered.size()); filtered.forEach(typeDef -> { writeFunctions(buf, typeWriter, insWriter, typeDef.attachedFuncs); writeReferencedTypes(buf, typeDef.referencedTypes); }); } private void writeReferencedTypes(ByteBuf buf, List<BType> referencedTypes) { buf.writeInt(referencedTypes.size()); referencedTypes.forEach(type -> writeType(buf, type)); } private void writeGlobalVars(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRGlobalVariableDcl> birGlobalVars) { buf.writeInt(birGlobalVars.size()); for (BIRGlobalVariableDcl birGlobalVar : birGlobalVars) { buf.writeByte(birGlobalVar.kind.getValue()); buf.writeInt(addStringCPEntry(birGlobalVar.name.value)); buf.writeInt(birGlobalVar.flags); buf.writeByte(birGlobalVar.origin.value()); typeWriter.writeMarkdownDocAttachment(buf, birGlobalVar.markdownDocAttachment); writeType(buf, birGlobalVar.type); } } private void writeType(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, BIRTypeDefinition typeDef) { insWriter.writePosition(typeDef.pos); buf.writeInt(addStringCPEntry(typeDef.name.value)); buf.writeInt(typeDef.flags); buf.writeByte(typeDef.isLabel ? 1 : 0); buf.writeByte(typeDef.origin.value()); typeWriter.writeMarkdownDocAttachment(buf, typeDef.markdownDocAttachment); writeType(buf, typeDef.type); } private void writeFunctions(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, List<BIRNode.BIRFunction> birFunctionList) { buf.writeInt(birFunctionList.size()); birFunctionList.forEach(func -> writeFunction(buf, typeWriter, insWriter, func)); } private void writeFunction(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, BIRNode.BIRFunction birFunction) { insWriter.writePosition(birFunction.pos); buf.writeInt(addStringCPEntry(birFunction.name.value)); buf.writeInt(addStringCPEntry(birFunction.workerName.value)); buf.writeInt(birFunction.flags); buf.writeByte(birFunction.origin.value()); writeType(buf, birFunction.type); writeAnnotAttachments(buf, insWriter, birFunction.annotAttachments); buf.writeInt(birFunction.requiredParams.size()); for (BIRParameter parameter : birFunction.requiredParams) { buf.writeInt(addStringCPEntry(parameter.name.value)); buf.writeInt(parameter.flags); } boolean restParamExist = birFunction.restParam != null; buf.writeBoolean(restParamExist); if (restParamExist) { buf.writeInt(addStringCPEntry(birFunction.restParam.name.value)); } boolean hasReceiverType = birFunction.receiver != null; buf.writeBoolean(hasReceiverType); if (hasReceiverType) { buf.writeByte(birFunction.receiver.kind.getValue()); writeType(buf, birFunction.receiver.type); buf.writeInt(addStringCPEntry(birFunction.receiver.name.value)); } writeTaintTable(buf, birFunction.taintTable); typeWriter.writeMarkdownDocAttachment(buf, birFunction.markdownDocAttachment); writeFunctionsGlobalVarDependency(buf, birFunction); ByteBuf birbuf = Unpooled.buffer(); BIRInstructionWriter funcInsWriter = new BIRInstructionWriter(birbuf, cp, this); birbuf.writeInt(birFunction.argsCount); birbuf.writeBoolean(birFunction.returnVariable != null); if (birFunction.returnVariable != null) { birbuf.writeByte(birFunction.returnVariable.kind.getValue()); writeType(birbuf, birFunction.returnVariable.type); birbuf.writeInt(addStringCPEntry(birFunction.returnVariable.name.value)); } birbuf.writeInt(birFunction.parameters.size()); for (BIRNode.BIRFunctionParameter param : birFunction.parameters.keySet()) { birbuf.writeByte(param.kind.getValue()); writeType(birbuf, param.type); birbuf.writeInt(addStringCPEntry(param.name.value)); if (param.kind.equals(VarKind.ARG)) { birbuf.writeInt(addStringCPEntry(param.metaVarName != null ? param.metaVarName : "")); } birbuf.writeBoolean(param.hasDefaultExpr); } birbuf.writeInt(birFunction.localVars.size()); for (BIRNode.BIRVariableDcl localVar : birFunction.localVars) { birbuf.writeByte(localVar.kind.getValue()); writeType(birbuf, localVar.type); birbuf.writeInt(addStringCPEntry(localVar.name.value)); if (localVar.kind.equals(VarKind.ARG)) { birbuf.writeInt(addStringCPEntry(localVar.metaVarName != null ? localVar.metaVarName : "")); } if (localVar.kind.equals(VarKind.LOCAL)) { birbuf.writeInt(addStringCPEntry(localVar.metaVarName != null ? localVar.metaVarName : "")); birbuf.writeInt(addStringCPEntry(localVar.endBB != null ? localVar.endBB.id.value : "")); birbuf.writeInt(addStringCPEntry(localVar.startBB != null ? localVar.startBB.id.value : "")); birbuf.writeInt(localVar.insOffset); } } birFunction.parameters.values().forEach(funcInsWriter::writeBBs); funcInsWriter.writeBBs(birFunction.basicBlocks); funcInsWriter.writeErrorTable(birFunction.errorTable); birbuf.writeInt(birFunction.workerChannels.length); for (BIRNode.ChannelDetails details : birFunction.workerChannels) { birbuf.writeInt(addStringCPEntry(details.name)); birbuf.writeBoolean(details.channelInSameStrand); birbuf.writeBoolean(details.send); } int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeTaintTable(ByteBuf buf, TaintTable taintTable) { ByteBuf birbuf = Unpooled.buffer(); birbuf.writeShort(taintTable.rowCount); birbuf.writeShort(taintTable.columnCount); birbuf.writeInt(taintTable.taintTable.size()); for (Integer paramIndex : taintTable.taintTable.keySet()) { birbuf.writeShort(paramIndex); List<Byte> taintRecord = taintTable.taintTable.get(paramIndex); birbuf.writeInt(taintRecord.size()); for (Byte taintStatus : taintRecord) { birbuf.writeByte(taintStatus); } } int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeAnnotations(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRNode.BIRAnnotation> birAnnotationList) { buf.writeInt(birAnnotationList.size()); birAnnotationList.forEach(annotation -> writeAnnotation(buf, typeWriter, annotation)); } private void writeAnnotation(ByteBuf buf, BIRTypeWriter typeWriter, BIRNode.BIRAnnotation birAnnotation) { buf.writeInt(addStringCPEntry(birAnnotation.name.value)); buf.writeInt(birAnnotation.flags); buf.writeByte(birAnnotation.origin.value()); writePosition(buf, birAnnotation.pos); buf.writeInt(birAnnotation.attachPoints.size()); for (AttachPoint attachPoint : birAnnotation.attachPoints) { buf.writeInt(addStringCPEntry(attachPoint.point.getValue())); buf.writeBoolean(attachPoint.source); } writeType(buf, birAnnotation.annotationType); typeWriter.writeMarkdownDocAttachment(buf, birAnnotation.markdownDocAttachment); } private void writeConstants(ByteBuf buf, List<BIRNode.BIRConstant> birConstList) { BIRTypeWriter constTypeWriter = new BIRTypeWriter(buf, cp); buf.writeInt(birConstList.size()); birConstList.forEach(constant -> writeConstant(buf, constTypeWriter, constant)); } private void writeConstant(ByteBuf buf, BIRTypeWriter typeWriter, BIRNode.BIRConstant birConstant) { buf.writeInt(addStringCPEntry(birConstant.name.value)); buf.writeInt(birConstant.flags); buf.writeByte(birConstant.origin.value()); writePosition(buf, birConstant.pos); typeWriter.writeMarkdownDocAttachment(buf, birConstant.markdownDocAttachment); writeType(buf, birConstant.type); ByteBuf birbuf = Unpooled.buffer(); writeConstValue(birbuf, birConstant.constValue); int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeConstValue(ByteBuf buf, ConstValue constValue) { writeType(buf, constValue.type); switch (constValue.type.tag) { case TypeTags.INT: case TypeTags.SIGNED32_INT: case TypeTags.SIGNED16_INT: case TypeTags.SIGNED8_INT: case TypeTags.UNSIGNED32_INT: case TypeTags.UNSIGNED16_INT: case TypeTags.UNSIGNED8_INT: buf.writeInt(addIntCPEntry((Long) constValue.value)); break; case TypeTags.BYTE: int byteValue = ((Number) constValue.value).intValue(); buf.writeInt(addByteCPEntry(byteValue)); break; case TypeTags.FLOAT: double doubleVal = constValue.value instanceof String ? Double.parseDouble((String) constValue.value) : (Double) constValue.value; buf.writeInt(addFloatCPEntry(doubleVal)); break; case TypeTags.STRING: case TypeTags.CHAR_STRING: case TypeTags.DECIMAL: buf.writeInt(addStringCPEntry((String) constValue.value)); break; case TypeTags.BOOLEAN: buf.writeBoolean((Boolean) constValue.value); break; case TypeTags.NIL: break; case TypeTags.MAP: Map<String, ConstValue> mapConstVal = (Map<String, ConstValue>) constValue.value; buf.writeInt(mapConstVal.size()); mapConstVal.forEach((key, value) -> { buf.writeInt(addStringCPEntry(key)); writeConstValue(buf, value); }); break; default: throw new UnsupportedOperationException( "finite type value is not supported for type: " + constValue.type); } } private int addIntCPEntry(long value) { return cp.addCPEntry(new IntegerCPEntry(value)); } private int addFloatCPEntry(double value) { return cp.addCPEntry(new FloatCPEntry(value)); } private int addStringCPEntry(String value) { return cp.addCPEntry(new StringCPEntry(value)); } private int addByteCPEntry(int value) { return cp.addCPEntry(new ByteCPEntry(value)); } private void writeType(ByteBuf buf, BType type) { buf.writeInt(cp.addShapeCPEntry(type)); } void writeAnnotAttachments(ByteBuf buff, BIRInstructionWriter insWriter, List<BIRAnnotationAttachment> annotAttachments) { ByteBuf annotBuf = Unpooled.buffer(); annotBuf.writeInt(annotAttachments.size()); for (BIRAnnotationAttachment annotAttachment : annotAttachments) { writeAnnotAttachment(annotBuf, insWriter, annotAttachment); } int length = annotBuf.nioBuffer().limit(); buff.writeLong(length); buff.writeBytes(annotBuf.nioBuffer().array(), 0, length); } private void writeAnnotAttachment(ByteBuf annotBuf, BIRInstructionWriter insWriter, BIRAnnotationAttachment annotAttachment) { annotBuf.writeInt(insWriter.addPkgCPEntry(annotAttachment.packageID)); insWriter.writePosition(annotBuf, annotAttachment.pos); annotBuf.writeInt(addStringCPEntry(annotAttachment.annotTagRef.value)); writeAnnotAttachValues(annotBuf, annotAttachment.annotValues); } private void writeAnnotAttachValues(ByteBuf annotBuf, List<BIRAnnotationValue> annotValues) { annotBuf.writeInt(annotValues.size()); for (BIRAnnotationValue annotValue : annotValues) { writeAnnotAttachValue(annotBuf, annotValue); } } private void writeAnnotAttachValue(ByteBuf annotBuf, BIRAnnotationValue annotValue) { if (annotValue.type.tag == TypeTags.ARRAY) { writeType(annotBuf, annotValue.type); BIRAnnotationArrayValue annotArrayValue = (BIRAnnotationArrayValue) annotValue; annotBuf.writeInt(annotArrayValue.annotArrayValue.length); for (BIRAnnotationValue annotValueEntry : annotArrayValue.annotArrayValue) { writeAnnotAttachValue(annotBuf, annotValueEntry); } } else if (annotValue.type.tag == TypeTags.RECORD || annotValue.type.tag == TypeTags.MAP) { writeType(annotBuf, annotValue.type); BIRAnnotationRecordValue annotRecValue = (BIRAnnotationRecordValue) annotValue; annotBuf.writeInt(annotRecValue.annotValueEntryMap.size()); for (Map.Entry<String, BIRAnnotationValue> annotValueEntry : annotRecValue.annotValueEntryMap.entrySet()) { annotBuf.writeInt(addStringCPEntry(annotValueEntry.getKey())); writeAnnotAttachValue(annotBuf, annotValueEntry.getValue()); } } else { BIRAnnotationLiteralValue annotLiteralValue = (BIRAnnotationLiteralValue) annotValue; writeConstValue(annotBuf, new ConstValue(annotLiteralValue.value, annotLiteralValue.type)); } } private void writePosition(ByteBuf buf, DiagnosticPos pos) { buf.writeInt(addStringCPEntry(pos.src.getCompilationUnitName())); buf.writeInt(pos.sLine); buf.writeInt(pos.sCol); buf.writeInt(pos.eLine); buf.writeInt(pos.eCol); } }
class BIRBinaryWriter { private final ConstantPool cp = new ConstantPool(); private final BIRNode.BIRPackage birPackage; public BIRBinaryWriter(BIRNode.BIRPackage birPackage) { this.birPackage = birPackage; } public byte[] serialize() { ByteBuf birbuf = Unpooled.buffer(); BIRTypeWriter typeWriter = new BIRTypeWriter(birbuf, cp); BIRInstructionWriter insWriter = new BIRInstructionWriter(birbuf, cp, this); int orgCPIndex = addStringCPEntry(birPackage.org.value); int nameCPIndex = addStringCPEntry(birPackage.name.value); int versionCPIndex = addStringCPEntry(birPackage.version.value); int pkgIndex = cp.addCPEntry(new PackageCPEntry(orgCPIndex, nameCPIndex, versionCPIndex)); birbuf.writeInt(pkgIndex); writeImportModuleDecls(birbuf, birPackage.importModules); writeConstants(birbuf, birPackage.constants); writeTypeDefs(birbuf, typeWriter, insWriter, birPackage.typeDefs); writeGlobalVars(birbuf, typeWriter, birPackage.globalVars); writeTypeDefBodies(birbuf, typeWriter, insWriter, birPackage.typeDefs); writeFunctions(birbuf, typeWriter, insWriter, birPackage.functions); writeAnnotations(birbuf, typeWriter, birPackage.annotations); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (DataOutputStream dataOut = new DataOutputStream(baos)) { dataOut.write(cp.serialize()); dataOut.write(birbuf.nioBuffer().array(), 0, birbuf.nioBuffer().limit()); return baos.toByteArray(); } catch (IOException e) { throw new BLangCompilerException("failed to serialize the bir", e); } } private void writeImportModuleDecls(ByteBuf buf, List<BIRNode.BIRImportModule> birImpModList) { buf.writeInt(birImpModList.size()); birImpModList.forEach(impMod -> { buf.writeInt(addStringCPEntry(impMod.org.value)); buf.writeInt(addStringCPEntry(impMod.name.value)); buf.writeInt(addStringCPEntry(impMod.version.value)); }); } /** * Write the type definitions. Only the container will be written, to avoid * cyclic dependencies with global vars. * * @param buf ByteBuf * @param typeWriter Type writer * @param insWriter Instruction writer * @param birTypeDefList Type definitions list */ private void writeTypeDefs(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, List<BIRTypeDefinition> birTypeDefList) { buf.writeInt(birTypeDefList.size()); birTypeDefList.forEach(typeDef -> writeType(buf, typeWriter, insWriter, typeDef)); } /** * Write the body of the type definitions. * * @param buf ByteBuf * @param typeWriter Type writer * @param birTypeDefList Type definitions list */ private void writeTypeDefBodies(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, List<BIRTypeDefinition> birTypeDefList) { List<BIRTypeDefinition> filtered = birTypeDefList.stream().filter(t -> t.type.tag == TypeTags.OBJECT || t.type.tag == TypeTags.RECORD).collect(Collectors.toList()); buf.writeInt(filtered.size()); filtered.forEach(typeDef -> { writeFunctions(buf, typeWriter, insWriter, typeDef.attachedFuncs); writeReferencedTypes(buf, typeDef.referencedTypes); }); } private void writeReferencedTypes(ByteBuf buf, List<BType> referencedTypes) { buf.writeInt(referencedTypes.size()); referencedTypes.forEach(type -> writeType(buf, type)); } private void writeGlobalVars(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRGlobalVariableDcl> birGlobalVars) { buf.writeInt(birGlobalVars.size()); for (BIRGlobalVariableDcl birGlobalVar : birGlobalVars) { buf.writeByte(birGlobalVar.kind.getValue()); buf.writeInt(addStringCPEntry(birGlobalVar.name.value)); buf.writeInt(birGlobalVar.flags); buf.writeByte(birGlobalVar.origin.value()); typeWriter.writeMarkdownDocAttachment(buf, birGlobalVar.markdownDocAttachment); writeType(buf, birGlobalVar.type); } } private void writeType(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, BIRTypeDefinition typeDef) { insWriter.writePosition(typeDef.pos); buf.writeInt(addStringCPEntry(typeDef.name.value)); buf.writeInt(typeDef.flags); buf.writeByte(typeDef.isLabel ? 1 : 0); buf.writeByte(typeDef.origin.value()); typeWriter.writeMarkdownDocAttachment(buf, typeDef.markdownDocAttachment); writeType(buf, typeDef.type); } private void writeFunctions(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, List<BIRNode.BIRFunction> birFunctionList) { buf.writeInt(birFunctionList.size()); birFunctionList.forEach(func -> writeFunction(buf, typeWriter, insWriter, func)); } private void writeFunction(ByteBuf buf, BIRTypeWriter typeWriter, BIRInstructionWriter insWriter, BIRNode.BIRFunction birFunction) { insWriter.writePosition(birFunction.pos); buf.writeInt(addStringCPEntry(birFunction.name.value)); buf.writeInt(addStringCPEntry(birFunction.workerName.value)); buf.writeInt(birFunction.flags); buf.writeByte(birFunction.origin.value()); writeType(buf, birFunction.type); writeAnnotAttachments(buf, insWriter, birFunction.annotAttachments); buf.writeInt(birFunction.requiredParams.size()); for (BIRParameter parameter : birFunction.requiredParams) { buf.writeInt(addStringCPEntry(parameter.name.value)); buf.writeInt(parameter.flags); } boolean restParamExist = birFunction.restParam != null; buf.writeBoolean(restParamExist); if (restParamExist) { buf.writeInt(addStringCPEntry(birFunction.restParam.name.value)); } boolean hasReceiverType = birFunction.receiver != null; buf.writeBoolean(hasReceiverType); if (hasReceiverType) { buf.writeByte(birFunction.receiver.kind.getValue()); writeType(buf, birFunction.receiver.type); buf.writeInt(addStringCPEntry(birFunction.receiver.name.value)); } writeTaintTable(buf, birFunction.taintTable); typeWriter.writeMarkdownDocAttachment(buf, birFunction.markdownDocAttachment); writeFunctionsGlobalVarDependency(buf, birFunction); ByteBuf birbuf = Unpooled.buffer(); BIRInstructionWriter funcInsWriter = new BIRInstructionWriter(birbuf, cp, this); birbuf.writeInt(birFunction.argsCount); birbuf.writeBoolean(birFunction.returnVariable != null); if (birFunction.returnVariable != null) { birbuf.writeByte(birFunction.returnVariable.kind.getValue()); writeType(birbuf, birFunction.returnVariable.type); birbuf.writeInt(addStringCPEntry(birFunction.returnVariable.name.value)); } birbuf.writeInt(birFunction.parameters.size()); for (BIRNode.BIRFunctionParameter param : birFunction.parameters.keySet()) { birbuf.writeByte(param.kind.getValue()); writeType(birbuf, param.type); birbuf.writeInt(addStringCPEntry(param.name.value)); if (param.kind.equals(VarKind.ARG)) { birbuf.writeInt(addStringCPEntry(param.metaVarName != null ? param.metaVarName : "")); } birbuf.writeBoolean(param.hasDefaultExpr); } birbuf.writeInt(birFunction.localVars.size()); for (BIRNode.BIRVariableDcl localVar : birFunction.localVars) { birbuf.writeByte(localVar.kind.getValue()); writeType(birbuf, localVar.type); birbuf.writeInt(addStringCPEntry(localVar.name.value)); if (localVar.kind.equals(VarKind.ARG)) { birbuf.writeInt(addStringCPEntry(localVar.metaVarName != null ? localVar.metaVarName : "")); } if (localVar.kind.equals(VarKind.LOCAL)) { birbuf.writeInt(addStringCPEntry(localVar.metaVarName != null ? localVar.metaVarName : "")); birbuf.writeInt(addStringCPEntry(localVar.endBB != null ? localVar.endBB.id.value : "")); birbuf.writeInt(addStringCPEntry(localVar.startBB != null ? localVar.startBB.id.value : "")); birbuf.writeInt(localVar.insOffset); } } birFunction.parameters.values().forEach(funcInsWriter::writeBBs); funcInsWriter.writeBBs(birFunction.basicBlocks); funcInsWriter.writeErrorTable(birFunction.errorTable); birbuf.writeInt(birFunction.workerChannels.length); for (BIRNode.ChannelDetails details : birFunction.workerChannels) { birbuf.writeInt(addStringCPEntry(details.name)); birbuf.writeBoolean(details.channelInSameStrand); birbuf.writeBoolean(details.send); } int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeTaintTable(ByteBuf buf, TaintTable taintTable) { ByteBuf birbuf = Unpooled.buffer(); birbuf.writeShort(taintTable.rowCount); birbuf.writeShort(taintTable.columnCount); birbuf.writeInt(taintTable.taintTable.size()); for (Integer paramIndex : taintTable.taintTable.keySet()) { birbuf.writeShort(paramIndex); List<Byte> taintRecord = taintTable.taintTable.get(paramIndex); birbuf.writeInt(taintRecord.size()); for (Byte taintStatus : taintRecord) { birbuf.writeByte(taintStatus); } } int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeAnnotations(ByteBuf buf, BIRTypeWriter typeWriter, List<BIRNode.BIRAnnotation> birAnnotationList) { buf.writeInt(birAnnotationList.size()); birAnnotationList.forEach(annotation -> writeAnnotation(buf, typeWriter, annotation)); } private void writeAnnotation(ByteBuf buf, BIRTypeWriter typeWriter, BIRNode.BIRAnnotation birAnnotation) { buf.writeInt(addStringCPEntry(birAnnotation.name.value)); buf.writeInt(birAnnotation.flags); buf.writeByte(birAnnotation.origin.value()); writePosition(buf, birAnnotation.pos); buf.writeInt(birAnnotation.attachPoints.size()); for (AttachPoint attachPoint : birAnnotation.attachPoints) { buf.writeInt(addStringCPEntry(attachPoint.point.getValue())); buf.writeBoolean(attachPoint.source); } writeType(buf, birAnnotation.annotationType); typeWriter.writeMarkdownDocAttachment(buf, birAnnotation.markdownDocAttachment); } private void writeConstants(ByteBuf buf, List<BIRNode.BIRConstant> birConstList) { BIRTypeWriter constTypeWriter = new BIRTypeWriter(buf, cp); buf.writeInt(birConstList.size()); birConstList.forEach(constant -> writeConstant(buf, constTypeWriter, constant)); } private void writeConstant(ByteBuf buf, BIRTypeWriter typeWriter, BIRNode.BIRConstant birConstant) { buf.writeInt(addStringCPEntry(birConstant.name.value)); buf.writeInt(birConstant.flags); buf.writeByte(birConstant.origin.value()); writePosition(buf, birConstant.pos); typeWriter.writeMarkdownDocAttachment(buf, birConstant.markdownDocAttachment); writeType(buf, birConstant.type); ByteBuf birbuf = Unpooled.buffer(); writeConstValue(birbuf, birConstant.constValue); int length = birbuf.nioBuffer().limit(); buf.writeLong(length); buf.writeBytes(birbuf.nioBuffer().array(), 0, length); } private void writeConstValue(ByteBuf buf, ConstValue constValue) { writeType(buf, constValue.type); switch (constValue.type.tag) { case TypeTags.INT: case TypeTags.SIGNED32_INT: case TypeTags.SIGNED16_INT: case TypeTags.SIGNED8_INT: case TypeTags.UNSIGNED32_INT: case TypeTags.UNSIGNED16_INT: case TypeTags.UNSIGNED8_INT: buf.writeInt(addIntCPEntry((Long) constValue.value)); break; case TypeTags.BYTE: int byteValue = ((Number) constValue.value).intValue(); buf.writeInt(addByteCPEntry(byteValue)); break; case TypeTags.FLOAT: double doubleVal = constValue.value instanceof String ? Double.parseDouble((String) constValue.value) : (Double) constValue.value; buf.writeInt(addFloatCPEntry(doubleVal)); break; case TypeTags.STRING: case TypeTags.CHAR_STRING: case TypeTags.DECIMAL: buf.writeInt(addStringCPEntry((String) constValue.value)); break; case TypeTags.BOOLEAN: buf.writeBoolean((Boolean) constValue.value); break; case TypeTags.NIL: break; case TypeTags.MAP: Map<String, ConstValue> mapConstVal = (Map<String, ConstValue>) constValue.value; buf.writeInt(mapConstVal.size()); mapConstVal.forEach((key, value) -> { buf.writeInt(addStringCPEntry(key)); writeConstValue(buf, value); }); break; default: throw new UnsupportedOperationException( "finite type value is not supported for type: " + constValue.type); } } private int addIntCPEntry(long value) { return cp.addCPEntry(new IntegerCPEntry(value)); } private int addFloatCPEntry(double value) { return cp.addCPEntry(new FloatCPEntry(value)); } private int addStringCPEntry(String value) { return cp.addCPEntry(new StringCPEntry(value)); } private int addByteCPEntry(int value) { return cp.addCPEntry(new ByteCPEntry(value)); } private void writeType(ByteBuf buf, BType type) { buf.writeInt(cp.addShapeCPEntry(type)); } void writeAnnotAttachments(ByteBuf buff, BIRInstructionWriter insWriter, List<BIRAnnotationAttachment> annotAttachments) { ByteBuf annotBuf = Unpooled.buffer(); annotBuf.writeInt(annotAttachments.size()); for (BIRAnnotationAttachment annotAttachment : annotAttachments) { writeAnnotAttachment(annotBuf, insWriter, annotAttachment); } int length = annotBuf.nioBuffer().limit(); buff.writeLong(length); buff.writeBytes(annotBuf.nioBuffer().array(), 0, length); } private void writeAnnotAttachment(ByteBuf annotBuf, BIRInstructionWriter insWriter, BIRAnnotationAttachment annotAttachment) { annotBuf.writeInt(insWriter.addPkgCPEntry(annotAttachment.packageID)); insWriter.writePosition(annotBuf, annotAttachment.pos); annotBuf.writeInt(addStringCPEntry(annotAttachment.annotTagRef.value)); writeAnnotAttachValues(annotBuf, annotAttachment.annotValues); } private void writeAnnotAttachValues(ByteBuf annotBuf, List<BIRAnnotationValue> annotValues) { annotBuf.writeInt(annotValues.size()); for (BIRAnnotationValue annotValue : annotValues) { writeAnnotAttachValue(annotBuf, annotValue); } } private void writeAnnotAttachValue(ByteBuf annotBuf, BIRAnnotationValue annotValue) { if (annotValue.type.tag == TypeTags.ARRAY) { writeType(annotBuf, annotValue.type); BIRAnnotationArrayValue annotArrayValue = (BIRAnnotationArrayValue) annotValue; annotBuf.writeInt(annotArrayValue.annotArrayValue.length); for (BIRAnnotationValue annotValueEntry : annotArrayValue.annotArrayValue) { writeAnnotAttachValue(annotBuf, annotValueEntry); } } else if (annotValue.type.tag == TypeTags.RECORD || annotValue.type.tag == TypeTags.MAP) { writeType(annotBuf, annotValue.type); BIRAnnotationRecordValue annotRecValue = (BIRAnnotationRecordValue) annotValue; annotBuf.writeInt(annotRecValue.annotValueEntryMap.size()); for (Map.Entry<String, BIRAnnotationValue> annotValueEntry : annotRecValue.annotValueEntryMap.entrySet()) { annotBuf.writeInt(addStringCPEntry(annotValueEntry.getKey())); writeAnnotAttachValue(annotBuf, annotValueEntry.getValue()); } } else { BIRAnnotationLiteralValue annotLiteralValue = (BIRAnnotationLiteralValue) annotValue; writeConstValue(annotBuf, new ConstValue(annotLiteralValue.value, annotLiteralValue.type)); } } private void writePosition(ByteBuf buf, DiagnosticPos pos) { buf.writeInt(addStringCPEntry(pos.src.getCompilationUnitName())); buf.writeInt(pos.sLine); buf.writeInt(pos.sCol); buf.writeInt(pos.eLine); buf.writeInt(pos.eCol); } }
`writeCallMetric` and `readCallMetric` look almost identical except `METHOD` field. Is it possible to refactor the common part into a separate method?
public static ServiceCallMetric writeCallMetric(TableReference tableReference) { if (tableReference != null) { HashMap<String, String> baseLabels = new HashMap<String, String>(); baseLabels.put(MonitoringInfoConstants.Labels.PTRANSFORM, ""); baseLabels.put(MonitoringInfoConstants.Labels.SERVICE, "BigQuery"); baseLabels.put(MonitoringInfoConstants.Labels.METHOD, "BigQueryBatchWrite"); baseLabels.put( MonitoringInfoConstants.Labels.RESOURCE, GcpResourceIdentifiers.bigQueryTable( tableReference.getProjectId(), tableReference.getDatasetId(), tableReference.getTableId())); baseLabels.put( MonitoringInfoConstants.Labels.BIGQUERY_PROJECT_ID, tableReference.getProjectId()); baseLabels.put( MonitoringInfoConstants.Labels.BIGQUERY_DATASET, tableReference.getDatasetId()); baseLabels.put(MonitoringInfoConstants.Labels.BIGQUERY_TABLE, tableReference.getTableId()); return new ServiceCallMetric(MonitoringInfoConstants.Urns.API_REQUEST_COUNT, baseLabels); } return null; }
baseLabels.put(MonitoringInfoConstants.Labels.PTRANSFORM, "");
public static ServiceCallMetric writeCallMetric(TableReference tableReference) { return callMetricForMethod(tableReference, "BigQueryBatchWrite"); }
class ToTableRow<T> implements SerializableFunction<T, TableRow> { private final SerializableFunction<T, Row> toRow; ToTableRow(SerializableFunction<T, Row> toRow) { this.toRow = toRow; } @Override public TableRow apply(T input) { return toTableRow(toRow.apply(input)); } }
class ToTableRow<T> implements SerializableFunction<T, TableRow> { private final SerializableFunction<T, Row> toRow; ToTableRow(SerializableFunction<T, Row> toRow) { this.toRow = toRow; } @Override public TableRow apply(T input) { return toTableRow(toRow.apply(input)); } }
I'm still not convinced about this test. It's reimplementing production code in many aspects. For example exception handling (this exception ignoring during cancelation/closing) and threading model from `Task`/`StreamTask`, so at any time we modify those, this test has a chance to fail. Also I'm pretty sure there are `ITCase`s testing for that and the logic there is simpler and more stable manner.
private Callable<Void> readRecoveredStateTask(RecoveredInputChannel inputChannel, ChannelStateReader reader, boolean verifyRelease) { return () -> { try { inputChannel.readRecoveredState(reader); } catch (Throwable t) { assertTrue("The expected exception should only happen in the case of released channel.", verifyRelease && inputChannel.isReleased()); } return null; }; }
assertTrue("The expected exception should only happen in the case of released channel.", verifyRelease && inputChannel.isReleased());
private Callable<Void> readRecoveredStateTask(RecoveredInputChannel inputChannel, ChannelStateReader reader, boolean verifyRelease) { return () -> { try { inputChannel.readRecoveredState(reader); } catch (Throwable t) { if (!(verifyRelease && inputChannel.isReleased())) { throw new AssertionError("Exceptions are expected here only if the input channel was released", t); } } return null; }; }
class RecoveredInputChannelTest { private final boolean isRemote; @Parameterized.Parameters(name = "isRemote = {0}") public static Collection<Object[]> parameters() { return Arrays.asList(new Object[][] { {true}, {false}, }); } public RecoveredInputChannelTest(boolean isRemote) { this.isRemote = isRemote; } @Test public void testConcurrentReadStateAndProcess() throws Exception { testConcurrentReadStateAndProcess(isRemote); } @Test public void testConcurrentReadStateAndRelease() throws Exception { testConcurrentReadStateAndRelease(isRemote); } /** * Tests that there are no potential deadlock and buffer leak issues while the following actions happen concurrently: * 1. Task thread processes the recovered state buffer from RecoveredInputChannel. * 2. Unspilling IO thread reads the recovered state and queues the buffer into RecoveredInputChannel. * 3. Canceler thread closes the input gate and releases the RecoveredInputChannel. */ @Test public void testConcurrentReadStateAndProcessAndRelease() throws Exception { testConcurrentReadStateAndProcessAndRelease(isRemote); } /** * Tests that there are no buffers leak while recovering the empty input channel state. */ @Test public void testReadEmptyState() throws Exception { testReadEmptyStateOrThrowException(isRemote, ChannelStateReader.NO_OP); } /** * Tests that there are no buffers leak while throwing exception during state recovery. */ @Test(expected = IOException.class) public void testReadStateWithException() throws Exception { testReadEmptyStateOrThrowException(isRemote, new ChannelStateReaderWithException()); } private void testReadEmptyStateOrThrowException(boolean isRemote, ChannelStateReader reader) throws Exception { final int totalBuffers = 10; final NetworkBufferPool globalPool = new NetworkBufferPool(totalBuffers, 32, 2); final SingleInputGate inputGate = createInputGate(globalPool); final RecoveredInputChannel inputChannel = createRecoveredChannel(isRemote, inputGate); try { inputGate.setInputChannels(inputChannel); inputGate.setup(); inputChannel.readRecoveredState(reader); assertEquals(1, inputChannel.getNumberOfQueuedBuffers()); assertFalse(inputChannel.getNextBuffer().isPresent()); assertTrue(inputChannel.getStateConsumedFuture().isDone()); } finally { inputGate.close(); globalPool.destroyAllBufferPools(); assertEquals(totalBuffers, globalPool.getNumberOfAvailableMemorySegments()); globalPool.destroy(); } } /** * Tests that the process of reading recovered state executes concurrently with channel * buffer processing, based on the condition of the total number of states is more that * the total buffer amount, to confirm that the lifecycle(recycle) of exclusive/floating * buffers works well. */ private void testConcurrentReadStateAndProcess(boolean isRemote) throws Exception { final int totalBuffers = 10; final NetworkBufferPool globalPool = new NetworkBufferPool(totalBuffers, 32, 2); final SingleInputGate inputGate = createInputGate(globalPool); final RecoveredInputChannel inputChannel = createRecoveredChannel(isRemote, inputGate); final int totalStates = 15; final int[] states = {1, 2, 3, 4}; final ChannelStateReader reader = new ResultPartitionTest.FiniteChannelStateReader(totalStates, states); final ExecutorService executor = Executors.newFixedThreadPool(2); Throwable thrown = null; try { inputGate.setInputChannels(inputChannel); inputGate.setup(); final Callable<Void> processTask = processRecoveredBufferTask(inputChannel, totalStates, states, false); final Callable<Void> readStateTask = readRecoveredStateTask(inputChannel, reader, false); submitTasksAndWaitForResults(executor, new Callable[] {readStateTask, processTask}); } catch (Throwable t) { thrown = t; } finally { cleanup(globalPool, executor, null, thrown, inputChannel); } } private void testConcurrentReadStateAndRelease(boolean isRemote) throws Exception { final int totalBuffers = 10; final NetworkBufferPool globalPool = new NetworkBufferPool(totalBuffers, 32, 2); final SingleInputGate inputGate = createInputGate(globalPool); final RecoveredInputChannel inputChannel = createRecoveredChannel(isRemote, inputGate); final int totalStates = 15; final int[] states = {1, 2, 3, 4}; final ChannelStateReader reader = new ResultPartitionTest.FiniteChannelStateReader(totalStates, states); final ExecutorService executor = Executors.newFixedThreadPool(2); Throwable thrown = null; try { inputGate.setInputChannels(inputChannel); inputGate.setup(); submitTasksAndWaitForResults( executor, new Callable[] {readRecoveredStateTask(inputChannel, reader, true), releaseChannelTask(inputChannel)}); } catch (Throwable t) { thrown = t; } finally { cleanup(globalPool, executor, null, thrown, inputChannel); } } private void testConcurrentReadStateAndProcessAndRelease(boolean isRemote) throws Exception { final int totalBuffers = 10; final NetworkBufferPool globalPool = new NetworkBufferPool(totalBuffers, 32, 2); final SingleInputGate inputGate = createInputGate(globalPool); final RecoveredInputChannel inputChannel = createRecoveredChannel(isRemote, inputGate); final int totalStates = 15; final int[] states = {1, 2, 3, 4}; final ChannelStateReader reader = new ResultPartitionTest.FiniteChannelStateReader(totalStates, states); final ExecutorService executor = Executors.newFixedThreadPool(2); Throwable thrown = null; try { inputGate.setInputChannels(inputChannel); inputGate.setup(); final Callable<Void> processTask = processRecoveredBufferTask(inputChannel, totalStates, states, true); final Callable<Void> readStateTask = readRecoveredStateTask(inputChannel, reader, true); final Callable<Void> releaseTask = releaseChannelTask(inputChannel); submitTasksAndWaitForResults(executor, new Callable[] {readStateTask, processTask, releaseTask}); } catch (Throwable t) { thrown = t; } finally { cleanup(globalPool, executor, null, thrown, inputChannel); } } private Callable<Void> processRecoveredBufferTask(RecoveredInputChannel inputChannel, int totalStates, int[] states, boolean verifyRelease) { return () -> { int numProcessedStates = 0; while (numProcessedStates < totalStates) { if (verifyRelease && inputChannel.isReleased()) { break; } if (inputChannel.getNumberOfQueuedBuffers() == 0) { Thread.sleep(1); continue; } try { Optional<BufferAndAvailability> bufferAndAvailability = inputChannel.getNextBuffer(); if (bufferAndAvailability.isPresent()) { Buffer buffer = bufferAndAvailability.get().buffer(); BufferBuilderAndConsumerTest.assertContent(buffer, null, states); buffer.recycleBuffer(); numProcessedStates++; } } catch (Throwable t) { assertTrue("The expected exception should only happen in the case of released channel.", verifyRelease && inputChannel.isReleased()); } } return null; }; } private Callable<Void> releaseChannelTask(RecoveredInputChannel inputChannel) { return () -> { inputChannel.releaseAllResources(); return null; }; } private RecoveredInputChannel createRecoveredChannel(boolean isRemote, SingleInputGate gate) { if (isRemote) { return new InputChannelBuilder().buildRemoteRecoveredChannel(gate); } else { return new InputChannelBuilder().buildLocalRecoveredChannel(gate); } } private SingleInputGate createInputGate(NetworkBufferPool globalPool) throws Exception { return new SingleInputGateBuilder() .setBufferPoolFactory(globalPool.createBufferPool(8, 8)) .setSegmentProvider(globalPool) .build(); } }
class RecoveredInputChannelTest { private final boolean isRemote; @Parameterized.Parameters(name = "isRemote = {0}") public static Collection<Object[]> parameters() { return Arrays.asList(new Object[][] { {true}, {false}, }); } public RecoveredInputChannelTest(boolean isRemote) { this.isRemote = isRemote; } @Test public void testConcurrentReadStateAndProcess() throws Exception { testConcurrentReadStateAndProcess(isRemote); } @Test public void testConcurrentReadStateAndRelease() throws Exception { testConcurrentReadStateAndRelease(isRemote); } /** * Tests that there are no potential deadlock and buffer leak issues while the following actions happen concurrently: * 1. Task thread processes the recovered state buffer from RecoveredInputChannel. * 2. Unspilling IO thread reads the recovered state and queues the buffer into RecoveredInputChannel. * 3. Canceler thread closes the input gate and releases the RecoveredInputChannel. */ @Test public void testConcurrentReadStateAndProcessAndRelease() throws Exception { testConcurrentReadStateAndProcessAndRelease(isRemote); } /** * Tests that there are no buffers leak while recovering the empty input channel state. */ @Test public void testReadEmptyState() throws Exception { testReadEmptyStateOrThrowException(isRemote, ChannelStateReader.NO_OP); } /** * Tests that there are no buffers leak while throwing exception during state recovery. */ @Test(expected = IOException.class) public void testReadStateWithException() throws Exception { testReadEmptyStateOrThrowException(isRemote, new ChannelStateReaderWithException()); } private void testReadEmptyStateOrThrowException(boolean isRemote, ChannelStateReader reader) throws Exception { final int totalBuffers = 10; final NetworkBufferPool globalPool = new NetworkBufferPool(totalBuffers, 32, 2); final SingleInputGate inputGate = createInputGate(globalPool); final RecoveredInputChannel inputChannel = createRecoveredChannel(isRemote, inputGate); try { inputGate.setInputChannels(inputChannel); inputGate.setup(); inputChannel.readRecoveredState(reader); assertEquals(1, inputChannel.getNumberOfQueuedBuffers()); assertFalse(inputChannel.getNextBuffer().isPresent()); assertTrue(inputChannel.getStateConsumedFuture().isDone()); } finally { inputGate.close(); globalPool.destroyAllBufferPools(); assertEquals(totalBuffers, globalPool.getNumberOfAvailableMemorySegments()); globalPool.destroy(); } } /** * Tests that the process of reading recovered state executes concurrently with channel * buffer processing, based on the condition of the total number of states is more that * the total buffer amount, to confirm that the lifecycle(recycle) of exclusive/floating * buffers works well. */ private void testConcurrentReadStateAndProcess(boolean isRemote) throws Exception { final int totalBuffers = 10; final NetworkBufferPool globalPool = new NetworkBufferPool(totalBuffers, 32, 2); final SingleInputGate inputGate = createInputGate(globalPool); final RecoveredInputChannel inputChannel = createRecoveredChannel(isRemote, inputGate); final int totalStates = 15; final int[] states = {1, 2, 3, 4}; final ChannelStateReader reader = new ResultPartitionTest.FiniteChannelStateReader(totalStates, states); final ExecutorService executor = Executors.newFixedThreadPool(2); Throwable thrown = null; try { inputGate.setInputChannels(inputChannel); inputGate.setup(); final Callable<Void> processTask = processRecoveredBufferTask(inputChannel, totalStates, states, false); final Callable<Void> readStateTask = readRecoveredStateTask(inputChannel, reader, false); submitTasksAndWaitForResults(executor, new Callable[] {readStateTask, processTask}); } catch (Throwable t) { thrown = t; } finally { cleanup(globalPool, executor, null, thrown, inputChannel); } } private void testConcurrentReadStateAndRelease(boolean isRemote) throws Exception { final int totalBuffers = 10; final NetworkBufferPool globalPool = new NetworkBufferPool(totalBuffers, 32, 2); final SingleInputGate inputGate = createInputGate(globalPool); final RecoveredInputChannel inputChannel = createRecoveredChannel(isRemote, inputGate); final int totalStates = 15; final int[] states = {1, 2, 3, 4}; final ChannelStateReader reader = new ResultPartitionTest.FiniteChannelStateReader(totalStates, states); final ExecutorService executor = Executors.newFixedThreadPool(2); Throwable thrown = null; try { inputGate.setInputChannels(inputChannel); inputGate.setup(); submitTasksAndWaitForResults( executor, new Callable[] {readRecoveredStateTask(inputChannel, reader, true), releaseChannelTask(inputChannel)}); } catch (Throwable t) { thrown = t; } finally { cleanup(globalPool, executor, null, thrown, inputChannel); } } private void testConcurrentReadStateAndProcessAndRelease(boolean isRemote) throws Exception { final int totalBuffers = 10; final NetworkBufferPool globalPool = new NetworkBufferPool(totalBuffers, 32, 2); final SingleInputGate inputGate = createInputGate(globalPool); final RecoveredInputChannel inputChannel = createRecoveredChannel(isRemote, inputGate); final int totalStates = 15; final int[] states = {1, 2, 3, 4}; final ChannelStateReader reader = new ResultPartitionTest.FiniteChannelStateReader(totalStates, states); final ExecutorService executor = Executors.newFixedThreadPool(2); Throwable thrown = null; try { inputGate.setInputChannels(inputChannel); inputGate.setup(); final Callable<Void> processTask = processRecoveredBufferTask(inputChannel, totalStates, states, true); final Callable<Void> readStateTask = readRecoveredStateTask(inputChannel, reader, true); final Callable<Void> releaseTask = releaseChannelTask(inputChannel); submitTasksAndWaitForResults(executor, new Callable[] {readStateTask, processTask, releaseTask}); } catch (Throwable t) { thrown = t; } finally { cleanup(globalPool, executor, null, thrown, inputChannel); } } private Callable<Void> processRecoveredBufferTask(RecoveredInputChannel inputChannel, int totalStates, int[] states, boolean verifyRelease) { return () -> { int numProcessedStates = 0; while (numProcessedStates < totalStates) { if (verifyRelease && inputChannel.isReleased()) { break; } if (inputChannel.getNumberOfQueuedBuffers() == 0) { Thread.sleep(1); continue; } try { Optional<BufferAndAvailability> bufferAndAvailability = inputChannel.getNextBuffer(); if (bufferAndAvailability.isPresent()) { Buffer buffer = bufferAndAvailability.get().buffer(); BufferBuilderAndConsumerTest.assertContent(buffer, null, states); buffer.recycleBuffer(); numProcessedStates++; } } catch (Throwable t) { if (!(verifyRelease && inputChannel.isReleased())) { throw new AssertionError("Exceptions are expected here only if the input channel was released", t); } } } return null; }; } private Callable<Void> releaseChannelTask(RecoveredInputChannel inputChannel) { return () -> { inputChannel.releaseAllResources(); return null; }; } private RecoveredInputChannel createRecoveredChannel(boolean isRemote, SingleInputGate gate) { if (isRemote) { return new InputChannelBuilder().buildRemoteRecoveredChannel(gate); } else { return new InputChannelBuilder().buildLocalRecoveredChannel(gate); } } private SingleInputGate createInputGate(NetworkBufferPool globalPool) throws Exception { return new SingleInputGateBuilder() .setBufferPoolFactory(globalPool.createBufferPool(8, 8)) .setSegmentProvider(globalPool) .build(); } }
True. Maybe we need some more methods :)
public NativeImageInvokerInfo build() { List<String> nativeImageArgs = new ArrayList<>(); boolean enableSslNative = false; boolean inlineBeforeAnalysis = nativeConfig.inlineBeforeAnalysis; boolean addAllCharsets = nativeConfig.addAllCharsets; boolean enableHttpsUrlHandler = nativeConfig.enableHttpsUrlHandler; for (NativeImageSystemPropertyBuildItem prop : nativeImageProperties) { if (prop.getKey().equals("quarkus.ssl.native") && prop.getValue() != null) { enableSslNative = Boolean.parseBoolean(prop.getValue()); } else if (prop.getKey().equals("quarkus.jni.enable") && prop.getValue().equals("false")) { log.warn("Your application is setting the deprecated 'quarkus.jni.enable' configuration key to false." + " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it" + " will be removed in a future Quarkus version."); } else if (prop.getKey().equals("quarkus.native.enable-all-security-services") && prop.getValue() != null) { log.warn( "Your application is setting the deprecated 'quarkus.native.enable-all-security-services' configuration key." + " Please consider removing this configuration key as it is ignored and it" + " will be removed in a future Quarkus version."); } else if (prop.getKey().equals("quarkus.native.enable-all-charsets") && prop.getValue() != null) { addAllCharsets |= Boolean.parseBoolean(prop.getValue()); } else if (prop.getKey().equals("quarkus.native.inline-before-analysis") && prop.getValue() != null) { inlineBeforeAnalysis = Boolean.parseBoolean(prop.getValue()); } else { if (prop.getValue() == null) { nativeImageArgs.add("-J-D" + prop.getKey()); } else { nativeImageArgs.add("-J-D" + prop.getKey() + "=" + prop.getValue()); } } } final String userLanguage = LocaleProcessor.nativeImageUserLanguage(nativeConfig, localesBuildTimeConfig); if (!userLanguage.isEmpty()) { nativeImageArgs.add("-J-Duser.language=" + userLanguage); } final String userCountry = LocaleProcessor.nativeImageUserCountry(nativeConfig, localesBuildTimeConfig); if (!userCountry.isEmpty()) { nativeImageArgs.add("-J-Duser.country=" + userCountry); } final String includeLocales = LocaleProcessor.nativeImageIncludeLocales(nativeConfig, localesBuildTimeConfig); if (!includeLocales.isEmpty()) { nativeImageArgs.add("-H:IncludeLocales=" + includeLocales); } nativeImageArgs.add("-J-Dfile.encoding=" + nativeConfig.fileEncoding); if (enableSslNative) { enableHttpsUrlHandler = true; } if (nativeImageFeatures == null || nativeImageFeatures.isEmpty()) { throw new IllegalStateException("GraalVM features can't be empty, quarkus core is using some."); } List<String> featuresList = new ArrayList<>(nativeImageFeatures.size()); for (NativeImageFeatureBuildItem nativeImageFeature : nativeImageFeatures) { featuresList.add(nativeImageFeature.getQualifiedName()); } nativeImageArgs.add("--features=" + String.join(",", featuresList)); /* * Instruct GraalVM / Mandrel parse compiler graphs twice, once for the static analysis and once again * for the AOT compilation. * * We do this because single parsing significantly increases memory usage at build time * see https: * https: * * Note: This option must come before the invocation of * {@code handleAdditionalProperties(nativeImageArgs)} to ensure that devs and advanced users can * override it by passing -Dquarkus.native.additional-build-args=-H:+ParseOnce */ nativeImageArgs.add("-H:-ParseOnce"); /** * This makes sure the Kerberos integration module is made available in case any library * refers to it (e.g. the PostgreSQL JDBC requires it, seems plausible that many others will as well): * the module is not available by default on Java 17. * No flag was introduced as this merely exposes the visibility of the module, it doesn't * control its actual inclusion which will depend on the usual analysis. */ nativeImageArgs.add("-J--add-exports=java.security.jgss/sun.security.krb5=ALL-UNNAMED"); nativeImageArgs.add("-J--add-opens=java.base/java.text=ALL-UNNAMED"); if (nativeConfig.enableReports) { if (graalVMVersion.isOlderThan(GraalVM.Version.VERSION_21_3_2)) { nativeImageArgs.add("-H:+PrintAnalysisCallTree"); } else { nativeImageArgs.add("-H:PrintAnalysisCallTreeType=CSV"); } } if (graalVMVersion.compareTo(GraalVM.Version.VERSION_22_3_0) >= 0) { nativeImageArgs.add("-H:+CollectImageBuildStatistics"); nativeImageArgs.add("-H:ImageBuildStatisticsFile=" + nativeImageName + "-timing-stats.json"); nativeImageArgs.add("-H:BuildOutputJSONFile=" + nativeImageName + "-build-output-stats.json"); } /* * Any parameters following this call are forced over the user provided parameters in * quarkus.native.additional-build-args. So if you need a parameter to be overridable through * quarkus.native.additional-build-args please make sure to add it before this call. */ handleAdditionalProperties(nativeImageArgs); nativeImageArgs.add( "-H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime"); nativeImageArgs.add("-H:+JNI"); nativeImageArgs.add("-H:+AllowFoldMethods"); if (nativeConfig.headless) { nativeImageArgs.add("-J-Djava.awt.headless=true"); } if (nativeConfig.enableFallbackImages) { nativeImageArgs.add("-H:FallbackThreshold=5"); } else { nativeImageArgs.add("-H:FallbackThreshold=0"); } if (graalVMVersion.isNewerThan(GraalVM.Version.VERSION_22_0_0_2) && !classpathIsBroken) { nativeImageArgs.add("--link-at-build-time"); } else if (!graalVMVersion.isNewerThan(GraalVM.Version.VERSION_22_0_0_2) && classpathIsBroken) { nativeImageArgs.add("--allow-incomplete-classpath"); } if (nativeConfig.reportErrorsAtRuntime) { nativeImageArgs.add("-H:+ReportUnsupportedElementsAtRuntime"); } if (nativeConfig.reportExceptionStackTraces) { nativeImageArgs.add("-H:+ReportExceptionStackTraces"); } if (nativeConfig.debug.enabled) { nativeImageArgs.add("-g"); nativeImageArgs.add("-H:DebugInfoSourceSearchPath=" + APP_SOURCES); } if (nativeConfig.debugBuildProcess) { String debugBuildProcessHost; if (nativeConfig.isContainerBuild()) { debugBuildProcessHost = "0.0.0.0"; } else { debugBuildProcessHost = "localhost"; } nativeImageArgs .add("-J-Xrunjdwp:transport=dt_socket,address=" + debugBuildProcessHost + ":" + DEBUG_BUILD_PROCESS_PORT + ",server=y,suspend=y"); } if (nativeConfig.dumpProxies) { nativeImageArgs.add("-Dsun.misc.ProxyGenerator.saveGeneratedFiles=true"); } if (nativeConfig.nativeImageXmx.isPresent()) { nativeImageArgs.add("-J-Xmx" + nativeConfig.nativeImageXmx.get()); } List<String> protocols = new ArrayList<>(2); if (nativeConfig.enableHttpUrlHandler) { protocols.add("http"); } if (enableHttpsUrlHandler) { protocols.add("https"); } if (addAllCharsets) { nativeImageArgs.add("-H:+AddAllCharsets"); } else { nativeImageArgs.add("-H:-AddAllCharsets"); } if (!protocols.isEmpty()) { nativeImageArgs.add("-H:EnableURLProtocols=" + String.join(",", protocols)); } if (inlineBeforeAnalysis) { if (graalVMVersion.isOlderThan(GraalVM.Version.VERSION_21_3)) { nativeImageArgs.add("-H:+InlineBeforeAnalysis"); } } else { nativeImageArgs.add("-H:-InlineBeforeAnalysis"); } if (!noPIE.isEmpty()) { nativeImageArgs.add("-H:NativeLinkerOption=" + noPIE); } if (!nativeConfig.enableIsolates) { nativeImageArgs.add("-H:-SpawnIsolates"); } if (!nativeConfig.enableJni) { log.warn( "Your application is setting the deprecated 'quarkus.native.enable-jni' configuration key to false." + " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it" + " will be removed in a future Quarkus version."); } if (nativeConfig.enableServer) { log.warn( "Your application is setting the deprecated 'quarkus.native.enable-server' configuration key to true." + " Please consider removing this configuration key as it is ignored" + " (The Native image build server is always disabled) and it" + " will be removed in a future Quarkus version."); } if (nativeConfig.enableVmInspection) { nativeImageArgs.add("-H:+AllowVMInspection"); } if (nativeConfig.autoServiceLoaderRegistration) { nativeImageArgs.add("-H:+UseServiceLoaderFeature"); nativeImageArgs.add("-H:+TraceServiceLoaderFeature"); } else { nativeImageArgs.add("-H:-UseServiceLoaderFeature"); } if (nativeConfig.fullStackTraces) { nativeImageArgs.add("-H:+StackTrace"); } else { nativeImageArgs.add("-H:-StackTrace"); } if (nativeConfig.enableDashboardDump) { nativeImageArgs.add("-H:DashboardDump=" + outputTargetBuildItem.getBaseName() + "_dashboard.dump"); nativeImageArgs.add("-H:+DashboardAll"); } if (nativeImageSecurityProviders != null && !nativeImageSecurityProviders.isEmpty()) { String additionalSecurityProviders = nativeImageSecurityProviders.stream() .map(p -> p.getSecurityProvider()) .collect(Collectors.joining(",")); nativeImageArgs.add("-H:AdditionalSecurityProviders=" + additionalSecurityProviders); } if (jpmsExports != null) { HashSet<JPMSExportBuildItem> deduplicatedJpmsExport = new HashSet<>(jpmsExports); for (JPMSExportBuildItem jpmsExport : deduplicatedJpmsExport) { if (graalVMVersion.isNewerThan(jpmsExport.getExportAfter())) { nativeImageArgs.add( "-J--add-exports=" + jpmsExport.getModule() + "/" + jpmsExport.getPackage() + "=ALL-UNNAMED"); } } } if (nativeMinimalJavaVersions != null && !nativeMinimalJavaVersions.isEmpty()) { if (graalVMVersion.javaUpdateVersion == GraalVM.Version.UNDEFINED) { log.warnf( "Unable to parse used Java version from native-image version string `%s'. Java version checks will be skipped.", graalVMVersion.fullVersion); } else { nativeMinimalJavaVersions.stream() .filter(a -> !graalVMVersion.jdkVersionGreaterOrEqualTo(a.minFeature, a.minUpdate)) .forEach(a -> log.warnf("Expected: Java %d, update %d, Actual: Java %d, update %d. %s", a.minFeature, a.minUpdate, graalVMVersion.javaFeatureVersion, graalVMVersion.javaUpdateVersion, a.warning)); } } if (unsupportedOSes != null && !unsupportedOSes.isEmpty()) { final String errs = unsupportedOSes.stream().filter(o -> o.os.active).map(o -> o.error) .collect(Collectors.joining(", ")); if (!errs.isEmpty()) { throw new UnsupportedOperationException(errs); } } for (ExcludeConfigBuildItem excludeConfig : excludeConfigs) { nativeImageArgs.add("--exclude-config"); nativeImageArgs.add(excludeConfig.getJarFile()); nativeImageArgs.add(excludeConfig.getResourceName()); } if (graalVMVersion.is(GraalVM.Version.VERSION_21_3_0) && graalVMVersion.isJava17()) { nativeImageArgs.add("-J--add-exports=java.management/sun.management=ALL-UNNAMED"); } nativeImageArgs.add(nativeImageName); nativeImageArgs.add("-jar"); nativeImageArgs.add(runnerJarName); return new NativeImageInvokerInfo(nativeImageArgs); }
if (graalVMVersion.compareTo(GraalVM.Version.VERSION_22_3_0) >= 0) {
public NativeImageInvokerInfo build() { List<String> nativeImageArgs = new ArrayList<>(); boolean enableSslNative = false; boolean inlineBeforeAnalysis = nativeConfig.inlineBeforeAnalysis; boolean addAllCharsets = nativeConfig.addAllCharsets; boolean enableHttpsUrlHandler = nativeConfig.enableHttpsUrlHandler; for (NativeImageSystemPropertyBuildItem prop : nativeImageProperties) { if (prop.getKey().equals("quarkus.ssl.native") && prop.getValue() != null) { enableSslNative = Boolean.parseBoolean(prop.getValue()); } else if (prop.getKey().equals("quarkus.jni.enable") && prop.getValue().equals("false")) { log.warn("Your application is setting the deprecated 'quarkus.jni.enable' configuration key to false." + " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it" + " will be removed in a future Quarkus version."); } else if (prop.getKey().equals("quarkus.native.enable-all-security-services") && prop.getValue() != null) { log.warn( "Your application is setting the deprecated 'quarkus.native.enable-all-security-services' configuration key." + " Please consider removing this configuration key as it is ignored and it" + " will be removed in a future Quarkus version."); } else if (prop.getKey().equals("quarkus.native.enable-all-charsets") && prop.getValue() != null) { addAllCharsets |= Boolean.parseBoolean(prop.getValue()); } else if (prop.getKey().equals("quarkus.native.inline-before-analysis") && prop.getValue() != null) { inlineBeforeAnalysis = Boolean.parseBoolean(prop.getValue()); } else { if (prop.getValue() == null) { nativeImageArgs.add("-J-D" + prop.getKey()); } else { nativeImageArgs.add("-J-D" + prop.getKey() + "=" + prop.getValue()); } } } final String userLanguage = LocaleProcessor.nativeImageUserLanguage(nativeConfig, localesBuildTimeConfig); if (!userLanguage.isEmpty()) { nativeImageArgs.add("-J-Duser.language=" + userLanguage); } final String userCountry = LocaleProcessor.nativeImageUserCountry(nativeConfig, localesBuildTimeConfig); if (!userCountry.isEmpty()) { nativeImageArgs.add("-J-Duser.country=" + userCountry); } final String includeLocales = LocaleProcessor.nativeImageIncludeLocales(nativeConfig, localesBuildTimeConfig); if (!includeLocales.isEmpty()) { nativeImageArgs.add("-H:IncludeLocales=" + includeLocales); } nativeImageArgs.add("-J-Dfile.encoding=" + nativeConfig.fileEncoding); if (enableSslNative) { enableHttpsUrlHandler = true; } if (nativeImageFeatures == null || nativeImageFeatures.isEmpty()) { throw new IllegalStateException("GraalVM features can't be empty, quarkus core is using some."); } List<String> featuresList = new ArrayList<>(nativeImageFeatures.size()); for (NativeImageFeatureBuildItem nativeImageFeature : nativeImageFeatures) { featuresList.add(nativeImageFeature.getQualifiedName()); } nativeImageArgs.add("--features=" + String.join(",", featuresList)); /* * Instruct GraalVM / Mandrel parse compiler graphs twice, once for the static analysis and once again * for the AOT compilation. * * We do this because single parsing significantly increases memory usage at build time * see https: * https: * * Note: This option must come before the invocation of * {@code handleAdditionalProperties(nativeImageArgs)} to ensure that devs and advanced users can * override it by passing -Dquarkus.native.additional-build-args=-H:+ParseOnce */ nativeImageArgs.add("-H:-ParseOnce"); /** * This makes sure the Kerberos integration module is made available in case any library * refers to it (e.g. the PostgreSQL JDBC requires it, seems plausible that many others will as well): * the module is not available by default on Java 17. * No flag was introduced as this merely exposes the visibility of the module, it doesn't * control its actual inclusion which will depend on the usual analysis. */ nativeImageArgs.add("-J--add-exports=java.security.jgss/sun.security.krb5=ALL-UNNAMED"); nativeImageArgs.add("-J--add-opens=java.base/java.text=ALL-UNNAMED"); if (nativeConfig.enableReports) { if (graalVMVersion.isOlderThan(GraalVM.Version.VERSION_21_3_2)) { nativeImageArgs.add("-H:+PrintAnalysisCallTree"); } else { nativeImageArgs.add("-H:PrintAnalysisCallTreeType=CSV"); } } if (graalVMVersion.compareTo(GraalVM.Version.VERSION_22_3_0) >= 0) { nativeImageArgs.add("-H:+CollectImageBuildStatistics"); nativeImageArgs.add("-H:ImageBuildStatisticsFile=" + nativeImageName + "-timing-stats.json"); nativeImageArgs.add("-H:BuildOutputJSONFile=" + nativeImageName + "-build-output-stats.json"); } /* * Any parameters following this call are forced over the user provided parameters in * quarkus.native.additional-build-args. So if you need a parameter to be overridable through * quarkus.native.additional-build-args please make sure to add it before this call. */ handleAdditionalProperties(nativeImageArgs); nativeImageArgs.add( "-H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime"); nativeImageArgs.add("-H:+JNI"); nativeImageArgs.add("-H:+AllowFoldMethods"); if (nativeConfig.headless) { nativeImageArgs.add("-J-Djava.awt.headless=true"); } if (nativeConfig.enableFallbackImages) { nativeImageArgs.add("-H:FallbackThreshold=5"); } else { nativeImageArgs.add("-H:FallbackThreshold=0"); } if (graalVMVersion.isNewerThan(GraalVM.Version.VERSION_22_0_0_2) && !classpathIsBroken) { nativeImageArgs.add("--link-at-build-time"); } else if (!graalVMVersion.isNewerThan(GraalVM.Version.VERSION_22_0_0_2) && classpathIsBroken) { nativeImageArgs.add("--allow-incomplete-classpath"); } if (nativeConfig.reportErrorsAtRuntime) { nativeImageArgs.add("-H:+ReportUnsupportedElementsAtRuntime"); } if (nativeConfig.reportExceptionStackTraces) { nativeImageArgs.add("-H:+ReportExceptionStackTraces"); } if (nativeConfig.debug.enabled) { nativeImageArgs.add("-g"); nativeImageArgs.add("-H:DebugInfoSourceSearchPath=" + APP_SOURCES); } if (nativeConfig.debugBuildProcess) { String debugBuildProcessHost; if (nativeConfig.isContainerBuild()) { debugBuildProcessHost = "0.0.0.0"; } else { debugBuildProcessHost = "localhost"; } nativeImageArgs .add("-J-Xrunjdwp:transport=dt_socket,address=" + debugBuildProcessHost + ":" + DEBUG_BUILD_PROCESS_PORT + ",server=y,suspend=y"); } if (nativeConfig.dumpProxies) { nativeImageArgs.add("-Dsun.misc.ProxyGenerator.saveGeneratedFiles=true"); } if (nativeConfig.nativeImageXmx.isPresent()) { nativeImageArgs.add("-J-Xmx" + nativeConfig.nativeImageXmx.get()); } List<String> protocols = new ArrayList<>(2); if (nativeConfig.enableHttpUrlHandler) { protocols.add("http"); } if (enableHttpsUrlHandler) { protocols.add("https"); } if (addAllCharsets) { nativeImageArgs.add("-H:+AddAllCharsets"); } else { nativeImageArgs.add("-H:-AddAllCharsets"); } if (!protocols.isEmpty()) { nativeImageArgs.add("-H:EnableURLProtocols=" + String.join(",", protocols)); } if (inlineBeforeAnalysis) { if (graalVMVersion.isOlderThan(GraalVM.Version.VERSION_21_3)) { nativeImageArgs.add("-H:+InlineBeforeAnalysis"); } } else { nativeImageArgs.add("-H:-InlineBeforeAnalysis"); } if (!noPIE.isEmpty()) { nativeImageArgs.add("-H:NativeLinkerOption=" + noPIE); } if (!nativeConfig.enableIsolates) { nativeImageArgs.add("-H:-SpawnIsolates"); } if (!nativeConfig.enableJni) { log.warn( "Your application is setting the deprecated 'quarkus.native.enable-jni' configuration key to false." + " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it" + " will be removed in a future Quarkus version."); } if (nativeConfig.enableServer) { log.warn( "Your application is setting the deprecated 'quarkus.native.enable-server' configuration key to true." + " Please consider removing this configuration key as it is ignored" + " (The Native image build server is always disabled) and it" + " will be removed in a future Quarkus version."); } if (nativeConfig.enableVmInspection) { nativeImageArgs.add("-H:+AllowVMInspection"); } if (nativeConfig.autoServiceLoaderRegistration) { nativeImageArgs.add("-H:+UseServiceLoaderFeature"); nativeImageArgs.add("-H:+TraceServiceLoaderFeature"); } else { nativeImageArgs.add("-H:-UseServiceLoaderFeature"); } if (nativeConfig.fullStackTraces) { nativeImageArgs.add("-H:+StackTrace"); } else { nativeImageArgs.add("-H:-StackTrace"); } if (nativeConfig.enableDashboardDump) { nativeImageArgs.add("-H:DashboardDump=" + outputTargetBuildItem.getBaseName() + "_dashboard.dump"); nativeImageArgs.add("-H:+DashboardAll"); } if (nativeImageSecurityProviders != null && !nativeImageSecurityProviders.isEmpty()) { String additionalSecurityProviders = nativeImageSecurityProviders.stream() .map(p -> p.getSecurityProvider()) .collect(Collectors.joining(",")); nativeImageArgs.add("-H:AdditionalSecurityProviders=" + additionalSecurityProviders); } if (jpmsExports != null) { HashSet<JPMSExportBuildItem> deduplicatedJpmsExport = new HashSet<>(jpmsExports); for (JPMSExportBuildItem jpmsExport : deduplicatedJpmsExport) { if (graalVMVersion.isNewerThan(jpmsExport.getExportAfter())) { nativeImageArgs.add( "-J--add-exports=" + jpmsExport.getModule() + "/" + jpmsExport.getPackage() + "=ALL-UNNAMED"); } } } if (nativeMinimalJavaVersions != null && !nativeMinimalJavaVersions.isEmpty()) { if (graalVMVersion.javaUpdateVersion == GraalVM.Version.UNDEFINED) { log.warnf( "Unable to parse used Java version from native-image version string `%s'. Java version checks will be skipped.", graalVMVersion.fullVersion); } else { nativeMinimalJavaVersions.stream() .filter(a -> !graalVMVersion.jdkVersionGreaterOrEqualTo(a.minFeature, a.minUpdate)) .forEach(a -> log.warnf("Expected: Java %d, update %d, Actual: Java %d, update %d. %s", a.minFeature, a.minUpdate, graalVMVersion.javaFeatureVersion, graalVMVersion.javaUpdateVersion, a.warning)); } } if (unsupportedOSes != null && !unsupportedOSes.isEmpty()) { final String errs = unsupportedOSes.stream().filter(o -> o.os.active).map(o -> o.error) .collect(Collectors.joining(", ")); if (!errs.isEmpty()) { throw new UnsupportedOperationException(errs); } } for (ExcludeConfigBuildItem excludeConfig : excludeConfigs) { nativeImageArgs.add("--exclude-config"); nativeImageArgs.add(excludeConfig.getJarFile()); nativeImageArgs.add(excludeConfig.getResourceName()); } if (graalVMVersion.is(GraalVM.Version.VERSION_21_3_0) && graalVMVersion.isJava17()) { nativeImageArgs.add("-J--add-exports=java.management/sun.management=ALL-UNNAMED"); } nativeImageArgs.add(nativeImageName); nativeImageArgs.add("-jar"); nativeImageArgs.add(runnerJarName); return new NativeImageInvokerInfo(nativeImageArgs); }
class Builder { private NativeConfig nativeConfig; private LocalesBuildTimeConfig localesBuildTimeConfig; private OutputTargetBuildItem outputTargetBuildItem; private List<NativeImageSystemPropertyBuildItem> nativeImageProperties; private List<ExcludeConfigBuildItem> excludeConfigs; private List<NativeImageSecurityProviderBuildItem> nativeImageSecurityProviders; private List<JPMSExportBuildItem> jpmsExports; private List<NativeMinimalJavaVersionBuildItem> nativeMinimalJavaVersions; private List<UnsupportedOSBuildItem> unsupportedOSes; private List<NativeImageFeatureBuildItem> nativeImageFeatures; private Path outputDir; private String runnerJarName; private String noPIE = ""; private GraalVM.Version graalVMVersion = GraalVM.Version.UNVERSIONED; private String nativeImageName; private boolean classpathIsBroken; public Builder setNativeConfig(NativeConfig nativeConfig) { this.nativeConfig = nativeConfig; return this; } public Builder setLocalesBuildTimeConfig(LocalesBuildTimeConfig localesBuildTimeConfig) { this.localesBuildTimeConfig = localesBuildTimeConfig; return this; } public Builder setOutputTargetBuildItem(OutputTargetBuildItem outputTargetBuildItem) { this.outputTargetBuildItem = outputTargetBuildItem; return this; } public Builder setNativeImageProperties(List<NativeImageSystemPropertyBuildItem> nativeImageProperties) { this.nativeImageProperties = nativeImageProperties; return this; } public Builder setBrokenClasspath(boolean classpathIsBroken) { this.classpathIsBroken = classpathIsBroken; return this; } public Builder setExcludeConfigs(List<ExcludeConfigBuildItem> excludeConfigs) { this.excludeConfigs = excludeConfigs; return this; } public Builder setNativeImageSecurityProviders( List<NativeImageSecurityProviderBuildItem> nativeImageSecurityProviders) { this.nativeImageSecurityProviders = nativeImageSecurityProviders; return this; } public Builder setJPMSExportBuildItems(List<JPMSExportBuildItem> JPMSExportBuildItems) { this.jpmsExports = JPMSExportBuildItems; return this; } public Builder setNativeMinimalJavaVersions( List<NativeMinimalJavaVersionBuildItem> nativeMinimalJavaVersions) { this.nativeMinimalJavaVersions = nativeMinimalJavaVersions; return this; } public Builder setUnsupportedOSes( List<UnsupportedOSBuildItem> unsupportedOSes) { this.unsupportedOSes = unsupportedOSes; return this; } public Builder setNativeImageFeatures(List<NativeImageFeatureBuildItem> nativeImageFeatures) { this.nativeImageFeatures = nativeImageFeatures; return this; } public Builder setOutputDir(Path outputDir) { this.outputDir = outputDir; return this; } public Builder setRunnerJarName(String runnerJarName) { this.runnerJarName = runnerJarName; return this; } public Builder setNoPIE(String noPIE) { this.noPIE = noPIE; return this; } public Builder setGraalVMVersion(GraalVM.Version graalVMVersion) { this.graalVMVersion = graalVMVersion; return this; } public Builder setNativeImageName(String nativeImageName) { this.nativeImageName = nativeImageName; return this; } private void handleAdditionalProperties(List<String> command) { if (nativeConfig.additionalBuildArgs.isPresent()) { List<String> strings = nativeConfig.additionalBuildArgs.get(); for (String buildArg : strings) { String trimmedBuildArg = buildArg.trim(); if (trimmedBuildArg.contains(TRUST_STORE_SYSTEM_PROPERTY_MARKER) && nativeConfig.isContainerBuild()) { /* * When the native binary is being built with a docker container, because a volume is created, * we need to copy the trustStore file into the output directory (which is the root of volume) * and change the value of 'javax.net.ssl.trustStore' property to point to this value * * TODO: we might want to introduce a dedicated property in order to overcome this ugliness */ int index = trimmedBuildArg.indexOf(TRUST_STORE_SYSTEM_PROPERTY_MARKER); if (trimmedBuildArg.length() > index + 2) { String configuredTrustStorePath = trimmedBuildArg .substring(index + TRUST_STORE_SYSTEM_PROPERTY_MARKER.length()); try { IoUtils.copy(Paths.get(configuredTrustStorePath), outputDir.resolve(MOVED_TRUST_STORE_NAME)); command.add(trimmedBuildArg.substring(0, index) + TRUST_STORE_SYSTEM_PROPERTY_MARKER + CONTAINER_BUILD_VOLUME_PATH + "/" + MOVED_TRUST_STORE_NAME); } catch (IOException e) { throw new UncheckedIOException("Unable to copy trustStore file '" + configuredTrustStorePath + "' to volume root directory '" + outputDir.toAbsolutePath().toString() + "'", e); } } } else { command.add(trimmedBuildArg); } } } } }
class Builder { private NativeConfig nativeConfig; private LocalesBuildTimeConfig localesBuildTimeConfig; private OutputTargetBuildItem outputTargetBuildItem; private List<NativeImageSystemPropertyBuildItem> nativeImageProperties; private List<ExcludeConfigBuildItem> excludeConfigs; private List<NativeImageSecurityProviderBuildItem> nativeImageSecurityProviders; private List<JPMSExportBuildItem> jpmsExports; private List<NativeMinimalJavaVersionBuildItem> nativeMinimalJavaVersions; private List<UnsupportedOSBuildItem> unsupportedOSes; private List<NativeImageFeatureBuildItem> nativeImageFeatures; private Path outputDir; private String runnerJarName; private String noPIE = ""; private GraalVM.Version graalVMVersion = GraalVM.Version.UNVERSIONED; private String nativeImageName; private boolean classpathIsBroken; public Builder setNativeConfig(NativeConfig nativeConfig) { this.nativeConfig = nativeConfig; return this; } public Builder setLocalesBuildTimeConfig(LocalesBuildTimeConfig localesBuildTimeConfig) { this.localesBuildTimeConfig = localesBuildTimeConfig; return this; } public Builder setOutputTargetBuildItem(OutputTargetBuildItem outputTargetBuildItem) { this.outputTargetBuildItem = outputTargetBuildItem; return this; } public Builder setNativeImageProperties(List<NativeImageSystemPropertyBuildItem> nativeImageProperties) { this.nativeImageProperties = nativeImageProperties; return this; } public Builder setBrokenClasspath(boolean classpathIsBroken) { this.classpathIsBroken = classpathIsBroken; return this; } public Builder setExcludeConfigs(List<ExcludeConfigBuildItem> excludeConfigs) { this.excludeConfigs = excludeConfigs; return this; } public Builder setNativeImageSecurityProviders( List<NativeImageSecurityProviderBuildItem> nativeImageSecurityProviders) { this.nativeImageSecurityProviders = nativeImageSecurityProviders; return this; } public Builder setJPMSExportBuildItems(List<JPMSExportBuildItem> JPMSExportBuildItems) { this.jpmsExports = JPMSExportBuildItems; return this; } public Builder setNativeMinimalJavaVersions( List<NativeMinimalJavaVersionBuildItem> nativeMinimalJavaVersions) { this.nativeMinimalJavaVersions = nativeMinimalJavaVersions; return this; } public Builder setUnsupportedOSes( List<UnsupportedOSBuildItem> unsupportedOSes) { this.unsupportedOSes = unsupportedOSes; return this; } public Builder setNativeImageFeatures(List<NativeImageFeatureBuildItem> nativeImageFeatures) { this.nativeImageFeatures = nativeImageFeatures; return this; } public Builder setOutputDir(Path outputDir) { this.outputDir = outputDir; return this; } public Builder setRunnerJarName(String runnerJarName) { this.runnerJarName = runnerJarName; return this; } public Builder setNoPIE(String noPIE) { this.noPIE = noPIE; return this; } public Builder setGraalVMVersion(GraalVM.Version graalVMVersion) { this.graalVMVersion = graalVMVersion; return this; } public Builder setNativeImageName(String nativeImageName) { this.nativeImageName = nativeImageName; return this; } private void handleAdditionalProperties(List<String> command) { if (nativeConfig.additionalBuildArgs.isPresent()) { List<String> strings = nativeConfig.additionalBuildArgs.get(); for (String buildArg : strings) { String trimmedBuildArg = buildArg.trim(); if (trimmedBuildArg.contains(TRUST_STORE_SYSTEM_PROPERTY_MARKER) && nativeConfig.isContainerBuild()) { /* * When the native binary is being built with a docker container, because a volume is created, * we need to copy the trustStore file into the output directory (which is the root of volume) * and change the value of 'javax.net.ssl.trustStore' property to point to this value * * TODO: we might want to introduce a dedicated property in order to overcome this ugliness */ int index = trimmedBuildArg.indexOf(TRUST_STORE_SYSTEM_PROPERTY_MARKER); if (trimmedBuildArg.length() > index + 2) { String configuredTrustStorePath = trimmedBuildArg .substring(index + TRUST_STORE_SYSTEM_PROPERTY_MARKER.length()); try { IoUtils.copy(Paths.get(configuredTrustStorePath), outputDir.resolve(MOVED_TRUST_STORE_NAME)); command.add(trimmedBuildArg.substring(0, index) + TRUST_STORE_SYSTEM_PROPERTY_MARKER + CONTAINER_BUILD_VOLUME_PATH + "/" + MOVED_TRUST_STORE_NAME); } catch (IOException e) { throw new UncheckedIOException("Unable to copy trustStore file '" + configuredTrustStorePath + "' to volume root directory '" + outputDir.toAbsolutePath().toString() + "'", e); } } } else { command.add(trimmedBuildArg); } } } } }
If possible, collections (Set, List, ...) in ShardingSphere are declared as Collection<x>
private void processSuccess() throws SQLException { final List<Long> orderIds = insertData(); assertThat(this.selectAll(), equalTo(Arrays.asList( new Order(1, 0, 2, 2, "INSERT_TEST"), new Order(2, 0, 4, 4, "INSERT_TEST"), new Order(3, 0, 6, 6, "INSERT_TEST"), new Order(4, 0, 8, 8, "INSERT_TEST"), new Order(5, 0, 10, 10, "INSERT_TEST"), new Order(1, 1, 1, 1, "INSERT_TEST"), new Order(2, 1, 3, 3, "INSERT_TEST"), new Order(3, 1, 5, 5, "INSERT_TEST"), new Order(4, 1, 7, 7, "INSERT_TEST"), new Order(5, 1, 9, 9, "INSERT_TEST")))); assertThat(orderItemRepository.selectAll(), equalTo(Arrays.asList( new OrderItem(1, 1, 1, "13800000001", "INSERT_TEST"), new OrderItem(2, 1, 2, "13800000001", "INSERT_TEST"), new OrderItem(3, 2, 3, "13800000001", "INSERT_TEST"), new OrderItem(4, 2, 4, "13800000001", "INSERT_TEST"), new OrderItem(5, 3, 5, "13800000001", "INSERT_TEST"), new OrderItem(6, 3, 6, "13800000001", "INSERT_TEST"), new OrderItem(7, 4, 7, "13800000001", "INSERT_TEST"), new OrderItem(8, 4, 8, "13800000001", "INSERT_TEST"), new OrderItem(9, 5, 9, "13800000001", "INSERT_TEST"), new OrderItem(10, 5, 10, "13800000001", "INSERT_TEST")))); assertThat(addressRepository.selectAll(), equalTo(LongStream.range(1, 11).mapToObj(i -> new Address(i, "address_test_" + i)).collect(Collectors.toList()))); deleteData(orderIds); assertThat(this.selectAll(), equalTo(Collections.singletonList(new Order(1, 0, 2, 2, "INSERT_TEST")))); assertThat(orderItemRepository.selectAll(), equalTo(new ArrayList<>())); assertThat(addressRepository.selectAll(), equalTo(new ArrayList<>())); }
final List<Long> orderIds = insertData();
private void processSuccess() throws SQLException { final Collection<Long> orderIds = insertData(); assertThat(this.selectAll(), equalTo(Arrays.asList( new Order(1, 0, 2, 2, "INSERT_TEST"), new Order(2, 0, 4, 4, "INSERT_TEST"), new Order(3, 0, 6, 6, "INSERT_TEST"), new Order(4, 0, 8, 8, "INSERT_TEST"), new Order(5, 0, 10, 10, "INSERT_TEST"), new Order(1, 1, 1, 1, "INSERT_TEST"), new Order(2, 1, 3, 3, "INSERT_TEST"), new Order(3, 1, 5, 5, "INSERT_TEST"), new Order(4, 1, 7, 7, "INSERT_TEST"), new Order(5, 1, 9, 9, "INSERT_TEST")))); assertThat(orderItemRepository.selectAll(), equalTo(Arrays.asList( new OrderItem(1, 1, 1, "13800000001", "INSERT_TEST"), new OrderItem(2, 1, 2, "13800000001", "INSERT_TEST"), new OrderItem(3, 2, 3, "13800000001", "INSERT_TEST"), new OrderItem(4, 2, 4, "13800000001", "INSERT_TEST"), new OrderItem(5, 3, 5, "13800000001", "INSERT_TEST"), new OrderItem(6, 3, 6, "13800000001", "INSERT_TEST"), new OrderItem(7, 4, 7, "13800000001", "INSERT_TEST"), new OrderItem(8, 4, 8, "13800000001", "INSERT_TEST"), new OrderItem(9, 5, 9, "13800000001", "INSERT_TEST"), new OrderItem(10, 5, 10, "13800000001", "INSERT_TEST")))); assertThat(addressRepository.selectAll(), equalTo(LongStream.range(1, 11).mapToObj(i -> new Address(i, "address_test_" + i)).collect(Collectors.toList()))); deleteData(orderIds); assertThat(this.selectAll(), equalTo(Collections.singletonList(new Order(1, 0, 2, 2, "INSERT_TEST")))); assertThat(orderItemRepository.selectAll(), equalTo(new ArrayList<>())); assertThat(addressRepository.selectAll(), equalTo(new ArrayList<>())); }
class ShadowTest { private OrderRepository orderRepository; private OrderItemRepository orderItemRepository; private AddressRepository addressRepository; @Test void testShadowInLocalTransactions() throws SQLException, IOException { DataSource dataSource = YamlShardingSphereDataSourceFactory.createDataSource(FileTestUtils.readFromFileURLString("yaml/shadow.yaml")); orderRepository = new OrderRepository(dataSource); orderItemRepository = new OrderItemRepository(dataSource); addressRepository = new AddressRepository(dataSource); this.initEnvironment(); this.processSuccess(); this.cleanEnvironment(); } private void initEnvironment() throws SQLException { orderRepository.createTableIfNotExists(); orderItemRepository.createTableIfNotExists(); addressRepository.createTableIfNotExists(); orderRepository.truncateTable(); orderItemRepository.truncateTable(); addressRepository.truncateTable(); orderRepository.createTableIfNotExistsShadow(); orderRepository.truncateTableShadow(); } private List<Long> insertData() throws SQLException { List<Long> result = new ArrayList<>(10); for (int i = 1; i <= 10; i++) { Order order = new Order(); order.setUserId(i); order.setOrderType(i % 2); order.setAddressId(i); order.setStatus("INSERT_TEST"); orderRepository.insert(order); OrderItem orderItem = new OrderItem(); orderItem.setOrderId(order.getOrderId()); orderItem.setUserId(i); orderItem.setPhone("13800000001"); orderItem.setStatus("INSERT_TEST"); orderItemRepository.insert(orderItem); Address address = new Address((long) i, "address_test_" + i); addressRepository.insert(address); result.add(order.getOrderId()); } return result; } private void deleteData(final List<Long> orderIds) throws SQLException { long count = 1; for (Long each : orderIds) { orderRepository.deleteShadow(each); orderRepository.delete(each); orderItemRepository.delete(each); addressRepository.delete(count++); } } private List<Order> selectAll() throws SQLException { List<Order> result = orderRepository.selectAll(); result.addAll(orderRepository.selectShadowOrder()); return result; } private void cleanEnvironment() throws SQLException { orderRepository.dropTableShadow(); orderRepository.dropTable(); orderItemRepository.dropTable(); addressRepository.dropTable(); } }
class ShadowTest { private OrderRepository orderRepository; private OrderItemRepository orderItemRepository; private AddressRepository addressRepository; @Test void testShadowInLocalTransactions() throws SQLException, IOException { DataSource dataSource = YamlShardingSphereDataSourceFactory.createDataSource(FileTestUtils.readFromFileURLString("yaml/shadow.yaml")); orderRepository = new OrderRepository(dataSource); orderItemRepository = new OrderItemRepository(dataSource); addressRepository = new AddressRepository(dataSource); this.initEnvironment(); this.processSuccess(); this.cleanEnvironment(); } private void initEnvironment() throws SQLException { orderRepository.createTableIfNotExists(); orderItemRepository.createTableIfNotExists(); addressRepository.createTableIfNotExists(); orderRepository.truncateTable(); orderItemRepository.truncateTable(); addressRepository.truncateTable(); orderRepository.createTableIfNotExistsShadow(); orderRepository.truncateTableShadow(); } private Collection<Long> insertData() throws SQLException { Collection<Long> result = new ArrayList<>(10); for (int i = 1; i <= 10; i++) { Order order = new Order(); order.setUserId(i); order.setOrderType(i % 2); order.setAddressId(i); order.setStatus("INSERT_TEST"); orderRepository.insert(order); OrderItem orderItem = new OrderItem(); orderItem.setOrderId(order.getOrderId()); orderItem.setUserId(i); orderItem.setPhone("13800000001"); orderItem.setStatus("INSERT_TEST"); orderItemRepository.insert(orderItem); Address address = new Address((long) i, "address_test_" + i); addressRepository.insert(address); result.add(order.getOrderId()); } return result; } private void deleteData(final Collection<Long> orderIds) throws SQLException { long count = 1; for (Long each : orderIds) { orderRepository.deleteShadow(each); orderRepository.delete(each); orderItemRepository.delete(each); addressRepository.delete(count++); } } private Collection<Order> selectAll() throws SQLException { Collection<Order> result = orderRepository.selectAll(); result.addAll(orderRepository.selectShadowOrder()); return result; } private void cleanEnvironment() throws SQLException { orderRepository.dropTableShadow(); orderRepository.dropTable(); orderItemRepository.dropTable(); addressRepository.dropTable(); } }
Use the sendDropTabletTasks directly, we are just in the Catalog class.
public void dropDb(DropDbStmt stmt) throws DdlException { String dbName = stmt.getDbName(); if (!tryLock(false)) { throw new DdlException("Failed to acquire catalog lock. Try again"); } try { if (!fullNameToDb.containsKey(dbName)) { if (stmt.isSetIfExists()) { LOG.info("drop database[{}] which does not exist", dbName); return; } else { ErrorReport.reportDdlException(ErrorCode.ERR_DB_DROP_EXISTS, dbName); } } Database db = this.fullNameToDb.get(dbName); HashMap<Long, AgentBatchTask> batchTaskMap; db.writeLock(); try { if (!stmt.isForceDrop()) { if (Catalog.getCurrentCatalog().getGlobalTransactionMgr() .existCommittedTxns(db.getId(), null, null)) { throw new DdlException( "There are still some transactions in the COMMITTED state waiting to be completed. " + "The database [" + dbName + "] cannot be dropped. If you want to forcibly drop(cannot be recovered)," + " please use \"DROP database FORCE\"."); } } if (db.getDbState() == DbState.LINK && dbName.equals(db.getAttachDb())) { final DropLinkDbAndUpdateDbInfo info = new DropLinkDbAndUpdateDbInfo(); fullNameToDb.remove(db.getAttachDb()); db.setDbState(DbState.NORMAL); info.setUpdateDbState(DbState.NORMAL); final Cluster cluster = nameToCluster .get(ClusterNamespace.getClusterNameFromFullName(db.getAttachDb())); final BaseParam param = new BaseParam(); param.addStringParam(db.getAttachDb()); param.addLongParam(db.getId()); cluster.removeLinkDb(param); info.setDropDbCluster(cluster.getName()); info.setDropDbId(db.getId()); info.setDropDbName(db.getAttachDb()); editLog.logDropLinkDb(info); return; } if (db.getDbState() == DbState.LINK && dbName.equals(db.getFullName())) { ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE, ClusterNamespace.getNameFromFullName(dbName)); return; } if (dbName.equals(db.getAttachDb()) && db.getDbState() == DbState.MOVE) { ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE, ClusterNamespace.getNameFromFullName(dbName)); return; } Set<String> tableNames = db.getTableNamesWithLock(); batchTaskMap = unprotectDropDb(db, stmt.isForceDrop(), false); if (!stmt.isForceDrop()) { Catalog.getCurrentRecycleBin().recycleDatabase(db, tableNames); } else { Catalog.getCurrentCatalog().onEraseDatabase(db.getId()); } } finally { db.writeUnlock(); } Catalog.getCurrentCatalog().sendDropTabletTasks(batchTaskMap); idToDb.remove(db.getId()); fullNameToDb.remove(db.getFullName()); final Cluster cluster = nameToCluster.get(db.getClusterName()); cluster.removeDb(dbName, db.getId()); DropDbInfo info = new DropDbInfo(dbName, stmt.isForceDrop()); editLog.logDropDb(info); LOG.info("finish drop database[{}], id: {}, is force : {}", dbName, db.getId(), stmt.isForceDrop()); } finally { unlock(); } }
Catalog.getCurrentCatalog().sendDropTabletTasks(batchTaskMap);
public void dropDb(DropDbStmt stmt) throws DdlException { String dbName = stmt.getDbName(); if (!tryLock(false)) { throw new DdlException("Failed to acquire catalog lock. Try again"); } try { if (!fullNameToDb.containsKey(dbName)) { if (stmt.isSetIfExists()) { LOG.info("drop database[{}] which does not exist", dbName); return; } else { ErrorReport.reportDdlException(ErrorCode.ERR_DB_DROP_EXISTS, dbName); } } Database db = this.fullNameToDb.get(dbName); HashMap<Long, AgentBatchTask> batchTaskMap; db.writeLock(); try { if (!stmt.isForceDrop()) { if (Catalog.getCurrentCatalog().getGlobalTransactionMgr() .existCommittedTxns(db.getId(), null, null)) { throw new DdlException( "There are still some transactions in the COMMITTED state waiting to be completed. " + "The database [" + dbName + "] cannot be dropped. If you want to forcibly drop(cannot be recovered)," + " please use \"DROP database FORCE\"."); } } if (db.getDbState() == DbState.LINK && dbName.equals(db.getAttachDb())) { final DropLinkDbAndUpdateDbInfo info = new DropLinkDbAndUpdateDbInfo(); fullNameToDb.remove(db.getAttachDb()); db.setDbState(DbState.NORMAL); info.setUpdateDbState(DbState.NORMAL); final Cluster cluster = nameToCluster .get(ClusterNamespace.getClusterNameFromFullName(db.getAttachDb())); final BaseParam param = new BaseParam(); param.addStringParam(db.getAttachDb()); param.addLongParam(db.getId()); cluster.removeLinkDb(param); info.setDropDbCluster(cluster.getName()); info.setDropDbId(db.getId()); info.setDropDbName(db.getAttachDb()); editLog.logDropLinkDb(info); return; } if (db.getDbState() == DbState.LINK && dbName.equals(db.getFullName())) { ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE, ClusterNamespace.getNameFromFullName(dbName)); return; } if (dbName.equals(db.getAttachDb()) && db.getDbState() == DbState.MOVE) { ErrorReport.reportDdlException(ErrorCode.ERR_CLUSTER_DB_STATE_LINK_OR_MIGRATE, ClusterNamespace.getNameFromFullName(dbName)); return; } Set<String> tableNames = db.getTableNamesWithLock(); batchTaskMap = unprotectDropDb(db, stmt.isForceDrop(), false); if (!stmt.isForceDrop()) { Catalog.getCurrentRecycleBin().recycleDatabase(db, tableNames); } else { Catalog.getCurrentCatalog().onEraseDatabase(db.getId()); } } finally { db.writeUnlock(); } sendDropTabletTasks(batchTaskMap); idToDb.remove(db.getId()); fullNameToDb.remove(db.getFullName()); final Cluster cluster = nameToCluster.get(db.getClusterName()); cluster.removeDb(dbName, db.getId()); DropDbInfo info = new DropDbInfo(dbName, stmt.isForceDrop()); editLog.logDropDb(info); LOG.info("finish drop database[{}], id: {}, is force : {}", dbName, db.getId(), stmt.isForceDrop()); } finally { unlock(); } }
class SingletonHolder { private static final Catalog INSTANCE = new Catalog(); }
class SingletonHolder { private static final Catalog INSTANCE = new Catalog(); }
Sorry for the bad wording in the question. What I meant to ask was: If an application has no global service id and no rotations, why log here? I.e. shouldn't you only log when rotation set is empty?
void writeContainerEndpointsZK(Optional<String> globalServiceId) { if (!params.containerEndpoints().isEmpty()) { containerEndpoints.write(applicationId, params.containerEndpoints()); } else { if (globalServiceId.isEmpty()) { log.log(LogLevel.WARNING, "Want to write rotations " + rotationsSet + " as container endpoints, but " + applicationId + " has no global-service-id. This should not happen"); return; } containerEndpoints.write(applicationId, toContainerEndpoints(globalServiceId.get(), rotationsSet)); } checkTimeout("write container endpoints to zookeeper"); }
log.log(LogLevel.WARNING, "Want to write rotations " + rotationsSet + " as container endpoints, but " + applicationId + " has no global-service-id. This should not happen");
void writeContainerEndpointsZK(Optional<String> globalServiceId) { if (!params.containerEndpoints().isEmpty()) { containerEndpoints.write(applicationId, params.containerEndpoints()); } else { if (!rotationsSet.isEmpty()) { if (globalServiceId.isEmpty()) { log.log(LogLevel.WARNING, "Want to write rotations " + rotationsSet + " as container endpoints, but " + applicationId + " has no global-service-id. This should not happen"); return; } containerEndpoints.write(applicationId, toContainerEndpoints(globalServiceId.get(), rotationsSet)); } } checkTimeout("write container endpoints to zookeeper"); }
class Preparation { final SessionContext context; final DeployLogger logger; final PrepareParams params; final Optional<ApplicationSet> currentActiveApplicationSet; final Path tenantPath; final ApplicationId applicationId; /** The version of Vespa the application to be prepared specifies for its nodes */ final com.yahoo.component.Version vespaVersion; final Rotations rotations; final ContainerEndpointsCache containerEndpoints; final Set<Rotation> rotationsSet; final ModelContext.Properties properties; private ApplicationPackage applicationPackage; private List<PreparedModelsBuilder.PreparedModelResult> modelResultList; private PrepareResult prepareResult; private final PreparedModelsBuilder preparedModelsBuilder; Preparation(SessionContext context, DeployLogger logger, PrepareParams params, Optional<ApplicationSet> currentActiveApplicationSet, Path tenantPath) { this.context = context; this.logger = logger; this.params = params; this.currentActiveApplicationSet = currentActiveApplicationSet; this.tenantPath = tenantPath; this.applicationId = params.getApplicationId(); this.vespaVersion = params.vespaVersion().orElse(Vtag.currentVersion); this.rotations = new Rotations(curator, tenantPath); this.containerEndpoints = new ContainerEndpointsCache(tenantPath, curator); this.rotationsSet = getRotations(params.rotations()); this.properties = new ModelContextImpl.Properties(params.getApplicationId(), configserverConfig.multitenant(), ConfigServerSpec.fromConfig(configserverConfig), HostName.from(configserverConfig.loadBalancerAddress()), configserverConfig.ztsUrl() != null ? URI.create(configserverConfig.ztsUrl()) : null, configserverConfig.athenzDnsSuffix(), configserverConfig.hostedVespa(), zone, rotationsSet, params.isBootstrap(), ! currentActiveApplicationSet.isPresent(), context.getFlagSource()); this.preparedModelsBuilder = new PreparedModelsBuilder(modelFactoryRegistry, permanentApplicationPackage, configDefinitionRepo, fileDistributionFactory, hostProvisionerProvider, context, logger, params, currentActiveApplicationSet, properties, configserverConfig); } void checkTimeout(String step) { if (! params.getTimeoutBudget().hasTimeLeft()) { String used = params.getTimeoutBudget().timesUsed(); throw new RuntimeException("prepare timed out "+used+" after "+step+" step: " + applicationId); } } void preprocess() { try { this.applicationPackage = context.getApplicationPackage().preprocess(properties.zone(), logger); } catch (IOException | TransformerException | ParserConfigurationException | SAXException e) { throw new IllegalArgumentException("Error preprocessing application package for " + applicationId, e); } checkTimeout("preprocess"); } AllocatedHosts buildModels(Instant now) { SettableOptional<AllocatedHosts> allocatedHosts = new SettableOptional<>(); this.modelResultList = preparedModelsBuilder.buildModels(applicationId, vespaVersion, applicationPackage, allocatedHosts, now); checkTimeout("build models"); return allocatedHosts.get(); } void makeResult(AllocatedHosts allocatedHosts) { this.prepareResult = new PrepareResult(allocatedHosts, modelResultList); checkTimeout("making result from models"); } void writeStateZK() { log.log(LogLevel.DEBUG, "Writing application package state to zookeeper"); writeStateToZooKeeper(context.getSessionZooKeeperClient(), applicationPackage, applicationId, vespaVersion, logger, prepareResult.getFileRegistries(), prepareResult.allocatedHosts()); checkTimeout("write state to zookeeper"); } void writeRotZK() { rotations.writeRotationsToZooKeeper(applicationId, rotationsSet); checkTimeout("write rotations to zookeeper"); } void distribute() { prepareResult.asList().forEach(modelResult -> modelResult.model .distributeFiles(modelResult.fileDistributionProvider.getFileDistribution())); checkTimeout("distribute files"); } ConfigChangeActions result() { return prepareResult.getConfigChangeActions(); } private Set<Rotation> getRotations(Set<Rotation> rotations) { if (rotations == null || rotations.isEmpty()) { rotations = this.rotations.readRotationsFromZooKeeper(applicationId); } return rotations; } }
class Preparation { final SessionContext context; final DeployLogger logger; final PrepareParams params; final Optional<ApplicationSet> currentActiveApplicationSet; final Path tenantPath; final ApplicationId applicationId; /** The version of Vespa the application to be prepared specifies for its nodes */ final com.yahoo.component.Version vespaVersion; final Rotations rotations; final ContainerEndpointsCache containerEndpoints; final Set<Rotation> rotationsSet; final ModelContext.Properties properties; private ApplicationPackage applicationPackage; private List<PreparedModelsBuilder.PreparedModelResult> modelResultList; private PrepareResult prepareResult; private final PreparedModelsBuilder preparedModelsBuilder; Preparation(SessionContext context, DeployLogger logger, PrepareParams params, Optional<ApplicationSet> currentActiveApplicationSet, Path tenantPath) { this.context = context; this.logger = logger; this.params = params; this.currentActiveApplicationSet = currentActiveApplicationSet; this.tenantPath = tenantPath; this.applicationId = params.getApplicationId(); this.vespaVersion = params.vespaVersion().orElse(Vtag.currentVersion); this.rotations = new Rotations(curator, tenantPath); this.containerEndpoints = new ContainerEndpointsCache(tenantPath, curator); this.rotationsSet = getRotations(params.rotations()); this.properties = new ModelContextImpl.Properties(params.getApplicationId(), configserverConfig.multitenant(), ConfigServerSpec.fromConfig(configserverConfig), HostName.from(configserverConfig.loadBalancerAddress()), configserverConfig.ztsUrl() != null ? URI.create(configserverConfig.ztsUrl()) : null, configserverConfig.athenzDnsSuffix(), configserverConfig.hostedVespa(), zone, rotationsSet, params.isBootstrap(), ! currentActiveApplicationSet.isPresent(), context.getFlagSource()); this.preparedModelsBuilder = new PreparedModelsBuilder(modelFactoryRegistry, permanentApplicationPackage, configDefinitionRepo, fileDistributionFactory, hostProvisionerProvider, context, logger, params, currentActiveApplicationSet, properties, configserverConfig); } void checkTimeout(String step) { if (! params.getTimeoutBudget().hasTimeLeft()) { String used = params.getTimeoutBudget().timesUsed(); throw new RuntimeException("prepare timed out "+used+" after "+step+" step: " + applicationId); } } void preprocess() { try { this.applicationPackage = context.getApplicationPackage().preprocess(properties.zone(), logger); } catch (IOException | TransformerException | ParserConfigurationException | SAXException e) { throw new IllegalArgumentException("Error preprocessing application package for " + applicationId, e); } checkTimeout("preprocess"); } AllocatedHosts buildModels(Instant now) { SettableOptional<AllocatedHosts> allocatedHosts = new SettableOptional<>(); this.modelResultList = preparedModelsBuilder.buildModels(applicationId, vespaVersion, applicationPackage, allocatedHosts, now); checkTimeout("build models"); return allocatedHosts.get(); } void makeResult(AllocatedHosts allocatedHosts) { this.prepareResult = new PrepareResult(allocatedHosts, modelResultList); checkTimeout("making result from models"); } void writeStateZK() { log.log(LogLevel.DEBUG, "Writing application package state to zookeeper"); writeStateToZooKeeper(context.getSessionZooKeeperClient(), applicationPackage, applicationId, vespaVersion, logger, prepareResult.getFileRegistries(), prepareResult.allocatedHosts()); checkTimeout("write state to zookeeper"); } void writeRotZK() { rotations.writeRotationsToZooKeeper(applicationId, rotationsSet); checkTimeout("write rotations to zookeeper"); } void distribute() { prepareResult.asList().forEach(modelResult -> modelResult.model .distributeFiles(modelResult.fileDistributionProvider.getFileDistribution())); checkTimeout("distribute files"); } ConfigChangeActions result() { return prepareResult.getConfigChangeActions(); } private Set<Rotation> getRotations(Set<Rotation> rotations) { if (rotations == null || rotations.isEmpty()) { rotations = this.rotations.readRotationsFromZooKeeper(applicationId); } return rotations; } }
shall we test when it is a `const` as in the issue?
public void testBitwiseUnsignedRightShiftOperator3() { int a = 0xff; long i = -23445834; long j = 5; invokeUnsignedRightShiftOperatorTestFunction(a, i, j); }
invokeUnsignedRightShiftOperatorTestFunction(a, i, j);
public void testBitwiseUnsignedRightShiftOperator3() { int a = 0xff; long i = -23445834; long j = 5; invokeUnsignedRightShiftOperatorTestFunction(a, i, j); }
class BByteValueTest { private CompileResult result; @BeforeClass(alwaysRun = true) public void setup() { result = BCompileUtil.compile("test-src/types/byte/byte-value.bal"); } @Test(description = "Test byte value assignment") public void testByteValue() { BValue[] returns = BRunUtil.invoke(result, "testByteValue", new BValue[]{}); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte byteValue = (BByte) returns[0]; Assert.assertEquals(byteValue.byteValue(), 34, "Invalid byte value returned."); } @Test(description = "Test byte value space") public void testByteValueSpace() { BValue[] returns = BRunUtil.invoke(result, "testByteValueSpace", new BValue[]{}); BByte byteValue = (BByte) returns[0]; Assert.assertEquals(byteValue.byteValue(), 234, "Invalid byte value returned."); } @Test(description = "Test byte default value") public void testByteDefaultValue() { BValue[] returns = BRunUtil.invoke(result, "testByteDefaultValue"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte byteValue = (BByte) returns[0]; Assert.assertEquals(byteValue.byteValue(), 0, "Invalid byte value returned."); } @Test(description = "Test byte function parameter") public void testByteParameter() { invokeByteInputFunction("testByteParam"); } @Test(description = "Test global byte value assignment") public void testGlobalByte() { invokeByteInputFunction("testGlobalByte"); } private void invokeByteInputFunction(String functionName) { long input = 34; BValue[] args = {new BByte(input)}; BValue[] returns = BRunUtil.invoke(result, functionName, args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte byteValue = (BByte) returns[0]; Assert.assertEquals(byteValue.byteValue(), input, "Invalid byte value returned."); } @Test(description = "Test byte to integer cast") public void testByteToIntCast() { long input = 12; BValue[] args = {new BByte(input)}; BValue[] returns = BRunUtil.invoke(result, "testByteToIntCast", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger intValue = (BInteger) returns[0]; Assert.assertEquals(intValue.intValue(), input, "Invalid integer value returned."); } @Test(description = "Test integer to byte cast") public void testIntToByteCast() { int input = 123; BValue[] args = {new BInteger(input)}; BValue[] returns = BRunUtil.invoke(result, "testIntToByteCast", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), (long) input, "Invalid byte value returned."); } @Test(description = "Test integer to byte explicit cast") public void testIntToByteExplicitCast() { int input = 123; BValue[] args = {new BInteger(input)}; BValue[] returns = BRunUtil.invoke(result, "testIntToByteExplicitCast", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), (long) input, "Invalid byte value returned."); } @Test(description = "Test integer to byte conversion") public void testIntToByteConversion() { int input = 123; BValue[] args = {new BInteger(input)}; BValue[] returns = BRunUtil.invoke(result, "testIntToByteConversion", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), (long) input, "Invalid byte value returned."); } @Test(description = "Test byte to integer conversion") public void testByteToIntConversion() { long input = 45; BValue[] args = {new BByte(input)}; BValue[] returns = BRunUtil.invoke(result, "testByteToIntConversion", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), input, "Invalid integer value returned."); } @Test(description = "Test byte to int safe conversion") public void testSafeCasting() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testSafeCasting", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), 6, "Invalid integer value returned."); } @Test(description = "Test byte to integer conversion") public void testAnyToByteCasting() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testAnyToByteCasting", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), 45, "Invalid byte value returned."); } @Test(description = "Test byte array value") public void testByteArrayValue() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteArray", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BValueArray.class); } @Test(description = "Test byte array assignment") public void testByteArrayAssignment() { byte input1 = 2; byte input2 = 56; byte input3 = 89; byte input4 = 23; BValueArray bByteArrayIn = new BValueArray(BTypes.typeByte); bByteArrayIn.add(0, input1); bByteArrayIn.add(1, input2); bByteArrayIn.add(2, input3); bByteArrayIn.add(3, input4); BValue[] args = {bByteArrayIn}; BValue[] returns = BRunUtil.invoke(result, "testByteArrayAssignment", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BValueArray.class); BValueArray bByteArrayOut = (BValueArray) returns[0]; Assert.assertEquals(bByteArrayOut.getByte(0), input1); Assert.assertEquals(bByteArrayOut.getByte(1), input2); Assert.assertEquals(bByteArrayOut.getByte(2), input3); Assert.assertEquals(bByteArrayOut.getByte(3), input4); } @Test(description = "Test byte array length") public void testByteArrayLength() { invokeArrayLengthFunction("testByteArrayLength", 4); } @Test(description = "Test byte array zero length") public void testByteArrayZeroLength() { invokeArrayLengthFunction("testByteArrayZeroLength", 0); } private void invokeArrayLengthFunction(String functionName, int length) { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, functionName, args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(length, bInteger.intValue(), "Invalid array size"); } @Test(description = "Test byte array size increase") public void testByteArrayIncreaseSize() { invokeArrayLengthFunction("testByteArrayIncreaseSize", 10); } @Test(description = "Test byte array of array") public void testByteArrayOfArray() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteArrayOfArray", args); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BInteger.class); Assert.assertSame(returns[1].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; BInteger bInteger1 = (BInteger) returns[1]; Assert.assertEquals(3, bInteger.intValue(), "Invalid array size"); Assert.assertEquals(4, bInteger1.intValue(), "Invalid array size"); } @Test(description = "Test byte array iteration") public void testByteArrayIteration() { byte input1 = 1; byte input2 = 2; byte input3 = 3; byte input4 = 4; byte input5 = 5; BValueArray bByteArrayIn = new BValueArray(BTypes.typeByte); bByteArrayIn.add(0, input1); bByteArrayIn.add(1, input2); bByteArrayIn.add(2, input3); bByteArrayIn.add(3, input4); bByteArrayIn.add(4, input5); BValue[] args = {bByteArrayIn}; BValue[] returns = BRunUtil.invoke(result, "testByteArrayIteration", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BValueArray.class); BValueArray bByteArrayOut = (BValueArray) returns[0]; Assert.assertEquals(bByteArrayOut.getByte(0), input1); Assert.assertEquals(bByteArrayOut.getByte(1), input2); Assert.assertEquals(bByteArrayOut.getByte(2), input3); Assert.assertEquals(bByteArrayOut.getByte(3), input4); Assert.assertEquals(bByteArrayOut.getByte(4), input5); } @Test(description = "Test byte equal operation") public void testByteEqual() { byte b1 = 2; byte b2 = 3; BValue[] args = {new BByte(b1), new BByte(b2), new BByte(b1)}; BValue[] returns = BRunUtil.invoke(result, "testByteBinaryOperation", args); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertSame(returns[1].getClass(), BBoolean.class); BBoolean boolean1 = (BBoolean) returns[0]; BBoolean boolean2 = (BBoolean) returns[1]; Assert.assertFalse(boolean1.booleanValue(), "Invalid result"); Assert.assertTrue(boolean2.booleanValue(), "Invalid result"); } @Test(description = "Test byte not equal operation") public void testByteNotEqual() { byte b1 = 12; byte b2 = 32; BValue[] args = {new BByte(b1), new BByte(b2), new BByte(b1)}; BValue[] returns = BRunUtil.invoke(result, "testByteBinaryNotEqualOperation", args); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertSame(returns[1].getClass(), BBoolean.class); BBoolean boolean1 = (BBoolean) returns[0]; BBoolean boolean2 = (BBoolean) returns[1]; Assert.assertTrue(boolean1.booleanValue(), "Invalid result"); Assert.assertFalse(boolean2.booleanValue(), "Invalid result"); } @Test public void simpleWorkerMessagePassingTest() { BRunUtil.invoke(result, "testWorkerWithByteVariable", new BValue[0]); } @Test(description = "Test byte to int safe conversion") public void testByteOrIntMatch1() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteOrIntMatch1", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), 12, "Invalid byte value returned."); } @Test(description = "Test byte to int safe conversion") public void testByteOrIntMatch2() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteOrIntMatch2", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), 266, "Invalid integer value returned."); } @Test(description = "Test byte to int safe conversion") public void testByteOrIntMatch3() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteOrIntMatch3", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), 456, "Invalid byte value returned."); } @Test(description = "Test byte to int safe conversion") public void testByteOrIntMatch4() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteOrIntMatch4", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), -123, "Invalid integer value returned."); } @Test(description = "Test bitwise and operator 1") public void testBitwiseAndOperator1() { byte a = 12; byte b = 34; int i = 234; int j = -456; invokeBitwiseAndTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 2") public void testBitwiseAndOperator2() { byte a = -97; byte b = 12; int i = -456832; int j = 34; invokeBitwiseAndTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 3") public void testBitwiseAndOperator3() { byte a = -97; byte b = -23; int i = -456832; int j = -3456; invokeBitwiseAndTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 4") public void testBitwiseAndOperator4() { byte a = (byte) 159; byte b = (byte) 233; int i = -456832; int j = -3456; invokeBitwiseAndTestFunction(a, b, i, j); } private void invokeBitwiseAndTestFunction(byte a, byte b, int i, int j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseAndOperator", args); Assert.assertEquals(returns.length, 7); BByte bByte1 = (BByte) returns[0]; BByte bByte2 = (BByte) returns[1]; BByte bByte3 = (BByte) returns[2]; BInteger bInteger1 = (BInteger) returns[3]; BInteger bInteger2 = (BInteger) returns[4]; BInteger bInteger3 = (BInteger) returns[5]; BInteger bInteger4 = (BInteger) returns[6]; Assert.assertEquals(bByte1.value().byteValue(), a & b, "Invalid result"); Assert.assertEquals(bByte2.value().byteValue(), Byte.valueOf("7F", 16) & i, "Invalid result"); Assert.assertEquals(bByte3.value().byteValue(), i & Byte.valueOf("7F", 16), "Invalid result"); Assert.assertEquals(bInteger1.intValue(), Byte.toUnsignedInt(a) & Byte.toUnsignedInt(b), "Invalid result"); Assert.assertEquals(bInteger2.intValue(), Byte.toUnsignedInt(a) & i, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), i & j, "Invalid result"); Assert.assertEquals(bInteger4.intValue(), Byte.toUnsignedInt(a) & i & Byte.toUnsignedInt(b) & j, "Invalid result"); } @Test(description = "Test bitwise and operator 1") public void testBitwiseOrOperator1() { byte a = 12; byte b = 34; int i = 234; int j = -456; testBitwiseOrTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 2") public void testBitwiseOrOperator2() { byte a = -97; byte b = 12; int i = -456832; int j = 34; testBitwiseOrTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 3") public void testBitwiseOrOperator3() { byte a = -97; byte b = -23; int i = -456832; int j = -3456; testBitwiseOrTestFunction(a, b, i, j); } private void testBitwiseOrTestFunction(byte a, byte b, int i, int j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseOrOperator", args); Assert.assertEquals(returns.length, 5); BByte bByte = (BByte) returns[0]; BInteger bInteger1 = (BInteger) returns[1]; BInteger bInteger2 = (BInteger) returns[2]; BInteger bInteger3 = (BInteger) returns[3]; BInteger bInteger4 = (BInteger) returns[4]; Assert.assertEquals(bByte.value().byteValue(), a | b, "Invalid result"); Assert.assertEquals(bInteger1.intValue(), Byte.toUnsignedInt(a) | Byte.toUnsignedInt(b), "Invalid result"); Assert.assertEquals(bInteger2.intValue(), Byte.toUnsignedInt(a) | i, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), i | j, "Invalid result"); Assert.assertEquals(bInteger4.intValue(), Byte.toUnsignedInt(a) | i | Byte.toUnsignedInt(b) | j, "Invalid result"); } @Test(description = "Test bitwise and operator 1") public void testBitwiseXorOperator1() { byte a = 12; byte b = 34; int i = 234; int j = -456; testBitwiseXorTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 2") public void testBitwiseXorOperator2() { byte a = -97; byte b = 12; int i = -456832; int j = 34; testBitwiseXorTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 3") public void testBitwiseXorOperator3() { byte a = -97; byte b = -23; int i = -456832; int j = -3456; testBitwiseXorTestFunction(a, b, i, j); } private void testBitwiseXorTestFunction(byte a, byte b, int i, int j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseXorOperator", args); Assert.assertEquals(returns.length, 5); BByte bByte = (BByte) returns[0]; BInteger bInteger1 = (BInteger) returns[1]; BInteger bInteger2 = (BInteger) returns[2]; BInteger bInteger3 = (BInteger) returns[3]; BInteger bInteger4 = (BInteger) returns[4]; Assert.assertEquals(bByte.value().byteValue(), a ^ b, "Invalid result"); Assert.assertEquals(bInteger1.intValue(), Byte.toUnsignedInt(a) ^ Byte.toUnsignedInt(b), "Invalid result"); Assert.assertEquals(bInteger2.intValue(), Byte.toUnsignedInt(a) ^ i, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), i ^ j, "Invalid result"); Assert.assertEquals(bInteger4.intValue(), Byte.toUnsignedInt(a) ^ i ^ Byte.toUnsignedInt(b) ^ j, "Invalid result"); } @Test(description = "Test bitwise right shift operator") public void testBitwiseRightShiftOperator1() { int a = 123; int b = 4; long i = 234; long j = 3; BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseRightShiftOperator1", args); Assert.assertEquals(returns.length, 3); BByte bByte1 = (BByte) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; BByte bByte2 = (BByte) returns[2]; Assert.assertEquals(bByte1.value().intValue(), 7, "Invalid result"); Assert.assertEquals(bByte1.intValue(), 7, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), (i >> j), "Invalid result"); Assert.assertEquals(bByte2.intValue(), 15, "Invalid result"); BValue[] args2 = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; returns = BRunUtil.invoke(result, "testBitwiseRightShiftOperator2", args2); Assert.assertEquals(returns.length, 3); BInteger bInteger1 = (BInteger) returns[0]; bInteger2 = (BInteger) returns[1]; BInteger bInteger3 = (BInteger) returns[2]; Assert.assertEquals(bInteger1.intValue(), 7, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i >> j, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), 15, "Invalid result"); } @Test(description = "Test bitwise right shift operator") public void testBitwiseRightShiftOperator2() { int a = 228; int b = 6; long i = -45678776; long j = 4; BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseRightShiftOperator1", args); Assert.assertEquals(returns.length, 3); BByte bByte1 = (BByte) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; BByte bByte2 = (BByte) returns[2]; Assert.assertEquals(bByte1.value().intValue(), 3, "Invalid result"); Assert.assertEquals(bByte1.intValue(), 3, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), (i >> j), "Invalid result"); Assert.assertEquals(bByte2.intValue(), 14, "Invalid result"); BValue[] args2 = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; returns = BRunUtil.invoke(result, "testBitwiseRightShiftOperator2", args2); Assert.assertEquals(returns.length, 3); BInteger bInteger1 = (BInteger) returns[0]; bInteger2 = (BInteger) returns[1]; BInteger bInteger3 = (BInteger) returns[2]; Assert.assertEquals(bInteger1.intValue(), 3, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i >> j, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), 14, "Invalid result"); } @Test(description = "Test bitwise unsigned right shift operator 1") public void testBitwiseUnsignedRightShiftOperator1() { byte a = 12; int i = 234736486; int j = 6; invokeUnsignedRightShiftOperatorTestFunction(a, i, j); } @Test(description = "Test bitwise unsigned right shift operator 2") public void testBitwiseUnsignedRightShiftOperator2() { byte a = 12; long i = -23445834; long j = 5; invokeUnsignedRightShiftOperatorTestFunction(a, i, j); } @Test(description = "Test bitwise unsigned right shift operator 2") private void invokeUnsignedRightShiftOperatorTestFunction(int a, long i, long j) { BValue[] args = {new BByte(a), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseUnsignedRightShiftOperator", args); Assert.assertEquals(returns.length, 2); BInteger bInteger1 = (BInteger) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; Assert.assertEquals(bInteger1.intValue(), i >>> j, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i >>> a, "Invalid result"); } @Test(description = "Test bitwise left shift operator 1") public void testBitwiseLeftShiftOperator1() { int a = 23; int b = 6; long i = 4567; long j = 7; invokeLeftShiftOperatorTestFunction1(a, b, i, j); invokeLeftShiftOperatorTestFunction2(a, b, i, j); invokeLeftShiftOperatorTestFunction1(0xff, 56, 0xff, 56); invokeLeftShiftOperatorTestFunction2(0xff, 56, 0xff, 56); } @Test(description = "Test bitwise left shift operator 2") public void testBitwiseLeftShiftOperator2() { int a = 228; int b = 6; long i = -45678776; long j = 4; invokeLeftShiftOperatorTestFunction1(a, b, i, j); invokeLeftShiftOperatorTestFunction2(a, b, i, j); invokeLeftShiftOperatorTestFunction1(0xff, 56, 0xff, 56); invokeLeftShiftOperatorTestFunction2(0xff, 56, 0xff, 56); } private void invokeLeftShiftOperatorTestFunction1(int a, int b, long i, long j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseLeftShiftOperator1", args); Assert.assertEquals(returns.length, 3); BInteger bInteger1 = (BInteger) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; BInteger bInteger3 = (BInteger) returns[2]; Assert.assertEquals(bInteger1.intValue(), (long) a << b, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i << j, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), (long) a << j, "Invalid result"); } private void invokeLeftShiftOperatorTestFunction2(int a, int b, long i, long j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseLeftShiftOperator2", args); Assert.assertEquals(returns.length, 3); BInteger bInteger1 = (BInteger) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; BInteger bInteger3 = (BInteger) returns[2]; Assert.assertEquals(bInteger1.intValue(), (long) a << b, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i << j, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), (long) a << j, "Invalid result"); } @Test(description = "Test byte shift") public void testByteShift() { int a = 129; int c = (a << 1); int d = (c >> 1); BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteShift", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), d, "Invalid byte value returned."); } @Test(description = "Test bitwise Complement operator 1") public void testBitwiseComplementOperator1() { byte a = 34; int b = 234; invokeBitwiseComplementOperatorFunction(a, b); } @Test(description = "Test bitwise Complement operator 2") public void testBitwiseComplementOperator2() { byte a = (byte) 156; int b = -232224; invokeBitwiseComplementOperatorFunction(a, b); } @Test(description = "Test bitwise Complement operator 3") public void testBitwiseComplementOperator3() { byte a = -112; int b = 567849302; invokeBitwiseComplementOperatorFunction(a, b); } private void invokeBitwiseComplementOperatorFunction(byte a, int b) { BValue[] args = {new BByte(a), new BInteger(b)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseNotOperator", args); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BByte.class); Assert.assertSame(returns[1].getClass(), BInteger.class); BByte bByte = (BByte) returns[0]; BInteger bInteger = (BInteger) returns[1]; byte a1 = (byte) ~a; Assert.assertEquals(bByte.value().byteValue(), a1, "Invalid byte value returned."); Assert.assertEquals(bInteger.intValue(), ~b, "Invalid int value returned."); } @Test(description = "Test bitwise operator precedence 1") public void testBitwiseOperatorPrecedence1() { byte a = 127; byte b = 4; byte c = 5; byte expected = (byte) (~a & b >> c); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence1", expected); } @Test(description = "Test bitwise operator precedence 2") public void testBitwiseOperatorPrecedence2() { byte a = (byte) 233; byte b = 23; byte c = 3; byte expected = (byte) (~a & b >> c); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence1", expected); } @Test(description = "Test bitwise operator precedence 3") public void testBitwiseOperatorPrecedence3() { byte a = 23; byte b = 4; byte c = 5; byte expected = (byte) (b & ~a >> c); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence2", expected); } @Test(description = "Test bitwise operator precedence 4") public void testBitwiseOperatorPrecedence4() { byte a = (byte) 233; byte b = 23; byte c = 3; byte expected = (byte) (b & ~a >> c); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence2", expected); } @Test(description = "Test bitwise operator precedence 5") public void testBitwiseOperatorPrecedence5() { byte a = 23; byte b = 4; byte c = 5; byte expected = (byte) (b >> c & ~a); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence3", expected); } @Test(description = "Test bitwise operator precedence 6") public void testBitwiseOperatorPrecedence6() { byte a = (byte) 233; byte b = 23; byte c = 3; byte expected = (byte) (b >> c & ~a); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence3", expected); } @Test(description = "Test bitwise operator precedence 7") public void testBitwiseOperatorPrecedence7() { int a = 3546782; int b = 4; int c = 5; int expected = ~a & b >> c; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence4", expected); } @Test(description = "Test bitwise operator precedence 8") public void testBitwiseOperatorPrecedence8() { int a = -2334353; int b = 23; int c = -3; int expected = ~a & b >> c; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence4", expected); } @Test(description = "Test bitwise operator precedence 9") public void testBitwiseOperatorPrecedence9() { int a = 245623; int b = 4; int c = -5; int expected = b & ~a >> c; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence5", expected); } @Test(description = "Test bitwise operator precedence 10") public void testBitwiseOperatorPrecedence10() { int a = -2667533; int b = 23; int c = 3; int expected = b & ~a >> c; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence5", expected); } @Test(description = "Test bitwise operator precedence 11") public void testBitwiseOperatorPrecedence11() { int a = 23; int b = 4; int c = 5; int expected = b >> c & ~a; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence6", expected); } @Test(description = "Test bitwise operator precedence 12") public void testBitwiseOperatorPrecedence12() { int a = 23334233; int b = 23; int c = 3; int expected = b >> c & ~a; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence6", expected); } private void invokeBitwisePrecedenceTestFunctionForByte(byte a, byte b, byte c, String functionName, byte expected) { BValue[] args = {new BByte(a), new BByte(b), new BByte(c)}; BValue[] returns = BRunUtil.invoke(result, functionName, args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.value().byteValue(), expected, "Invalid byte value returned."); } private void invokeBitwisePrecedenceTestFunctionForInt(int a, int b, int c, String functionName, int expected) { BValue[] args = {new BInteger(a), new BInteger(b), new BInteger(c)}; BValue[] returns = BRunUtil.invoke(result, functionName, args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), expected, "Invalid byte value returned."); } @Test(description = "Test byte value return as int in lambda") public void testByteReturnAsIntInLambda() { BRunUtil.invoke(result, "testByteReturnAsIntInLambda1"); BRunUtil.invoke(result, "testByteReturnAsIntInLambda2"); BRunUtil.invoke(result, "testByteReturnAsIntInLambda3"); } @AfterClass public void tearDown() { result = null; } }
class BByteValueTest { private CompileResult result; @BeforeClass(alwaysRun = true) public void setup() { result = BCompileUtil.compile("test-src/types/byte/byte-value.bal"); } @Test(description = "Test byte value assignment") public void testByteValue() { BValue[] returns = BRunUtil.invoke(result, "testByteValue", new BValue[]{}); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte byteValue = (BByte) returns[0]; Assert.assertEquals(byteValue.byteValue(), 34, "Invalid byte value returned."); } @Test(description = "Test byte value space") public void testByteValueSpace() { BValue[] returns = BRunUtil.invoke(result, "testByteValueSpace", new BValue[]{}); BByte byteValue = (BByte) returns[0]; Assert.assertEquals(byteValue.byteValue(), 234, "Invalid byte value returned."); } @Test(description = "Test byte default value") public void testByteDefaultValue() { BValue[] returns = BRunUtil.invoke(result, "testByteDefaultValue"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte byteValue = (BByte) returns[0]; Assert.assertEquals(byteValue.byteValue(), 0, "Invalid byte value returned."); } @Test(description = "Test byte function parameter") public void testByteParameter() { invokeByteInputFunction("testByteParam"); } @Test(description = "Test global byte value assignment") public void testGlobalByte() { invokeByteInputFunction("testGlobalByte"); } private void invokeByteInputFunction(String functionName) { long input = 34; BValue[] args = {new BByte(input)}; BValue[] returns = BRunUtil.invoke(result, functionName, args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte byteValue = (BByte) returns[0]; Assert.assertEquals(byteValue.byteValue(), input, "Invalid byte value returned."); } @Test(description = "Test byte to integer cast") public void testByteToIntCast() { long input = 12; BValue[] args = {new BByte(input)}; BValue[] returns = BRunUtil.invoke(result, "testByteToIntCast", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger intValue = (BInteger) returns[0]; Assert.assertEquals(intValue.intValue(), input, "Invalid integer value returned."); } @Test(description = "Test integer to byte cast") public void testIntToByteCast() { int input = 123; BValue[] args = {new BInteger(input)}; BValue[] returns = BRunUtil.invoke(result, "testIntToByteCast", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), (long) input, "Invalid byte value returned."); } @Test(description = "Test integer to byte explicit cast") public void testIntToByteExplicitCast() { int input = 123; BValue[] args = {new BInteger(input)}; BValue[] returns = BRunUtil.invoke(result, "testIntToByteExplicitCast", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), (long) input, "Invalid byte value returned."); } @Test(description = "Test integer to byte conversion") public void testIntToByteConversion() { int input = 123; BValue[] args = {new BInteger(input)}; BValue[] returns = BRunUtil.invoke(result, "testIntToByteConversion", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), (long) input, "Invalid byte value returned."); } @Test(description = "Test byte to integer conversion") public void testByteToIntConversion() { long input = 45; BValue[] args = {new BByte(input)}; BValue[] returns = BRunUtil.invoke(result, "testByteToIntConversion", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), input, "Invalid integer value returned."); } @Test(description = "Test byte to int safe conversion") public void testSafeCasting() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testSafeCasting", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), 6, "Invalid integer value returned."); } @Test(description = "Test byte to integer conversion") public void testAnyToByteCasting() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testAnyToByteCasting", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), 45, "Invalid byte value returned."); } @Test(description = "Test byte array value") public void testByteArrayValue() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteArray", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BValueArray.class); } @Test(description = "Test byte array assignment") public void testByteArrayAssignment() { byte input1 = 2; byte input2 = 56; byte input3 = 89; byte input4 = 23; BValueArray bByteArrayIn = new BValueArray(BTypes.typeByte); bByteArrayIn.add(0, input1); bByteArrayIn.add(1, input2); bByteArrayIn.add(2, input3); bByteArrayIn.add(3, input4); BValue[] args = {bByteArrayIn}; BValue[] returns = BRunUtil.invoke(result, "testByteArrayAssignment", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BValueArray.class); BValueArray bByteArrayOut = (BValueArray) returns[0]; Assert.assertEquals(bByteArrayOut.getByte(0), input1); Assert.assertEquals(bByteArrayOut.getByte(1), input2); Assert.assertEquals(bByteArrayOut.getByte(2), input3); Assert.assertEquals(bByteArrayOut.getByte(3), input4); } @Test(description = "Test byte array length") public void testByteArrayLength() { invokeArrayLengthFunction("testByteArrayLength", 4); } @Test(description = "Test byte array zero length") public void testByteArrayZeroLength() { invokeArrayLengthFunction("testByteArrayZeroLength", 0); } private void invokeArrayLengthFunction(String functionName, int length) { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, functionName, args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(length, bInteger.intValue(), "Invalid array size"); } @Test(description = "Test byte array size increase") public void testByteArrayIncreaseSize() { invokeArrayLengthFunction("testByteArrayIncreaseSize", 10); } @Test(description = "Test byte array of array") public void testByteArrayOfArray() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteArrayOfArray", args); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BInteger.class); Assert.assertSame(returns[1].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; BInteger bInteger1 = (BInteger) returns[1]; Assert.assertEquals(3, bInteger.intValue(), "Invalid array size"); Assert.assertEquals(4, bInteger1.intValue(), "Invalid array size"); } @Test(description = "Test byte array iteration") public void testByteArrayIteration() { byte input1 = 1; byte input2 = 2; byte input3 = 3; byte input4 = 4; byte input5 = 5; BValueArray bByteArrayIn = new BValueArray(BTypes.typeByte); bByteArrayIn.add(0, input1); bByteArrayIn.add(1, input2); bByteArrayIn.add(2, input3); bByteArrayIn.add(3, input4); bByteArrayIn.add(4, input5); BValue[] args = {bByteArrayIn}; BValue[] returns = BRunUtil.invoke(result, "testByteArrayIteration", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BValueArray.class); BValueArray bByteArrayOut = (BValueArray) returns[0]; Assert.assertEquals(bByteArrayOut.getByte(0), input1); Assert.assertEquals(bByteArrayOut.getByte(1), input2); Assert.assertEquals(bByteArrayOut.getByte(2), input3); Assert.assertEquals(bByteArrayOut.getByte(3), input4); Assert.assertEquals(bByteArrayOut.getByte(4), input5); } @Test(description = "Test byte equal operation") public void testByteEqual() { byte b1 = 2; byte b2 = 3; BValue[] args = {new BByte(b1), new BByte(b2), new BByte(b1)}; BValue[] returns = BRunUtil.invoke(result, "testByteBinaryOperation", args); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertSame(returns[1].getClass(), BBoolean.class); BBoolean boolean1 = (BBoolean) returns[0]; BBoolean boolean2 = (BBoolean) returns[1]; Assert.assertFalse(boolean1.booleanValue(), "Invalid result"); Assert.assertTrue(boolean2.booleanValue(), "Invalid result"); } @Test(description = "Test byte not equal operation") public void testByteNotEqual() { byte b1 = 12; byte b2 = 32; BValue[] args = {new BByte(b1), new BByte(b2), new BByte(b1)}; BValue[] returns = BRunUtil.invoke(result, "testByteBinaryNotEqualOperation", args); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BBoolean.class); Assert.assertSame(returns[1].getClass(), BBoolean.class); BBoolean boolean1 = (BBoolean) returns[0]; BBoolean boolean2 = (BBoolean) returns[1]; Assert.assertTrue(boolean1.booleanValue(), "Invalid result"); Assert.assertFalse(boolean2.booleanValue(), "Invalid result"); } @Test public void simpleWorkerMessagePassingTest() { BRunUtil.invoke(result, "testWorkerWithByteVariable", new BValue[0]); } @Test(description = "Test byte to int safe conversion") public void testByteOrIntMatch1() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteOrIntMatch1", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.byteValue(), 12, "Invalid byte value returned."); } @Test(description = "Test byte to int safe conversion") public void testByteOrIntMatch2() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteOrIntMatch2", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), 266, "Invalid integer value returned."); } @Test(description = "Test byte to int safe conversion") public void testByteOrIntMatch3() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteOrIntMatch3", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), 456, "Invalid byte value returned."); } @Test(description = "Test byte to int safe conversion") public void testByteOrIntMatch4() { BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteOrIntMatch4", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), -123, "Invalid integer value returned."); } @Test(description = "Test bitwise and operator 1") public void testBitwiseAndOperator1() { byte a = 12; byte b = 34; int i = 234; int j = -456; invokeBitwiseAndTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 2") public void testBitwiseAndOperator2() { byte a = -97; byte b = 12; int i = -456832; int j = 34; invokeBitwiseAndTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 3") public void testBitwiseAndOperator3() { byte a = -97; byte b = -23; int i = -456832; int j = -3456; invokeBitwiseAndTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 4") public void testBitwiseAndOperator4() { byte a = (byte) 159; byte b = (byte) 233; int i = -456832; int j = -3456; invokeBitwiseAndTestFunction(a, b, i, j); } private void invokeBitwiseAndTestFunction(byte a, byte b, int i, int j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseAndOperator", args); Assert.assertEquals(returns.length, 7); BByte bByte1 = (BByte) returns[0]; BByte bByte2 = (BByte) returns[1]; BByte bByte3 = (BByte) returns[2]; BInteger bInteger1 = (BInteger) returns[3]; BInteger bInteger2 = (BInteger) returns[4]; BInteger bInteger3 = (BInteger) returns[5]; BInteger bInteger4 = (BInteger) returns[6]; Assert.assertEquals(bByte1.value().byteValue(), a & b, "Invalid result"); Assert.assertEquals(bByte2.value().byteValue(), Byte.valueOf("7F", 16) & i, "Invalid result"); Assert.assertEquals(bByte3.value().byteValue(), i & Byte.valueOf("7F", 16), "Invalid result"); Assert.assertEquals(bInteger1.intValue(), Byte.toUnsignedInt(a) & Byte.toUnsignedInt(b), "Invalid result"); Assert.assertEquals(bInteger2.intValue(), Byte.toUnsignedInt(a) & i, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), i & j, "Invalid result"); Assert.assertEquals(bInteger4.intValue(), Byte.toUnsignedInt(a) & i & Byte.toUnsignedInt(b) & j, "Invalid result"); } @Test(description = "Test bitwise and operator 1") public void testBitwiseOrOperator1() { byte a = 12; byte b = 34; int i = 234; int j = -456; testBitwiseOrTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 2") public void testBitwiseOrOperator2() { byte a = -97; byte b = 12; int i = -456832; int j = 34; testBitwiseOrTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 3") public void testBitwiseOrOperator3() { byte a = -97; byte b = -23; int i = -456832; int j = -3456; testBitwiseOrTestFunction(a, b, i, j); } private void testBitwiseOrTestFunction(byte a, byte b, int i, int j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseOrOperator", args); Assert.assertEquals(returns.length, 5); BByte bByte = (BByte) returns[0]; BInteger bInteger1 = (BInteger) returns[1]; BInteger bInteger2 = (BInteger) returns[2]; BInteger bInteger3 = (BInteger) returns[3]; BInteger bInteger4 = (BInteger) returns[4]; Assert.assertEquals(bByte.value().byteValue(), a | b, "Invalid result"); Assert.assertEquals(bInteger1.intValue(), Byte.toUnsignedInt(a) | Byte.toUnsignedInt(b), "Invalid result"); Assert.assertEquals(bInteger2.intValue(), Byte.toUnsignedInt(a) | i, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), i | j, "Invalid result"); Assert.assertEquals(bInteger4.intValue(), Byte.toUnsignedInt(a) | i | Byte.toUnsignedInt(b) | j, "Invalid result"); } @Test(description = "Test bitwise and operator 1") public void testBitwiseXorOperator1() { byte a = 12; byte b = 34; int i = 234; int j = -456; testBitwiseXorTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 2") public void testBitwiseXorOperator2() { byte a = -97; byte b = 12; int i = -456832; int j = 34; testBitwiseXorTestFunction(a, b, i, j); } @Test(description = "Test bitwise and operator 3") public void testBitwiseXorOperator3() { byte a = -97; byte b = -23; int i = -456832; int j = -3456; testBitwiseXorTestFunction(a, b, i, j); } private void testBitwiseXorTestFunction(byte a, byte b, int i, int j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseXorOperator", args); Assert.assertEquals(returns.length, 5); BByte bByte = (BByte) returns[0]; BInteger bInteger1 = (BInteger) returns[1]; BInteger bInteger2 = (BInteger) returns[2]; BInteger bInteger3 = (BInteger) returns[3]; BInteger bInteger4 = (BInteger) returns[4]; Assert.assertEquals(bByte.value().byteValue(), a ^ b, "Invalid result"); Assert.assertEquals(bInteger1.intValue(), Byte.toUnsignedInt(a) ^ Byte.toUnsignedInt(b), "Invalid result"); Assert.assertEquals(bInteger2.intValue(), Byte.toUnsignedInt(a) ^ i, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), i ^ j, "Invalid result"); Assert.assertEquals(bInteger4.intValue(), Byte.toUnsignedInt(a) ^ i ^ Byte.toUnsignedInt(b) ^ j, "Invalid result"); } @Test(description = "Test bitwise right shift operator") public void testBitwiseRightShiftOperator1() { int a = 123; int b = 4; long i = 234; long j = 3; BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseRightShiftOperator1", args); Assert.assertEquals(returns.length, 3); BByte bByte1 = (BByte) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; BByte bByte2 = (BByte) returns[2]; Assert.assertEquals(bByte1.value().intValue(), 7, "Invalid result"); Assert.assertEquals(bByte1.intValue(), 7, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), (i >> j), "Invalid result"); Assert.assertEquals(bByte2.intValue(), 15, "Invalid result"); BValue[] args2 = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; returns = BRunUtil.invoke(result, "testBitwiseRightShiftOperator2", args2); Assert.assertEquals(returns.length, 3); BInteger bInteger1 = (BInteger) returns[0]; bInteger2 = (BInteger) returns[1]; BInteger bInteger3 = (BInteger) returns[2]; Assert.assertEquals(bInteger1.intValue(), 7, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i >> j, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), 15, "Invalid result"); } @Test(description = "Test bitwise right shift operator") public void testBitwiseRightShiftOperator2() { int a = 228; int b = 6; long i = -45678776; long j = 4; BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseRightShiftOperator1", args); Assert.assertEquals(returns.length, 3); BByte bByte1 = (BByte) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; BByte bByte2 = (BByte) returns[2]; Assert.assertEquals(bByte1.value().intValue(), 3, "Invalid result"); Assert.assertEquals(bByte1.intValue(), 3, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), (i >> j), "Invalid result"); Assert.assertEquals(bByte2.intValue(), 14, "Invalid result"); BValue[] args2 = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; returns = BRunUtil.invoke(result, "testBitwiseRightShiftOperator2", args2); Assert.assertEquals(returns.length, 3); BInteger bInteger1 = (BInteger) returns[0]; bInteger2 = (BInteger) returns[1]; BInteger bInteger3 = (BInteger) returns[2]; Assert.assertEquals(bInteger1.intValue(), 3, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i >> j, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), 14, "Invalid result"); } @Test(description = "Test bitwise unsigned right shift operator 1") public void testBitwiseUnsignedRightShiftOperator1() { byte a = 12; int i = 234736486; int j = 6; invokeUnsignedRightShiftOperatorTestFunction(a, i, j); } @Test(description = "Test bitwise unsigned right shift operator 2") public void testBitwiseUnsignedRightShiftOperator2() { byte a = 12; long i = -23445834; long j = 5; invokeUnsignedRightShiftOperatorTestFunction(a, i, j); } @Test(description = "Test bitwise unsigned right shift operator 2") private void invokeUnsignedRightShiftOperatorTestFunction(int a, long i, long j) { BValue[] args = {new BByte(a), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseUnsignedRightShiftOperator", args); Assert.assertEquals(returns.length, 2); BInteger bInteger1 = (BInteger) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; Assert.assertEquals(bInteger1.intValue(), i >>> j, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i >>> a, "Invalid result"); } @Test(description = "Test bitwise left shift operator 1") public void testBitwiseLeftShiftOperator1() { int a = 23; int b = 6; long i = 4567; long j = 7; invokeLeftShiftOperatorTestFunction1(a, b, i, j); invokeLeftShiftOperatorTestFunction2(a, b, i, j); invokeLeftShiftOperatorTestFunction1(0xff, 56, 0xff, 56); invokeLeftShiftOperatorTestFunction2(0xff, 56, 0xff, 56); } @Test(description = "Test bitwise left shift operator 2") public void testBitwiseLeftShiftOperator2() { int a = 228; int b = 6; long i = -45678776; long j = 4; invokeLeftShiftOperatorTestFunction1(a, b, i, j); invokeLeftShiftOperatorTestFunction2(a, b, i, j); invokeLeftShiftOperatorTestFunction1(0xff, 56, 0xff, 56); invokeLeftShiftOperatorTestFunction2(0xff, 56, 0xff, 56); } private void invokeLeftShiftOperatorTestFunction1(int a, int b, long i, long j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseLeftShiftOperator1", args); Assert.assertEquals(returns.length, 3); BInteger bInteger1 = (BInteger) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; BInteger bInteger3 = (BInteger) returns[2]; Assert.assertEquals(bInteger1.intValue(), (long) a << b, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i << j, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), (long) a << j, "Invalid result"); } private void invokeLeftShiftOperatorTestFunction2(int a, int b, long i, long j) { BValue[] args = {new BByte(a), new BByte(b), new BInteger(i), new BInteger(j)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseLeftShiftOperator2", args); Assert.assertEquals(returns.length, 3); BInteger bInteger1 = (BInteger) returns[0]; BInteger bInteger2 = (BInteger) returns[1]; BInteger bInteger3 = (BInteger) returns[2]; Assert.assertEquals(bInteger1.intValue(), (long) a << b, "Invalid result"); Assert.assertEquals(bInteger2.intValue(), i << j, "Invalid result"); Assert.assertEquals(bInteger3.intValue(), (long) a << j, "Invalid result"); } @Test(description = "Test byte shift") public void testByteShift() { int a = 129; int c = (a << 1); int d = (c >> 1); BValue[] args = {}; BValue[] returns = BRunUtil.invoke(result, "testByteShift", args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), d, "Invalid byte value returned."); } @Test(description = "Test byte shift by large value") public void testByteShiftWithLargeValue() { BRunUtil.invoke(result, "testByteShiftWithLargeValue"); } @Test(description = "Test bitwise Complement operator 1") public void testBitwiseComplementOperator1() { byte a = 34; int b = 234; invokeBitwiseComplementOperatorFunction(a, b); } @Test(description = "Test bitwise Complement operator 2") public void testBitwiseComplementOperator2() { byte a = (byte) 156; int b = -232224; invokeBitwiseComplementOperatorFunction(a, b); } @Test(description = "Test bitwise Complement operator 3") public void testBitwiseComplementOperator3() { byte a = -112; int b = 567849302; invokeBitwiseComplementOperatorFunction(a, b); } private void invokeBitwiseComplementOperatorFunction(byte a, int b) { BValue[] args = {new BByte(a), new BInteger(b)}; BValue[] returns = BRunUtil.invoke(result, "testBitwiseNotOperator", args); Assert.assertEquals(returns.length, 2); Assert.assertSame(returns[0].getClass(), BByte.class); Assert.assertSame(returns[1].getClass(), BInteger.class); BByte bByte = (BByte) returns[0]; BInteger bInteger = (BInteger) returns[1]; byte a1 = (byte) ~a; Assert.assertEquals(bByte.value().byteValue(), a1, "Invalid byte value returned."); Assert.assertEquals(bInteger.intValue(), ~b, "Invalid int value returned."); } @Test(description = "Test bitwise operator precedence 1") public void testBitwiseOperatorPrecedence1() { byte a = 127; byte b = 4; byte c = 5; byte expected = (byte) (~a & b >> c); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence1", expected); } @Test(description = "Test bitwise operator precedence 2") public void testBitwiseOperatorPrecedence2() { byte a = (byte) 233; byte b = 23; byte c = 3; byte expected = (byte) (~a & b >> c); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence1", expected); } @Test(description = "Test bitwise operator precedence 3") public void testBitwiseOperatorPrecedence3() { byte a = 23; byte b = 4; byte c = 5; byte expected = (byte) (b & ~a >> c); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence2", expected); } @Test(description = "Test bitwise operator precedence 4") public void testBitwiseOperatorPrecedence4() { byte a = (byte) 233; byte b = 23; byte c = 3; byte expected = (byte) (b & ~a >> c); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence2", expected); } @Test(description = "Test bitwise operator precedence 5") public void testBitwiseOperatorPrecedence5() { byte a = 23; byte b = 4; byte c = 5; byte expected = (byte) (b >> c & ~a); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence3", expected); } @Test(description = "Test bitwise operator precedence 6") public void testBitwiseOperatorPrecedence6() { byte a = (byte) 233; byte b = 23; byte c = 3; byte expected = (byte) (b >> c & ~a); invokeBitwisePrecedenceTestFunctionForByte(a, b, c, "testBitwiseOperatorPrecedence3", expected); } @Test(description = "Test bitwise operator precedence 7") public void testBitwiseOperatorPrecedence7() { int a = 3546782; int b = 4; int c = 5; int expected = ~a & b >> c; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence4", expected); } @Test(description = "Test bitwise operator precedence 8") public void testBitwiseOperatorPrecedence8() { int a = -2334353; int b = 23; int c = -3; int expected = ~a & b >> c; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence4", expected); } @Test(description = "Test bitwise operator precedence 9") public void testBitwiseOperatorPrecedence9() { int a = 245623; int b = 4; int c = -5; int expected = b & ~a >> c; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence5", expected); } @Test(description = "Test bitwise operator precedence 10") public void testBitwiseOperatorPrecedence10() { int a = -2667533; int b = 23; int c = 3; int expected = b & ~a >> c; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence5", expected); } @Test(description = "Test bitwise operator precedence 11") public void testBitwiseOperatorPrecedence11() { int a = 23; int b = 4; int c = 5; int expected = b >> c & ~a; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence6", expected); } @Test(description = "Test bitwise operator precedence 12") public void testBitwiseOperatorPrecedence12() { int a = 23334233; int b = 23; int c = 3; int expected = b >> c & ~a; invokeBitwisePrecedenceTestFunctionForInt(a, b, c, "testBitwiseOperatorPrecedence6", expected); } private void invokeBitwisePrecedenceTestFunctionForByte(byte a, byte b, byte c, String functionName, byte expected) { BValue[] args = {new BByte(a), new BByte(b), new BByte(c)}; BValue[] returns = BRunUtil.invoke(result, functionName, args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BByte.class); BByte bByte = (BByte) returns[0]; Assert.assertEquals(bByte.value().byteValue(), expected, "Invalid byte value returned."); } private void invokeBitwisePrecedenceTestFunctionForInt(int a, int b, int c, String functionName, int expected) { BValue[] args = {new BInteger(a), new BInteger(b), new BInteger(c)}; BValue[] returns = BRunUtil.invoke(result, functionName, args); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); BInteger bInteger = (BInteger) returns[0]; Assert.assertEquals(bInteger.intValue(), expected, "Invalid byte value returned."); } @Test(description = "Test byte value return as int in lambda") public void testByteReturnAsIntInLambda() { BRunUtil.invoke(result, "testByteReturnAsIntInLambda1"); BRunUtil.invoke(result, "testByteReturnAsIntInLambda2"); BRunUtil.invoke(result, "testByteReturnAsIntInLambda3"); } @AfterClass public void tearDown() { result = null; } }
I saw significantly lower system cpu when hardcoding it yesterday. However I did a lot of different tests then, and I am not putting more into it than it is a dimension that should be tested further. And for that to happen I need config control over it.
public Connection(TransportThread parent, Supervisor owner, Spec spec, Object context, boolean tcpNoDelay) { super(context); this.parent = parent; this.owner = owner; this.spec = spec; this.tcpNoDelay = tcpNoDelay; server = false; owner.sessionInit(this); }
this.tcpNoDelay = tcpNoDelay;
public Connection(TransportThread parent, Supervisor owner, Spec spec, Object context, boolean tcpNoDelay) { super(context); this.parent = parent; this.owner = owner; this.spec = spec; this.tcpNoDelay = tcpNoDelay; server = false; owner.sessionInit(this); }
class Connection extends Target { private static final Logger log = Logger.getLogger(Connection.class.getName()); private static final int READ_SIZE = 32768; private static final int READ_REDO = 10; private static final int WRITE_SIZE = 32768; private static final int WRITE_REDO = 10; private static final int INITIAL = 0; private static final int CONNECTING = 1; private static final int CONNECTED = 2; private static final int CLOSED = 3; private int state = INITIAL; private final Queue queue = new Queue(); private final Queue myQueue = new Queue(); private final Buffer input = new Buffer(READ_SIZE * 2); private final Buffer output = new Buffer(WRITE_SIZE * 2); private int maxInputSize = 64*1024; private int maxOutputSize = 64*1024; private final boolean tcpNoDelay; private final Map<Integer, ReplyHandler> replyMap = new HashMap<>(); private final Map<TargetWatcher, TargetWatcher> watchers = new IdentityHashMap<>(); private int activeReqs = 0; private int writeWork = 0; private boolean pendingHandshakeWork = false; private final TransportThread parent; private final Supervisor owner; private final Spec spec; private CryptoSocket socket; private int readSize = READ_SIZE; private final boolean server; private final AtomicLong requestId = new AtomicLong(0); private SelectionKey selectionKey; private Exception lostReason = null; private void setState(int state) { if (state <= this.state) { log.log(Level.WARNING, "Bogus state transition: " + this.state + "->" + state); return; } boolean live = (state == CONNECTED); boolean down = (state == CLOSED); boolean fini; boolean pendingWrite; synchronized (this) { this.state = state; fini = down && (activeReqs == 0); pendingWrite = (writeWork > 0); } if (live) { enableRead(); if (pendingWrite) { enableWrite(); } else { disableWrite(); } owner.sessionLive(this); } if (down) { for (ReplyHandler rh : replyMap.values()) { rh.handleConnectionDown(); } for (TargetWatcher watcher : watchers.values()) { watcher.notifyTargetInvalid(this); } owner.sessionDown(this); } if (fini) { owner.sessionFini(this); } } public Connection(TransportThread parent, Supervisor owner, SocketChannel channel, boolean tcpNoDelay) { this.parent = parent; this.owner = owner; this.socket = parent.transport().createServerCryptoSocket(channel); this.spec = null; this.tcpNoDelay = tcpNoDelay; server = true; owner.sessionInit(this); } public void setMaxInputSize(int bytes) { maxInputSize = bytes; } public void setMaxOutputSize(int bytes) { maxOutputSize = bytes; } public TransportThread transportThread() { return parent; } public int allocateKey() { long v = requestId.getAndIncrement(); v = v*2 + (server ? 1 : 0); int i = (int)(v & 0x7fffffff); return i; } public synchronized boolean cancelReply(ReplyHandler handler) { if (state == CLOSED) { return false; } ReplyHandler stored = replyMap.remove(handler.key()); if (stored != handler) { if (stored != null) { replyMap.put(handler.key(), stored); } return false; } return true; } public boolean postPacket(Packet packet, ReplyHandler handler) { boolean accepted = false; boolean enableWrite = false; synchronized (this) { if (state <= CONNECTED) { enableWrite = (writeWork == 0 && state == CONNECTED); queue.enqueue(packet); writeWork++; accepted = true; if (handler != null) { replyMap.put(handler.key(), handler); } } } if (enableWrite) { parent.enableWrite(this); } return accepted; } public boolean postPacket(Packet packet) { return postPacket(packet, null); } public Connection connect() { if (spec == null || spec.malformed()) { setLostReason(new IllegalArgumentException("jrt: malformed or missing spec")); return this; } try { socket = parent.transport().createClientCryptoSocket(SocketChannel.open(spec.resolveAddress()), spec); } catch (Exception e) { setLostReason(e); } return this; } public boolean init(Selector selector) { if (!hasSocket()) { return false; } try { socket.channel().configureBlocking(false); socket.channel().socket().setTcpNoDelay(tcpNoDelay); selectionKey = socket.channel().register(selector, SelectionKey.OP_READ | SelectionKey.OP_WRITE, this); } catch (Exception e) { log.log(Level.WARNING, "Error initializing connection", e); setLostReason(e); return false; } setState(CONNECTING); return true; } public void enableRead() { selectionKey.interestOps(selectionKey.interestOps() | SelectionKey.OP_READ); } public void disableRead() { selectionKey.interestOps(selectionKey.interestOps() & ~SelectionKey.OP_READ); } public void enableWrite() { selectionKey.interestOps(selectionKey.interestOps() | SelectionKey.OP_WRITE); } public void disableWrite() { selectionKey.interestOps(selectionKey.interestOps() & ~SelectionKey.OP_WRITE); } private void handshake() throws IOException { if (pendingHandshakeWork) { return; } switch (socket.handshake()) { case DONE: if (socket.getMinimumReadBufferSize() > readSize) { readSize = socket.getMinimumReadBufferSize(); } setState(CONNECTED); while (socket.drain(input.getChannelWritable(readSize)) > 0) { handlePackets(); } break; case NEED_READ: enableRead(); disableWrite(); break; case NEED_WRITE: disableRead(); enableWrite(); break; case NEED_WORK: disableRead(); disableWrite(); pendingHandshakeWork = true; parent.transport().doHandshakeWork(this); break; } } public void doHandshakeWork() { socket.doHandshakeWork(); } public void handleHandshakeWorkDone() throws IOException { if (!pendingHandshakeWork) { throw new IllegalStateException("jrt: got unwanted handshake work done event"); } pendingHandshakeWork = false; if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got handshake work done event in incompatible state: " + state); } } private void handlePackets() throws IOException { ByteBuffer rb = input.getReadable(); while (true) { PacketInfo info = PacketInfo.getPacketInfo(rb); if (info == null || info.packetLength() > rb.remaining()) { break; } owner.readPacket(info); Packet packet; try { packet = info.decodePacket(rb); } catch (RuntimeException e) { log.log(Level.WARNING, "got garbage; closing connection: " + toString()); throw new IOException("jrt: decode error", e); } ReplyHandler handler; synchronized (this) { handler = replyMap.remove(packet.requestId()); } if (handler != null) { handler.handleReply(packet); } else { owner.handlePacket(this, packet); } } } private void read() throws IOException { boolean doneRead = false; for (int i = 0; !doneRead && i < READ_REDO; i++) { ByteBuffer wb = input.getChannelWritable(readSize); if (socket.read(wb) == -1) { throw new IOException("jrt: Connection closed by peer"); } doneRead = (wb.remaining() > 0); handlePackets(); } while (socket.drain(input.getChannelWritable(readSize)) > 0) { handlePackets(); } if (maxInputSize > 0) { input.shrink(maxInputSize); } } public void handleReadEvent() throws IOException { if (state == CONNECTED) { read(); } else if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got read event in incompatible state: " + state); } } private void write() throws IOException { synchronized (this) { queue.flush(myQueue); } for (int i = 0; i < WRITE_REDO; i++) { while (output.bytes() < WRITE_SIZE) { Packet packet = (Packet) myQueue.dequeue(); if (packet == null) { break; } PacketInfo info = packet.getPacketInfo(); ByteBuffer wb = output.getWritable(info.packetLength()); owner.writePacket(info); info.encodePacket(packet, wb); } ByteBuffer rb = output.getChannelReadable(); if (rb.remaining() == 0) { break; } socket.write(rb); if (rb.remaining() > 0) { break; } } int myWriteWork = 0; if (output.bytes() > 0) { myWriteWork++; } if (socket.flush() == CryptoSocket.FlushResult.NEED_WRITE) { myWriteWork++; } boolean disableWrite; synchronized (this) { writeWork = queue.size() + myQueue.size() + myWriteWork; disableWrite = (writeWork == 0); } if (disableWrite) { disableWrite(); } if (maxOutputSize > 0) { output.shrink(maxOutputSize); } } public void handleWriteEvent() throws IOException { if (state == CONNECTED) { write(); } else if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got write event in incompatible state: " + state); } } public void fini() { setState(CLOSED); if (selectionKey != null) { selectionKey.cancel(); } } public boolean isClosed() { return (state == CLOSED); } public boolean hasSocket() { return ((socket != null) && (socket.channel() != null)); } public void closeSocket() { if (hasSocket()) { try { socket.channel().socket().close(); } catch (Exception e) { log.log(Level.WARNING, "Error closing connection", e); } } } public void setLostReason(Exception e) { if (lostReason == null) { lostReason = e; } } public TieBreaker startRequest() { synchronized (this) { activeReqs++; } return new TieBreaker(); } public boolean completeRequest(TieBreaker done) { boolean signalFini = false; synchronized (this) { if (!done.first()) { return false; } if (--activeReqs == 0 && state == CLOSED) { signalFini = true; } } if (signalFini) { owner.sessionFini(this); } return true; } public boolean isValid() { return (state != CLOSED); } public Exception getConnectionLostReason() { return lostReason; } @Override public Optional<SecurityContext> getSecurityContext() { return Optional.ofNullable(socket) .flatMap(CryptoSocket::getSecurityContext); } public boolean isClient() { return !server; } public boolean isServer() { return server; } public void invokeSync(Request req, double timeout) { SingleRequestWaiter waiter = new SingleRequestWaiter(); invokeAsync(req, timeout, waiter); waiter.waitDone(); } public void invokeAsync(Request req, double timeout, RequestWaiter waiter) { if (timeout < 0.0) { timeout = 0.0; } new InvocationClient(this, req, timeout, waiter).invoke(); } public boolean invokeVoid(Request req) { return postPacket(new RequestPacket(Packet.FLAG_NOREPLY, allocateKey(), req.methodName(), req.parameters())); } public synchronized boolean addWatcher(TargetWatcher watcher) { if (state == CLOSED) { return false; } watchers.put(watcher, watcher); return true; } public synchronized boolean removeWatcher(TargetWatcher watcher) { if (state == CLOSED) { return false; } watchers.remove(watcher); return true; } public void close() { parent.closeConnection(this); } public String toString() { if (hasSocket()) { return "Connection { " + socket.channel().socket() + " }"; } return "Connection { no socket, spec " + spec + " }"; } }
class Connection extends Target { private static final Logger log = Logger.getLogger(Connection.class.getName()); private static final int READ_SIZE = 32768; private static final int READ_REDO = 10; private static final int WRITE_SIZE = 32768; private static final int WRITE_REDO = 10; private static final int INITIAL = 0; private static final int CONNECTING = 1; private static final int CONNECTED = 2; private static final int CLOSED = 3; private int state = INITIAL; private final Queue queue = new Queue(); private final Queue myQueue = new Queue(); private final Buffer input = new Buffer(READ_SIZE * 2); private final Buffer output = new Buffer(WRITE_SIZE * 2); private int maxInputSize = 64*1024; private int maxOutputSize = 64*1024; private final boolean tcpNoDelay; private final Map<Integer, ReplyHandler> replyMap = new HashMap<>(); private final Map<TargetWatcher, TargetWatcher> watchers = new IdentityHashMap<>(); private int activeReqs = 0; private int writeWork = 0; private boolean pendingHandshakeWork = false; private final TransportThread parent; private final Supervisor owner; private final Spec spec; private CryptoSocket socket; private int readSize = READ_SIZE; private final boolean server; private final AtomicLong requestId = new AtomicLong(0); private SelectionKey selectionKey; private Exception lostReason = null; private void setState(int state) { if (state <= this.state) { log.log(Level.WARNING, "Bogus state transition: " + this.state + "->" + state); return; } boolean live = (state == CONNECTED); boolean down = (state == CLOSED); boolean fini; boolean pendingWrite; synchronized (this) { this.state = state; fini = down && (activeReqs == 0); pendingWrite = (writeWork > 0); } if (live) { enableRead(); if (pendingWrite) { enableWrite(); } else { disableWrite(); } owner.sessionLive(this); } if (down) { for (ReplyHandler rh : replyMap.values()) { rh.handleConnectionDown(); } for (TargetWatcher watcher : watchers.values()) { watcher.notifyTargetInvalid(this); } owner.sessionDown(this); } if (fini) { owner.sessionFini(this); } } public Connection(TransportThread parent, Supervisor owner, SocketChannel channel, boolean tcpNoDelay) { this.parent = parent; this.owner = owner; this.socket = parent.transport().createServerCryptoSocket(channel); this.spec = null; this.tcpNoDelay = tcpNoDelay; server = true; owner.sessionInit(this); } public void setMaxInputSize(int bytes) { maxInputSize = bytes; } public void setMaxOutputSize(int bytes) { maxOutputSize = bytes; } public TransportThread transportThread() { return parent; } public int allocateKey() { long v = requestId.getAndIncrement(); v = v*2 + (server ? 1 : 0); int i = (int)(v & 0x7fffffff); return i; } public synchronized boolean cancelReply(ReplyHandler handler) { if (state == CLOSED) { return false; } ReplyHandler stored = replyMap.remove(handler.key()); if (stored != handler) { if (stored != null) { replyMap.put(handler.key(), stored); } return false; } return true; } public boolean postPacket(Packet packet, ReplyHandler handler) { boolean accepted = false; boolean enableWrite = false; synchronized (this) { if (state <= CONNECTED) { enableWrite = (writeWork == 0 && state == CONNECTED); queue.enqueue(packet); writeWork++; accepted = true; if (handler != null) { replyMap.put(handler.key(), handler); } } } if (enableWrite) { parent.enableWrite(this); } return accepted; } public boolean postPacket(Packet packet) { return postPacket(packet, null); } public Connection connect() { if (spec == null || spec.malformed()) { setLostReason(new IllegalArgumentException("jrt: malformed or missing spec")); return this; } try { socket = parent.transport().createClientCryptoSocket(SocketChannel.open(spec.resolveAddress()), spec); } catch (Exception e) { setLostReason(e); } return this; } public boolean init(Selector selector) { if (!hasSocket()) { return false; } try { socket.channel().configureBlocking(false); socket.channel().socket().setTcpNoDelay(tcpNoDelay); selectionKey = socket.channel().register(selector, SelectionKey.OP_READ | SelectionKey.OP_WRITE, this); } catch (Exception e) { log.log(Level.WARNING, "Error initializing connection", e); setLostReason(e); return false; } setState(CONNECTING); return true; } public void enableRead() { selectionKey.interestOps(selectionKey.interestOps() | SelectionKey.OP_READ); } public void disableRead() { selectionKey.interestOps(selectionKey.interestOps() & ~SelectionKey.OP_READ); } public void enableWrite() { selectionKey.interestOps(selectionKey.interestOps() | SelectionKey.OP_WRITE); } public void disableWrite() { selectionKey.interestOps(selectionKey.interestOps() & ~SelectionKey.OP_WRITE); } private void handshake() throws IOException { if (pendingHandshakeWork) { return; } switch (socket.handshake()) { case DONE: if (socket.getMinimumReadBufferSize() > readSize) { readSize = socket.getMinimumReadBufferSize(); } setState(CONNECTED); while (socket.drain(input.getChannelWritable(readSize)) > 0) { handlePackets(); } break; case NEED_READ: enableRead(); disableWrite(); break; case NEED_WRITE: disableRead(); enableWrite(); break; case NEED_WORK: disableRead(); disableWrite(); pendingHandshakeWork = true; parent.transport().doHandshakeWork(this); break; } } public void doHandshakeWork() { socket.doHandshakeWork(); } public void handleHandshakeWorkDone() throws IOException { if (!pendingHandshakeWork) { throw new IllegalStateException("jrt: got unwanted handshake work done event"); } pendingHandshakeWork = false; if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got handshake work done event in incompatible state: " + state); } } private void handlePackets() throws IOException { ByteBuffer rb = input.getReadable(); while (true) { PacketInfo info = PacketInfo.getPacketInfo(rb); if (info == null || info.packetLength() > rb.remaining()) { break; } owner.readPacket(info); Packet packet; try { packet = info.decodePacket(rb); } catch (RuntimeException e) { log.log(Level.WARNING, "got garbage; closing connection: " + toString()); throw new IOException("jrt: decode error", e); } ReplyHandler handler; synchronized (this) { handler = replyMap.remove(packet.requestId()); } if (handler != null) { handler.handleReply(packet); } else { owner.handlePacket(this, packet); } } } private void read() throws IOException { boolean doneRead = false; for (int i = 0; !doneRead && i < READ_REDO; i++) { ByteBuffer wb = input.getChannelWritable(readSize); if (socket.read(wb) == -1) { throw new IOException("jrt: Connection closed by peer"); } doneRead = (wb.remaining() > 0); handlePackets(); } while (socket.drain(input.getChannelWritable(readSize)) > 0) { handlePackets(); } if (maxInputSize > 0) { input.shrink(maxInputSize); } } public void handleReadEvent() throws IOException { if (state == CONNECTED) { read(); } else if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got read event in incompatible state: " + state); } } private void write() throws IOException { synchronized (this) { queue.flush(myQueue); } for (int i = 0; i < WRITE_REDO; i++) { while (output.bytes() < WRITE_SIZE) { Packet packet = (Packet) myQueue.dequeue(); if (packet == null) { break; } PacketInfo info = packet.getPacketInfo(); ByteBuffer wb = output.getWritable(info.packetLength()); owner.writePacket(info); info.encodePacket(packet, wb); } ByteBuffer rb = output.getChannelReadable(); if (rb.remaining() == 0) { break; } socket.write(rb); if (rb.remaining() > 0) { break; } } int myWriteWork = 0; if (output.bytes() > 0) { myWriteWork++; } if (socket.flush() == CryptoSocket.FlushResult.NEED_WRITE) { myWriteWork++; } boolean disableWrite; synchronized (this) { writeWork = queue.size() + myQueue.size() + myWriteWork; disableWrite = (writeWork == 0); } if (disableWrite) { disableWrite(); } if (maxOutputSize > 0) { output.shrink(maxOutputSize); } } public void handleWriteEvent() throws IOException { if (state == CONNECTED) { write(); } else if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got write event in incompatible state: " + state); } } public void fini() { setState(CLOSED); if (selectionKey != null) { selectionKey.cancel(); } } public boolean isClosed() { return (state == CLOSED); } public boolean hasSocket() { return ((socket != null) && (socket.channel() != null)); } public void closeSocket() { if (hasSocket()) { try { socket.channel().socket().close(); } catch (Exception e) { log.log(Level.WARNING, "Error closing connection", e); } } } public void setLostReason(Exception e) { if (lostReason == null) { lostReason = e; } } public TieBreaker startRequest() { synchronized (this) { activeReqs++; } return new TieBreaker(); } public boolean completeRequest(TieBreaker done) { boolean signalFini = false; synchronized (this) { if (!done.first()) { return false; } if (--activeReqs == 0 && state == CLOSED) { signalFini = true; } } if (signalFini) { owner.sessionFini(this); } return true; } public boolean isValid() { return (state != CLOSED); } public Exception getConnectionLostReason() { return lostReason; } @Override public Optional<SecurityContext> getSecurityContext() { return Optional.ofNullable(socket) .flatMap(CryptoSocket::getSecurityContext); } public boolean isClient() { return !server; } public boolean isServer() { return server; } public void invokeSync(Request req, double timeout) { SingleRequestWaiter waiter = new SingleRequestWaiter(); invokeAsync(req, timeout, waiter); waiter.waitDone(); } public void invokeAsync(Request req, double timeout, RequestWaiter waiter) { if (timeout < 0.0) { timeout = 0.0; } new InvocationClient(this, req, timeout, waiter).invoke(); } public boolean invokeVoid(Request req) { return postPacket(new RequestPacket(Packet.FLAG_NOREPLY, allocateKey(), req.methodName(), req.parameters())); } public synchronized boolean addWatcher(TargetWatcher watcher) { if (state == CLOSED) { return false; } watchers.put(watcher, watcher); return true; } public synchronized boolean removeWatcher(TargetWatcher watcher) { if (state == CLOSED) { return false; } watchers.remove(watcher); return true; } public void close() { parent.closeConnection(this); } public String toString() { if (hasSocket()) { return "Connection { " + socket.channel().socket() + " }"; } return "Connection { no socket, spec " + spec + " }"; } }
I think this is subjective. Also, I will be adding more false statements to the method for this change. Think code is obvious anyway. I prefer to keep the code as it is if there is no objection.
private boolean isReadonlyType(BType sourceType) { if (isValueType(sourceType)) { return true; } switch (sourceType.tag) { case TypeTags.NIL: case TypeTags.ERROR: case TypeTags.INVOKABLE: case TypeTags.SERVICE: case TypeTags.TYPEDESC: case TypeTags.HANDLE: return true; } return false; }
return true;
private boolean isReadonlyType(BType sourceType) { if (isValueType(sourceType)) { return true; } switch (sourceType.tag) { case TypeTags.NIL: case TypeTags.ERROR: case TypeTags.INVOKABLE: case TypeTags.SERVICE: case TypeTags.TYPEDESC: case TypeTags.HANDLE: return true; } return false; }
class Types { private static final CompilerContext.Key<Types> TYPES_KEY = new CompilerContext.Key<>(); private SymbolTable symTable; private SymbolResolver symResolver; private BLangDiagnosticLogHelper dlogHelper; private Names names; private int finiteTypeCount = 0; private BUnionType expandedXMLBuiltinSubtypes; public static Types getInstance(CompilerContext context) { Types types = context.get(TYPES_KEY); if (types == null) { types = new Types(context); } return types; } public Types(CompilerContext context) { context.put(TYPES_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.dlogHelper = BLangDiagnosticLogHelper.getInstance(context); this.names = Names.getInstance(context); this.expandedXMLBuiltinSubtypes = BUnionType.create(null, symTable.xmlElementType, symTable.xmlCommentType, symTable.xmlPIType, symTable.xmlTextType); } public List<BType> checkTypes(BLangExpression node, List<BType> actualTypes, List<BType> expTypes) { List<BType> resTypes = new ArrayList<>(); for (int i = 0; i < actualTypes.size(); i++) { resTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType)); } return resTypes; } public BType checkType(BLangExpression node, BType actualType, BType expType) { return checkType(node, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES); } public BType checkType(BLangExpression expr, BType actualType, BType expType, DiagnosticCode diagCode) { expr.type = checkType(expr.pos, actualType, expType, diagCode); if (expr.type.tag == TypeTags.SEMANTIC_ERROR) { return expr.type; } setImplicitCastExpr(expr, actualType, expType); return expr.type; } public BType checkType(DiagnosticPos pos, BType actualType, BType expType, DiagnosticCode diagCode) { if (expType.tag == TypeTags.SEMANTIC_ERROR) { return expType; } else if (expType.tag == TypeTags.NONE) { return actualType; } else if (actualType.tag == TypeTags.SEMANTIC_ERROR) { return actualType; } else if (isAssignable(actualType, expType)) { return actualType; } dlogHelper.error(pos, diagCode, expType, actualType); return symTable.semanticError; } public boolean isJSONContext(BType type) { if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream().anyMatch(memType -> memType.tag == TypeTags.JSON); } return type.tag == TypeTags.JSON; } public boolean isLax(BType type) { switch (type.tag) { case TypeTags.JSON: case TypeTags.XML: case TypeTags.XML_ELEMENT: return true; case TypeTags.MAP: return isLax(((BMapType) type).constraint); case TypeTags.UNION: return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isLax); } return false; } public boolean isSameType(BType source, BType target) { return isSameType(source, target, new HashSet<>()); } private boolean isSameType(BType source, BType target, Set<TypePair> unresolvedTypes) { TypePair pair = new TypePair(source, target); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); BTypeVisitor<BType, Boolean> sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes); return target.accept(sameTypeVisitor, source); } public boolean isValueType(BType type) { switch (type.tag) { case TypeTags.BOOLEAN: case TypeTags.BYTE: case TypeTags.DECIMAL: case TypeTags.FLOAT: case TypeTags.INT: case TypeTags.STRING: case TypeTags.SIGNED32_INT: case TypeTags.SIGNED16_INT: case TypeTags.SIGNED8_INT: case TypeTags.UNSIGNED32_INT: case TypeTags.UNSIGNED16_INT: case TypeTags.UNSIGNED8_INT: case TypeTags.CHAR_STRING: return true; default: return false; } } boolean isBasicNumericType(BType type) { return type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag); } boolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) { return finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type)); } public boolean containsErrorType(BType type) { if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream() .anyMatch(this::containsErrorType); } return type.tag == TypeTags.ERROR; } public boolean isSubTypeOfList(BType type) { if (type.tag != TypeTags.UNION) { return isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE); } return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList); } public boolean isSubTypeOfMapping(BType type) { if (type.tag != TypeTags.UNION) { return isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD); } return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping); } public boolean isSubTypeOfBaseType(BType type, int baseTypeTag) { if (type.tag != TypeTags.UNION) { return type.tag == baseTypeTag; } if (TypeTags.isXMLTypeTag(baseTypeTag)) { return true; } return ((BUnionType) type).getMemberTypes().stream().allMatch(memType -> memType.tag == baseTypeTag); } /** * Checks whether source type is assignable to the target type. * <p> * Source type is assignable to the target type if, * 1) the target type is any and the source type is not a value type. * 2) there exists an implicit cast symbol from source to target. * 3) both types are JSON and the target constraint is no type. * 4) both types are array type and both array types are assignable. * 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent. * * @param source type. * @param target type. * @return true if source type is assignable to the target type. */ public boolean isAssignable(BType source, BType target) { return isAssignable(source, target, new HashSet<>()); } boolean isStampingAllowed(BType source, BType target) { return (isAssignable(source, target) || isAssignable(target, source) || checkTypeEquivalencyForStamping(source, target) || checkTypeEquivalencyForStamping(target, source)); } private boolean checkTypeEquivalencyForStamping(BType source, BType target) { if (target.tag == TypeTags.RECORD) { if (source.tag == TypeTags.RECORD) { TypePair pair = new TypePair(source, target); Set<TypePair> unresolvedTypes = new HashSet<>(); unresolvedTypes.add(pair); return checkRecordEquivalencyForStamping((BRecordType) source, (BRecordType) target, unresolvedTypes); } else if (source.tag == TypeTags.MAP) { int mapConstraintTypeTag = ((BMapType) source).constraint.tag; if ((!(mapConstraintTypeTag == TypeTags.ANY || mapConstraintTypeTag == TypeTags.ANYDATA)) && ((BRecordType) target).sealed) { for (BField field : ((BStructureType) target).getFields()) { if (field.getType().tag != mapConstraintTypeTag) { return false; } } } return true; } } else if (target.tag == TypeTags.JSON) { return source.tag == TypeTags.JSON || source.tag == TypeTags.RECORD || source.tag == TypeTags.MAP; } else if (target.tag == TypeTags.MAP) { if (source.tag == TypeTags.MAP) { return isStampingAllowed(((BMapType) source).getConstraint(), ((BMapType) target).getConstraint()); } else if (source.tag == TypeTags.UNION) { return checkUnionEquivalencyForStamping(source, target); } } else if (target.tag == TypeTags.ARRAY) { if (source.tag == TypeTags.JSON) { return true; } else if (source.tag == TypeTags.TUPLE) { BType arrayElementType = ((BArrayType) target).eType; for (BType tupleMemberType : ((BTupleType) source).getTupleTypes()) { if (!isStampingAllowed(tupleMemberType, arrayElementType)) { return false; } } return true; } else if (source.tag == TypeTags.ARRAY) { return checkTypeEquivalencyForStamping(((BArrayType) source).eType, ((BArrayType) target).eType); } } else if (target.tag == TypeTags.UNION) { return checkUnionEquivalencyForStamping(source, target); } else if (target.tag == TypeTags.TUPLE && source.tag == TypeTags.TUPLE) { return checkTupleEquivalencyForStamping(source, target); } return false; } private boolean checkRecordEquivalencyForStamping(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) { if (Symbols.isFlagOn(lhsType.tsymbol.flags ^ rhsType.tsymbol.flags, Flags.PUBLIC)) { return false; } if (Symbols.isPrivate(lhsType.tsymbol) && rhsType.tsymbol.pkgID != lhsType.tsymbol.pkgID) { return false; } if (lhsType.fields.size() > rhsType.fields.size()) { return false; } if (lhsType.sealed && !rhsType.sealed) { return false; } return checkFieldEquivalencyForStamping(lhsType, rhsType, unresolvedTypes); } private boolean checkFieldEquivalencyForStamping(BStructureType lhsType, BStructureType rhsType, Set<TypePair> unresolvedTypes) { Map<Name, BField> rhsFields = rhsType.fields.stream().collect( Collectors.toMap(BField::getName, field -> field)); for (BField lhsField : lhsType.fields) { BField rhsField = rhsFields.get(lhsField.name); if (rhsField == null || !isStampingAllowed(rhsField.type, lhsField.type)) { return false; } } Map<Name, BField> lhsFields = lhsType.fields.stream().collect( Collectors.toMap(BField::getName, field -> field)); for (BField rhsField : rhsType.fields) { BField lhsField = lhsFields.get(rhsField.name); if (lhsField == null && !isStampingAllowed(rhsField.type, ((BRecordType) lhsType).restFieldType)) { return false; } } return true; } private boolean checkUnionEquivalencyForStamping(BType source, BType target) { Set<BType> sourceTypes = new LinkedHashSet<>(); Set<BType> targetTypes = new LinkedHashSet<>(); if (source.tag == TypeTags.UNION) { BUnionType sourceUnionType = (BUnionType) source; sourceTypes.addAll(sourceUnionType.getMemberTypes()); } else { sourceTypes.add(source); } if (target.tag == TypeTags.UNION) { BUnionType targetUnionType = (BUnionType) target; targetTypes.addAll(targetUnionType.getMemberTypes()); } else { targetTypes.add(target); } boolean notAssignable = sourceTypes .stream() .map(s -> targetTypes .stream() .anyMatch(t -> isStampingAllowed(s, t))) .anyMatch(assignable -> !assignable); return !notAssignable; } private boolean checkTupleEquivalencyForStamping(BType source, BType target) { if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) { return false; } BTupleType lhsTupleType = (BTupleType) target; BTupleType rhsTupleType = (BTupleType) source; if (lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) { return false; } for (int i = 0; i < lhsTupleType.tupleTypes.size(); i++) { if (!isStampingAllowed(rhsTupleType.tupleTypes.get(i), lhsTupleType.tupleTypes.get(i))) { return false; } } return true; } private boolean isAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) { if (isSameType(source, target)) { return true; } int sourceTag = source.tag; int targetTag = target.tag; if (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) { return true; } if (TypeTags.isXMLTypeTag(sourceTag) && TypeTags.isXMLTypeTag(targetTag)) { return isXMLTypeAssignable(source, target, unresolvedTypes); } if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) { return true; } if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.XML_TEXT) { return true; } if (sourceTag == TypeTags.STRING && targetTag == TypeTags.XML_TEXT) { return true; } if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.STRING) { return true; } if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.CHAR_STRING) { return true; } if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) { return isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes); } else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) { return false; } if (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) { return true; } if (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) { return true; } if (targetTag == TypeTags.ANYDATA && !containsErrorType(source) && source.isAnydata()) { return true; } if (targetTag == TypeTags.READONLY && isReadonlyType(source)) { return true; } if (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) { BRecordType recordType = (BRecordType) source; return isAssignableRecordType(recordType, target); } if (targetTag == TypeTags.RECORD && sourceTag == TypeTags.MAP) { return isAssignableMapType((BMapType) source, (BRecordType) target); } if (target.getKind() == TypeKind.SERVICE && source.getKind() == TypeKind.SERVICE) { return true; } if (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) { return isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint), unresolvedTypes); } if (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) { return isAssignable(((BStreamType) source).constraint, ((BStreamType) target).constraint, unresolvedTypes); } if (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) { return true; } if (sourceTag == TypeTags.FINITE) { return isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes); } if ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) && isAssignableToUnionType(source, target, unresolvedTypes)) { return true; } if (targetTag == TypeTags.JSON) { if (sourceTag == TypeTags.ARRAY) { return isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes); } if (sourceTag == TypeTags.MAP) { return isAssignable(((BMapType) source).constraint, target, unresolvedTypes); } if (sourceTag == TypeTags.RECORD) { return isAssignableRecordType((BRecordType) source, target); } } if (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) { if (((BFutureType) target).constraint.tag == TypeTags.NONE) { return true; } return isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes); } if (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) { if (((BMapType) target).constraint.tag == TypeTags.ANY && ((BMapType) source).constraint.tag != TypeTags.UNION) { return true; } return isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes); } if ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD) && (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) { return checkStructEquivalency(source, target, unresolvedTypes); } if (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) { return isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes); } if (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) { return isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes); } if (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) { return isTupleTypeAssignable(source, target, unresolvedTypes); } if (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) { return isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>()); } return sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY && isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes); } private boolean isAssignableRecordType(BRecordType recordType, BType type) { BType targetType; switch (type.tag) { case TypeTags.MAP: targetType = ((BMapType) type).constraint; break; case TypeTags.JSON: targetType = type; break; default: throw new IllegalArgumentException("Incompatible target type: " + type.toString()); } return recordFieldsAssignableToType(recordType, targetType); } private boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType) { for (BField field : recordType.fields) { if (!isAssignable(field.type, targetType)) { return false; } } if (!recordType.sealed) { return isAssignable(recordType.restFieldType, targetType); } return true; } private boolean isAssignableMapType(BMapType sourceMapType, BRecordType targetRecType) { if (targetRecType.sealed) { return false; } for (BField field : targetRecType.fields) { if (!(Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) && isAssignable(sourceMapType.constraint, field.type))) { return false; } } return isAssignable(sourceMapType.constraint, targetRecType.restFieldType); } private boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set<TypePair> unresolvedTypes) { if (target == symTable.errorType) { return true; } TypePair pair = new TypePair(source, target); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); return isAssignable(source.reasonType, target.reasonType, unresolvedTypes) && isAssignable(source.detailType, target.detailType, unresolvedTypes); } private boolean isXMLTypeAssignable(BType sourceType, BType targetType, Set<TypePair> unresolvedTypes) { int sourceTag = sourceType.tag; int targetTag = targetType.tag; if (targetTag == TypeTags.XML) { BXMLType target = (BXMLType) targetType; if (target.constraint != null) { if (TypeTags.isXMLNonSequenceType(sourceTag)) { return isAssignable(sourceType, target.constraint, unresolvedTypes); } BXMLType source = (BXMLType) sourceType; return isAssignable(source.constraint, target.constraint, unresolvedTypes); } return true; } return sourceTag == targetTag; } private boolean isTupleTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) { if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) { return false; } BTupleType lhsTupleType = (BTupleType) target; BTupleType rhsTupleType = (BTupleType) source; if (lhsTupleType.restType == null && rhsTupleType.restType != null) { return false; } if (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) { return false; } if (lhsTupleType.restType != null && rhsTupleType.restType != null) { if (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) { return false; } } for (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) { BType lhsType = (lhsTupleType.tupleTypes.size() > i) ? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType; if (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) { return false; } } return true; } private boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target, Set<TypePair> unresolvedTypes) { if (target.state != BArrayState.UNSEALED && (source.restType != null || source.tupleTypes.size() != target.size)) { return false; } List<BType> sourceTypes = new ArrayList<>(source.tupleTypes); if (source.restType != null) { sourceTypes.add(source.restType); } return sourceTypes.stream() .allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes)); } private boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target, Set<TypePair> unresolvedTypes) { if (!target.tupleTypes.isEmpty()) { if (source.state == BArrayState.UNSEALED) { return false; } if (target.restType != null && target.tupleTypes.size() > source.size) { return false; } if (target.restType == null && target.tupleTypes.size() != source.size) { return false; } } List<BType> targetTypes = new ArrayList<>(target.tupleTypes); if (target.restType != null) { targetTypes.add(target.restType); } return targetTypes.stream() .allMatch(tupleElemType -> isAssignable(source.eType, tupleElemType, unresolvedTypes)); } private boolean isArrayTypesAssignable(BArrayType source, BType target, Set<TypePair> unresolvedTypes) { BType sourceElementType = source.getElementType(); if (target.tag == TypeTags.ARRAY) { BArrayType targetArrayType = (BArrayType) target; BType targetElementType = targetArrayType.getElementType(); if (targetArrayType.state == BArrayState.UNSEALED) { return isAssignable(sourceElementType, targetElementType, unresolvedTypes); } if (targetArrayType.size == source.size) { return isAssignable(sourceElementType, targetElementType, unresolvedTypes); } } else if (target.tag == TypeTags.JSON) { return isAssignable(sourceElementType, target, unresolvedTypes); } else if (target.tag == TypeTags.UNION) { for (BType memberType : ((BUnionType) target).getMemberTypes()) { if (isArrayTypesAssignable(source, memberType, unresolvedTypes)) { return true; } } } return false; } private boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) { if (containsTypeParams(target)) { if (source.paramTypes.size() != target.paramTypes.size()) { return false; } for (int i = 0; i < source.paramTypes.size(); i++) { BType sourceParam = source.paramTypes.get(i); BType targetParam = target.paramTypes.get(i); boolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam); if (isTypeParam) { if (!isAssignable(sourceParam, targetParam)) { return false; } } else { if (!isAssignable(targetParam, sourceParam)) { return false; } } } if (source.retType == null && target.retType == null) { return true; } else if (source.retType == null || target.retType == null) { return false; } return isAssignable(source.retType, target.retType, unresolvedTypes); } return checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut)); } private boolean containsTypeParams(BInvokableType type) { boolean hasParameterizedTypes = type.paramTypes.stream() .anyMatch(t -> { if (t.tag == TypeTags.FUNCTION_POINTER) { return containsTypeParams((BInvokableType) t); } return TypeParamAnalyzer.isTypeParam(t); }); if (hasParameterizedTypes) { return hasParameterizedTypes; } if (type.retType.tag == TypeTags.FUNCTION_POINTER) { return containsTypeParams((BInvokableType) type.retType); } return TypeParamAnalyzer.isTypeParam(type.retType); } private boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) { return checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType); } private boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes, TypeEqualityPredicate equality) { if (source.paramTypes.size() != target.paramTypes.size()) { return false; } for (int i = 0; i < source.paramTypes.size(); i++) { if (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) { return false; } } if ((source.restType != null && target.restType == null) || target.restType != null && source.restType == null) { return false; } else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) { return false; } if (source.retType == null && target.retType == null) { return true; } else if (source.retType == null || target.retType == null) { return false; } return isAssignable(source.retType, target.retType, unresolvedTypes); } public boolean checkArrayEquality(BType source, BType target, Set<TypePair> unresolvedTypes) { if (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) { return false; } BArrayType lhsArrayType = (BArrayType) target; BArrayType rhsArrayType = (BArrayType) source; boolean hasSameTypeElements = isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes); if (lhsArrayType.state == BArrayState.UNSEALED) { return (rhsArrayType.state == BArrayState.UNSEALED) && hasSameTypeElements; } return checkSealedArraySizeEquality(rhsArrayType, lhsArrayType) && hasSameTypeElements; } public boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) { return lhsArrayType.size == rhsArrayType.size; } public boolean checkStructEquivalency(BType rhsType, BType lhsType) { return checkStructEquivalency(rhsType, lhsType, new HashSet<>()); } private boolean checkStructEquivalency(BType rhsType, BType lhsType, Set<TypePair> unresolvedTypes) { TypePair pair = new TypePair(rhsType, lhsType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); if (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) { return checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes); } if (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) { return checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes); } return false; } public boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set<TypePair> unresolvedTypes) { BObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol; BObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol; List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs; List<BAttachedFunction> rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs; int lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol); int rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol); if (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) { return false; } if (lhsType.getFields().stream().anyMatch(field -> Symbols.isPrivate(field.symbol)) || lhsFuncs.stream().anyMatch(func -> Symbols.isPrivate(func.symbol))) { return false; } Map<Name, BField> rhsFields = rhsType.fields.stream().collect(Collectors.toMap(BField::getName, field -> field)); for (BField lhsField : lhsType.fields) { BField rhsField = rhsFields.get(lhsField.name); if (rhsField == null || !isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol) || !isAssignable(rhsField.type, lhsField.type)) { return false; } } for (BAttachedFunction lhsFunc : lhsFuncs) { if (lhsFunc == lhsStructSymbol.initializerFunc) { continue; } BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes); if (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) { return false; } } return true; } private int getObjectFuncCount(BObjectTypeSymbol sym) { if (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) { return sym.attachedFuncs.size() - 1; } return sym.attachedFuncs.size(); } public boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) { if (lhsType.sealed && !rhsType.sealed) { return false; } if (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) { return false; } return checkFieldEquivalency(lhsType, rhsType, unresolvedTypes); } public void setForeachTypedBindingPatternType(BLangForeach foreachNode) { BType collectionType = foreachNode.collection.type; BType varType; switch (collectionType.tag) { case TypeTags.STRING: varType = symTable.stringType; break; case TypeTags.ARRAY: BArrayType arrayType = (BArrayType) collectionType; varType = arrayType.eType; break; case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) collectionType; LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes); if (tupleType.restType != null) { tupleTypes.add(tupleType.restType); } varType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); break; case TypeTags.MAP: BMapType bMapType = (BMapType) collectionType; varType = bMapType.constraint; break; case TypeTags.RECORD: BRecordType recordType = (BRecordType) collectionType; varType = inferRecordFieldType(recordType); break; case TypeTags.XML: varType = BUnionType.create(null, symTable.xmlType, symTable.stringType); break; case TypeTags.STREAM: BStreamType streamType = (BStreamType) collectionType; if (streamType.constraint.tag == TypeTags.NONE) { varType = symTable.anydataType; break; } varType = streamType.constraint; if (streamType.error != null) { BType actualType = BUnionType.create(null, varType, streamType.error); dlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_TYPES, varType, actualType); } break; case TypeTags.OBJECT: BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType); if (nextMethodReturnType != null) { foreachNode.resultType = getRecordType(nextMethodReturnType); BType valueType = (foreachNode.resultType != null) ? ((BRecordType) foreachNode.resultType).fields.get(0).type : null; BType errorType = getErrorType(nextMethodReturnType); if (errorType != null) { BType actualType = BUnionType.create(null, valueType, errorType); dlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_TYPES, valueType, actualType); } foreachNode.nillableResultType = nextMethodReturnType; foreachNode.varType = valueType; return; } dlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE); case TypeTags.SEMANTIC_ERROR: foreachNode.varType = symTable.semanticError; foreachNode.resultType = symTable.semanticError; foreachNode.nillableResultType = symTable.semanticError; return; default: foreachNode.varType = symTable.semanticError; foreachNode.resultType = symTable.semanticError; foreachNode.nillableResultType = symTable.semanticError; dlogHelper.error(foreachNode.collection.pos, DiagnosticCode.ITERABLE_NOT_SUPPORTED_COLLECTION, collectionType); return; } BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); BUnionType nextMethodReturnType = (BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType); foreachNode.varType = varType; foreachNode.resultType = getRecordType(nextMethodReturnType); foreachNode.nillableResultType = nextMethodReturnType; } public void setFromClauseTypedBindingPatternType(BLangFromClause fromClause) { if (fromClause.collection == null) { return; } BType collectionType = fromClause.collection.type; BType varType; switch (collectionType.tag) { case TypeTags.STRING: varType = symTable.stringType; break; case TypeTags.ARRAY: BArrayType arrayType = (BArrayType) collectionType; varType = arrayType.eType; break; case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) collectionType; LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes); if (tupleType.restType != null) { tupleTypes.add(tupleType.restType); } varType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); break; case TypeTags.MAP: BMapType bMapType = (BMapType) collectionType; varType = bMapType.constraint; break; case TypeTags.RECORD: BRecordType recordType = (BRecordType) collectionType; varType = inferRecordFieldType(recordType); break; case TypeTags.XML: varType = BUnionType.create(null, symTable.xmlType, symTable.stringType); break; case TypeTags.STREAM: BStreamType streamType = (BStreamType) collectionType; if (streamType.constraint.tag == TypeTags.NONE) { varType = symTable.anydataType; break; } varType = streamType.constraint; break; case TypeTags.OBJECT: BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType); if (nextMethodReturnType != null) { fromClause.resultType = getRecordType(nextMethodReturnType); fromClause.nillableResultType = nextMethodReturnType; fromClause.varType = ((BRecordType) fromClause.resultType).fields.get(0).type; return; } dlogHelper.error(fromClause.collection.pos, DiagnosticCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE); case TypeTags.SEMANTIC_ERROR: fromClause.varType = symTable.semanticError; fromClause.resultType = symTable.semanticError; fromClause.nillableResultType = symTable.semanticError; return; default: fromClause.varType = symTable.semanticError; fromClause.resultType = symTable.semanticError; fromClause.nillableResultType = symTable.semanticError; dlogHelper.error(fromClause.collection.pos, DiagnosticCode.ITERABLE_NOT_SUPPORTED_COLLECTION, collectionType); return; } BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); BUnionType nextMethodReturnType = (BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType); fromClause.varType = varType; fromClause.resultType = getRecordType(nextMethodReturnType); fromClause.nillableResultType = nextMethodReturnType; } public BUnionType getVarTypeFromIterableObject(BObjectType collectionType) { BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol; for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) { if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) { return getVarTypeFromIteratorFunc(func); } } return null; } private BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) { if (!candidateIteratorFunc.type.paramTypes.isEmpty()) { return null; } BType returnType = candidateIteratorFunc.type.retType; return getVarTypeFromIteratorFuncReturnType(returnType); } public BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) { BObjectTypeSymbol objectTypeSymbol; if (returnType.tag != TypeTags.OBJECT) { return null; } objectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol; for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) { if (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) { return getVarTypeFromNextFunc(func); } } return null; } private BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) { BType returnType; if (!nextFunc.type.paramTypes.isEmpty()) { return null; } returnType = nextFunc.type.retType; if (checkNextFuncReturnType(returnType)) { return (BUnionType) returnType; } return null; } private boolean checkNextFuncReturnType(BType returnType) { if (returnType.tag != TypeTags.UNION) { return false; } List<BType> types = new ArrayList<>(((BUnionType) returnType).getMemberTypes()); if (!types.removeIf(type -> type.tag == TypeTags.NIL)) { return false; } types.removeIf(type -> type.tag == TypeTags.ERROR); if (types.size() != 1) { return false; } if (types.get(0).tag != TypeTags.RECORD) { return false; } BRecordType recordType = (BRecordType) types.get(0); return checkRecordTypeInNextFuncReturnType(recordType); } private boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) { if (!recordType.sealed) { return false; } if (recordType.fields.size() != 1) { return false; } for (BField field : recordType.fields) { if (field.name.value.equals(BLangCompilerConstants.VALUE_FIELD)) { return true; } } return false; } private BRecordType getRecordType(BUnionType type) { for (BType member : type.getMemberTypes()) { if (member.tag == TypeTags.RECORD) { return (BRecordType) member; } } return null; } public BErrorType getErrorType(BUnionType type) { for (BType member : type.getMemberTypes()) { if (member.tag == TypeTags.ERROR) { return (BErrorType) member; } else if (member.tag == TypeTags.UNION) { BErrorType e = getErrorType((BUnionType) member); if (e != null) { return e; } } } return null; } public BType getResultTypeOfNextInvocation(BObjectType iteratorType) { BAttachedFunction nextFunc = getNextFunc(iteratorType); return Objects.requireNonNull(nextFunc).type.retType; } private BAttachedFunction getNextFunc(BObjectType iteratorType) { BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol; for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value .equals(BLangCompilerConstants.NEXT_FUNC)) { return bAttachedFunction; } } return null; } public BType inferRecordFieldType(BRecordType recordType) { List<BField> fields = recordType.fields; BUnionType unionType = BUnionType.create(null); if (!recordType.sealed) { unionType.add(recordType.restFieldType); } for (BField field : fields) { if (isAssignable(field.type, unionType)) { continue; } if (isAssignable(unionType, field.type)) { unionType = BUnionType.create(null); } unionType.add(field.type); } if (unionType.getMemberTypes().size() > 1) { unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, recordType.tsymbol.pkgID, null, recordType.tsymbol.owner); return unionType; } return unionType.getMemberTypes().iterator().next(); } /** * Enum to represent type test result. * * @since 1.2.0 */ enum TypeTestResult { NOT_FOUND, TRUE, FALSE } TypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) { int targetTag = targetType.tag; int actualTag = actualType.tag; if (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) { switch (actualTag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: if (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) { return TypeTestResult.FALSE; } break; case TypeTags.BOOLEAN: if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT || targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) { return TypeTestResult.FALSE; } break; case TypeTags.STRING: if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT || targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) { return TypeTestResult.FALSE; } break; } } switch (actualTag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.BOOLEAN: case TypeTags.STRING: case TypeTags.SIGNED32_INT: case TypeTags.SIGNED16_INT: case TypeTags.SIGNED8_INT: case TypeTags.UNSIGNED32_INT: case TypeTags.UNSIGNED16_INT: case TypeTags.UNSIGNED8_INT: case TypeTags.CHAR_STRING: if (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY || targetTag == TypeTags.READONLY) { return TypeTestResult.TRUE; } break; case TypeTags.ANYDATA: case TypeTags.TYPEDESC: if (targetTag == TypeTags.ANY) { return TypeTestResult.TRUE; } break; default: } if (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) { return TypeTestResult.FALSE; } if ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE) && (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) { return checkBuiltInIntSubtypeWidenPossible(actualType, targetType); } if (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) { return TypeTestResult.TRUE; } return TypeTestResult.NOT_FOUND; } private TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) { int actualTag = actualType.tag; switch (targetType.tag) { case TypeTags.INT: if (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) { return TypeTestResult.TRUE; } break; case TypeTags.SIGNED32_INT: if (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; case TypeTags.SIGNED16_INT: if (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; case TypeTags.UNSIGNED32_INT: if (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; case TypeTags.UNSIGNED16_INT: if (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; case TypeTags.BYTE: if (actualTag == TypeTags.UNSIGNED8_INT) { return TypeTestResult.TRUE; } break; case TypeTags.UNSIGNED8_INT: if (actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; } return TypeTestResult.NOT_FOUND; } public boolean isImplicityCastable(BType actualType, BType targetType) { /* The word Builtin refers for Compiler known types. */ BType newTargetType = targetType; if ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) { newTargetType = symTable.anyType; } TypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType); if (result != TypeTestResult.NOT_FOUND) { return result == TypeTestResult.TRUE; } if (isValueType(targetType) && (actualType.tag == TypeTags.FINITE || (actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream() .anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) { return targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT || targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN; } else if (targetType.tag == TypeTags.ERROR && (actualType.tag == TypeTags.UNION && isAllErrorMembers((BUnionType) actualType))) { return true; } return false; } public boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType) { if (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR || sourceType == targetType) { return true; } if (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) { return true; } if (isNumericConversionPossible(expr, sourceType, targetType)) { return true; } boolean validTypeCast = false; if (sourceType.tag == TypeTags.UNION) { if (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType) != symTable.semanticError) { validTypeCast = true; } } if (targetType.tag == TypeTags.UNION) { if (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType) != symTable.semanticError) { validTypeCast = true; } } if (sourceType.tag == TypeTags.FINITE) { if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType) != symTable.semanticError) { validTypeCast = true; } } if (targetType.tag == TypeTags.FINITE) { if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType) != symTable.semanticError) { validTypeCast = true; } } if (validTypeCast) { if (isValueType(sourceType)) { setImplicitCastExpr(expr, sourceType, symTable.anyType); } return true; } return false; } boolean isNumericConversionPossible(BLangExpression expr, BType sourceType, BType targetType) { final boolean isSourceNumericType = isBasicNumericType(sourceType); final boolean isTargetNumericType = isBasicNumericType(targetType); if (isSourceNumericType && isTargetNumericType) { return true; } if (targetType.tag == TypeTags.UNION) { HashSet<Integer> typeTags = new HashSet<>(); for (BType bType : ((BUnionType) targetType).getMemberTypes()) { if (isBasicNumericType(bType)) { typeTags.add(bType.tag); if (typeTags.size() > 1) { return false; } } } } if (!isTargetNumericType && targetType.tag != TypeTags.UNION) { return false; } if (isSourceNumericType) { setImplicitCastExpr(expr, sourceType, symTable.anyType); return true; } switch (sourceType.tag) { case TypeTags.ANY: case TypeTags.ANYDATA: case TypeTags.JSON: return true; case TypeTags.UNION: for (BType memType : ((BUnionType) sourceType).getMemberTypes()) { if (isBasicNumericType(memType) || (memType.tag == TypeTags.FINITE && finiteTypeContainsNumericTypeValues((BFiniteType) memType))) { return true; } } break; case TypeTags.FINITE: if (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) { return true; } break; } return false; } private boolean isAllErrorMembers(BUnionType actualType) { return actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType)); } public void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) { if (!isImplicityCastable(actualType, expType)) { return; } BLangTypeConversionExpr implicitConversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); implicitConversionExpr.pos = expr.pos; implicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr; implicitConversionExpr.type = expType; implicitConversionExpr.targetType = expType; expr.impConversionExpr = implicitConversionExpr; } public BType getElementType(BType type) { if (type.tag != TypeTags.ARRAY) { return type; } return getElementType(((BArrayType) type).getElementType()); } public boolean checkListenerCompatibility(BType type) { if (type.tag != TypeTags.OBJECT) { return false; } final BSymbol bSymbol = symTable.langObjectModuleSymbol.scope.lookup(Names.LISTENER).symbol; if (bSymbol == symTable.notFoundSymbol || bSymbol.type.tag != TypeTags.OBJECT) { throw new AssertionError("Listener object not defined."); } BObjectType rhsType = (BObjectType) type; BObjectType lhsType = (BObjectType) bSymbol.type; BStructureTypeSymbol lhsStructSymbol = (BStructureTypeSymbol) lhsType.tsymbol; List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs; List<BAttachedFunction> rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs; int lhsAttachedFuncCount = lhsStructSymbol.initializerFunc != null ? lhsFuncs.size() - 1 : lhsFuncs.size(); if (lhsAttachedFuncCount > rhsFuncs.size()) { return false; } for (BAttachedFunction lhsFunc : lhsFuncs) { if (lhsFunc == lhsStructSymbol.initializerFunc) { continue; } if (!Symbols.isPublic(lhsFunc.symbol)) { return false; } BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, new HashSet<>()); if (rhsFunc == null || !Symbols.isPublic(rhsFunc.symbol)) { return false; } } return true; } public boolean isValidErrorDetailType(BType detailType) { switch (detailType.tag) { case TypeTags.MAP: case TypeTags.RECORD: return isAssignable(detailType, symTable.detailType); } return false; } private boolean isNullable(BType fieldType) { return fieldType.isNullable(); } private class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> { Set<TypePair> unresolvedTypes; BSameTypeVisitor(Set<TypePair> unresolvedTypes) { this.unresolvedTypes = unresolvedTypes; } @Override public Boolean visit(BType t, BType s) { if (t == s) { return true; } switch (t.tag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.BOOLEAN: case TypeTags.ANY: case TypeTags.ANYDATA: return t.tag == s.tag && (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s)); default: break; } return false; } @Override public Boolean visit(BBuiltInRefType t, BType s) { return t == s; } @Override public Boolean visit(BAnyType t, BType s) { return t == s; } @Override public Boolean visit(BAnydataType t, BType s) { return t == s; } @Override public Boolean visit(BMapType t, BType s) { if (s.tag != TypeTags.MAP) { return false; } BMapType sType = ((BMapType) s); return isSameType(sType.constraint, t.constraint, this.unresolvedTypes); } @Override public Boolean visit(BFutureType t, BType s) { return s.tag == TypeTags.FUTURE && t.constraint.tag == ((BFutureType) s).constraint.tag; } @Override public Boolean visit(BXMLType t, BType s) { return visit((BBuiltInRefType) t, s); } @Override public Boolean visit(BJSONType t, BType s) { return s.tag == TypeTags.JSON; } @Override public Boolean visit(BArrayType t, BType s) { return s.tag == TypeTags.ARRAY && checkArrayEquality(s, t, new HashSet<>()); } @Override public Boolean visit(BObjectType t, BType s) { if (t == s) { return true; } if (s.tag != TypeTags.OBJECT) { return false; } return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name); } @Override public Boolean visit(BRecordType t, BType s) { if (t == s) { return true; } if (s.tag != TypeTags.RECORD) { return false; } BRecordType source = (BRecordType) s; if (source.fields.size() != t.fields.size()) { return false; } boolean notSameType = source.fields .stream() .map(fs -> t.fields.stream() .anyMatch(ft -> fs.name.equals(ft.name) && isSameType(fs.type, ft.type, this.unresolvedTypes) && hasSameOptionalFlag(fs.symbol, ft.symbol))) .anyMatch(foundSameType -> !foundSameType); if (notSameType) { return false; } return isSameType(source.restFieldType, t.restFieldType, unresolvedTypes); } private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) { return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL; } public Boolean visit(BTupleType t, BType s) { if (s.tag != TypeTags.TUPLE) { return false; } BTupleType source = (BTupleType) s; if (source.tupleTypes.size() != t.tupleTypes.size()) { return false; } for (int i = 0; i < source.tupleTypes.size(); i++) { if (t.getTupleTypes().get(i) == symTable.noType) { continue; } if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) { return false; } } return true; } @Override public Boolean visit(BStreamType t, BType s) { return t == s; } @Override public Boolean visit(BInvokableType t, BType s) { return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, new HashSet<>()); } @Override public Boolean visit(BUnionType tUnionType, BType s) { if (s.tag != TypeTags.UNION) { return false; } BUnionType sUnionType = (BUnionType) s; if (sUnionType.getMemberTypes().size() != tUnionType.getMemberTypes().size()) { return false; } Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes()); Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes()); boolean notSameType = sourceTypes .stream() .map(sT -> targetTypes .stream() .anyMatch(it -> isSameType(it, sT, this.unresolvedTypes))) .anyMatch(foundSameType -> !foundSameType); return !notSameType; } @Override public Boolean visit(BErrorType t, BType s) { if (s.tag != TypeTags.ERROR) { return false; } BErrorType source = (BErrorType) s; if (!isSameType(source.reasonType, t.reasonType, this.unresolvedTypes)) { return false; } if (source.detailType == t.detailType) { return true; } return isSameType(source.detailType, t.detailType, this.unresolvedTypes); } @Override public Boolean visit(BServiceType t, BType s) { return t == s || t.tag == s.tag; } @Override public Boolean visit(BTypedescType t, BType s) { if (s.tag != TypeTags.TYPEDESC) { return false; } BTypedescType sType = ((BTypedescType) s); return isSameType(sType.constraint, t.constraint, this.unresolvedTypes); } @Override public Boolean visit(BFiniteType t, BType s) { return s == t; } }; private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) { Map<Name, BField> rhsFields = rhsType.fields.stream().collect(Collectors.toMap(BField::getName, f -> f)); for (BField lhsField : lhsType.fields) { BField rhsField = rhsFields.get(lhsField.name); if (rhsField == null) { return false; } if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) { return false; } if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) { return false; } rhsFields.remove(lhsField.name); } return rhsFields.entrySet().stream().allMatch( fieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes)); } private BAttachedFunction getMatchingInvokableType(List<BAttachedFunction> rhsFuncList, BAttachedFunction lhsFunc, Set<TypePair> unresolvedTypes) { return rhsFuncList.stream() .filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName)) .filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes)) .findFirst() .orElse(null); } private boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) { if (Symbols.isPrivate(lhsSym)) { return Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID) && lhsSym.owner.name.equals(rhsSym.owner.name); } else if (Symbols.isPublic(lhsSym)) { return Symbols.isPublic(rhsSym); } return !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID); } private boolean isAssignableToUnionType(BType source, BType target, Set<TypePair> unresolvedTypes) { Set<BType> sourceTypes = new LinkedHashSet<>(); Set<BType> targetTypes = new LinkedHashSet<>(); if (source.tag == TypeTags.UNION) { BUnionType sourceUnionType = (BUnionType) source; sourceTypes.addAll(sourceUnionType.getMemberTypes()); } else { sourceTypes.add(source); } if (target.tag == TypeTags.UNION) { BUnionType targetUnionType = (BUnionType) target; targetTypes.addAll(targetUnionType.getMemberTypes()); } else { targetTypes.add(target); } return sourceTypes.stream() .allMatch(s -> (targetTypes.stream().anyMatch(t -> isAssignable(s, t, unresolvedTypes))) || (s.tag == TypeTags.FINITE && isAssignable(s, target, unresolvedTypes)) || (s.tag == TypeTags.XML && isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes))); } private boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set<TypePair> unresolvedTypes) { if (targetType.tag == TypeTags.FINITE) { return finiteType.getValueSpace().stream() .allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression)); } if (targetType.tag == TypeTags.UNION) { List<BType> unionMemberTypes = getAllTypes(targetType); return finiteType.getValueSpace().stream() .allMatch(valueExpr -> unionMemberTypes.stream() .anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ? isAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) : isAssignable(valueExpr.type, targetType, unresolvedTypes))); } return finiteType.getValueSpace().stream() .allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes)); } boolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) { if (type.tag != TypeTags.FINITE) { return false; } BFiniteType expType = (BFiniteType) type; return expType.getValueSpace().stream().anyMatch(memberLiteral -> { if (((BLangLiteral) memberLiteral).value == null) { return literalExpr.value == null; } return checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr); }); } /** * Method to check the literal assignability based on the types of the literals. For numeric literals the * assignability depends on the equivalency of the literals. If the candidate literal could either be a simple * literal or a constant. In case of a constant, it is assignable to the base literal if and only if both * literals have same type and equivalent values. * * @param baseLiteral Literal based on which we check the assignability. * @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not. * @return true if assignable; false otherwise. */ boolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) { if (baseLiteral.getKind() != candidateLiteral.getKind()) { return false; } Object baseValue = baseLiteral.value; Object candidateValue = candidateLiteral.value; int candidateTypeTag = candidateLiteral.type.tag; switch (baseLiteral.type.tag) { case TypeTags.BYTE: if (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.INT: if (candidateTypeTag == TypeTags.INT) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.SIGNED32_INT: if (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.SIGNED16_INT: if (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.SIGNED8_INT: if (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.UNSIGNED32_INT: if (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.UNSIGNED16_INT: if (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.UNSIGNED8_INT: if (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.FLOAT: String baseValueStr = String.valueOf(baseValue); String originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr; if (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) { return false; } double baseDoubleVal = Double.parseDouble(baseValueStr); double candidateDoubleVal; if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) { candidateDoubleVal = ((Long) candidateValue).doubleValue(); return baseDoubleVal == candidateDoubleVal; } else if (candidateTypeTag == TypeTags.FLOAT) { candidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue)); return baseDoubleVal == candidateDoubleVal; } break; case TypeTags.DECIMAL: BigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue); BigDecimal candidateDecimalVal; if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) { candidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128); return baseDecimalVal.compareTo(candidateDecimalVal) == 0; } else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant || candidateTypeTag == TypeTags.DECIMAL) { if (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) { return false; } candidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue); return baseDecimalVal.compareTo(candidateDecimalVal) == 0; } break; default: return baseValue.equals(candidateValue); } return false; } boolean isByteLiteralValue(Long longObject) { return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE); } boolean isSigned32LiteralValue(Long longObject) { return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE); } boolean isSigned16LiteralValue(Long longObject) { return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE); } boolean isSigned8LiteralValue(Long longObject) { return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE); } boolean isUnsigned32LiteralValue(Long longObject) { return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE); } boolean isUnsigned16LiteralValue(Long longObject) { return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE); } boolean isUnsigned8LiteralValue(Long longObject) { return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE); } boolean isCharLiteralValue(String literal) { return (literal.codePoints().count() == 1); } /** * Method to retrieve a type representing all the values in the value space of a finite type that are assignable to * the target type. * * @param finiteType the finite type * @param targetType the target type * @return a new finite type if at least one value in the value space of the specified finiteType is * assignable to targetType (the same if all are assignable), else semanticError */ BType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) { if (isAssignable(finiteType, targetType)) { return finiteType; } Set<BLangExpression> matchingValues = finiteType.getValueSpace().stream() .filter( expr -> isAssignable(expr.type, targetType) || isAssignableToFiniteType(targetType, (BLangLiteral) expr) || (targetType.tag == TypeTags.UNION && ((BUnionType) targetType).getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .anyMatch(filteredType -> isAssignableToFiniteType(filteredType, (BLangLiteral) expr)))) .collect(Collectors.toSet()); if (matchingValues.isEmpty()) { return symTable.semanticError; } BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags, names.fromString("$anonType$" + finiteTypeCount++), finiteType.tsymbol.pkgID, null, finiteType.tsymbol.owner); BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues); finiteTypeSymbol.type = intersectingFiniteType; return intersectingFiniteType; } /** * Method to retrieve a type representing all the member types of a union type that are assignable to * the target type. * * @param unionType the union type * @param targetType the target type * @return a single type or a new union type if at least one member type of the union type is * assignable to targetType, else semanticError */ BType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType) { List<BType> intersection = new LinkedList<>(); unionType.getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { BType finiteTypeWithMatches = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) memType, targetType); if (finiteTypeWithMatches != symTable.semanticError) { intersection.add(finiteTypeWithMatches); } } else { if (isAssignable(memType, targetType)) { intersection.add(memType); } } }); if (intersection.isEmpty()) { return symTable.semanticError; } if (intersection.size() == 1) { return intersection.get(0); } else { return BUnionType.create(null, new LinkedHashSet<>(intersection)); } } boolean validEqualityIntersectionExists(BType lhsType, BType rhsType) { if (!lhsType.isPureType() || !rhsType.isPureType()) { return false; } if (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) { return true; } Set<BType> lhsTypes = expandAndGetMemberTypesRecursive(lhsType); Set<BType> rhsTypes = expandAndGetMemberTypesRecursive(rhsType); return equalityIntersectionExists(lhsTypes, rhsTypes); } private boolean equalityIntersectionExists(Set<BType> lhsTypes, Set<BType> rhsTypes) { if ((lhsTypes.contains(symTable.anydataType) && rhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) || (rhsTypes.contains(symTable.anydataType) && lhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) { return true; } boolean matchFound = lhsTypes .stream() .anyMatch(s -> rhsTypes .stream() .anyMatch(t -> isSameType(s, t))); if (!matchFound) { matchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes); } return matchFound; } /** * Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual * maps/arrays. * * e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[] * * @param bType the type for which member types needs to be identified * @return a set containing all the retrieved member types */ public Set<BType> expandAndGetMemberTypesRecursive(BType bType) { Set<BType> memberTypes = new LinkedHashSet<>(); switch (bType.tag) { case TypeTags.BYTE: case TypeTags.INT: memberTypes.add(symTable.intType); memberTypes.add(symTable.byteType); break; case TypeTags.FINITE: BFiniteType expType = (BFiniteType) bType; expType.getValueSpace().forEach(value -> { memberTypes.add(value.type); }); break; case TypeTags.UNION: BUnionType unionType = (BUnionType) bType; unionType.getMemberTypes().forEach(member -> { memberTypes.addAll(expandAndGetMemberTypesRecursive(member)); }); break; case TypeTags.ARRAY: BType arrayElementType = ((BArrayType) bType).getElementType(); if (((BArrayType) bType).getSize() != -1) { memberTypes.add(new BArrayType(arrayElementType)); } if (arrayElementType.tag == TypeTags.UNION) { Set<BType> elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType); elementUnionTypes.forEach(elementUnionType -> { memberTypes.add(new BArrayType(elementUnionType)); }); } memberTypes.add(bType); break; case TypeTags.MAP: BType mapConstraintType = ((BMapType) bType).getConstraint(); if (mapConstraintType.tag == TypeTags.UNION) { Set<BType> constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType); constraintUnionTypes.forEach(constraintUnionType -> { memberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol)); }); } memberTypes.add(bType); break; default: memberTypes.add(bType); } return memberTypes; } private boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) { if (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) { return false; } List<BType> lhsMemberTypes = lhsType.getTupleTypes(); List<BType> rhsMemberTypes = rhsType.getTupleTypes(); for (int i = 0; i < lhsType.getTupleTypes().size(); i++) { if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)), expandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) { return false; } } return true; } private boolean equalityIntersectionExistsForComplexTypes(Set<BType> lhsTypes, Set<BType> rhsTypes) { for (BType lhsMemberType : lhsTypes) { switch (lhsMemberType.tag) { case TypeTags.INT: case TypeTags.STRING: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.BOOLEAN: case TypeTags.NIL: if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) { return true; } break; case TypeTags.JSON: if (jsonEqualityIntersectionExists(rhsTypes)) { return true; } break; case TypeTags.TUPLE: if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE && tupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY && arrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType, (BTupleType) lhsMemberType))) { return true; } break; case TypeTags.ARRAY: if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY && equalityIntersectionExists( expandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType), expandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE && arrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType, (BTupleType) rhsMemberType))) { return true; } break; case TypeTags.MAP: if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.MAP && equalityIntersectionExists( expandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint), expandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) { return true; } if (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) && rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD && mapRecordEqualityIntersectionExists((BMapType) lhsMemberType, (BRecordType) rhsMemberType))) { return true; } break; case TypeTags.OBJECT: case TypeTags.RECORD: if (rhsTypes.stream().anyMatch( rhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) || checkStructEquivalency(lhsMemberType, rhsMemberType))) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD && recordEqualityIntersectionExists((BRecordType) lhsMemberType, (BRecordType) rhsMemberType))) { return true; } if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) && jsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.MAP && mapRecordEqualityIntersectionExists((BMapType) rhsMemberType, (BRecordType) lhsMemberType))) { return true; } break; } } return false; } private boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) { Set<BType> elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType); return tupleType.tupleTypes.stream() .allMatch(tupleMemType -> equalityIntersectionExists(elementTypes, expandAndGetMemberTypesRecursive(tupleMemType))); } private boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) { List<BField> lhsFields = lhsType.fields; List<BField> rhsFields = rhsType.fields; List<Name> matchedFieldNames = new ArrayList<>(); for (BField lhsField : lhsFields) { Optional<BField> match = rhsFields.stream().filter(rhsField -> lhsField.name.equals(rhsField.name)).findFirst(); if (match.isPresent()) { if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type), expandAndGetMemberTypesRecursive(match.get().type))) { return false; } matchedFieldNames.add(lhsField.getName()); } else { if (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) { break; } if (rhsType.sealed) { return false; } if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type), expandAndGetMemberTypesRecursive(rhsType.restFieldType))) { return false; } } } for (BField rhsField : rhsFields) { if (matchedFieldNames.contains(rhsField.getName())) { continue; } if (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) { if (lhsType.sealed) { return false; } if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type), expandAndGetMemberTypesRecursive(lhsType.restFieldType))) { return false; } } } return true; } private boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) { Set<BType> mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint()); return recordType.fields.stream() .allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) || equalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type))); } private boolean jsonEqualityIntersectionExists(Set<BType> typeSet) { for (BType type : typeSet) { switch (type.tag) { case TypeTags.MAP: if (!isAssignable(((BMapType) type).constraint, symTable.errorType)) { return true; } break; case TypeTags.RECORD: BRecordType recordType = (BRecordType) type; if (recordType.fields.stream() .allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) || !isAssignable(field.type, symTable.errorType))) { return true; } break; default: if (isAssignable(type, symTable.jsonType)) { return true; } } } return false; } public BType getRemainingType(BType originalType, BType typeToRemove) { switch (originalType.tag) { case TypeTags.UNION: return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove)); case TypeTags.FINITE: return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove)); default: return originalType; } } private BType getRemainingType(BUnionType originalType, List<BType> removeTypes) { List<BType> remainingTypes = getAllTypes(originalType); removeTypes.forEach(removeType -> remainingTypes.removeIf(type -> isAssignable(type, removeType))); List<BType> finiteTypesToRemove = new ArrayList<>(); List<BType> finiteTypesToAdd = new ArrayList<>(); for (BType remainingType : remainingTypes) { if (remainingType.tag == TypeTags.FINITE) { BFiniteType finiteType = (BFiniteType) remainingType; finiteTypesToRemove.add(finiteType); BType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes); if (remainingTypeWithMatchesRemoved != symTable.semanticError) { finiteTypesToAdd.add(remainingTypeWithMatchesRemoved); } } } remainingTypes.removeAll(finiteTypesToRemove); remainingTypes.addAll(finiteTypesToAdd); if (remainingTypes.size() == 1) { return remainingTypes.get(0); } if (remainingTypes.isEmpty()) { return symTable.nullSet; } return BUnionType.create(null, new LinkedHashSet<>(remainingTypes)); } private BType getRemainingType(BFiniteType originalType, List<BType> removeTypes) { Set<BLangExpression> remainingValueSpace = new LinkedHashSet<>(); for (BLangExpression valueExpr : originalType.getValueSpace()) { boolean matchExists = false; for (BType remType : removeTypes) { if (isAssignable(valueExpr.type, remType) || isAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) { matchExists = true; break; } } if (!matchExists) { remainingValueSpace.add(valueExpr); } } if (remainingValueSpace.isEmpty()) { return symTable.semanticError; } BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags, names.fromString("$anonType$" + finiteTypeCount++), originalType.tsymbol.pkgID, null, originalType.tsymbol.owner); BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace); finiteTypeSymbol.type = intersectingFiniteType; return intersectingFiniteType; } public BType getSafeType(BType type, boolean liftNil, boolean liftError) { switch (type.tag) { case TypeTags.JSON: BJSONType jsonType = (BJSONType) type; return new BJSONType(jsonType.tag, jsonType.tsymbol, false); case TypeTags.ANY: return new BAnyType(type.tag, type.tsymbol, false); case TypeTags.ANYDATA: return new BAnydataType(type.tag, type.tsymbol, false); case TypeTags.READONLY: return new BReadonlyType(type.tag, type.tsymbol, false); } if (type.tag != TypeTags.UNION) { return type; } BUnionType unionType = (BUnionType) type; LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes()); BUnionType errorLiftedType = BUnionType.create(null, memTypes); if (liftNil) { errorLiftedType.remove(symTable.nilType); } if (liftError) { errorLiftedType.remove(symTable.errorType); } if (errorLiftedType.getMemberTypes().size() == 1) { return errorLiftedType.getMemberTypes().toArray(new BType[0])[0]; } return errorLiftedType; } public List<BType> getAllTypes(BType type) { if (type.tag != TypeTags.UNION) { return Lists.of(type); } List<BType> memberTypes = new ArrayList<>(); ((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType))); return memberTypes; } public boolean isAllowedConstantType(BType type) { switch (type.tag) { case TypeTags.BOOLEAN: case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.NIL: return true; case TypeTags.MAP: return isAllowedConstantType(((BMapType) type).constraint); case TypeTags.FINITE: BLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0]; return isAllowedConstantType(finiteValue.type); default: return false; } } public boolean isValidLiteral(BLangLiteral literal, BType targetType) { BType literalType = literal.type; if (literalType.tag == targetType.tag) { return true; } switch (targetType.tag) { case TypeTags.BYTE: return literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value); case TypeTags.DECIMAL: return literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT; case TypeTags.FLOAT: return literalType.tag == TypeTags.INT; case TypeTags.SIGNED32_INT: return literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value); case TypeTags.SIGNED16_INT: return literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value); case TypeTags.SIGNED8_INT: return literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value); case TypeTags.UNSIGNED32_INT: return literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value); case TypeTags.UNSIGNED16_INT: return literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value); case TypeTags.UNSIGNED8_INT: return literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value); case TypeTags.CHAR_STRING: return literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value); default: return false; } } /** * Validate if the return type of the given function is a subtype of `error?`, containing `()`. * * @param function The function of which the return type should be validated * @param diagnosticCode The code to log if the return type is invalid */ public void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) { BType returnType = function.returnTypeNode.type; if (returnType.tag == TypeTags.NIL) { return; } if (returnType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) returnType).getMemberTypes(); if (returnType.isNullable() && memberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) { return; } } dlogHelper.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString()); } /** * Type vector of size two, to hold the source and the target types. * * @since 0.982.0 */ private static class TypePair { BType sourceType; BType targetType; public TypePair(BType sourceType, BType targetType) { this.sourceType = sourceType; this.targetType = targetType; } @Override public boolean equals(Object obj) { if (!(obj instanceof TypePair)) { return false; } TypePair other = (TypePair) obj; return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType); } @Override public int hashCode() { return Objects.hash(sourceType, targetType); } } /** * A functional interface for parameterizing the type of type checking that needs to be done on the source and * target types. * * @since 0.995.0 */ private interface TypeEqualityPredicate { boolean test(BType source, BType target, Set<TypePair> unresolvedTypes); } public boolean hasFillerValue(BType type) { switch (type.tag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.BOOLEAN: case TypeTags.JSON: case TypeTags.XML: case TypeTags.NIL: case TypeTags.ANYDATA: case TypeTags.MAP: case TypeTags.ANY: return true; case TypeTags.ARRAY: return checkFillerValue((BArrayType) type); case TypeTags.FINITE: return checkFillerValue((BFiniteType) type); case TypeTags.UNION: return checkFillerValue((BUnionType) type); case TypeTags.OBJECT: return checkFillerValue((BObjectType) type); case TypeTags.RECORD: return checkFillerValue((BRecordType) type); case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) type; return tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType)); default: if (TypeTags.isIntegerTypeTag(type.tag)) { return true; } return false; } } private boolean checkFillerValue(BObjectType type) { if ((type.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) { return false; } BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc; if (initFunction == null) { return true; } if (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) { return false; } for (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) { if (!bVarSymbol.defaultableParam) { return false; } } return true; } /** * This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill * value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and * the default value for that type should be a member of the union precondition : value space should have at least * one element * * @param type BFiniteType union or finite * @return boolean whether type has a valid filler value or not */ private boolean checkFillerValue(BFiniteType type) { if (type.isNullable()) { return true; } if (type.getValueSpace().size() == 1) { return true; } Iterator iterator = type.getValueSpace().iterator(); BLangExpression firstElement = (BLangExpression) iterator.next(); boolean defaultFillValuePresent = isImplicitDefaultValue(firstElement); while (iterator.hasNext()) { BLangExpression value = (BLangExpression) iterator.next(); if (!isSameBasicType(value.type, firstElement.type)) { return false; } if (!defaultFillValuePresent && isImplicitDefaultValue(value)) { defaultFillValuePresent = true; } } return defaultFillValuePresent; } private boolean hasImplicitDefaultValue(Set<BLangExpression> valueSpace) { for (BLangExpression expression : valueSpace) { if (isImplicitDefaultValue(expression)) { return true; } } return false; } private boolean checkFillerValue(BUnionType type) { if (type.isNullable()) { return true; } Set<BType> memberTypes = new HashSet<>(); boolean hasFillerValue = false; boolean defaultValuePresent = false; boolean finiteTypePresent = false; for (BType member : type.getMemberTypes()) { if (member.tag == TypeTags.FINITE) { Set<BType> uniqueValues = getValueTypes(((BFiniteType) member).getValueSpace()); memberTypes.addAll(uniqueValues); if (!defaultValuePresent && hasImplicitDefaultValue(((BFiniteType) member).getValueSpace())) { defaultValuePresent = true; } finiteTypePresent = true; } else { memberTypes.add(member); } if (!hasFillerValue && hasFillerValue(member)) { hasFillerValue = true; } } if (!hasFillerValue) { return false; } Iterator<BType> iterator = memberTypes.iterator(); BType firstMember = iterator.next(); while (iterator.hasNext()) { if (!isSameBasicType(firstMember, iterator.next())) { return false; } } if (finiteTypePresent) { return defaultValuePresent; } return true; } private boolean isSameBasicType(BType source, BType target) { if (isSameType(source, target)) { return true; } if (TypeTags.isIntegerTypeTag(source.tag) && TypeTags.isIntegerTypeTag(target.tag)) { return true; } return false; } private Set<BType> getValueTypes(Set<BLangExpression> valueSpace) { Set<BType> uniqueType = new HashSet<>(); for (BLangExpression expression : valueSpace) { uniqueType.add(expression.type); } return uniqueType; } private boolean isImplicitDefaultValue(BLangExpression expression) { if ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) { BLangLiteral literalExpression = (BLangLiteral) expression; BType literalExprType = literalExpression.type; Object value = literalExpression.getValue(); switch (literalExprType.getKind()) { case INT: case BYTE: return value.equals(Long.valueOf(0)); case STRING: return value == null || value.equals(""); case DECIMAL: case FLOAT: return value.equals(String.valueOf(0.0)); case BOOLEAN: return value.equals(Boolean.valueOf(false)); case NIL: return true; default: return false; } } return false; } private boolean checkFillerValue(BRecordType type) { for (BField field : type.fields) { if (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) { continue; } if (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { return false; } } return true; } private boolean checkFillerValue(BArrayType type) { if (type.size == -1) { return true; } return hasFillerValue(type.eType); } }
class Types { private static final CompilerContext.Key<Types> TYPES_KEY = new CompilerContext.Key<>(); private SymbolTable symTable; private SymbolResolver symResolver; private BLangDiagnosticLogHelper dlogHelper; private Names names; private int finiteTypeCount = 0; private BUnionType expandedXMLBuiltinSubtypes; public static Types getInstance(CompilerContext context) { Types types = context.get(TYPES_KEY); if (types == null) { types = new Types(context); } return types; } public Types(CompilerContext context) { context.put(TYPES_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.dlogHelper = BLangDiagnosticLogHelper.getInstance(context); this.names = Names.getInstance(context); this.expandedXMLBuiltinSubtypes = BUnionType.create(null, symTable.xmlElementType, symTable.xmlCommentType, symTable.xmlPIType, symTable.xmlTextType); } public List<BType> checkTypes(BLangExpression node, List<BType> actualTypes, List<BType> expTypes) { List<BType> resTypes = new ArrayList<>(); for (int i = 0; i < actualTypes.size(); i++) { resTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType)); } return resTypes; } public BType checkType(BLangExpression node, BType actualType, BType expType) { return checkType(node, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES); } public BType checkType(BLangExpression expr, BType actualType, BType expType, DiagnosticCode diagCode) { expr.type = checkType(expr.pos, actualType, expType, diagCode); if (expr.type.tag == TypeTags.SEMANTIC_ERROR) { return expr.type; } setImplicitCastExpr(expr, actualType, expType); return expr.type; } public BType checkType(DiagnosticPos pos, BType actualType, BType expType, DiagnosticCode diagCode) { if (expType.tag == TypeTags.SEMANTIC_ERROR) { return expType; } else if (expType.tag == TypeTags.NONE) { return actualType; } else if (actualType.tag == TypeTags.SEMANTIC_ERROR) { return actualType; } else if (isAssignable(actualType, expType)) { return actualType; } dlogHelper.error(pos, diagCode, expType, actualType); return symTable.semanticError; } public boolean isJSONContext(BType type) { if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream().anyMatch(memType -> memType.tag == TypeTags.JSON); } return type.tag == TypeTags.JSON; } public boolean isLax(BType type) { switch (type.tag) { case TypeTags.JSON: case TypeTags.XML: case TypeTags.XML_ELEMENT: return true; case TypeTags.MAP: return isLax(((BMapType) type).constraint); case TypeTags.UNION: return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isLax); } return false; } public boolean isSameType(BType source, BType target) { return isSameType(source, target, new HashSet<>()); } private boolean isSameType(BType source, BType target, Set<TypePair> unresolvedTypes) { TypePair pair = new TypePair(source, target); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); BTypeVisitor<BType, Boolean> sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes); return target.accept(sameTypeVisitor, source); } public boolean isValueType(BType type) { switch (type.tag) { case TypeTags.BOOLEAN: case TypeTags.BYTE: case TypeTags.DECIMAL: case TypeTags.FLOAT: case TypeTags.INT: case TypeTags.STRING: case TypeTags.SIGNED32_INT: case TypeTags.SIGNED16_INT: case TypeTags.SIGNED8_INT: case TypeTags.UNSIGNED32_INT: case TypeTags.UNSIGNED16_INT: case TypeTags.UNSIGNED8_INT: case TypeTags.CHAR_STRING: return true; default: return false; } } boolean isBasicNumericType(BType type) { return type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag); } boolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) { return finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type)); } public boolean containsErrorType(BType type) { if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream() .anyMatch(this::containsErrorType); } return type.tag == TypeTags.ERROR; } public boolean isSubTypeOfList(BType type) { if (type.tag != TypeTags.UNION) { return isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE); } return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList); } public boolean isSubTypeOfMapping(BType type) { if (type.tag != TypeTags.UNION) { return isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD); } return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping); } public boolean isSubTypeOfBaseType(BType type, int baseTypeTag) { if (type.tag != TypeTags.UNION) { return type.tag == baseTypeTag; } if (TypeTags.isXMLTypeTag(baseTypeTag)) { return true; } return ((BUnionType) type).getMemberTypes().stream().allMatch(memType -> memType.tag == baseTypeTag); } /** * Checks whether source type is assignable to the target type. * <p> * Source type is assignable to the target type if, * 1) the target type is any and the source type is not a value type. * 2) there exists an implicit cast symbol from source to target. * 3) both types are JSON and the target constraint is no type. * 4) both types are array type and both array types are assignable. * 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent. * * @param source type. * @param target type. * @return true if source type is assignable to the target type. */ public boolean isAssignable(BType source, BType target) { return isAssignable(source, target, new HashSet<>()); } boolean isStampingAllowed(BType source, BType target) { return (isAssignable(source, target) || isAssignable(target, source) || checkTypeEquivalencyForStamping(source, target) || checkTypeEquivalencyForStamping(target, source)); } private boolean checkTypeEquivalencyForStamping(BType source, BType target) { if (target.tag == TypeTags.RECORD) { if (source.tag == TypeTags.RECORD) { TypePair pair = new TypePair(source, target); Set<TypePair> unresolvedTypes = new HashSet<>(); unresolvedTypes.add(pair); return checkRecordEquivalencyForStamping((BRecordType) source, (BRecordType) target, unresolvedTypes); } else if (source.tag == TypeTags.MAP) { int mapConstraintTypeTag = ((BMapType) source).constraint.tag; if ((!(mapConstraintTypeTag == TypeTags.ANY || mapConstraintTypeTag == TypeTags.ANYDATA)) && ((BRecordType) target).sealed) { for (BField field : ((BStructureType) target).getFields()) { if (field.getType().tag != mapConstraintTypeTag) { return false; } } } return true; } } else if (target.tag == TypeTags.JSON) { return source.tag == TypeTags.JSON || source.tag == TypeTags.RECORD || source.tag == TypeTags.MAP; } else if (target.tag == TypeTags.MAP) { if (source.tag == TypeTags.MAP) { return isStampingAllowed(((BMapType) source).getConstraint(), ((BMapType) target).getConstraint()); } else if (source.tag == TypeTags.UNION) { return checkUnionEquivalencyForStamping(source, target); } } else if (target.tag == TypeTags.ARRAY) { if (source.tag == TypeTags.JSON) { return true; } else if (source.tag == TypeTags.TUPLE) { BType arrayElementType = ((BArrayType) target).eType; for (BType tupleMemberType : ((BTupleType) source).getTupleTypes()) { if (!isStampingAllowed(tupleMemberType, arrayElementType)) { return false; } } return true; } else if (source.tag == TypeTags.ARRAY) { return checkTypeEquivalencyForStamping(((BArrayType) source).eType, ((BArrayType) target).eType); } } else if (target.tag == TypeTags.UNION) { return checkUnionEquivalencyForStamping(source, target); } else if (target.tag == TypeTags.TUPLE && source.tag == TypeTags.TUPLE) { return checkTupleEquivalencyForStamping(source, target); } return false; } private boolean checkRecordEquivalencyForStamping(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) { if (Symbols.isFlagOn(lhsType.tsymbol.flags ^ rhsType.tsymbol.flags, Flags.PUBLIC)) { return false; } if (Symbols.isPrivate(lhsType.tsymbol) && rhsType.tsymbol.pkgID != lhsType.tsymbol.pkgID) { return false; } if (lhsType.fields.size() > rhsType.fields.size()) { return false; } if (lhsType.sealed && !rhsType.sealed) { return false; } return checkFieldEquivalencyForStamping(lhsType, rhsType, unresolvedTypes); } private boolean checkFieldEquivalencyForStamping(BStructureType lhsType, BStructureType rhsType, Set<TypePair> unresolvedTypes) { Map<Name, BField> rhsFields = rhsType.fields.stream().collect( Collectors.toMap(BField::getName, field -> field)); for (BField lhsField : lhsType.fields) { BField rhsField = rhsFields.get(lhsField.name); if (rhsField == null || !isStampingAllowed(rhsField.type, lhsField.type)) { return false; } } Map<Name, BField> lhsFields = lhsType.fields.stream().collect( Collectors.toMap(BField::getName, field -> field)); for (BField rhsField : rhsType.fields) { BField lhsField = lhsFields.get(rhsField.name); if (lhsField == null && !isStampingAllowed(rhsField.type, ((BRecordType) lhsType).restFieldType)) { return false; } } return true; } private boolean checkUnionEquivalencyForStamping(BType source, BType target) { Set<BType> sourceTypes = new LinkedHashSet<>(); Set<BType> targetTypes = new LinkedHashSet<>(); if (source.tag == TypeTags.UNION) { BUnionType sourceUnionType = (BUnionType) source; sourceTypes.addAll(sourceUnionType.getMemberTypes()); } else { sourceTypes.add(source); } if (target.tag == TypeTags.UNION) { BUnionType targetUnionType = (BUnionType) target; targetTypes.addAll(targetUnionType.getMemberTypes()); } else { targetTypes.add(target); } boolean notAssignable = sourceTypes .stream() .map(s -> targetTypes .stream() .anyMatch(t -> isStampingAllowed(s, t))) .anyMatch(assignable -> !assignable); return !notAssignable; } private boolean checkTupleEquivalencyForStamping(BType source, BType target) { if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) { return false; } BTupleType lhsTupleType = (BTupleType) target; BTupleType rhsTupleType = (BTupleType) source; if (lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) { return false; } for (int i = 0; i < lhsTupleType.tupleTypes.size(); i++) { if (!isStampingAllowed(rhsTupleType.tupleTypes.get(i), lhsTupleType.tupleTypes.get(i))) { return false; } } return true; } private boolean isAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) { if (isSameType(source, target)) { return true; } int sourceTag = source.tag; int targetTag = target.tag; if (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) { return true; } if (TypeTags.isXMLTypeTag(sourceTag) && TypeTags.isXMLTypeTag(targetTag)) { return isXMLTypeAssignable(source, target, unresolvedTypes); } if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) { return true; } if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.XML_TEXT) { return true; } if (sourceTag == TypeTags.STRING && targetTag == TypeTags.XML_TEXT) { return true; } if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.STRING) { return true; } if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.CHAR_STRING) { return true; } if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) { return isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes); } else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) { return false; } if (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) { return true; } if (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) { return true; } if (targetTag == TypeTags.ANYDATA && !containsErrorType(source) && source.isAnydata()) { return true; } if (targetTag == TypeTags.READONLY && isReadonlyType(source)) { return true; } if (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) { BRecordType recordType = (BRecordType) source; return isAssignableRecordType(recordType, target); } if (targetTag == TypeTags.RECORD && sourceTag == TypeTags.MAP) { return isAssignableMapType((BMapType) source, (BRecordType) target); } if (target.getKind() == TypeKind.SERVICE && source.getKind() == TypeKind.SERVICE) { return true; } if (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) { return isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint), unresolvedTypes); } if (targetTag == TypeTags.TABLE && sourceTag == TypeTags.TABLE) { return isAssignableTableType((BTableType) source, (BTableType) target); } if (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) { return isAssignable(((BStreamType) source).constraint, ((BStreamType) target).constraint, unresolvedTypes); } if (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) { return true; } if (sourceTag == TypeTags.FINITE) { return isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes); } if ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) && isAssignableToUnionType(source, target, unresolvedTypes)) { return true; } if (targetTag == TypeTags.JSON) { if (sourceTag == TypeTags.ARRAY) { return isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes); } if (sourceTag == TypeTags.MAP) { return isAssignable(((BMapType) source).constraint, target, unresolvedTypes); } if (sourceTag == TypeTags.RECORD) { return isAssignableRecordType((BRecordType) source, target); } } if (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) { if (((BFutureType) target).constraint.tag == TypeTags.NONE) { return true; } return isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes); } if (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) { if (((BMapType) target).constraint.tag == TypeTags.ANY && ((BMapType) source).constraint.tag != TypeTags.UNION) { return true; } return isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes); } if ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD) && (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) { return checkStructEquivalency(source, target, unresolvedTypes); } if (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) { return isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes); } if (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) { return isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes); } if (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) { return isTupleTypeAssignable(source, target, unresolvedTypes); } if (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) { return isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>()); } return sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY && isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes); } private boolean isAssignableRecordType(BRecordType recordType, BType type) { BType targetType; switch (type.tag) { case TypeTags.MAP: targetType = ((BMapType) type).constraint; break; case TypeTags.JSON: targetType = type; break; default: throw new IllegalArgumentException("Incompatible target type: " + type.toString()); } return recordFieldsAssignableToType(recordType, targetType); } private boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType) { for (BField field : recordType.fields) { if (!isAssignable(field.type, targetType)) { return false; } } if (!recordType.sealed) { return isAssignable(recordType.restFieldType, targetType); } return true; } private boolean isAssignableTableType(BTableType sourceTableType, BTableType targetTableType) { if (!isAssignable(sourceTableType.constraint, targetTableType.constraint)) { return false; } if (targetTableType.keyTypeConstraint == null && targetTableType.fieldNameList == null) { return true; } if (targetTableType.keyTypeConstraint != null) { if (sourceTableType.keyTypeConstraint != null && (isAssignable(sourceTableType.keyTypeConstraint, targetTableType.keyTypeConstraint))) { return true; } if (sourceTableType.fieldNameList == null) { return false; } List<BType> fieldTypes = new ArrayList<>(); sourceTableType.fieldNameList.forEach(field -> fieldTypes .add(getTableConstraintField(sourceTableType.constraint, field).type)); if (fieldTypes.size() == 1) { return isAssignable(fieldTypes.get(0), targetTableType.keyTypeConstraint); } BTupleType tupleType = new BTupleType(fieldTypes); return isAssignable(tupleType, targetTableType.keyTypeConstraint); } return targetTableType.fieldNameList.equals(sourceTableType.fieldNameList); } BField getTableConstraintField(BType constraintType, String fieldName) { List<BField> fieldList = ((BRecordType) constraintType).getFields(); for (BField field : fieldList) { if (field.name.toString().equals(fieldName)) { return field; } } return null; } private boolean isAssignableMapType(BMapType sourceMapType, BRecordType targetRecType) { if (targetRecType.sealed) { return false; } for (BField field : targetRecType.fields) { if (!(Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) && isAssignable(sourceMapType.constraint, field.type))) { return false; } } return isAssignable(sourceMapType.constraint, targetRecType.restFieldType); } private boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set<TypePair> unresolvedTypes) { if (target == symTable.errorType) { return true; } TypePair pair = new TypePair(source, target); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); return isAssignable(source.reasonType, target.reasonType, unresolvedTypes) && isAssignable(source.detailType, target.detailType, unresolvedTypes); } private boolean isXMLTypeAssignable(BType sourceType, BType targetType, Set<TypePair> unresolvedTypes) { int sourceTag = sourceType.tag; int targetTag = targetType.tag; if (targetTag == TypeTags.XML) { BXMLType target = (BXMLType) targetType; if (target.constraint != null) { if (TypeTags.isXMLNonSequenceType(sourceTag)) { return isAssignable(sourceType, target.constraint, unresolvedTypes); } BXMLType source = (BXMLType) sourceType; return isAssignable(source.constraint, target.constraint, unresolvedTypes); } return true; } return sourceTag == targetTag; } private boolean isTupleTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) { if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) { return false; } BTupleType lhsTupleType = (BTupleType) target; BTupleType rhsTupleType = (BTupleType) source; if (lhsTupleType.restType == null && rhsTupleType.restType != null) { return false; } if (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) { return false; } if (lhsTupleType.restType != null && rhsTupleType.restType != null) { if (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) { return false; } } if (lhsTupleType.tupleTypes.size() > rhsTupleType.tupleTypes.size()) { return false; } for (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) { BType lhsType = (lhsTupleType.tupleTypes.size() > i) ? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType; if (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) { return false; } } return true; } private boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target, Set<TypePair> unresolvedTypes) { if (target.state != BArrayState.UNSEALED && (source.restType != null || source.tupleTypes.size() != target.size)) { return false; } List<BType> sourceTypes = new ArrayList<>(source.tupleTypes); if (source.restType != null) { sourceTypes.add(source.restType); } return sourceTypes.stream() .allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes)); } private boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target, Set<TypePair> unresolvedTypes) { if (!target.tupleTypes.isEmpty()) { if (source.state == BArrayState.UNSEALED) { return false; } if (target.restType != null && target.tupleTypes.size() > source.size) { return false; } if (target.restType == null && target.tupleTypes.size() != source.size) { return false; } } List<BType> targetTypes = new ArrayList<>(target.tupleTypes); if (target.restType != null) { targetTypes.add(target.restType); } return targetTypes.stream() .allMatch(tupleElemType -> isAssignable(source.eType, tupleElemType, unresolvedTypes)); } private boolean isArrayTypesAssignable(BArrayType source, BType target, Set<TypePair> unresolvedTypes) { BType sourceElementType = source.getElementType(); if (target.tag == TypeTags.ARRAY) { BArrayType targetArrayType = (BArrayType) target; BType targetElementType = targetArrayType.getElementType(); if (targetArrayType.state == BArrayState.UNSEALED) { return isAssignable(sourceElementType, targetElementType, unresolvedTypes); } if (targetArrayType.size != source.size) { return false; } return isAssignable(sourceElementType, targetElementType, unresolvedTypes); } else if (target.tag == TypeTags.JSON) { return isAssignable(sourceElementType, target, unresolvedTypes); } return false; } private boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) { if (containsTypeParams(target)) { if (source.paramTypes.size() != target.paramTypes.size()) { return false; } for (int i = 0; i < source.paramTypes.size(); i++) { BType sourceParam = source.paramTypes.get(i); BType targetParam = target.paramTypes.get(i); boolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam); if (isTypeParam) { if (!isAssignable(sourceParam, targetParam)) { return false; } } else { if (!isAssignable(targetParam, sourceParam)) { return false; } } } if (source.retType == null && target.retType == null) { return true; } else if (source.retType == null || target.retType == null) { return false; } return isAssignable(source.retType, target.retType, unresolvedTypes); } return checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut)); } private boolean containsTypeParams(BInvokableType type) { boolean hasParameterizedTypes = type.paramTypes.stream() .anyMatch(t -> { if (t.tag == TypeTags.FUNCTION_POINTER) { return containsTypeParams((BInvokableType) t); } return TypeParamAnalyzer.isTypeParam(t); }); if (hasParameterizedTypes) { return hasParameterizedTypes; } if (type.retType.tag == TypeTags.FUNCTION_POINTER) { return containsTypeParams((BInvokableType) type.retType); } return TypeParamAnalyzer.isTypeParam(type.retType); } private boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) { return checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType); } private boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes, TypeEqualityPredicate equality) { if (source.paramTypes.size() != target.paramTypes.size()) { return false; } for (int i = 0; i < source.paramTypes.size(); i++) { if (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) { return false; } } if ((source.restType != null && target.restType == null) || target.restType != null && source.restType == null) { return false; } else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) { return false; } if (source.retType == null && target.retType == null) { return true; } else if (source.retType == null || target.retType == null) { return false; } return isAssignable(source.retType, target.retType, unresolvedTypes); } public boolean checkArrayEquality(BType source, BType target, Set<TypePair> unresolvedTypes) { if (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) { return false; } BArrayType lhsArrayType = (BArrayType) target; BArrayType rhsArrayType = (BArrayType) source; boolean hasSameTypeElements = isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes); if (lhsArrayType.state == BArrayState.UNSEALED) { return (rhsArrayType.state == BArrayState.UNSEALED) && hasSameTypeElements; } return checkSealedArraySizeEquality(rhsArrayType, lhsArrayType) && hasSameTypeElements; } public boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) { return lhsArrayType.size == rhsArrayType.size; } public boolean checkStructEquivalency(BType rhsType, BType lhsType) { return checkStructEquivalency(rhsType, lhsType, new HashSet<>()); } private boolean checkStructEquivalency(BType rhsType, BType lhsType, Set<TypePair> unresolvedTypes) { TypePair pair = new TypePair(rhsType, lhsType); if (unresolvedTypes.contains(pair)) { return true; } unresolvedTypes.add(pair); if (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) { return checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes); } if (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) { return checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes); } return false; } public boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set<TypePair> unresolvedTypes) { BObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol; BObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol; List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs; List<BAttachedFunction> rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs; int lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol); int rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol); if (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) { return false; } if (lhsType.getFields().stream().anyMatch(field -> Symbols.isPrivate(field.symbol)) || lhsFuncs.stream().anyMatch(func -> Symbols.isPrivate(func.symbol))) { return false; } Map<Name, BField> rhsFields = rhsType.fields.stream().collect(Collectors.toMap(BField::getName, field -> field)); for (BField lhsField : lhsType.fields) { BField rhsField = rhsFields.get(lhsField.name); if (rhsField == null || !isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol) || !isAssignable(rhsField.type, lhsField.type)) { return false; } } for (BAttachedFunction lhsFunc : lhsFuncs) { if (lhsFunc == lhsStructSymbol.initializerFunc) { continue; } BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes); if (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) { return false; } } return true; } private int getObjectFuncCount(BObjectTypeSymbol sym) { if (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) { return sym.attachedFuncs.size() - 1; } return sym.attachedFuncs.size(); } public boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) { if (lhsType.sealed && !rhsType.sealed) { return false; } if (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) { return false; } return checkFieldEquivalency(lhsType, rhsType, unresolvedTypes); } public void setForeachTypedBindingPatternType(BLangForeach foreachNode) { BType collectionType = foreachNode.collection.type; BType varType; switch (collectionType.tag) { case TypeTags.STRING: varType = symTable.stringType; break; case TypeTags.ARRAY: BArrayType arrayType = (BArrayType) collectionType; varType = arrayType.eType; break; case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) collectionType; LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes); if (tupleType.restType != null) { tupleTypes.add(tupleType.restType); } varType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); break; case TypeTags.MAP: BMapType bMapType = (BMapType) collectionType; varType = bMapType.constraint; break; case TypeTags.RECORD: BRecordType recordType = (BRecordType) collectionType; varType = inferRecordFieldType(recordType); break; case TypeTags.XML: varType = BUnionType.create(null, symTable.xmlType, symTable.stringType); break; case TypeTags.STREAM: BStreamType streamType = (BStreamType) collectionType; if (streamType.constraint.tag == TypeTags.NONE) { varType = symTable.anydataType; break; } varType = streamType.constraint; if (streamType.error != null) { BType actualType = BUnionType.create(null, varType, streamType.error); dlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_TYPES, varType, actualType); } break; case TypeTags.OBJECT: BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType); if (nextMethodReturnType != null) { foreachNode.resultType = getRecordType(nextMethodReturnType); BType valueType = (foreachNode.resultType != null) ? ((BRecordType) foreachNode.resultType).fields.get(0).type : null; BType errorType = getErrorType(nextMethodReturnType); if (errorType != null) { BType actualType = BUnionType.create(null, valueType, errorType); dlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_TYPES, valueType, actualType); } foreachNode.nillableResultType = nextMethodReturnType; foreachNode.varType = valueType; return; } dlogHelper.error(foreachNode.collection.pos, DiagnosticCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE); case TypeTags.SEMANTIC_ERROR: foreachNode.varType = symTable.semanticError; foreachNode.resultType = symTable.semanticError; foreachNode.nillableResultType = symTable.semanticError; return; default: foreachNode.varType = symTable.semanticError; foreachNode.resultType = symTable.semanticError; foreachNode.nillableResultType = symTable.semanticError; dlogHelper.error(foreachNode.collection.pos, DiagnosticCode.ITERABLE_NOT_SUPPORTED_COLLECTION, collectionType); return; } BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); BUnionType nextMethodReturnType = (BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType); foreachNode.varType = varType; foreachNode.resultType = getRecordType(nextMethodReturnType); foreachNode.nillableResultType = nextMethodReturnType; } public void setFromClauseTypedBindingPatternType(BLangFromClause fromClause) { if (fromClause.collection == null) { return; } BType collectionType = fromClause.collection.type; BType varType; switch (collectionType.tag) { case TypeTags.STRING: varType = symTable.stringType; break; case TypeTags.ARRAY: BArrayType arrayType = (BArrayType) collectionType; varType = arrayType.eType; break; case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) collectionType; LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes); if (tupleType.restType != null) { tupleTypes.add(tupleType.restType); } varType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); break; case TypeTags.MAP: BMapType bMapType = (BMapType) collectionType; varType = bMapType.constraint; break; case TypeTags.RECORD: BRecordType recordType = (BRecordType) collectionType; varType = inferRecordFieldType(recordType); break; case TypeTags.XML: varType = BUnionType.create(null, symTable.xmlType, symTable.stringType); break; case TypeTags.STREAM: BStreamType streamType = (BStreamType) collectionType; if (streamType.constraint.tag == TypeTags.NONE) { varType = symTable.anydataType; break; } varType = streamType.constraint; break; case TypeTags.OBJECT: BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType); if (nextMethodReturnType != null) { fromClause.resultType = getRecordType(nextMethodReturnType); fromClause.nillableResultType = nextMethodReturnType; fromClause.varType = ((BRecordType) fromClause.resultType).fields.get(0).type; return; } dlogHelper.error(fromClause.collection.pos, DiagnosticCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE); case TypeTags.SEMANTIC_ERROR: fromClause.varType = symTable.semanticError; fromClause.resultType = symTable.semanticError; fromClause.nillableResultType = symTable.semanticError; return; default: fromClause.varType = symTable.semanticError; fromClause.resultType = symTable.semanticError; fromClause.nillableResultType = symTable.semanticError; dlogHelper.error(fromClause.collection.pos, DiagnosticCode.ITERABLE_NOT_SUPPORTED_COLLECTION, collectionType); return; } BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); BUnionType nextMethodReturnType = (BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType); fromClause.varType = varType; fromClause.resultType = getRecordType(nextMethodReturnType); fromClause.nillableResultType = nextMethodReturnType; } public BUnionType getVarTypeFromIterableObject(BObjectType collectionType) { BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol; for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) { if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) { return getVarTypeFromIteratorFunc(func); } } return null; } private BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) { if (!candidateIteratorFunc.type.paramTypes.isEmpty()) { return null; } BType returnType = candidateIteratorFunc.type.retType; return getVarTypeFromIteratorFuncReturnType(returnType); } public BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) { BObjectTypeSymbol objectTypeSymbol; if (returnType.tag != TypeTags.OBJECT) { return null; } objectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol; for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) { if (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) { return getVarTypeFromNextFunc(func); } } return null; } private BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) { BType returnType; if (!nextFunc.type.paramTypes.isEmpty()) { return null; } returnType = nextFunc.type.retType; if (checkNextFuncReturnType(returnType)) { return (BUnionType) returnType; } return null; } private boolean checkNextFuncReturnType(BType returnType) { if (returnType.tag != TypeTags.UNION) { return false; } List<BType> types = new ArrayList<>(((BUnionType) returnType).getMemberTypes()); if (!types.removeIf(type -> type.tag == TypeTags.NIL)) { return false; } types.removeIf(type -> type.tag == TypeTags.ERROR); if (types.size() != 1) { return false; } if (types.get(0).tag != TypeTags.RECORD) { return false; } BRecordType recordType = (BRecordType) types.get(0); return checkRecordTypeInNextFuncReturnType(recordType); } private boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) { if (!recordType.sealed) { return false; } if (recordType.fields.size() != 1) { return false; } for (BField field : recordType.fields) { if (field.name.value.equals(BLangCompilerConstants.VALUE_FIELD)) { return true; } } return false; } private BRecordType getRecordType(BUnionType type) { for (BType member : type.getMemberTypes()) { if (member.tag == TypeTags.RECORD) { return (BRecordType) member; } } return null; } public BErrorType getErrorType(BUnionType type) { for (BType member : type.getMemberTypes()) { if (member.tag == TypeTags.ERROR) { return (BErrorType) member; } else if (member.tag == TypeTags.UNION) { BErrorType e = getErrorType((BUnionType) member); if (e != null) { return e; } } } return null; } public BType getResultTypeOfNextInvocation(BObjectType iteratorType) { BAttachedFunction nextFunc = getNextFunc(iteratorType); return Objects.requireNonNull(nextFunc).type.retType; } private BAttachedFunction getNextFunc(BObjectType iteratorType) { BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol; for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value .equals(BLangCompilerConstants.NEXT_FUNC)) { return bAttachedFunction; } } return null; } public BType inferRecordFieldType(BRecordType recordType) { List<BField> fields = recordType.fields; BUnionType unionType = BUnionType.create(null); if (!recordType.sealed) { unionType.add(recordType.restFieldType); } for (BField field : fields) { if (isAssignable(field.type, unionType)) { continue; } if (isAssignable(unionType, field.type)) { unionType = BUnionType.create(null); } unionType.add(field.type); } if (unionType.getMemberTypes().size() > 1) { unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, recordType.tsymbol.pkgID, null, recordType.tsymbol.owner); return unionType; } return unionType.getMemberTypes().iterator().next(); } /** * Enum to represent type test result. * * @since 1.2.0 */ enum TypeTestResult { NOT_FOUND, TRUE, FALSE } TypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) { int targetTag = targetType.tag; int actualTag = actualType.tag; if (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) { switch (actualTag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: if (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) { return TypeTestResult.FALSE; } break; case TypeTags.BOOLEAN: if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT || targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) { return TypeTestResult.FALSE; } break; case TypeTags.STRING: if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT || targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) { return TypeTestResult.FALSE; } break; } } switch (actualTag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.BOOLEAN: case TypeTags.STRING: case TypeTags.SIGNED32_INT: case TypeTags.SIGNED16_INT: case TypeTags.SIGNED8_INT: case TypeTags.UNSIGNED32_INT: case TypeTags.UNSIGNED16_INT: case TypeTags.UNSIGNED8_INT: case TypeTags.CHAR_STRING: if (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY || targetTag == TypeTags.READONLY) { return TypeTestResult.TRUE; } break; case TypeTags.ANYDATA: case TypeTags.TYPEDESC: if (targetTag == TypeTags.ANY) { return TypeTestResult.TRUE; } break; default: } if (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) { return TypeTestResult.FALSE; } if ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE) && (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) { return checkBuiltInIntSubtypeWidenPossible(actualType, targetType); } if (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) { return TypeTestResult.TRUE; } return TypeTestResult.NOT_FOUND; } private TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) { int actualTag = actualType.tag; switch (targetType.tag) { case TypeTags.INT: if (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) { return TypeTestResult.TRUE; } break; case TypeTags.SIGNED32_INT: if (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; case TypeTags.SIGNED16_INT: if (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; case TypeTags.UNSIGNED32_INT: if (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; case TypeTags.UNSIGNED16_INT: if (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; case TypeTags.BYTE: if (actualTag == TypeTags.UNSIGNED8_INT) { return TypeTestResult.TRUE; } break; case TypeTags.UNSIGNED8_INT: if (actualTag == TypeTags.BYTE) { return TypeTestResult.TRUE; } break; } return TypeTestResult.NOT_FOUND; } public boolean isImplicityCastable(BType actualType, BType targetType) { /* The word Builtin refers for Compiler known types. */ BType newTargetType = targetType; if ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) { newTargetType = symTable.anyType; } TypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType); if (result != TypeTestResult.NOT_FOUND) { return result == TypeTestResult.TRUE; } if (isValueType(targetType) && (actualType.tag == TypeTags.FINITE || (actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream() .anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) { return targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT || targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN; } else if (targetType.tag == TypeTags.ERROR && (actualType.tag == TypeTags.UNION && isAllErrorMembers((BUnionType) actualType))) { return true; } return false; } public boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType) { if (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR || sourceType == targetType) { return true; } if (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) { return true; } if (isNumericConversionPossible(expr, sourceType, targetType)) { return true; } boolean validTypeCast = false; if (sourceType.tag == TypeTags.UNION) { if (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType) != symTable.semanticError) { validTypeCast = true; } } if (targetType.tag == TypeTags.UNION) { if (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType) != symTable.semanticError) { validTypeCast = true; } } if (sourceType.tag == TypeTags.FINITE) { if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType) != symTable.semanticError) { validTypeCast = true; } } if (targetType.tag == TypeTags.FINITE) { if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType) != symTable.semanticError) { validTypeCast = true; } } if (validTypeCast) { if (isValueType(sourceType)) { setImplicitCastExpr(expr, sourceType, symTable.anyType); } return true; } return false; } boolean isNumericConversionPossible(BLangExpression expr, BType sourceType, BType targetType) { final boolean isSourceNumericType = isBasicNumericType(sourceType); final boolean isTargetNumericType = isBasicNumericType(targetType); if (isSourceNumericType && isTargetNumericType) { return true; } if (targetType.tag == TypeTags.UNION) { HashSet<Integer> typeTags = new HashSet<>(); for (BType bType : ((BUnionType) targetType).getMemberTypes()) { if (isBasicNumericType(bType)) { typeTags.add(bType.tag); if (typeTags.size() > 1) { return false; } } } } if (!isTargetNumericType && targetType.tag != TypeTags.UNION) { return false; } if (isSourceNumericType) { setImplicitCastExpr(expr, sourceType, symTable.anyType); return true; } switch (sourceType.tag) { case TypeTags.ANY: case TypeTags.ANYDATA: case TypeTags.JSON: return true; case TypeTags.UNION: for (BType memType : ((BUnionType) sourceType).getMemberTypes()) { if (isBasicNumericType(memType) || (memType.tag == TypeTags.FINITE && finiteTypeContainsNumericTypeValues((BFiniteType) memType))) { return true; } } break; case TypeTags.FINITE: if (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) { return true; } break; } return false; } private boolean isAllErrorMembers(BUnionType actualType) { return actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType)); } public void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) { if (!isImplicityCastable(actualType, expType)) { return; } BLangTypeConversionExpr implicitConversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); implicitConversionExpr.pos = expr.pos; implicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr; implicitConversionExpr.type = expType; implicitConversionExpr.targetType = expType; expr.impConversionExpr = implicitConversionExpr; } public BType getElementType(BType type) { if (type.tag != TypeTags.ARRAY) { return type; } return getElementType(((BArrayType) type).getElementType()); } public boolean checkListenerCompatibility(BType type) { if (type.tag != TypeTags.OBJECT) { return false; } final BSymbol bSymbol = symTable.langObjectModuleSymbol.scope.lookup(Names.LISTENER).symbol; if (bSymbol == symTable.notFoundSymbol || bSymbol.type.tag != TypeTags.OBJECT) { throw new AssertionError("Listener object not defined."); } BObjectType rhsType = (BObjectType) type; BObjectType lhsType = (BObjectType) bSymbol.type; BStructureTypeSymbol lhsStructSymbol = (BStructureTypeSymbol) lhsType.tsymbol; List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs; List<BAttachedFunction> rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs; int lhsAttachedFuncCount = lhsStructSymbol.initializerFunc != null ? lhsFuncs.size() - 1 : lhsFuncs.size(); if (lhsAttachedFuncCount > rhsFuncs.size()) { return false; } for (BAttachedFunction lhsFunc : lhsFuncs) { if (lhsFunc == lhsStructSymbol.initializerFunc) { continue; } if (!Symbols.isPublic(lhsFunc.symbol)) { return false; } BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, new HashSet<>()); if (rhsFunc == null || !Symbols.isPublic(rhsFunc.symbol)) { return false; } } return true; } public boolean isValidErrorDetailType(BType detailType) { switch (detailType.tag) { case TypeTags.MAP: case TypeTags.RECORD: return isAssignable(detailType, symTable.detailType); } return false; } private boolean isNullable(BType fieldType) { return fieldType.isNullable(); } private class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> { Set<TypePair> unresolvedTypes; BSameTypeVisitor(Set<TypePair> unresolvedTypes) { this.unresolvedTypes = unresolvedTypes; } @Override public Boolean visit(BType t, BType s) { if (t == s) { return true; } switch (t.tag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.BOOLEAN: case TypeTags.ANY: case TypeTags.ANYDATA: return t.tag == s.tag && (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s)); default: break; } return false; } @Override public Boolean visit(BBuiltInRefType t, BType s) { return t == s; } @Override public Boolean visit(BAnyType t, BType s) { return t == s; } @Override public Boolean visit(BAnydataType t, BType s) { return t == s; } @Override public Boolean visit(BMapType t, BType s) { if (s.tag != TypeTags.MAP) { return false; } BMapType sType = ((BMapType) s); return isSameType(sType.constraint, t.constraint, this.unresolvedTypes); } @Override public Boolean visit(BFutureType t, BType s) { return s.tag == TypeTags.FUTURE && t.constraint.tag == ((BFutureType) s).constraint.tag; } @Override public Boolean visit(BXMLType t, BType s) { return visit((BBuiltInRefType) t, s); } @Override public Boolean visit(BJSONType t, BType s) { return s.tag == TypeTags.JSON; } @Override public Boolean visit(BArrayType t, BType s) { return s.tag == TypeTags.ARRAY && checkArrayEquality(s, t, new HashSet<>()); } @Override public Boolean visit(BObjectType t, BType s) { if (t == s) { return true; } if (s.tag != TypeTags.OBJECT) { return false; } return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name); } @Override public Boolean visit(BRecordType t, BType s) { if (t == s) { return true; } if (s.tag != TypeTags.RECORD) { return false; } BRecordType source = (BRecordType) s; if (source.fields.size() != t.fields.size()) { return false; } boolean notSameType = source.fields .stream() .map(fs -> t.fields.stream() .anyMatch(ft -> fs.name.equals(ft.name) && isSameType(fs.type, ft.type, this.unresolvedTypes) && hasSameOptionalFlag(fs.symbol, ft.symbol))) .anyMatch(foundSameType -> !foundSameType); if (notSameType) { return false; } return isSameType(source.restFieldType, t.restFieldType, unresolvedTypes); } private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) { return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL; } public Boolean visit(BTupleType t, BType s) { if (s.tag != TypeTags.TUPLE) { return false; } BTupleType source = (BTupleType) s; if (source.tupleTypes.size() != t.tupleTypes.size()) { return false; } for (int i = 0; i < source.tupleTypes.size(); i++) { if (t.getTupleTypes().get(i) == symTable.noType) { continue; } if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) { return false; } } return true; } @Override public Boolean visit(BStreamType t, BType s) { return t == s; } @Override public Boolean visit(BTableType t, BType s) { return t == s; } @Override public Boolean visit(BInvokableType t, BType s) { return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, new HashSet<>()); } @Override public Boolean visit(BUnionType tUnionType, BType s) { if (s.tag != TypeTags.UNION) { return false; } BUnionType sUnionType = (BUnionType) s; if (sUnionType.getMemberTypes().size() != tUnionType.getMemberTypes().size()) { return false; } Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes()); Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes()); boolean notSameType = sourceTypes .stream() .map(sT -> targetTypes .stream() .anyMatch(it -> isSameType(it, sT, this.unresolvedTypes))) .anyMatch(foundSameType -> !foundSameType); return !notSameType; } @Override public Boolean visit(BErrorType t, BType s) { if (s.tag != TypeTags.ERROR) { return false; } BErrorType source = (BErrorType) s; if (!isSameType(source.reasonType, t.reasonType, this.unresolvedTypes)) { return false; } if (source.detailType == t.detailType) { return true; } return isSameType(source.detailType, t.detailType, this.unresolvedTypes); } @Override public Boolean visit(BServiceType t, BType s) { return t == s || t.tag == s.tag; } @Override public Boolean visit(BTypedescType t, BType s) { if (s.tag != TypeTags.TYPEDESC) { return false; } BTypedescType sType = ((BTypedescType) s); return isSameType(sType.constraint, t.constraint, this.unresolvedTypes); } @Override public Boolean visit(BFiniteType t, BType s) { return s == t; } }; private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) { Map<Name, BField> rhsFields = rhsType.fields.stream().collect(Collectors.toMap(BField::getName, f -> f)); for (BField lhsField : lhsType.fields) { BField rhsField = rhsFields.get(lhsField.name); if (rhsField == null) { return false; } if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) { return false; } if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) { return false; } rhsFields.remove(lhsField.name); } return rhsFields.entrySet().stream().allMatch( fieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes)); } private BAttachedFunction getMatchingInvokableType(List<BAttachedFunction> rhsFuncList, BAttachedFunction lhsFunc, Set<TypePair> unresolvedTypes) { return rhsFuncList.stream() .filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName)) .filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes)) .findFirst() .orElse(null); } private boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) { if (Symbols.isPrivate(lhsSym)) { return Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID) && lhsSym.owner.name.equals(rhsSym.owner.name); } else if (Symbols.isPublic(lhsSym)) { return Symbols.isPublic(rhsSym); } return !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID); } private boolean isAssignableToUnionType(BType source, BType target, Set<TypePair> unresolvedTypes) { Set<BType> sourceTypes = new LinkedHashSet<>(); Set<BType> targetTypes = new LinkedHashSet<>(); if (source.tag == TypeTags.UNION) { BUnionType sourceUnionType = (BUnionType) source; sourceTypes.addAll(sourceUnionType.getMemberTypes()); } else { sourceTypes.add(source); } if (target.tag == TypeTags.UNION) { BUnionType targetUnionType = (BUnionType) target; targetTypes.addAll(targetUnionType.getMemberTypes()); } else { targetTypes.add(target); } return sourceTypes.stream() .allMatch(s -> (targetTypes.stream().anyMatch(t -> isAssignable(s, t, unresolvedTypes))) || (s.tag == TypeTags.FINITE && isAssignable(s, target, unresolvedTypes)) || (s.tag == TypeTags.XML && isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes))); } private boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set<TypePair> unresolvedTypes) { if (targetType.tag == TypeTags.FINITE) { return finiteType.getValueSpace().stream() .allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression)); } if (targetType.tag == TypeTags.UNION) { List<BType> unionMemberTypes = getAllTypes(targetType); return finiteType.getValueSpace().stream() .allMatch(valueExpr -> unionMemberTypes.stream() .anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ? isAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) : isAssignable(valueExpr.type, targetType, unresolvedTypes))); } return finiteType.getValueSpace().stream() .allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes)); } boolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) { if (type.tag != TypeTags.FINITE) { return false; } BFiniteType expType = (BFiniteType) type; return expType.getValueSpace().stream().anyMatch(memberLiteral -> { if (((BLangLiteral) memberLiteral).value == null) { return literalExpr.value == null; } return checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr); }); } /** * Method to check the literal assignability based on the types of the literals. For numeric literals the * assignability depends on the equivalency of the literals. If the candidate literal could either be a simple * literal or a constant. In case of a constant, it is assignable to the base literal if and only if both * literals have same type and equivalent values. * * @param baseLiteral Literal based on which we check the assignability. * @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not. * @return true if assignable; false otherwise. */ boolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) { if (baseLiteral.getKind() != candidateLiteral.getKind()) { return false; } Object baseValue = baseLiteral.value; Object candidateValue = candidateLiteral.value; int candidateTypeTag = candidateLiteral.type.tag; switch (baseLiteral.type.tag) { case TypeTags.BYTE: if (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.INT: if (candidateTypeTag == TypeTags.INT) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.SIGNED32_INT: if (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.SIGNED16_INT: if (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.SIGNED8_INT: if (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.UNSIGNED32_INT: if (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.UNSIGNED16_INT: if (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.UNSIGNED8_INT: if (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) { return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue(); } break; case TypeTags.FLOAT: String baseValueStr = String.valueOf(baseValue); String originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr; if (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) { return false; } double baseDoubleVal = Double.parseDouble(baseValueStr); double candidateDoubleVal; if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) { candidateDoubleVal = ((Long) candidateValue).doubleValue(); return baseDoubleVal == candidateDoubleVal; } else if (candidateTypeTag == TypeTags.FLOAT) { candidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue)); return baseDoubleVal == candidateDoubleVal; } break; case TypeTags.DECIMAL: BigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue); BigDecimal candidateDecimalVal; if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) { candidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128); return baseDecimalVal.compareTo(candidateDecimalVal) == 0; } else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant || candidateTypeTag == TypeTags.DECIMAL) { if (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) { return false; } candidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue); return baseDecimalVal.compareTo(candidateDecimalVal) == 0; } break; default: return baseValue.equals(candidateValue); } return false; } boolean isByteLiteralValue(Long longObject) { return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE); } boolean isSigned32LiteralValue(Long longObject) { return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE); } boolean isSigned16LiteralValue(Long longObject) { return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE); } boolean isSigned8LiteralValue(Long longObject) { return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE); } boolean isUnsigned32LiteralValue(Long longObject) { return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE); } boolean isUnsigned16LiteralValue(Long longObject) { return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE); } boolean isUnsigned8LiteralValue(Long longObject) { return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE); } boolean isCharLiteralValue(String literal) { return (literal.codePoints().count() == 1); } /** * Method to retrieve a type representing all the values in the value space of a finite type that are assignable to * the target type. * * @param finiteType the finite type * @param targetType the target type * @return a new finite type if at least one value in the value space of the specified finiteType is * assignable to targetType (the same if all are assignable), else semanticError */ BType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) { if (isAssignable(finiteType, targetType)) { return finiteType; } Set<BLangExpression> matchingValues = finiteType.getValueSpace().stream() .filter( expr -> isAssignable(expr.type, targetType) || isAssignableToFiniteType(targetType, (BLangLiteral) expr) || (targetType.tag == TypeTags.UNION && ((BUnionType) targetType).getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .anyMatch(filteredType -> isAssignableToFiniteType(filteredType, (BLangLiteral) expr)))) .collect(Collectors.toSet()); if (matchingValues.isEmpty()) { return symTable.semanticError; } BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags, names.fromString("$anonType$" + finiteTypeCount++), finiteType.tsymbol.pkgID, null, finiteType.tsymbol.owner); BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues); finiteTypeSymbol.type = intersectingFiniteType; return intersectingFiniteType; } /** * Method to retrieve a type representing all the member types of a union type that are assignable to * the target type. * * @param unionType the union type * @param targetType the target type * @return a single type or a new union type if at least one member type of the union type is * assignable to targetType, else semanticError */ BType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType) { List<BType> intersection = new LinkedList<>(); unionType.getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { BType finiteTypeWithMatches = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) memType, targetType); if (finiteTypeWithMatches != symTable.semanticError) { intersection.add(finiteTypeWithMatches); } } else { if (isAssignable(memType, targetType)) { intersection.add(memType); } } }); if (intersection.isEmpty()) { return symTable.semanticError; } if (intersection.size() == 1) { return intersection.get(0); } else { return BUnionType.create(null, new LinkedHashSet<>(intersection)); } } boolean validEqualityIntersectionExists(BType lhsType, BType rhsType) { if (!lhsType.isPureType() || !rhsType.isPureType()) { return false; } if (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) { return true; } Set<BType> lhsTypes = expandAndGetMemberTypesRecursive(lhsType); Set<BType> rhsTypes = expandAndGetMemberTypesRecursive(rhsType); return equalityIntersectionExists(lhsTypes, rhsTypes); } private boolean equalityIntersectionExists(Set<BType> lhsTypes, Set<BType> rhsTypes) { if ((lhsTypes.contains(symTable.anydataType) && rhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) || (rhsTypes.contains(symTable.anydataType) && lhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) { return true; } boolean matchFound = lhsTypes .stream() .anyMatch(s -> rhsTypes .stream() .anyMatch(t -> isSameType(s, t))); if (!matchFound) { matchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes); } return matchFound; } /** * Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual * maps/arrays. * * e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[] * * @param bType the type for which member types needs to be identified * @return a set containing all the retrieved member types */ public Set<BType> expandAndGetMemberTypesRecursive(BType bType) { Set<BType> memberTypes = new LinkedHashSet<>(); switch (bType.tag) { case TypeTags.BYTE: case TypeTags.INT: memberTypes.add(symTable.intType); memberTypes.add(symTable.byteType); break; case TypeTags.FINITE: BFiniteType expType = (BFiniteType) bType; expType.getValueSpace().forEach(value -> { memberTypes.add(value.type); }); break; case TypeTags.UNION: BUnionType unionType = (BUnionType) bType; unionType.getMemberTypes().forEach(member -> { memberTypes.addAll(expandAndGetMemberTypesRecursive(member)); }); break; case TypeTags.ARRAY: BType arrayElementType = ((BArrayType) bType).getElementType(); if (((BArrayType) bType).getSize() != -1) { memberTypes.add(new BArrayType(arrayElementType)); } if (arrayElementType.tag == TypeTags.UNION) { Set<BType> elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType); elementUnionTypes.forEach(elementUnionType -> { memberTypes.add(new BArrayType(elementUnionType)); }); } memberTypes.add(bType); break; case TypeTags.MAP: BType mapConstraintType = ((BMapType) bType).getConstraint(); if (mapConstraintType.tag == TypeTags.UNION) { Set<BType> constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType); constraintUnionTypes.forEach(constraintUnionType -> { memberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol)); }); } memberTypes.add(bType); break; default: memberTypes.add(bType); } return memberTypes; } private boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) { if (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) { return false; } List<BType> lhsMemberTypes = lhsType.getTupleTypes(); List<BType> rhsMemberTypes = rhsType.getTupleTypes(); for (int i = 0; i < lhsType.getTupleTypes().size(); i++) { if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)), expandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) { return false; } } return true; } private boolean equalityIntersectionExistsForComplexTypes(Set<BType> lhsTypes, Set<BType> rhsTypes) { for (BType lhsMemberType : lhsTypes) { switch (lhsMemberType.tag) { case TypeTags.INT: case TypeTags.STRING: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.BOOLEAN: case TypeTags.NIL: if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) { return true; } break; case TypeTags.JSON: if (jsonEqualityIntersectionExists(rhsTypes)) { return true; } break; case TypeTags.TUPLE: if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE && tupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY && arrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType, (BTupleType) lhsMemberType))) { return true; } break; case TypeTags.ARRAY: if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY && equalityIntersectionExists( expandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType), expandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE && arrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType, (BTupleType) rhsMemberType))) { return true; } break; case TypeTags.MAP: if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.MAP && equalityIntersectionExists( expandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint), expandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) { return true; } if (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) && rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD && mapRecordEqualityIntersectionExists((BMapType) lhsMemberType, (BRecordType) rhsMemberType))) { return true; } break; case TypeTags.OBJECT: case TypeTags.RECORD: if (rhsTypes.stream().anyMatch( rhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) || checkStructEquivalency(lhsMemberType, rhsMemberType))) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD && recordEqualityIntersectionExists((BRecordType) lhsMemberType, (BRecordType) rhsMemberType))) { return true; } if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) && jsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) { return true; } if (rhsTypes.stream().anyMatch( rhsMemberType -> rhsMemberType.tag == TypeTags.MAP && mapRecordEqualityIntersectionExists((BMapType) rhsMemberType, (BRecordType) lhsMemberType))) { return true; } break; } } return false; } private boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) { Set<BType> elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType); return tupleType.tupleTypes.stream() .allMatch(tupleMemType -> equalityIntersectionExists(elementTypes, expandAndGetMemberTypesRecursive(tupleMemType))); } private boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) { List<BField> lhsFields = lhsType.fields; List<BField> rhsFields = rhsType.fields; List<Name> matchedFieldNames = new ArrayList<>(); for (BField lhsField : lhsFields) { Optional<BField> match = rhsFields.stream().filter(rhsField -> lhsField.name.equals(rhsField.name)).findFirst(); if (match.isPresent()) { if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type), expandAndGetMemberTypesRecursive(match.get().type))) { return false; } matchedFieldNames.add(lhsField.getName()); } else { if (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) { break; } if (rhsType.sealed) { return false; } if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type), expandAndGetMemberTypesRecursive(rhsType.restFieldType))) { return false; } } } for (BField rhsField : rhsFields) { if (matchedFieldNames.contains(rhsField.getName())) { continue; } if (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) { if (lhsType.sealed) { return false; } if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type), expandAndGetMemberTypesRecursive(lhsType.restFieldType))) { return false; } } } return true; } private boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) { Set<BType> mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint()); return recordType.fields.stream() .allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) || equalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type))); } private boolean jsonEqualityIntersectionExists(Set<BType> typeSet) { for (BType type : typeSet) { switch (type.tag) { case TypeTags.MAP: if (!isAssignable(((BMapType) type).constraint, symTable.errorType)) { return true; } break; case TypeTags.RECORD: BRecordType recordType = (BRecordType) type; if (recordType.fields.stream() .allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) || !isAssignable(field.type, symTable.errorType))) { return true; } break; default: if (isAssignable(type, symTable.jsonType)) { return true; } } } return false; } public BType getRemainingType(BType originalType, BType typeToRemove) { switch (originalType.tag) { case TypeTags.UNION: return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove)); case TypeTags.FINITE: return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove)); default: return originalType; } } private BType getRemainingType(BUnionType originalType, List<BType> removeTypes) { List<BType> remainingTypes = getAllTypes(originalType); removeTypes.forEach(removeType -> remainingTypes.removeIf(type -> isAssignable(type, removeType))); List<BType> finiteTypesToRemove = new ArrayList<>(); List<BType> finiteTypesToAdd = new ArrayList<>(); for (BType remainingType : remainingTypes) { if (remainingType.tag == TypeTags.FINITE) { BFiniteType finiteType = (BFiniteType) remainingType; finiteTypesToRemove.add(finiteType); BType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes); if (remainingTypeWithMatchesRemoved != symTable.semanticError) { finiteTypesToAdd.add(remainingTypeWithMatchesRemoved); } } } remainingTypes.removeAll(finiteTypesToRemove); remainingTypes.addAll(finiteTypesToAdd); if (remainingTypes.size() == 1) { return remainingTypes.get(0); } if (remainingTypes.isEmpty()) { return symTable.nullSet; } return BUnionType.create(null, new LinkedHashSet<>(remainingTypes)); } private BType getRemainingType(BFiniteType originalType, List<BType> removeTypes) { Set<BLangExpression> remainingValueSpace = new LinkedHashSet<>(); for (BLangExpression valueExpr : originalType.getValueSpace()) { boolean matchExists = false; for (BType remType : removeTypes) { if (isAssignable(valueExpr.type, remType) || isAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) { matchExists = true; break; } } if (!matchExists) { remainingValueSpace.add(valueExpr); } } if (remainingValueSpace.isEmpty()) { return symTable.semanticError; } BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags, names.fromString("$anonType$" + finiteTypeCount++), originalType.tsymbol.pkgID, null, originalType.tsymbol.owner); BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace); finiteTypeSymbol.type = intersectingFiniteType; return intersectingFiniteType; } public BType getSafeType(BType type, boolean liftNil, boolean liftError) { switch (type.tag) { case TypeTags.JSON: BJSONType jsonType = (BJSONType) type; return new BJSONType(jsonType.tag, jsonType.tsymbol, false); case TypeTags.ANY: return new BAnyType(type.tag, type.tsymbol, false); case TypeTags.ANYDATA: return new BAnydataType(type.tag, type.tsymbol, false); case TypeTags.READONLY: return new BReadonlyType(type.tag, type.tsymbol, false); } if (type.tag != TypeTags.UNION) { return type; } BUnionType unionType = (BUnionType) type; LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes()); BUnionType errorLiftedType = BUnionType.create(null, memTypes); if (liftNil) { errorLiftedType.remove(symTable.nilType); } if (liftError) { errorLiftedType.remove(symTable.errorType); } if (errorLiftedType.getMemberTypes().size() == 1) { return errorLiftedType.getMemberTypes().toArray(new BType[0])[0]; } return errorLiftedType; } public List<BType> getAllTypes(BType type) { if (type.tag != TypeTags.UNION) { return Lists.of(type); } List<BType> memberTypes = new ArrayList<>(); ((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType))); return memberTypes; } public boolean isAllowedConstantType(BType type) { switch (type.tag) { case TypeTags.BOOLEAN: case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.NIL: return true; case TypeTags.MAP: return isAllowedConstantType(((BMapType) type).constraint); case TypeTags.FINITE: BLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0]; return isAllowedConstantType(finiteValue.type); default: return false; } } public boolean isValidLiteral(BLangLiteral literal, BType targetType) { BType literalType = literal.type; if (literalType.tag == targetType.tag) { return true; } switch (targetType.tag) { case TypeTags.BYTE: return literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value); case TypeTags.DECIMAL: return literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT; case TypeTags.FLOAT: return literalType.tag == TypeTags.INT; case TypeTags.SIGNED32_INT: return literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value); case TypeTags.SIGNED16_INT: return literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value); case TypeTags.SIGNED8_INT: return literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value); case TypeTags.UNSIGNED32_INT: return literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value); case TypeTags.UNSIGNED16_INT: return literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value); case TypeTags.UNSIGNED8_INT: return literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value); case TypeTags.CHAR_STRING: return literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value); default: return false; } } /** * Validate if the return type of the given function is a subtype of `error?`, containing `()`. * * @param function The function of which the return type should be validated * @param diagnosticCode The code to log if the return type is invalid */ public void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) { BType returnType = function.returnTypeNode.type; if (returnType.tag == TypeTags.NIL) { return; } if (returnType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) returnType).getMemberTypes(); if (returnType.isNullable() && memberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) { return; } } dlogHelper.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString()); } /** * Type vector of size two, to hold the source and the target types. * * @since 0.982.0 */ private static class TypePair { BType sourceType; BType targetType; public TypePair(BType sourceType, BType targetType) { this.sourceType = sourceType; this.targetType = targetType; } @Override public boolean equals(Object obj) { if (!(obj instanceof TypePair)) { return false; } TypePair other = (TypePair) obj; return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType); } @Override public int hashCode() { return Objects.hash(sourceType, targetType); } } /** * A functional interface for parameterizing the type of type checking that needs to be done on the source and * target types. * * @since 0.995.0 */ private interface TypeEqualityPredicate { boolean test(BType source, BType target, Set<TypePair> unresolvedTypes); } public boolean hasFillerValue(BType type) { switch (type.tag) { case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.BOOLEAN: case TypeTags.JSON: case TypeTags.XML: case TypeTags.NIL: case TypeTags.ANYDATA: case TypeTags.MAP: case TypeTags.ANY: return true; case TypeTags.ARRAY: return checkFillerValue((BArrayType) type); case TypeTags.FINITE: return checkFillerValue((BFiniteType) type); case TypeTags.UNION: return checkFillerValue((BUnionType) type); case TypeTags.OBJECT: return checkFillerValue((BObjectType) type); case TypeTags.RECORD: return checkFillerValue((BRecordType) type); case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) type; return tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType)); default: if (TypeTags.isIntegerTypeTag(type.tag)) { return true; } return false; } } private boolean checkFillerValue(BObjectType type) { if ((type.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) { return false; } BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc; if (initFunction == null) { return true; } if (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) { return false; } for (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) { if (!bVarSymbol.defaultableParam) { return false; } } return true; } /** * This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill * value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and * the default value for that type should be a member of the union precondition : value space should have at least * one element * * @param type BFiniteType union or finite * @return boolean whether type has a valid filler value or not */ private boolean checkFillerValue(BFiniteType type) { if (type.isNullable()) { return true; } if (type.getValueSpace().size() == 1) { return true; } Iterator iterator = type.getValueSpace().iterator(); BLangExpression firstElement = (BLangExpression) iterator.next(); boolean defaultFillValuePresent = isImplicitDefaultValue(firstElement); while (iterator.hasNext()) { BLangExpression value = (BLangExpression) iterator.next(); if (!isSameBasicType(value.type, firstElement.type)) { return false; } if (!defaultFillValuePresent && isImplicitDefaultValue(value)) { defaultFillValuePresent = true; } } return defaultFillValuePresent; } private boolean hasImplicitDefaultValue(Set<BLangExpression> valueSpace) { for (BLangExpression expression : valueSpace) { if (isImplicitDefaultValue(expression)) { return true; } } return false; } private boolean checkFillerValue(BUnionType type) { if (type.isNullable()) { return true; } Set<BType> memberTypes = new HashSet<>(); boolean hasFillerValue = false; boolean defaultValuePresent = false; boolean finiteTypePresent = false; for (BType member : type.getMemberTypes()) { if (member.tag == TypeTags.FINITE) { Set<BType> uniqueValues = getValueTypes(((BFiniteType) member).getValueSpace()); memberTypes.addAll(uniqueValues); if (!defaultValuePresent && hasImplicitDefaultValue(((BFiniteType) member).getValueSpace())) { defaultValuePresent = true; } finiteTypePresent = true; } else { memberTypes.add(member); } if (!hasFillerValue && hasFillerValue(member)) { hasFillerValue = true; } } if (!hasFillerValue) { return false; } Iterator<BType> iterator = memberTypes.iterator(); BType firstMember = iterator.next(); while (iterator.hasNext()) { if (!isSameBasicType(firstMember, iterator.next())) { return false; } } if (finiteTypePresent) { return defaultValuePresent; } return true; } private boolean isSameBasicType(BType source, BType target) { if (isSameType(source, target)) { return true; } if (TypeTags.isIntegerTypeTag(source.tag) && TypeTags.isIntegerTypeTag(target.tag)) { return true; } return false; } private Set<BType> getValueTypes(Set<BLangExpression> valueSpace) { Set<BType> uniqueType = new HashSet<>(); for (BLangExpression expression : valueSpace) { uniqueType.add(expression.type); } return uniqueType; } private boolean isImplicitDefaultValue(BLangExpression expression) { if ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) { BLangLiteral literalExpression = (BLangLiteral) expression; BType literalExprType = literalExpression.type; Object value = literalExpression.getValue(); switch (literalExprType.getKind()) { case INT: case BYTE: return value.equals(Long.valueOf(0)); case STRING: return value == null || value.equals(""); case DECIMAL: case FLOAT: return value.equals(String.valueOf(0.0)); case BOOLEAN: return value.equals(Boolean.valueOf(false)); case NIL: return true; default: return false; } } return false; } private boolean checkFillerValue(BRecordType type) { for (BField field : type.fields) { if (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) { continue; } if (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { return false; } } return true; } private boolean checkFillerValue(BArrayType type) { if (type.size == -1) { return true; } return hasFillerValue(type.eType); } }
I think this deserves a "final" here, and the line below too.
public static void main(String[] args) throws Exception { ServerConfiguration configuration = new ServerConfiguration(); CmdLineParser parser = new CmdLineParser(configuration); try { parser.parseArgument(args); fromConfig(configuration).run(); } catch (CmdLineException e) { LOG.error("Unable to parse command line arguments {}", Arrays.asList(args), e); throw new IllegalArgumentException("Unable to parse command line arguments.", e); } catch (Exception e) { LOG.error("Hit exception with SamzaJobServer. Exiting...", e); throw e; } }
ServerConfiguration configuration = new ServerConfiguration();
public static void main(String[] args) throws Exception { final ServerConfiguration configuration = new ServerConfiguration(); final CmdLineParser parser = new CmdLineParser(configuration); try { parser.parseArgument(args); fromConfig(configuration).run(); } catch (CmdLineException e) { LOG.error("Unable to parse command line arguments {}", Arrays.asList(args), e); throw new IllegalArgumentException("Unable to parse command line arguments.", e); } catch (Exception e) { LOG.error("Hit exception with SamzaJobServer. Exiting...", e); throw e; } }
class ServerConfiguration { @Option(name = "--job-port", usage = "The job service port. (Default: 11440)") private int jobPort = 11440; @Option(name = "--control-port", usage = "The FnControl port. (Default: 11441)") private int controlPort = 11441; }
class ServerConfiguration { @Option(name = "--job-port", usage = "The job service port. (Default: 11440)") private int jobPort = 11440; @Option(name = "--control-port", usage = "The FnControl port. (Default: 11441)") private int controlPort = 11441; }
Ok, I'll need to update the rhoas operator and do a new release. Do you mind if we remove this property in a future pr, or are you planning on removing support for it in the very near (this calendar year) future.
public Optional<ServiceBindingConfigSource> convert(List<ServiceBinding> serviceBindings) { var matchingByType = ServiceBinding.singleMatchingByType("serviceregistry", serviceBindings); Config config = ConfigProvider.getConfig(); if (matchingByType.isEmpty()) { return Optional.empty(); } var binding = matchingByType.get(); List<String> channels = extractChannels(config); Map<String, String> properties = new HashMap<>(); String registryUrl = binding.getProperties().get("registryUrl"); if (registryUrl == null) { registryUrl = binding.getProperties().get("registryurl"); } if (registryUrl != null) { properties.put("mp.messaging.connector.smallrye-kafka.apicurio.registry.url", registryUrl); } for (String channel : channels) { String prefix = channel; String oAuthHost = binding.getProperties().get("oauthServerUrl"); if (oAuthHost == null) { oAuthHost = binding.getProperties().get("oauthserverurl"); } if (oAuthHost != null) { properties.put(prefix + "apicurio.auth.service.url", oAuthHost); } String clientId = binding.getProperties().get("clientId"); if (clientId == null) { clientId = binding.getProperties().get("clientid"); } if (clientId != null) { properties.put(prefix + "apicurio.auth.client.id", clientId); } String clientSecret = binding.getProperties().get("clientSecret"); if (clientSecret == null) { clientSecret = binding.getProperties().get("clientsecret"); } if (clientSecret != null) { properties.put(prefix + "apicurio.auth.client.secret", clientSecret); } String realm = binding.getProperties().get("oauthRealm"); if (realm == null) { realm = binding.getProperties().get("oauthRealm"); } if (clientSecret != null) { properties.put(prefix + "apicurio.auth.realm", realm); } if (registryUrl != null) { properties.put(prefix + "apicurio.registry.url", registryUrl); } } return Optional.of(new ServiceBindingConfigSource("serviceregistry-k8s-service-binding-source", properties)); }
String realm = binding.getProperties().get("oauthRealm");
public Optional<ServiceBindingConfigSource> convert(List<ServiceBinding> serviceBindings) { var matchingByType = ServiceBinding.singleMatchingByType("serviceregistry", serviceBindings); Config config = ConfigProvider.getConfig(); if (matchingByType.isEmpty()) { return Optional.empty(); } var binding = matchingByType.get(); List<String> channels = extractChannels(config); Map<String, String> properties = new HashMap<>(); String registryUrl = binding.getProperties().get("registryUrl"); if (registryUrl == null) { registryUrl = binding.getProperties().get("registryurl"); } if (registryUrl != null) { properties.put("kafka.apicurio.registry.url", registryUrl); } for (String channel : channels) { String prefix = channel; String oauthTokenUrl = binding.getProperties().get("oauthTokenUrl"); if (oauthTokenUrl == null) { oauthTokenUrl = binding.getProperties().get("oauthtokenurl"); } if (oauthTokenUrl != null) { properties.put(prefix + "apicurio.auth.service.token.endpoint", oauthTokenUrl); } String clientId = binding.getProperties().get("clientId"); if (clientId == null) { clientId = binding.getProperties().get("clientid"); } if (clientId != null) { properties.put(prefix + "apicurio.auth.client.id", clientId); } String clientSecret = binding.getProperties().get("clientSecret"); if (clientSecret == null) { clientSecret = binding.getProperties().get("clientsecret"); } if (clientSecret != null) { properties.put(prefix + "apicurio.auth.client.secret", clientSecret); } if (registryUrl != null) { properties.put(prefix + "apicurio.registry.url", registryUrl); } } return Optional.of(new ServiceBindingConfigSource("serviceregistry-k8s-service-binding-source", properties)); }
class ServiceRegistryBindingConverter implements ServiceBindingConverter { private static Logger LOG = Logger.getLogger(ServiceRegistryBindingConverter.class.getName()); private static final String INCOMING_PREFIX = "mp.messaging.incoming."; private static final String OUTGOING_PREFIX = "mp.messaging.outgoing."; @Override private List<String> extractChannels(Config configIn) { var list = new ArrayList<String>(); for (String propertyName : configIn.getPropertyNames()) { if (propertyName.startsWith(INCOMING_PREFIX)) { var channelName = propertyName.replace(INCOMING_PREFIX, "").split("\\.")[0]; list.add(INCOMING_PREFIX + channelName + "."); } else if (propertyName.startsWith(OUTGOING_PREFIX)) { var channelName = propertyName.replace(OUTGOING_PREFIX, "").split("\\.")[0]; list.add(OUTGOING_PREFIX + channelName + "."); } } return list; } }
class ServiceRegistryBindingConverter implements ServiceBindingConverter { private static Logger LOG = Logger.getLogger(ServiceRegistryBindingConverter.class.getName()); private static final String INCOMING_PREFIX = "mp.messaging.incoming."; private static final String OUTGOING_PREFIX = "mp.messaging.outgoing."; @Override private List<String> extractChannels(Config configIn) { var list = new ArrayList<String>(); for (String propertyName : configIn.getPropertyNames()) { if (propertyName.startsWith(INCOMING_PREFIX)) { var channelAndProp = StringUtils.substringAfter(propertyName, INCOMING_PREFIX); var channelName = StringUtils.substringBefore(channelAndProp, "."); if (!StringUtils.isBlank(channelName)) list.add(INCOMING_PREFIX + channelName + "."); } else if (propertyName.startsWith(OUTGOING_PREFIX)) { var channelAndProp = StringUtils.substringAfter(propertyName, OUTGOING_PREFIX); var channelName = StringUtils.substringBefore(channelAndProp, "."); if (!StringUtils.isBlank(channelName)) list.add(OUTGOING_PREFIX + channelName + "."); } } return list; } }
@aoyvx Please remove final modifier in method domain.
public void assertRemoveSchemaMetadata() { final Map<String, ShardingSphereSchema> map = Maps.of( "foo_db_1", mock(ShardingSphereSchema.class), "foo_db_2", mock(ShardingSphereSchema.class)); final FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", map); assertTrue(federationDatabaseMetaData.getSchemas().containsKey("foo_db_1")); federationDatabaseMetaData.removeSchemaMetadata("foo_db_1"); assertFalse(federationDatabaseMetaData.getSchemas().containsKey("foo_db_1")); assertTrue(federationDatabaseMetaData.getSchemas().containsKey("foo_db_2")); }
final Map<String, ShardingSphereSchema> map = Maps.of(
public void assertRemoveSchemaMetadata() { Map<String, ShardingSphereSchema> map = new HashMap<>(); map.put("foo_db_1", mock(ShardingSphereSchema.class)); map.put("foo_db_2", mock(ShardingSphereSchema.class)); FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", map); assertTrue(federationDatabaseMetaData.getSchemas().containsKey("foo_db_1")); federationDatabaseMetaData.removeSchemaMetadata("foo_db_1"); assertFalse(federationDatabaseMetaData.getSchemas().containsKey("foo_db_1")); assertTrue(federationDatabaseMetaData.getSchemas().containsKey("foo_db_2")); }
class FederationDatabaseMetaDataTest { @Test public void assertPutSchemaMetadata() { final FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", Collections.emptyMap()); final FederationSchemaMetaData schemaMetaData = mock(FederationSchemaMetaData.class); federationDatabaseMetaData.putSchemaMetadata("foo_db", schemaMetaData); assertThat(federationDatabaseMetaData.getSchemas().get("foo_db"), is(schemaMetaData)); } @Test @Test public void assertGetSchemaMetadata() { final ShardingSphereSchema sphereSchema = mock(ShardingSphereSchema.class); final Map<String, ShardingSphereSchema> map = Maps.of("foo_db", sphereSchema); final FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", map); assertNotNull(federationDatabaseMetaData.getSchemaMetadata("foo_db").orElse(null)); } @Test public void assertGetSchemaMetadataByNonexistentKey() { final ShardingSphereSchema sphereSchema = mock(ShardingSphereSchema.class); final Map<String, ShardingSphereSchema> map = Maps.of("foo_db", sphereSchema); final FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", map); federationDatabaseMetaData.getSchemaMetadata("foo_db_2"); } @Test public void assertPutTable() { final FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", Collections.emptyMap()); final ShardingSphereTable fooTable = new ShardingSphereTable("foo_table", Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); federationDatabaseMetaData.putTable("foo_db", fooTable); assertNotNull(federationDatabaseMetaData.getSchemaMetadata("foo_db").orElse(null)); assertNotNull(federationDatabaseMetaData.getSchemaMetadata("foo_db").map(FederationSchemaMetaData::getTables).map(e -> e.get("foo_table")).orElse(null)); } @Test public void assertRemoveTableMetadata() { final FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", Collections.emptyMap()); final ShardingSphereTable fooTable = new ShardingSphereTable("foo_table", Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); federationDatabaseMetaData.putTable("foo_db", fooTable); federationDatabaseMetaData.removeTableMetadata("foo_db", "foo_table"); assertNull(federationDatabaseMetaData.getSchemaMetadata("foo_db").map(s -> s.getTables().get("foo_table")).orElse(null)); } }
class FederationDatabaseMetaDataTest { @Test public void assertPutSchemaMetadata() { FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", Collections.emptyMap()); FederationSchemaMetaData schemaMetaData = mock(FederationSchemaMetaData.class); federationDatabaseMetaData.putSchemaMetadata("foo_db", schemaMetaData); assertThat(federationDatabaseMetaData.getSchemas().get("foo_db"), is(schemaMetaData)); } @Test @Test public void assertGetSchemaMetadata() { ShardingSphereSchema schema = mock(ShardingSphereSchema.class); Map<String, ShardingSphereSchema> map = new HashMap<>(); map.put("foo_db", schema); FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", map); assertTrue(federationDatabaseMetaData.getSchemaMetadata("foo_db").isPresent()); } @Test public void assertGetSchemaMetadataByNonexistentKey() { ShardingSphereSchema schema = mock(ShardingSphereSchema.class); Map<String, ShardingSphereSchema> map = new HashMap<>(); map.put("foo_db", schema); FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", map); assertFalse(federationDatabaseMetaData.getSchemaMetadata("foo_db_2").isPresent()); } @Test public void assertPutTable() { FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", Collections.emptyMap()); ShardingSphereTable fooTable = new ShardingSphereTable("foo_table", Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); federationDatabaseMetaData.putTable("foo_db", fooTable); assertTrue(federationDatabaseMetaData.getSchemaMetadata("foo_db").isPresent()); assertTrue(federationDatabaseMetaData.getSchemaMetadata("foo_db").map(FederationSchemaMetaData::getTables).map(e -> e.get("foo_table")).isPresent()); } @Test public void assertRemoveTableMetadata() { FederationDatabaseMetaData federationDatabaseMetaData = new FederationDatabaseMetaData("foo", Collections.emptyMap()); ShardingSphereTable fooTable = new ShardingSphereTable("foo_table", Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); federationDatabaseMetaData.putTable("foo_db", fooTable); federationDatabaseMetaData.removeTableMetadata("foo_db", "foo_table"); assertFalse(federationDatabaseMetaData.getSchemaMetadata("foo_db").map(s -> s.getTables().get("foo_table")).isPresent()); } }
should use global var for WS and NL
public OnFailClauseNode transform(OnFailClauseNode onFailClauseNode) { Token onKeyword = formatToken(onFailClauseNode.onKeyword(), 1, 0); Token failKeyword = formatToken(onFailClauseNode.failKeyword(), 1, 0); TypeDescriptorNode typeDescriptor = formatNode(onFailClauseNode.typeDescriptor(), 1, 0); IdentifierToken failErrorName = formatToken(onFailClauseNode.failErrorName(), 1, 0); BlockStatementNode blockStatement = formatNode(onFailClauseNode.blockStatement(), 0, 1); return onFailClauseNode.modify() .withOnKeyword(onKeyword) .withFailKeyword(failKeyword) .withTypeDescriptor(typeDescriptor) .withFailErrorName(failErrorName) .withBlockStatement(blockStatement) .apply(); }
BlockStatementNode blockStatement = formatNode(onFailClauseNode.blockStatement(), 0, 1);
public OnFailClauseNode transform(OnFailClauseNode onFailClauseNode) { Token onKeyword = formatToken(onFailClauseNode.onKeyword(), 1, 0); Token failKeyword = formatToken(onFailClauseNode.failKeyword(), 1, 0); TypeDescriptorNode typeDescriptor = formatNode(onFailClauseNode.typeDescriptor(), 1, 0); IdentifierToken failErrorName = formatToken(onFailClauseNode.failErrorName(), 1, 0); BlockStatementNode blockStatement = formatNode(onFailClauseNode.blockStatement(), this.trailingWS, this.trailingNL); return onFailClauseNode.modify() .withOnKeyword(onKeyword) .withFailKeyword(failKeyword) .withTypeDescriptor(typeDescriptor) .withFailErrorName(failErrorName) .withBlockStatement(blockStatement) .apply(); }
class NewFormattingTreeModifier extends FormattingTreeModifier { /** * Number of of whitespace characters to be used as the indentation for the current line. */ private int indentation = 0; /** * Number of leading newlines to be added to the currently processing node. */ private int leadingNL = 0; /** * Number of trailing newlines to be added to the currently processing node. */ private int trailingNL = 0; /** * Number of trailing whitespace characters to be added to the currently processing node. */ private int trailingWS = 0; /** * Flag indicating whether the currently formatting token is the first token of the current line. */ private boolean hasNewline = true; /** * Number of of whitespace characters to be used for a single indentation. */ private static final int DEFAULT_INDENTATION = 4; /** * Maximum length of a line. Any line that goes pass this limit will be wrapped. */ private static final int COLUMN_LIMIT = 80; /** * Length of the currently formatting line. */ private int lineLength = 0; public NewFormattingTreeModifier(FormattingOptions options, LineRange lineRange) { super(options, lineRange); } @Override public ModulePartNode transform(ModulePartNode modulePartNode) { NodeList<ImportDeclarationNode> imports = formatNodeList(modulePartNode.imports(), 0, 1, 0, 2); NodeList<ModuleMemberDeclarationNode> members = formatNodeList(modulePartNode.members(), 0, 2, 0, 1); Token eofToken = formatToken(modulePartNode.eofToken(), 0, 0); return modulePartNode.modify(imports, members, eofToken); } @Override public FunctionDefinitionNode transform(FunctionDefinitionNode functionDefinitionNode) { if (functionDefinitionNode.metadata().isPresent()) { MetadataNode metadata = formatNode(functionDefinitionNode.metadata().get(), 1, 0); functionDefinitionNode = functionDefinitionNode.modify().withMetadata(metadata).apply(); } NodeList<Token> qualifierList = formatNodeList(functionDefinitionNode.qualifierList(), 1, 0, 1, 0); Token functionKeyword = formatToken(functionDefinitionNode.functionKeyword(), 1, 0); IdentifierToken functionName = formatToken(functionDefinitionNode.functionName(), 0, 0); FunctionSignatureNode functionSignatureNode = formatNode(functionDefinitionNode.functionSignature(), 1, 0); FunctionBodyNode functionBodyNode = formatNode(functionDefinitionNode.functionBody(), this.trailingWS, this.trailingNL); return functionDefinitionNode.modify() .withFunctionKeyword(functionKeyword) .withFunctionName(functionName) .withFunctionSignature(functionSignatureNode).withQualifierList(qualifierList) .withFunctionBody(functionBodyNode) .apply(); } @Override public FunctionSignatureNode transform(FunctionSignatureNode functionSignatureNode) { Token openPara = formatToken(functionSignatureNode.openParenToken(), 0, 0); int currentIndentation = this.indentation; setIndentation(this.lineLength); SeparatedNodeList<ParameterNode> parameters = formatSeparatedNodeList(functionSignatureNode.parameters(), 0, 0, 0, 0); setIndentation(currentIndentation); Token closePara = formatToken(functionSignatureNode.closeParenToken(), 1, 0); if (functionSignatureNode.returnTypeDesc().isPresent()) { ReturnTypeDescriptorNode returnTypeDesc = formatNode(functionSignatureNode.returnTypeDesc().get(), this.trailingWS, this.trailingNL); functionSignatureNode = functionSignatureNode.modify().withReturnTypeDesc(returnTypeDesc).apply(); } return functionSignatureNode.modify() .withOpenParenToken(openPara) .withCloseParenToken(closePara) .withParameters(parameters) .apply(); } @Override public RequiredParameterNode transform(RequiredParameterNode requiredParameterNode) { NodeList<AnnotationNode> annotations = formatNodeList(requiredParameterNode.annotations(), 0, 1, 0, 0); Node typeName = formatNode(requiredParameterNode.typeName(), 1, 0); if (requiredParameterNode.paramName().isPresent()) { Token paramName = formatToken(requiredParameterNode.paramName().get(), 0, 0); return requiredParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .withParamName(paramName) .apply(); } else { return requiredParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .apply(); } } @Override public FunctionBodyBlockNode transform(FunctionBodyBlockNode functionBodyBlockNode) { Token openBrace = formatToken(functionBodyBlockNode.openBraceToken(), 0, 1); indent(); NodeList<StatementNode> statements = formatNodeList(functionBodyBlockNode.statements(), 0, 1, 0, 1); unindent(); Token closeBrace = formatToken(functionBodyBlockNode.closeBraceToken(), this.trailingWS, this.trailingNL); return functionBodyBlockNode.modify() .withOpenBraceToken(openBrace) .withCloseBraceToken(closeBrace) .withStatements(statements) .apply(); } @Override public VariableDeclarationNode transform(VariableDeclarationNode variableDeclarationNode) { NodeList<AnnotationNode> annotationNodes = formatNodeList(variableDeclarationNode.annotations(), 0, 1, 0, 1); if (variableDeclarationNode.finalKeyword().isPresent()) { Token finalToken = formatToken(variableDeclarationNode.finalKeyword().get(), 1, 0); variableDeclarationNode = variableDeclarationNode.modify().withFinalKeyword(finalToken).apply(); } TypedBindingPatternNode typedBindingPatternNode; if (variableDeclarationNode.equalsToken().isPresent()) { typedBindingPatternNode = formatNode(variableDeclarationNode.typedBindingPattern(), 1, 0); Token equalToken = formatToken(variableDeclarationNode.equalsToken().get(), 1, 0); ExpressionNode initializer = formatNode(variableDeclarationNode.initializer().get(), 0, 0); Token semicolonToken = formatToken(variableDeclarationNode.semicolonToken(), this.trailingWS, this.trailingNL); return variableDeclarationNode.modify() .withAnnotations(annotationNodes) .withTypedBindingPattern(typedBindingPatternNode) .withEqualsToken(equalToken) .withInitializer(initializer) .withSemicolonToken(semicolonToken) .apply(); } else { typedBindingPatternNode = formatNode(variableDeclarationNode.typedBindingPattern(), 0, 0); Token semicolonToken = formatToken(variableDeclarationNode.semicolonToken(), this.trailingWS, this.trailingNL); return variableDeclarationNode.modify() .withAnnotations(annotationNodes) .withTypedBindingPattern(typedBindingPatternNode) .withSemicolonToken(semicolonToken) .apply(); } } @Override public TypedBindingPatternNode transform(TypedBindingPatternNode typedBindingPatternNode) { TypeDescriptorNode typeDescriptorNode = formatNode(typedBindingPatternNode.typeDescriptor(), 1, 0); BindingPatternNode bindingPatternNode = formatNode(typedBindingPatternNode.bindingPattern(), this.trailingWS, this.trailingNL); return typedBindingPatternNode.modify() .withTypeDescriptor(typeDescriptorNode) .withBindingPattern(bindingPatternNode) .apply(); } @Override public BuiltinSimpleNameReferenceNode transform(BuiltinSimpleNameReferenceNode builtinSimpleNameReferenceNode) { Token name = formatToken(builtinSimpleNameReferenceNode.name(), this.trailingWS, this.trailingNL); return builtinSimpleNameReferenceNode.modify().withName(name).apply(); } @Override public BasicLiteralNode transform(BasicLiteralNode basicLiteralNode) { Token literalToken = formatToken(basicLiteralNode.literalToken(), this.trailingWS, this.trailingNL); return basicLiteralNode.modify().withLiteralToken(literalToken).apply(); } @Override public CaptureBindingPatternNode transform(CaptureBindingPatternNode captureBindingPatternNode) { Token variableName = formatToken(captureBindingPatternNode.variableName(), this.trailingWS, this.trailingNL); return captureBindingPatternNode.modify().withVariableName(variableName).apply(); } @Override public IfElseStatementNode transform(IfElseStatementNode ifElseStatementNode) { Token ifKeyword = formatToken(ifElseStatementNode.ifKeyword(), 1, 0); ExpressionNode condition = formatNode(ifElseStatementNode.condition(), 1, 0); BlockStatementNode ifBody; if (ifElseStatementNode.elseBody().isPresent()) { ifBody = formatNode(ifElseStatementNode.ifBody(), 1, 0); Node elseBody = formatNode(ifElseStatementNode.elseBody().get(), this.trailingWS, this.trailingNL); ifElseStatementNode = ifElseStatementNode.modify().withElseBody(elseBody).apply(); } else { ifBody = formatNode(ifElseStatementNode.ifBody(), this.trailingWS, this.trailingNL); } return ifElseStatementNode.modify() .withIfKeyword(ifKeyword) .withIfBody(ifBody) .withCondition(condition) .apply(); } @Override public ElseBlockNode transform(ElseBlockNode elseBlockNode) { Token elseKeyword = formatToken(elseBlockNode.elseKeyword(), 1, 0); StatementNode elseBody = formatNode(elseBlockNode.elseBody(), this.trailingWS, this.trailingNL); return elseBlockNode.modify() .withElseKeyword(elseKeyword) .withElseBody(elseBody) .apply(); } @Override public BlockStatementNode transform(BlockStatementNode blockStatementNode) { Token openBrace = formatToken(blockStatementNode.openBraceToken(), 0, 1); indent(); NodeList<StatementNode> statements = formatNodeList(blockStatementNode.statements(), 0, 1, 0, 1); unindent(); Token closeBrace = formatToken(blockStatementNode.closeBraceToken(), this.trailingWS, this.trailingNL); return blockStatementNode.modify() .withOpenBraceToken(openBrace) .withStatements(statements) .withCloseBraceToken(closeBrace) .apply(); } @Override public RecordTypeDescriptorNode transform(RecordTypeDescriptorNode recordTypeDesc) { Token recordKeyword = formatNode(recordTypeDesc.recordKeyword(), 1, 0); int fieldTrailingWS = 0; int fieldTrailingNL = 0; if (shouldExpand(recordTypeDesc)) { fieldTrailingNL++; } else { fieldTrailingWS++; } Token bodyStartDelimiter = formatToken(recordTypeDesc.bodyStartDelimiter(), fieldTrailingWS, fieldTrailingNL); int prevIndentation = this.indentation; setIndentation(recordKeyword.location().lineRange().startLine().offset() + DEFAULT_INDENTATION); NodeList<Node> fields = formatNodeList(recordTypeDesc.fields(), fieldTrailingWS, fieldTrailingNL, fieldTrailingWS, fieldTrailingNL); if (recordTypeDesc.recordRestDescriptor().isPresent()) { RecordRestDescriptorNode recordRestDescriptor = formatNode(recordTypeDesc.recordRestDescriptor().get(), fieldTrailingWS, fieldTrailingNL); recordTypeDesc = recordTypeDesc.modify().withRecordRestDescriptor(recordRestDescriptor).apply(); } setIndentation(prevIndentation); Token bodyEndDelimiter = formatToken(recordTypeDesc.bodyEndDelimiter(), this.trailingWS, this.trailingNL); return recordTypeDesc.modify() .withRecordKeyword(recordKeyword) .withBodyStartDelimiter(bodyStartDelimiter) .withFields(fields) .withBodyEndDelimiter(bodyEndDelimiter) .apply(); } @Override public RecordFieldNode transform(RecordFieldNode recordField) { if (recordField.metadata().isPresent()) { MetadataNode metadata = formatNode(recordField.metadata().get(), 0, 1); recordField = recordField.modify().withMetadata(metadata).apply(); } if (recordField.readonlyKeyword().isPresent()) { Token readonlyKeyword = formatNode(recordField.readonlyKeyword().get(), 1, 0); recordField = recordField.modify().withReadonlyKeyword(readonlyKeyword).apply(); } Node typeName = formatNode(recordField.typeName(), 1, 0); Token fieldName = formatToken(recordField.fieldName(), 0, 0); if (recordField.questionMarkToken().isPresent()) { Token questionMarkToken = formatToken(recordField.questionMarkToken().get(), 0, 1); recordField = recordField.modify().withQuestionMarkToken(questionMarkToken).apply(); } Token semicolonToken = formatToken(recordField.semicolonToken(), this.trailingWS, this.trailingNL); return recordField.modify() .withTypeName(typeName) .withFieldName(fieldName) .withSemicolonToken(semicolonToken) .apply(); } @Override public RecordFieldWithDefaultValueNode transform(RecordFieldWithDefaultValueNode recordField) { if (recordField.metadata().isPresent()) { MetadataNode metadata = formatNode(recordField.metadata().get(), 0, 1); recordField = recordField.modify().withMetadata(metadata).apply(); } if (recordField.readonlyKeyword().isPresent()) { Token readonlyKeyword = formatNode(recordField.readonlyKeyword().get(), 1, 0); recordField = recordField.modify().withReadonlyKeyword(readonlyKeyword).apply(); } Node typeName = formatNode(recordField.typeName(), 1, 0); Token fieldName = formatToken(recordField.fieldName(), 1, 0); Token equalsToken = formatToken(recordField.equalsToken(), 1, 0); ExpressionNode expression = formatNode(recordField.expression(), 0, 0); Token semicolonToken = formatToken(recordField.semicolonToken(), this.trailingWS, this.trailingNL); return recordField.modify() .withTypeName(typeName) .withFieldName(fieldName) .withEqualsToken(equalsToken) .withExpression(expression) .withSemicolonToken(semicolonToken) .apply(); } @Override public ImportDeclarationNode transform(ImportDeclarationNode importDeclarationNode) { Token importKeyword = formatToken(importDeclarationNode.importKeyword(), 1, 0); boolean hasOrgName = importDeclarationNode.orgName().isPresent(); boolean hasVersion = importDeclarationNode.version().isPresent(); boolean hasPrefix = importDeclarationNode.prefix().isPresent(); if (hasOrgName) { ImportOrgNameNode orgName = formatNode(importDeclarationNode.orgName().get(), 0, 0); importDeclarationNode = importDeclarationNode.modify().withOrgName(orgName).apply(); } SeparatedNodeList<IdentifierToken> moduleNames = formatSeparatedNodeList(importDeclarationNode.moduleName(), 0, 0, 0, 0, (hasVersion || hasPrefix) ? 1 : 0, 0); if (hasVersion) { ImportVersionNode version = formatNode(importDeclarationNode.version().get(), hasPrefix ? 1 : 0, 0); importDeclarationNode = importDeclarationNode.modify().withVersion(version).apply(); } if (hasPrefix) { ImportPrefixNode prefix = formatNode(importDeclarationNode.prefix().get(), 0, 0); importDeclarationNode = importDeclarationNode.modify().withPrefix(prefix).apply(); } Token semicolon = formatToken(importDeclarationNode.semicolon(), this.trailingWS, this.trailingNL); return importDeclarationNode.modify() .withImportKeyword(importKeyword) .withModuleName(moduleNames) .withSemicolon(semicolon) .apply(); } @Override public ImportOrgNameNode transform(ImportOrgNameNode importOrgNameNode) { Token orgName = formatToken(importOrgNameNode.orgName(), 0, 0); Token slashToken = formatToken(importOrgNameNode.slashToken(), 0, 0); return importOrgNameNode.modify() .withOrgName(orgName) .withSlashToken(slashToken) .apply(); } @Override public ImportPrefixNode transform(ImportPrefixNode importPrefixNode) { Token asKeyword = formatToken(importPrefixNode.asKeyword(), 1, 0); Token prefix = formatToken(importPrefixNode.prefix(), 0, 0); return importPrefixNode.modify() .withAsKeyword(asKeyword) .withPrefix(prefix) .apply(); } @Override public ImportVersionNode transform(ImportVersionNode importVersionNode) { Token versionKeyword = formatToken(importVersionNode.versionKeyword(), 1, 0); SeparatedNodeList<Token> versionNumber = formatSeparatedNodeList(importVersionNode.versionNumber(), 0, 0, 0, 0, this.trailingWS, 0); return importVersionNode.modify() .withVersionKeyword(versionKeyword) .withVersionNumber(versionNumber) .apply(); } @Override public ServiceDeclarationNode transform(ServiceDeclarationNode serviceDeclarationNode) { if (serviceDeclarationNode.metadata().isPresent()) { MetadataNode metadata = formatNode(serviceDeclarationNode.metadata().get(), 1, 0); serviceDeclarationNode = serviceDeclarationNode.modify().withMetadata(metadata).apply(); } Token serviceKeyword = formatToken(serviceDeclarationNode.serviceKeyword(), 1, 0); IdentifierToken serviceName = formatToken(serviceDeclarationNode.serviceName(), 1, 0); Token onKeyword = formatToken(serviceDeclarationNode.onKeyword(), 1, 0); SeparatedNodeList<ExpressionNode> expressions = formatSeparatedNodeList(serviceDeclarationNode.expressions(), 0, 0, 1, 0); Node serviceBody = formatNode(serviceDeclarationNode.serviceBody(), this.trailingWS, this.trailingNL); return serviceDeclarationNode.modify() .withServiceKeyword(serviceKeyword) .withServiceName(serviceName) .withOnKeyword(onKeyword) .withExpressions(expressions) .withServiceBody(serviceBody) .apply(); } @Override public ExplicitNewExpressionNode transform(ExplicitNewExpressionNode explicitNewExpressionNode) { Token newKeywordToken = formatToken(explicitNewExpressionNode.newKeyword(), 1, 0); TypeDescriptorNode typeDescriptorNode = formatNode(explicitNewExpressionNode.typeDescriptor(), 0, 0); ParenthesizedArgList parenthesizedArgList = formatNode(explicitNewExpressionNode.parenthesizedArgList(), 0, 0); return explicitNewExpressionNode.modify() .withNewKeyword(newKeywordToken) .withTypeDescriptor(typeDescriptorNode) .withParenthesizedArgList(parenthesizedArgList) .apply(); } @Override public ParenthesizedArgList transform(ParenthesizedArgList parenthesizedArgList) { Token openParenToken = formatToken(parenthesizedArgList.openParenToken(), 0, 0); SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(parenthesizedArgList .arguments(), 0, 0, 0, 0); Token closeParenToken = formatToken(parenthesizedArgList.closeParenToken(), this.trailingWS, this.trailingNL); return parenthesizedArgList.modify() .withOpenParenToken(openParenToken) .withArguments(arguments) .withCloseParenToken(closeParenToken) .apply(); } @Override public ServiceBodyNode transform(ServiceBodyNode serviceBodyNode) { Token openBraceToken = formatToken(serviceBodyNode.openBraceToken(), 0, 1); indent(); NodeList<Node> resources = formatNodeList(serviceBodyNode.resources(), 0, 1, 0, 1); unindent(); Token closeBraceToken = formatToken(serviceBodyNode.closeBraceToken(), this.trailingWS, this.trailingNL); return serviceBodyNode.modify() .withOpenBraceToken(openBraceToken) .withResources(resources) .withCloseBraceToken(closeBraceToken) .apply(); } @Override public QualifiedNameReferenceNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) { Token modulePrefix = formatToken(qualifiedNameReferenceNode.modulePrefix(), 0, 0); Token colon = formatToken((Token) qualifiedNameReferenceNode.colon(), 0, 0); IdentifierToken identifier = formatToken(qualifiedNameReferenceNode.identifier(), this.trailingWS, this.trailingNL); return qualifiedNameReferenceNode.modify() .withModulePrefix(modulePrefix) .withColon(colon) .withIdentifier(identifier) .apply(); } @Override public ReturnTypeDescriptorNode transform(ReturnTypeDescriptorNode returnTypeDescriptorNode) { Token returnsKeyword = formatToken(returnTypeDescriptorNode.returnsKeyword(), 1, 0); NodeList<AnnotationNode> annotations = formatNodeList(returnTypeDescriptorNode.annotations(), 0, 0, 1, 0); Node type = formatNode(returnTypeDescriptorNode.type(), 1, 0); return returnTypeDescriptorNode.modify() .withReturnsKeyword(returnsKeyword) .withAnnotations(annotations) .withType(type) .apply(); } @Override public OptionalTypeDescriptorNode transform(OptionalTypeDescriptorNode optionalTypeDescriptorNode) { Node typeDescriptor = formatNode(optionalTypeDescriptorNode.typeDescriptor(), 0, 0); Token questionMarkToken = formatToken(optionalTypeDescriptorNode.questionMarkToken(), this.trailingWS, this.trailingNL); return optionalTypeDescriptorNode.modify() .withTypeDescriptor(typeDescriptor) .withQuestionMarkToken(questionMarkToken) .apply(); } @Override public ExpressionStatementNode transform(ExpressionStatementNode expressionStatementNode) { ExpressionNode expression = formatNode(expressionStatementNode.expression(), 0, 0); Token semicolonToken = formatToken(expressionStatementNode.semicolonToken(), this.trailingWS, this.trailingNL); return expressionStatementNode.modify() .withExpression(expression) .withSemicolonToken(semicolonToken) .apply(); } @Override public CheckExpressionNode transform(CheckExpressionNode checkExpressionNode) { Token checkKeyword = formatToken(checkExpressionNode.checkKeyword(), 1, 0); ExpressionNode expressionNode = formatNode(checkExpressionNode.expression(), this.trailingWS, this.trailingNL); return checkExpressionNode.modify() .withCheckKeyword(checkKeyword) .withExpression(expressionNode) .apply(); } @Override public RemoteMethodCallActionNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) { ExpressionNode expression = formatNode(remoteMethodCallActionNode.expression(), 0, 0); Token rightArrowToken = formatToken(remoteMethodCallActionNode.rightArrowToken(), 0, 0); SimpleNameReferenceNode methodName = formatNode(remoteMethodCallActionNode.methodName(), 0, 0); Token openParenToken = formatToken(remoteMethodCallActionNode.openParenToken(), 0, 0); SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(remoteMethodCallActionNode .arguments(), 1, 0, 0, 0); Token closeParenToken = formatToken(remoteMethodCallActionNode.closeParenToken(), this.trailingWS, this.trailingNL); return remoteMethodCallActionNode.modify() .withExpression(expression) .withRightArrowToken(rightArrowToken) .withMethodName(methodName) .withOpenParenToken(openParenToken) .withArguments(arguments) .withCloseParenToken(closeParenToken) .apply(); } @Override public SimpleNameReferenceNode transform(SimpleNameReferenceNode simpleNameReferenceNode) { Token name = formatToken(simpleNameReferenceNode.name(), this.trailingWS, this.trailingNL); return simpleNameReferenceNode.modify() .withName(name) .apply(); } @Override public TypeDefinitionNode transform(TypeDefinitionNode typeDefinitionNode) { if (typeDefinitionNode.metadata().isPresent()) { MetadataNode metadata = formatNode(typeDefinitionNode.metadata().get(), 1, 0); typeDefinitionNode = typeDefinitionNode.modify().withMetadata(metadata).apply(); } if (typeDefinitionNode.visibilityQualifier().isPresent()) { Token visibilityQualifier = formatToken(typeDefinitionNode.visibilityQualifier().get(), 1, 0); typeDefinitionNode = typeDefinitionNode.modify().withVisibilityQualifier(visibilityQualifier).apply(); } Token typeKeyword = formatToken(typeDefinitionNode.typeKeyword(), 1, 0); Token typeName = formatToken(typeDefinitionNode.typeName(), 1, 0); Node typeDescriptor = formatNode(typeDefinitionNode.typeDescriptor(), 1, 0); Token semicolonToken = formatToken(typeDefinitionNode.semicolonToken(), this.trailingWS, this.trailingNL); return typeDefinitionNode.modify() .withTypeKeyword(typeKeyword) .withTypeName(typeName) .withTypeDescriptor(typeDescriptor) .withSemicolonToken(semicolonToken) .apply(); } @Override public SingletonTypeDescriptorNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) { ExpressionNode simpleContExprNode = formatNode(singletonTypeDescriptorNode.simpleContExprNode(), 1, 0); return singletonTypeDescriptorNode.modify() .withSimpleContExprNode(simpleContExprNode) .apply(); } @Override public WhileStatementNode transform(WhileStatementNode whileStatementNode) { boolean hasOnFailClause = whileStatementNode.onFailClause().isPresent(); Token whileKeyword = formatToken(whileStatementNode.whileKeyword(), 1, 0); ExpressionNode condition = formatNode(whileStatementNode.condition(), 1, 0); BlockStatementNode whileBody = formatNode(whileStatementNode.whileBody(), hasOnFailClause ? 1 : this.trailingWS, hasOnFailClause ? 0 : this.trailingNL); if (hasOnFailClause) { OnFailClauseNode onFailClause = formatNode(whileStatementNode.onFailClause().get(), this.trailingWS, this.trailingNL); whileStatementNode = whileStatementNode.modify().withOnFailClause(onFailClause).apply(); } return whileStatementNode.modify() .withWhileKeyword(whileKeyword) .withCondition(condition) .withWhileBody(whileBody) .apply(); } @Override public BracedExpressionNode transform(BracedExpressionNode bracedExpressionNode) { Token openParen = formatToken(bracedExpressionNode.openParen(), 0, 0); ExpressionNode expression = formatNode(bracedExpressionNode.expression(), 0, 0); Token closeParen = formatToken(bracedExpressionNode.closeParen(), this.trailingWS, this.trailingNL); return bracedExpressionNode.modify() .withOpenParen(openParen) .withExpression(expression) .withCloseParen(closeParen) .apply(); } @Override public AssignmentStatementNode transform(AssignmentStatementNode assignmentStatementNode) { Node varRef = formatNode(assignmentStatementNode.varRef(), 1, 0); Token equalsToken = formatToken(assignmentStatementNode.equalsToken(), 1, 0); ExpressionNode expression = formatNode(assignmentStatementNode.expression(), 0, 0); Token semicolonToken = formatToken(assignmentStatementNode.semicolonToken(), this.trailingWS, this.trailingNL); return assignmentStatementNode.modify() .withVarRef(varRef) .withEqualsToken(equalsToken) .withExpression(expression) .withSemicolonToken(semicolonToken) .apply(); } @Override public CompoundAssignmentStatementNode transform(CompoundAssignmentStatementNode compoundAssignmentStatementNode) { ExpressionNode lhsExpression = formatNode(compoundAssignmentStatementNode.lhsExpression(), 1, 0); Token binaryOperator = formatToken(compoundAssignmentStatementNode.binaryOperator(), 0, 0); Token equalsToken = formatToken(compoundAssignmentStatementNode.equalsToken(), 1, 0); ExpressionNode rhsExpression = formatNode(compoundAssignmentStatementNode.rhsExpression(), 0, 0); Token semicolonToken = formatToken(compoundAssignmentStatementNode.semicolonToken(), this.trailingWS, this.trailingNL); return compoundAssignmentStatementNode.modify() .withLhsExpression(lhsExpression) .withBinaryOperator(binaryOperator) .withEqualsToken(equalsToken) .withRhsExpression(rhsExpression) .withSemicolonToken(semicolonToken) .apply(); } @Override public DoStatementNode transform(DoStatementNode doStatementNode) { boolean hasOnFailClause = doStatementNode.onFailClause().isPresent(); Token doKeyword = formatToken(doStatementNode.doKeyword(), 1, 0); BlockStatementNode blockStatement = formatNode(doStatementNode.blockStatement(), hasOnFailClause ? 1 : this.trailingWS, hasOnFailClause ? 0 : this.trailingNL); if (hasOnFailClause) { OnFailClauseNode onFailClause = formatNode(doStatementNode.onFailClause().get(), this.trailingWS, this.trailingNL); doStatementNode = doStatementNode.modify().withOnFailClause(onFailClause).apply(); } return doStatementNode.modify() .withDoKeyword(doKeyword) .withBlockStatement(blockStatement) .apply(); } @Override public ForEachStatementNode transform(ForEachStatementNode forEachStatementNode) { boolean hasOnFailClause = forEachStatementNode.onFailClause().isPresent(); Token forEachKeyword = formatToken(forEachStatementNode.forEachKeyword(), 1, 0); TypedBindingPatternNode typedBindingPattern = formatNode(forEachStatementNode.typedBindingPattern(), 1, 0); Token inKeyword = formatToken(forEachStatementNode.inKeyword(), 1, 0); Node actionOrExpressionNode = formatNode(forEachStatementNode.actionOrExpressionNode(), 1, 0); StatementNode blockStatement = formatNode(forEachStatementNode.blockStatement(), hasOnFailClause ? 1 : this.trailingWS, hasOnFailClause ? 0 : this.trailingNL); if (hasOnFailClause) { OnFailClauseNode onFailClause = formatNode(forEachStatementNode.onFailClause().get(), this.trailingWS, this.trailingNL); forEachStatementNode = forEachStatementNode.modify().withOnFailClause(onFailClause).apply(); } return forEachStatementNode.modify() .withForEachKeyword(forEachKeyword) .withTypedBindingPattern(typedBindingPattern) .withInKeyword(inKeyword) .withActionOrExpressionNode(actionOrExpressionNode) .withBlockStatement(blockStatement) .apply(); } @Override public BinaryExpressionNode transform(BinaryExpressionNode binaryExpressionNode) { Node lhsExpr = formatNode(binaryExpressionNode.lhsExpr(), 1, 0); Token operator = formatToken(binaryExpressionNode.operator(), 1, 0); Node rhsExpr = formatNode(binaryExpressionNode.rhsExpr(), this.trailingWS, this.trailingNL); return binaryExpressionNode.modify() .withLhsExpr(lhsExpr) .withOperator(operator) .withRhsExpr(rhsExpr) .apply(); } @Override @Override public ReturnStatementNode transform(ReturnStatementNode returnStatementNode) { Token returnKeyword = formatToken(returnStatementNode.returnKeyword(), returnStatementNode.expression().isPresent() ? 1 : 0, 0); if (returnStatementNode.expression().isPresent()) { ExpressionNode expressionNode = formatNode(returnStatementNode.expression().get(), 0, 0); returnStatementNode = returnStatementNode.modify().withExpression(expressionNode).apply(); } Token semicolonToken = formatToken(returnStatementNode.semicolonToken(), this.trailingWS, this.trailingNL); return returnStatementNode.modify() .withReturnKeyword(returnKeyword) .withSemicolonToken(semicolonToken) .apply(); } @Override public FunctionCallExpressionNode transform(FunctionCallExpressionNode functionCallExpressionNode) { NameReferenceNode functionName = formatNode(functionCallExpressionNode.functionName(), 0, 0); Token functionCallOpenPara = formatToken(functionCallExpressionNode.openParenToken(), 0, 0); SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(functionCallExpressionNode .arguments(), 0, 0, 0, 0); Token functionCallClosePara = formatToken(functionCallExpressionNode.closeParenToken(), this.trailingWS, this.trailingNL); return functionCallExpressionNode.modify() .withFunctionName(functionName) .withOpenParenToken(functionCallOpenPara) .withCloseParenToken(functionCallClosePara) .withArguments(arguments) .apply(); } @Override public IdentifierToken transform(IdentifierToken identifier) { return formatToken(identifier, this.trailingWS, this.trailingNL); } @Override public Token transform(Token token) { return formatToken(token, this.trailingWS, this.trailingNL); } /** * Format a node. * * @param <T> Type of the node * @param node Node to be formatted * @param trailingWS Number of single-length spaces to be added after the node * @param trailingNL Number of newlines to be added after the node * @return Formatted node */ @SuppressWarnings("unchecked") private <T extends Node> T formatNode(T node, int trailingWS, int trailingNL) { if (node == null) { return node; } if (!isInLineRange(node, lineRange)) { checkForNewline(node); return node; } int prevTrailingNL = this.trailingNL; int prevTrailingWS = this.trailingWS; this.trailingNL = trailingNL; this.trailingWS = trailingWS; node = (T) node.apply(this); if (this.lineLength > COLUMN_LIMIT) { node = wrap(node); } this.trailingNL = prevTrailingNL; this.trailingWS = prevTrailingWS; return node; } /** * Wrap the node. This is equivalent to adding a newline before the node and * re-formatting the node. Wrapped content will start from the current level * of indentation. * * @param <T> Node type * @param node Node to be wrapped * @return Wrapped node */ @SuppressWarnings("unchecked") private <T extends Node> T wrap(T node) { this.leadingNL += 1; this.lineLength = 0; this.hasNewline = true; return (T) node.apply(this); } /** * Format a token. * * @param <T> Type of the token * @param token Token to be formatted * @param trailingWS Number of single-length spaces to be added after the token * @param trailingNL Number of newlines to be added after the token * @return Formatted token */ private <T extends Token> T formatToken(T token, int trailingWS, int trailingNL) { if (token == null) { return token; } if (!isInLineRange(token, lineRange)) { checkForNewline(token); return token; } int prevTrailingNL = this.trailingNL; int prevTrailingWS = this.trailingWS; this.trailingNL = trailingNL > 0 ? 1 : 0; this.trailingWS = trailingWS; token = formatTokenInternal(token); this.leadingNL = trailingNL > 0 ? trailingNL - 1 : 0; this.hasNewline = trailingNL > 0; this.trailingNL = prevTrailingNL; this.trailingWS = prevTrailingWS; return token; } private <T extends Node> void checkForNewline(T node) { for (Minutiae mintiae : node.trailingMinutiae()) { if (mintiae.kind() == SyntaxKind.END_OF_LINE_MINUTIAE) { this.hasNewline = true; return; } } } /** * Format a list of nodes. * * @param <T> Type of the list item * @param nodeList Node list to be formatted * @param itemTrailingWS Number of single-length spaces to be added after each item of the list * @param itemTrailingNL Number of newlines to be added after each item of the list * @param listTrailingWS Number of single-length spaces to be added after the last item of the list * @param listTrailingNL Number of newlines to be added after the last item of the list * @return Formatted node list */ @SuppressWarnings("unchecked") protected <T extends Node> NodeList<T> formatNodeList(NodeList<T> nodeList, int itemTrailingWS, int itemTrailingNL, int listTrailingWS, int listTrailingNL) { if (nodeList.isEmpty()) { return nodeList; } boolean nodeModified = false; int size = nodeList.size(); Node[] newNodes = new Node[size]; for (int index = 0; index < size; index++) { T oldNode = nodeList.get(index); T newNode; if (index == size - 1) { newNode = formatNode(oldNode, listTrailingWS, listTrailingNL); } else { newNode = formatNode(oldNode, itemTrailingWS, itemTrailingNL); } if (oldNode != newNode) { nodeModified = true; } newNodes[index] = newNode; } if (!nodeModified) { return nodeList; } return (NodeList<T>) NodeFactory.createNodeList(newNodes); } /** * Format a delimited list of nodes. This method assumes the delimiters are followed by a * single whitespace character only. * * @param <T> Type of the list item * @param nodeList Node list to be formatted * @param itemTrailingWS Number of single-length spaces to be added after each item in the list * @param itemTrailingNL Number of newlines to be added after each item in the list * @param listTrailingWS Number of single-length spaces to be added after the last item in the list * @param listTrailingNL Number of newlines to be added after the last item in the list * @return Formatted node list */ protected <T extends Node> SeparatedNodeList<T> formatSeparatedNodeList(SeparatedNodeList<T> nodeList, int itemTrailingWS, int itemTrailingNL, int listTrailingWS, int listTrailingNL) { return formatSeparatedNodeList(nodeList, itemTrailingWS, itemTrailingNL, 1, 0, listTrailingWS, listTrailingNL); } /** * Format a delimited list of nodes. * * @param <T> Type of the list item * @param nodeList Node list to be formatted * @param itemTrailingWS Number of single-length spaces to be added after each item in the list * @param itemTrailingNL Number of newlines to be added after each item in the list * @param separatorTrailingWS Number of single-length spaces to be added after each separator in the list * @param separatorTrailingNL Number of newlines to be added after each each separator in the list * @param listTrailingWS Number of single-length spaces to be added after the last item in the list * @param listTrailingNL Number of newlines to be added after the last item in the list * @return Formatted node list */ @SuppressWarnings("unchecked") protected <T extends Node> SeparatedNodeList<T> formatSeparatedNodeList(SeparatedNodeList<T> nodeList, int itemTrailingWS, int itemTrailingNL, int separatorTrailingWS, int separatorTrailingNL, int listTrailingWS, int listTrailingNL) { if (nodeList.isEmpty()) { return nodeList; } boolean nodeModified = false; int size = nodeList.size(); Node[] newNodes = new Node[size * 2 - 1]; for (int index = 0; index < size; index++) { T oldNode = nodeList.get(index); T newNode; if (index == size - 1) { newNode = formatNode(oldNode, listTrailingWS, listTrailingNL); } else { newNode = formatNode(oldNode, itemTrailingWS, itemTrailingNL); } newNodes[2 * index] = newNode; if (oldNode != newNode) { nodeModified = true; } if (index == nodeList.size() - 1) { break; } Token oldSeperator = nodeList.getSeparator(index); Token newSeperator = formatToken(oldSeperator, separatorTrailingWS, separatorTrailingNL); newNodes[(2 * index) + 1] = newSeperator; if (oldSeperator != newSeperator) { nodeModified = true; } } if (!nodeModified) { return nodeList; } return (SeparatedNodeList<T>) NodeFactory.createSeparatedNodeList(newNodes); } /** * Format a token. * * @param <T> Type of the token * @param token Token to be formatted * @return Formatted token */ @SuppressWarnings("unchecked") private <T extends Token> T formatTokenInternal(T token) { MinutiaeList newLeadingMinutiaeList = getLeadingMinutiae(); this.lineLength += token.text().length(); MinutiaeList newTrailingMinutiaeList = getTrailingMinutiae(); return (T) token.modify(newLeadingMinutiaeList, newTrailingMinutiaeList); } /** * Get leading minutiae. * * @return Leading minutiae list */ private MinutiaeList getLeadingMinutiae() { List<Minutiae> leadingMinutiae = new ArrayList<>(); if (this.hasNewline) { for (int i = 0; i < this.leadingNL; i++) { leadingMinutiae.add(getNewline()); } if (this.indentation > 0) { String wsContent = getWSContent(this.indentation); leadingMinutiae.add(NodeFactory.createWhitespaceMinutiae(wsContent)); } } MinutiaeList newLeadingMinutiaeList = NodeFactory.createMinutiaeList(leadingMinutiae); return newLeadingMinutiaeList; } /** * Get trailing minutiae. * * @return Trailing minutiae list */ private MinutiaeList getTrailingMinutiae() { List<Minutiae> trailingMinutiae = new ArrayList<>(); if (this.trailingWS > 0) { String wsContent = getWSContent(this.trailingWS); trailingMinutiae.add(NodeFactory.createWhitespaceMinutiae(wsContent)); } if (this.trailingNL > 0) { trailingMinutiae.add(getNewline()); } MinutiaeList newTrailingMinutiaeList = NodeFactory.createMinutiaeList(trailingMinutiae); return newTrailingMinutiaeList; } private Minutiae getNewline() { this.lineLength = 0; return NodeFactory.createEndOfLineMinutiae(FormatterUtils.NEWLINE_SYMBOL); } /** * Indent the code by the 4-whitespace characters. */ private void indent() { this.indentation += DEFAULT_INDENTATION; } /** * Undo the indentation of the code by the 4-whitespace characters. */ private void unindent() { if (this.indentation < DEFAULT_INDENTATION) { this.indentation = 0; return; } this.indentation -= DEFAULT_INDENTATION; } /** * Set the indentation for the code to follow. * * @param value Number of characters to set the indentation from the start of the line. */ private void setIndentation(int value) { this.indentation = value; } private String getWSContent(int count) { this.lineLength += count; StringBuilder sb = new StringBuilder(); for (int i = 0; i < count; i++) { sb.append(" "); } return sb.toString(); } private boolean shouldExpand(RecordTypeDescriptorNode recordTypeDesc) { int fieldCount = recordTypeDesc.fields().size(); fieldCount += recordTypeDesc.recordRestDescriptor().isPresent() ? 1 : 0; if (fieldCount <= 1) { return false; } if (fieldCount > 3) { return true; } for (Node field : recordTypeDesc.fields()) { TextRange textRange = field.textRange(); if ((textRange.endOffset() - textRange.startOffset()) > 15) { return true; } } return false; } }
class NewFormattingTreeModifier extends FormattingTreeModifier { /** * Number of of whitespace characters to be used as the indentation for the current line. */ private int indentation = 0; /** * Number of leading newlines to be added to the currently processing node. */ private int leadingNL = 0; /** * Number of trailing newlines to be added to the currently processing node. */ private int trailingNL = 0; /** * Number of trailing whitespace characters to be added to the currently processing node. */ private int trailingWS = 0; /** * Flag indicating whether the currently formatting token is the first token of the current line. */ private boolean hasNewline = true; /** * Number of of whitespace characters to be used for a single indentation. */ private static final int DEFAULT_INDENTATION = 4; /** * Maximum length of a line. Any line that goes pass this limit will be wrapped. */ private static final int COLUMN_LIMIT = 80; /** * Length of the currently formatting line. */ private int lineLength = 0; public NewFormattingTreeModifier(FormattingOptions options, LineRange lineRange) { super(options, lineRange); } @Override public ModulePartNode transform(ModulePartNode modulePartNode) { NodeList<ImportDeclarationNode> imports = formatNodeList(modulePartNode.imports(), 0, 1, 0, 2); NodeList<ModuleMemberDeclarationNode> members = formatNodeList(modulePartNode.members(), 0, 2, 0, 1); Token eofToken = formatToken(modulePartNode.eofToken(), 0, 0); return modulePartNode.modify(imports, members, eofToken); } @Override public FunctionDefinitionNode transform(FunctionDefinitionNode functionDefinitionNode) { if (functionDefinitionNode.metadata().isPresent()) { MetadataNode metadata = formatNode(functionDefinitionNode.metadata().get(), 1, 0); functionDefinitionNode = functionDefinitionNode.modify().withMetadata(metadata).apply(); } NodeList<Token> qualifierList = formatNodeList(functionDefinitionNode.qualifierList(), 1, 0, 1, 0); Token functionKeyword = formatToken(functionDefinitionNode.functionKeyword(), 1, 0); IdentifierToken functionName = formatToken(functionDefinitionNode.functionName(), 0, 0); FunctionSignatureNode functionSignatureNode = formatNode(functionDefinitionNode.functionSignature(), 1, 0); FunctionBodyNode functionBodyNode = formatNode(functionDefinitionNode.functionBody(), this.trailingWS, this.trailingNL); return functionDefinitionNode.modify() .withFunctionKeyword(functionKeyword) .withFunctionName(functionName) .withFunctionSignature(functionSignatureNode).withQualifierList(qualifierList) .withFunctionBody(functionBodyNode) .apply(); } @Override public FunctionSignatureNode transform(FunctionSignatureNode functionSignatureNode) { Token openPara = formatToken(functionSignatureNode.openParenToken(), 0, 0); int currentIndentation = this.indentation; setIndentation(this.lineLength); SeparatedNodeList<ParameterNode> parameters = formatSeparatedNodeList(functionSignatureNode.parameters(), 0, 0, 0, 0); setIndentation(currentIndentation); Token closePara = formatToken(functionSignatureNode.closeParenToken(), 1, 0); if (functionSignatureNode.returnTypeDesc().isPresent()) { ReturnTypeDescriptorNode returnTypeDesc = formatNode(functionSignatureNode.returnTypeDesc().get(), this.trailingWS, this.trailingNL); functionSignatureNode = functionSignatureNode.modify().withReturnTypeDesc(returnTypeDesc).apply(); } return functionSignatureNode.modify() .withOpenParenToken(openPara) .withCloseParenToken(closePara) .withParameters(parameters) .apply(); } @Override public RequiredParameterNode transform(RequiredParameterNode requiredParameterNode) { NodeList<AnnotationNode> annotations = formatNodeList(requiredParameterNode.annotations(), 0, 1, 0, 0); Node typeName = formatNode(requiredParameterNode.typeName(), 1, 0); if (requiredParameterNode.paramName().isPresent()) { Token paramName = formatToken(requiredParameterNode.paramName().get(), 0, 0); return requiredParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .withParamName(paramName) .apply(); } else { return requiredParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .apply(); } } @Override public FunctionBodyBlockNode transform(FunctionBodyBlockNode functionBodyBlockNode) { Token openBrace = formatToken(functionBodyBlockNode.openBraceToken(), 0, 1); indent(); NodeList<StatementNode> statements = formatNodeList(functionBodyBlockNode.statements(), 0, 1, 0, 1); unindent(); Token closeBrace = formatToken(functionBodyBlockNode.closeBraceToken(), this.trailingWS, this.trailingNL); return functionBodyBlockNode.modify() .withOpenBraceToken(openBrace) .withCloseBraceToken(closeBrace) .withStatements(statements) .apply(); } @Override public VariableDeclarationNode transform(VariableDeclarationNode variableDeclarationNode) { NodeList<AnnotationNode> annotationNodes = formatNodeList(variableDeclarationNode.annotations(), 0, 1, 0, 1); if (variableDeclarationNode.finalKeyword().isPresent()) { Token finalToken = formatToken(variableDeclarationNode.finalKeyword().get(), 1, 0); variableDeclarationNode = variableDeclarationNode.modify().withFinalKeyword(finalToken).apply(); } TypedBindingPatternNode typedBindingPatternNode; if (variableDeclarationNode.equalsToken().isPresent()) { typedBindingPatternNode = formatNode(variableDeclarationNode.typedBindingPattern(), 1, 0); Token equalToken = formatToken(variableDeclarationNode.equalsToken().get(), 1, 0); ExpressionNode initializer = formatNode(variableDeclarationNode.initializer().get(), 0, 0); Token semicolonToken = formatToken(variableDeclarationNode.semicolonToken(), this.trailingWS, this.trailingNL); return variableDeclarationNode.modify() .withAnnotations(annotationNodes) .withTypedBindingPattern(typedBindingPatternNode) .withEqualsToken(equalToken) .withInitializer(initializer) .withSemicolonToken(semicolonToken) .apply(); } else { typedBindingPatternNode = formatNode(variableDeclarationNode.typedBindingPattern(), 0, 0); Token semicolonToken = formatToken(variableDeclarationNode.semicolonToken(), this.trailingWS, this.trailingNL); return variableDeclarationNode.modify() .withAnnotations(annotationNodes) .withTypedBindingPattern(typedBindingPatternNode) .withSemicolonToken(semicolonToken) .apply(); } } @Override public TypedBindingPatternNode transform(TypedBindingPatternNode typedBindingPatternNode) { TypeDescriptorNode typeDescriptorNode = formatNode(typedBindingPatternNode.typeDescriptor(), 1, 0); BindingPatternNode bindingPatternNode = formatNode(typedBindingPatternNode.bindingPattern(), this.trailingWS, this.trailingNL); return typedBindingPatternNode.modify() .withTypeDescriptor(typeDescriptorNode) .withBindingPattern(bindingPatternNode) .apply(); } @Override public BuiltinSimpleNameReferenceNode transform(BuiltinSimpleNameReferenceNode builtinSimpleNameReferenceNode) { Token name = formatToken(builtinSimpleNameReferenceNode.name(), this.trailingWS, this.trailingNL); return builtinSimpleNameReferenceNode.modify().withName(name).apply(); } @Override public BasicLiteralNode transform(BasicLiteralNode basicLiteralNode) { Token literalToken = formatToken(basicLiteralNode.literalToken(), this.trailingWS, this.trailingNL); return basicLiteralNode.modify().withLiteralToken(literalToken).apply(); } @Override public CaptureBindingPatternNode transform(CaptureBindingPatternNode captureBindingPatternNode) { Token variableName = formatToken(captureBindingPatternNode.variableName(), this.trailingWS, this.trailingNL); return captureBindingPatternNode.modify().withVariableName(variableName).apply(); } @Override public IfElseStatementNode transform(IfElseStatementNode ifElseStatementNode) { Token ifKeyword = formatToken(ifElseStatementNode.ifKeyword(), 1, 0); ExpressionNode condition = formatNode(ifElseStatementNode.condition(), 1, 0); BlockStatementNode ifBody; if (ifElseStatementNode.elseBody().isPresent()) { ifBody = formatNode(ifElseStatementNode.ifBody(), 1, 0); Node elseBody = formatNode(ifElseStatementNode.elseBody().get(), this.trailingWS, this.trailingNL); ifElseStatementNode = ifElseStatementNode.modify().withElseBody(elseBody).apply(); } else { ifBody = formatNode(ifElseStatementNode.ifBody(), this.trailingWS, this.trailingNL); } return ifElseStatementNode.modify() .withIfKeyword(ifKeyword) .withIfBody(ifBody) .withCondition(condition) .apply(); } @Override public ElseBlockNode transform(ElseBlockNode elseBlockNode) { Token elseKeyword = formatToken(elseBlockNode.elseKeyword(), 1, 0); StatementNode elseBody = formatNode(elseBlockNode.elseBody(), this.trailingWS, this.trailingNL); return elseBlockNode.modify() .withElseKeyword(elseKeyword) .withElseBody(elseBody) .apply(); } @Override public BlockStatementNode transform(BlockStatementNode blockStatementNode) { Token openBrace = formatToken(blockStatementNode.openBraceToken(), 0, 1); indent(); NodeList<StatementNode> statements = formatNodeList(blockStatementNode.statements(), 0, 1, 0, 1); unindent(); Token closeBrace = formatToken(blockStatementNode.closeBraceToken(), this.trailingWS, this.trailingNL); return blockStatementNode.modify() .withOpenBraceToken(openBrace) .withStatements(statements) .withCloseBraceToken(closeBrace) .apply(); } @Override public RecordTypeDescriptorNode transform(RecordTypeDescriptorNode recordTypeDesc) { Token recordKeyword = formatNode(recordTypeDesc.recordKeyword(), 1, 0); int fieldTrailingWS = 0; int fieldTrailingNL = 0; if (shouldExpand(recordTypeDesc)) { fieldTrailingNL++; } else { fieldTrailingWS++; } Token bodyStartDelimiter = formatToken(recordTypeDesc.bodyStartDelimiter(), fieldTrailingWS, fieldTrailingNL); int prevIndentation = this.indentation; setIndentation(recordKeyword.location().lineRange().startLine().offset() + DEFAULT_INDENTATION); NodeList<Node> fields = formatNodeList(recordTypeDesc.fields(), fieldTrailingWS, fieldTrailingNL, fieldTrailingWS, fieldTrailingNL); if (recordTypeDesc.recordRestDescriptor().isPresent()) { RecordRestDescriptorNode recordRestDescriptor = formatNode(recordTypeDesc.recordRestDescriptor().get(), fieldTrailingWS, fieldTrailingNL); recordTypeDesc = recordTypeDesc.modify().withRecordRestDescriptor(recordRestDescriptor).apply(); } setIndentation(prevIndentation); Token bodyEndDelimiter = formatToken(recordTypeDesc.bodyEndDelimiter(), this.trailingWS, this.trailingNL); return recordTypeDesc.modify() .withRecordKeyword(recordKeyword) .withBodyStartDelimiter(bodyStartDelimiter) .withFields(fields) .withBodyEndDelimiter(bodyEndDelimiter) .apply(); } @Override public RecordFieldNode transform(RecordFieldNode recordField) { if (recordField.metadata().isPresent()) { MetadataNode metadata = formatNode(recordField.metadata().get(), 0, 1); recordField = recordField.modify().withMetadata(metadata).apply(); } if (recordField.readonlyKeyword().isPresent()) { Token readonlyKeyword = formatNode(recordField.readonlyKeyword().get(), 1, 0); recordField = recordField.modify().withReadonlyKeyword(readonlyKeyword).apply(); } Node typeName = formatNode(recordField.typeName(), 1, 0); Token fieldName = formatToken(recordField.fieldName(), 0, 0); if (recordField.questionMarkToken().isPresent()) { Token questionMarkToken = formatToken(recordField.questionMarkToken().get(), 0, 1); recordField = recordField.modify().withQuestionMarkToken(questionMarkToken).apply(); } Token semicolonToken = formatToken(recordField.semicolonToken(), this.trailingWS, this.trailingNL); return recordField.modify() .withTypeName(typeName) .withFieldName(fieldName) .withSemicolonToken(semicolonToken) .apply(); } @Override public RecordFieldWithDefaultValueNode transform(RecordFieldWithDefaultValueNode recordField) { if (recordField.metadata().isPresent()) { MetadataNode metadata = formatNode(recordField.metadata().get(), 0, 1); recordField = recordField.modify().withMetadata(metadata).apply(); } if (recordField.readonlyKeyword().isPresent()) { Token readonlyKeyword = formatNode(recordField.readonlyKeyword().get(), 1, 0); recordField = recordField.modify().withReadonlyKeyword(readonlyKeyword).apply(); } Node typeName = formatNode(recordField.typeName(), 1, 0); Token fieldName = formatToken(recordField.fieldName(), 1, 0); Token equalsToken = formatToken(recordField.equalsToken(), 1, 0); ExpressionNode expression = formatNode(recordField.expression(), 0, 0); Token semicolonToken = formatToken(recordField.semicolonToken(), this.trailingWS, this.trailingNL); return recordField.modify() .withTypeName(typeName) .withFieldName(fieldName) .withEqualsToken(equalsToken) .withExpression(expression) .withSemicolonToken(semicolonToken) .apply(); } @Override public ImportDeclarationNode transform(ImportDeclarationNode importDeclarationNode) { Token importKeyword = formatToken(importDeclarationNode.importKeyword(), 1, 0); boolean hasOrgName = importDeclarationNode.orgName().isPresent(); boolean hasVersion = importDeclarationNode.version().isPresent(); boolean hasPrefix = importDeclarationNode.prefix().isPresent(); if (hasOrgName) { ImportOrgNameNode orgName = formatNode(importDeclarationNode.orgName().get(), 0, 0); importDeclarationNode = importDeclarationNode.modify().withOrgName(orgName).apply(); } SeparatedNodeList<IdentifierToken> moduleNames = formatSeparatedNodeList(importDeclarationNode.moduleName(), 0, 0, 0, 0, (hasVersion || hasPrefix) ? 1 : 0, 0); if (hasVersion) { ImportVersionNode version = formatNode(importDeclarationNode.version().get(), hasPrefix ? 1 : 0, 0); importDeclarationNode = importDeclarationNode.modify().withVersion(version).apply(); } if (hasPrefix) { ImportPrefixNode prefix = formatNode(importDeclarationNode.prefix().get(), 0, 0); importDeclarationNode = importDeclarationNode.modify().withPrefix(prefix).apply(); } Token semicolon = formatToken(importDeclarationNode.semicolon(), this.trailingWS, this.trailingNL); return importDeclarationNode.modify() .withImportKeyword(importKeyword) .withModuleName(moduleNames) .withSemicolon(semicolon) .apply(); } @Override public ImportOrgNameNode transform(ImportOrgNameNode importOrgNameNode) { Token orgName = formatToken(importOrgNameNode.orgName(), 0, 0); Token slashToken = formatToken(importOrgNameNode.slashToken(), this.trailingWS, this.trailingNL); return importOrgNameNode.modify() .withOrgName(orgName) .withSlashToken(slashToken) .apply(); } @Override public ImportPrefixNode transform(ImportPrefixNode importPrefixNode) { Token asKeyword = formatToken(importPrefixNode.asKeyword(), 1, 0); Token prefix = formatToken(importPrefixNode.prefix(), this.trailingWS, this.trailingNL); return importPrefixNode.modify() .withAsKeyword(asKeyword) .withPrefix(prefix) .apply(); } @Override public ImportVersionNode transform(ImportVersionNode importVersionNode) { Token versionKeyword = formatToken(importVersionNode.versionKeyword(), 1, 0); SeparatedNodeList<Token> versionNumber = formatSeparatedNodeList(importVersionNode.versionNumber(), 0, 0, 0, 0, this.trailingWS, this.trailingNL); return importVersionNode.modify() .withVersionKeyword(versionKeyword) .withVersionNumber(versionNumber) .apply(); } @Override public ServiceDeclarationNode transform(ServiceDeclarationNode serviceDeclarationNode) { if (serviceDeclarationNode.metadata().isPresent()) { MetadataNode metadata = formatNode(serviceDeclarationNode.metadata().get(), 1, 0); serviceDeclarationNode = serviceDeclarationNode.modify().withMetadata(metadata).apply(); } Token serviceKeyword = formatToken(serviceDeclarationNode.serviceKeyword(), 1, 0); IdentifierToken serviceName = formatToken(serviceDeclarationNode.serviceName(), 1, 0); Token onKeyword = formatToken(serviceDeclarationNode.onKeyword(), 1, 0); SeparatedNodeList<ExpressionNode> expressions = formatSeparatedNodeList(serviceDeclarationNode.expressions(), 0, 0, 1, 0); Node serviceBody = formatNode(serviceDeclarationNode.serviceBody(), this.trailingWS, this.trailingNL); return serviceDeclarationNode.modify() .withServiceKeyword(serviceKeyword) .withServiceName(serviceName) .withOnKeyword(onKeyword) .withExpressions(expressions) .withServiceBody(serviceBody) .apply(); } @Override public ExplicitNewExpressionNode transform(ExplicitNewExpressionNode explicitNewExpressionNode) { Token newKeywordToken = formatToken(explicitNewExpressionNode.newKeyword(), 1, 0); TypeDescriptorNode typeDescriptorNode = formatNode(explicitNewExpressionNode.typeDescriptor(), 0, 0); ParenthesizedArgList parenthesizedArgList = formatNode(explicitNewExpressionNode.parenthesizedArgList(), this.trailingWS, this.trailingNL); return explicitNewExpressionNode.modify() .withNewKeyword(newKeywordToken) .withTypeDescriptor(typeDescriptorNode) .withParenthesizedArgList(parenthesizedArgList) .apply(); } @Override public ParenthesizedArgList transform(ParenthesizedArgList parenthesizedArgList) { Token openParenToken = formatToken(parenthesizedArgList.openParenToken(), 0, 0); SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(parenthesizedArgList .arguments(), 0, 0, 0, 0); Token closeParenToken = formatToken(parenthesizedArgList.closeParenToken(), this.trailingWS, this.trailingNL); return parenthesizedArgList.modify() .withOpenParenToken(openParenToken) .withArguments(arguments) .withCloseParenToken(closeParenToken) .apply(); } @Override public ServiceBodyNode transform(ServiceBodyNode serviceBodyNode) { Token openBraceToken = formatToken(serviceBodyNode.openBraceToken(), 0, 1); indent(); NodeList<Node> resources = formatNodeList(serviceBodyNode.resources(), 0, 1, 0, 1); unindent(); Token closeBraceToken = formatToken(serviceBodyNode.closeBraceToken(), this.trailingWS, this.trailingNL); return serviceBodyNode.modify() .withOpenBraceToken(openBraceToken) .withResources(resources) .withCloseBraceToken(closeBraceToken) .apply(); } @Override public QualifiedNameReferenceNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) { Token modulePrefix = formatToken(qualifiedNameReferenceNode.modulePrefix(), 0, 0); Token colon = formatToken((Token) qualifiedNameReferenceNode.colon(), 0, 0); IdentifierToken identifier = formatToken(qualifiedNameReferenceNode.identifier(), this.trailingWS, this.trailingNL); return qualifiedNameReferenceNode.modify() .withModulePrefix(modulePrefix) .withColon(colon) .withIdentifier(identifier) .apply(); } @Override public ReturnTypeDescriptorNode transform(ReturnTypeDescriptorNode returnTypeDescriptorNode) { Token returnsKeyword = formatToken(returnTypeDescriptorNode.returnsKeyword(), 1, 0); NodeList<AnnotationNode> annotations = formatNodeList(returnTypeDescriptorNode.annotations(), 0, 0, 1, 0); Node type = formatNode(returnTypeDescriptorNode.type(), this.trailingWS, this.trailingNL); return returnTypeDescriptorNode.modify() .withReturnsKeyword(returnsKeyword) .withAnnotations(annotations) .withType(type) .apply(); } @Override public OptionalTypeDescriptorNode transform(OptionalTypeDescriptorNode optionalTypeDescriptorNode) { Node typeDescriptor = formatNode(optionalTypeDescriptorNode.typeDescriptor(), 0, 0); Token questionMarkToken = formatToken(optionalTypeDescriptorNode.questionMarkToken(), this.trailingWS, this.trailingNL); return optionalTypeDescriptorNode.modify() .withTypeDescriptor(typeDescriptor) .withQuestionMarkToken(questionMarkToken) .apply(); } @Override public ExpressionStatementNode transform(ExpressionStatementNode expressionStatementNode) { ExpressionNode expression = formatNode(expressionStatementNode.expression(), 0, 0); Token semicolonToken = formatToken(expressionStatementNode.semicolonToken(), this.trailingWS, this.trailingNL); return expressionStatementNode.modify() .withExpression(expression) .withSemicolonToken(semicolonToken) .apply(); } @Override public CheckExpressionNode transform(CheckExpressionNode checkExpressionNode) { Token checkKeyword = formatToken(checkExpressionNode.checkKeyword(), 1, 0); ExpressionNode expressionNode = formatNode(checkExpressionNode.expression(), this.trailingWS, this.trailingNL); return checkExpressionNode.modify() .withCheckKeyword(checkKeyword) .withExpression(expressionNode) .apply(); } @Override public RemoteMethodCallActionNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) { ExpressionNode expression = formatNode(remoteMethodCallActionNode.expression(), 0, 0); Token rightArrowToken = formatToken(remoteMethodCallActionNode.rightArrowToken(), 0, 0); SimpleNameReferenceNode methodName = formatNode(remoteMethodCallActionNode.methodName(), 0, 0); Token openParenToken = formatToken(remoteMethodCallActionNode.openParenToken(), 0, 0); SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(remoteMethodCallActionNode .arguments(), 1, 0, 0, 0); Token closeParenToken = formatToken(remoteMethodCallActionNode.closeParenToken(), this.trailingWS, this.trailingNL); return remoteMethodCallActionNode.modify() .withExpression(expression) .withRightArrowToken(rightArrowToken) .withMethodName(methodName) .withOpenParenToken(openParenToken) .withArguments(arguments) .withCloseParenToken(closeParenToken) .apply(); } @Override public SimpleNameReferenceNode transform(SimpleNameReferenceNode simpleNameReferenceNode) { Token name = formatToken(simpleNameReferenceNode.name(), this.trailingWS, this.trailingNL); return simpleNameReferenceNode.modify() .withName(name) .apply(); } @Override public TypeDefinitionNode transform(TypeDefinitionNode typeDefinitionNode) { if (typeDefinitionNode.metadata().isPresent()) { MetadataNode metadata = formatNode(typeDefinitionNode.metadata().get(), 1, 0); typeDefinitionNode = typeDefinitionNode.modify().withMetadata(metadata).apply(); } if (typeDefinitionNode.visibilityQualifier().isPresent()) { Token visibilityQualifier = formatToken(typeDefinitionNode.visibilityQualifier().get(), 1, 0); typeDefinitionNode = typeDefinitionNode.modify().withVisibilityQualifier(visibilityQualifier).apply(); } Token typeKeyword = formatToken(typeDefinitionNode.typeKeyword(), 1, 0); Token typeName = formatToken(typeDefinitionNode.typeName(), 1, 0); Node typeDescriptor = formatNode(typeDefinitionNode.typeDescriptor(), 1, 0); Token semicolonToken = formatToken(typeDefinitionNode.semicolonToken(), this.trailingWS, this.trailingNL); return typeDefinitionNode.modify() .withTypeKeyword(typeKeyword) .withTypeName(typeName) .withTypeDescriptor(typeDescriptor) .withSemicolonToken(semicolonToken) .apply(); } @Override public SingletonTypeDescriptorNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) { ExpressionNode simpleContExprNode = formatNode(singletonTypeDescriptorNode.simpleContExprNode(), 1, 0); return singletonTypeDescriptorNode.modify() .withSimpleContExprNode(simpleContExprNode) .apply(); } @Override public WhileStatementNode transform(WhileStatementNode whileStatementNode) { boolean hasOnFailClause = whileStatementNode.onFailClause().isPresent(); Token whileKeyword = formatToken(whileStatementNode.whileKeyword(), 1, 0); ExpressionNode condition = formatNode(whileStatementNode.condition(), 1, 0); BlockStatementNode whileBody; if (hasOnFailClause) { whileBody = formatNode(whileStatementNode.whileBody(), 1, 0); OnFailClauseNode onFailClause = formatNode(whileStatementNode.onFailClause().get(), this.trailingWS, this.trailingNL); whileStatementNode = whileStatementNode.modify().withOnFailClause(onFailClause).apply(); } else { whileBody = formatNode(whileStatementNode.whileBody(), this.trailingWS, this.trailingNL); } return whileStatementNode.modify() .withWhileKeyword(whileKeyword) .withCondition(condition) .withWhileBody(whileBody) .apply(); } @Override public BracedExpressionNode transform(BracedExpressionNode bracedExpressionNode) { Token openParen = formatToken(bracedExpressionNode.openParen(), 0, 0); ExpressionNode expression = formatNode(bracedExpressionNode.expression(), 0, 0); Token closeParen = formatToken(bracedExpressionNode.closeParen(), this.trailingWS, this.trailingNL); return bracedExpressionNode.modify() .withOpenParen(openParen) .withExpression(expression) .withCloseParen(closeParen) .apply(); } @Override public AssignmentStatementNode transform(AssignmentStatementNode assignmentStatementNode) { Node varRef = formatNode(assignmentStatementNode.varRef(), 1, 0); Token equalsToken = formatToken(assignmentStatementNode.equalsToken(), 1, 0); ExpressionNode expression = formatNode(assignmentStatementNode.expression(), 0, 0); Token semicolonToken = formatToken(assignmentStatementNode.semicolonToken(), this.trailingWS, this.trailingNL); return assignmentStatementNode.modify() .withVarRef(varRef) .withEqualsToken(equalsToken) .withExpression(expression) .withSemicolonToken(semicolonToken) .apply(); } @Override public CompoundAssignmentStatementNode transform(CompoundAssignmentStatementNode compoundAssignmentStatementNode) { ExpressionNode lhsExpression = formatNode(compoundAssignmentStatementNode.lhsExpression(), 1, 0); Token binaryOperator = formatToken(compoundAssignmentStatementNode.binaryOperator(), 0, 0); Token equalsToken = formatToken(compoundAssignmentStatementNode.equalsToken(), 1, 0); ExpressionNode rhsExpression = formatNode(compoundAssignmentStatementNode.rhsExpression(), 0, 0); Token semicolonToken = formatToken(compoundAssignmentStatementNode.semicolonToken(), this.trailingWS, this.trailingNL); return compoundAssignmentStatementNode.modify() .withLhsExpression(lhsExpression) .withBinaryOperator(binaryOperator) .withEqualsToken(equalsToken) .withRhsExpression(rhsExpression) .withSemicolonToken(semicolonToken) .apply(); } @Override public DoStatementNode transform(DoStatementNode doStatementNode) { boolean hasOnFailClause = doStatementNode.onFailClause().isPresent(); Token doKeyword = formatToken(doStatementNode.doKeyword(), 1, 0); BlockStatementNode blockStatement; if (hasOnFailClause) { blockStatement = formatNode(doStatementNode.blockStatement(), 1, 0); OnFailClauseNode onFailClause = formatNode(doStatementNode.onFailClause().get(), this.trailingWS, this.trailingNL); doStatementNode = doStatementNode.modify().withOnFailClause(onFailClause).apply(); } else { blockStatement = formatNode(doStatementNode.blockStatement(), this.trailingWS, this.trailingNL); } return doStatementNode.modify() .withDoKeyword(doKeyword) .withBlockStatement(blockStatement) .apply(); } @Override public ForEachStatementNode transform(ForEachStatementNode forEachStatementNode) { boolean hasOnFailClause = forEachStatementNode.onFailClause().isPresent(); Token forEachKeyword = formatToken(forEachStatementNode.forEachKeyword(), 1, 0); TypedBindingPatternNode typedBindingPattern = formatNode(forEachStatementNode.typedBindingPattern(), 1, 0); Token inKeyword = formatToken(forEachStatementNode.inKeyword(), 1, 0); Node actionOrExpressionNode = formatNode(forEachStatementNode.actionOrExpressionNode(), 1, 0); StatementNode blockStatement; if (hasOnFailClause) { blockStatement = formatNode(forEachStatementNode.blockStatement(), 1, 0); OnFailClauseNode onFailClause = formatNode(forEachStatementNode.onFailClause().get(), this.trailingWS, this.trailingNL); forEachStatementNode = forEachStatementNode.modify().withOnFailClause(onFailClause).apply(); } else { blockStatement = formatNode(forEachStatementNode.blockStatement(), this.trailingWS, this.trailingNL); } return forEachStatementNode.modify() .withForEachKeyword(forEachKeyword) .withTypedBindingPattern(typedBindingPattern) .withInKeyword(inKeyword) .withActionOrExpressionNode(actionOrExpressionNode) .withBlockStatement(blockStatement) .apply(); } @Override public BinaryExpressionNode transform(BinaryExpressionNode binaryExpressionNode) { Node lhsExpr = formatNode(binaryExpressionNode.lhsExpr(), 1, 0); Token operator = formatToken(binaryExpressionNode.operator(), 1, 0); Node rhsExpr = formatNode(binaryExpressionNode.rhsExpr(), this.trailingWS, this.trailingNL); return binaryExpressionNode.modify() .withLhsExpr(lhsExpr) .withOperator(operator) .withRhsExpr(rhsExpr) .apply(); } @Override @Override public ReturnStatementNode transform(ReturnStatementNode returnStatementNode) { Token returnKeyword = formatToken(returnStatementNode.returnKeyword(), returnStatementNode.expression().isPresent() ? 1 : 0, 0); if (returnStatementNode.expression().isPresent()) { ExpressionNode expressionNode = formatNode(returnStatementNode.expression().get(), 0, 0); returnStatementNode = returnStatementNode.modify().withExpression(expressionNode).apply(); } Token semicolonToken = formatToken(returnStatementNode.semicolonToken(), this.trailingWS, this.trailingNL); return returnStatementNode.modify() .withReturnKeyword(returnKeyword) .withSemicolonToken(semicolonToken) .apply(); } @Override public FunctionCallExpressionNode transform(FunctionCallExpressionNode functionCallExpressionNode) { NameReferenceNode functionName = formatNode(functionCallExpressionNode.functionName(), 0, 0); Token functionCallOpenPara = formatToken(functionCallExpressionNode.openParenToken(), 0, 0); SeparatedNodeList<FunctionArgumentNode> arguments = formatSeparatedNodeList(functionCallExpressionNode .arguments(), 0, 0, 0, 0); Token functionCallClosePara = formatToken(functionCallExpressionNode.closeParenToken(), this.trailingWS, this.trailingNL); return functionCallExpressionNode.modify() .withFunctionName(functionName) .withOpenParenToken(functionCallOpenPara) .withCloseParenToken(functionCallClosePara) .withArguments(arguments) .apply(); } @Override public UnionTypeDescriptorNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) { TypeDescriptorNode leftTypeDesc = formatNode(unionTypeDescriptorNode.leftTypeDesc(), 0, 0); Token pipeToken = formatToken(unionTypeDescriptorNode.pipeToken(), 0, 0); TypeDescriptorNode rightTypeDesc = formatNode(unionTypeDescriptorNode.rightTypeDesc(), this.trailingWS, this.trailingNL); return unionTypeDescriptorNode.modify() .withLeftTypeDesc(leftTypeDesc) .withPipeToken(pipeToken) .withRightTypeDesc(rightTypeDesc) .apply(); } @Override public NilTypeDescriptorNode transform(NilTypeDescriptorNode nilTypeDescriptorNode) { Token openParenToken = formatToken(nilTypeDescriptorNode.openParenToken(), 0, 0); Token closeParenToken = formatToken(nilTypeDescriptorNode.closeParenToken(), this.trailingWS, this.trailingNL); return nilTypeDescriptorNode.modify() .withOpenParenToken(openParenToken) .withCloseParenToken(closeParenToken) .apply(); } @Override public IdentifierToken transform(IdentifierToken identifier) { return formatToken(identifier, this.trailingWS, this.trailingNL); } @Override public Token transform(Token token) { return formatToken(token, this.trailingWS, this.trailingNL); } /** * Format a node. * * @param <T> Type of the node * @param node Node to be formatted * @param trailingWS Number of single-length spaces to be added after the node * @param trailingNL Number of newlines to be added after the node * @return Formatted node */ @SuppressWarnings("unchecked") private <T extends Node> T formatNode(T node, int trailingWS, int trailingNL) { if (node == null) { return node; } if (!isInLineRange(node, lineRange)) { checkForNewline(node); return node; } int prevTrailingNL = this.trailingNL; int prevTrailingWS = this.trailingWS; this.trailingNL = trailingNL; this.trailingWS = trailingWS; node = (T) node.apply(this); if (this.lineLength > COLUMN_LIMIT) { node = wrap(node); } this.trailingNL = prevTrailingNL; this.trailingWS = prevTrailingWS; return node; } /** * Wrap the node. This is equivalent to adding a newline before the node and * re-formatting the node. Wrapped content will start from the current level * of indentation. * * @param <T> Node type * @param node Node to be wrapped * @return Wrapped node */ @SuppressWarnings("unchecked") private <T extends Node> T wrap(T node) { this.leadingNL += 1; this.lineLength = 0; this.hasNewline = true; return (T) node.apply(this); } /** * Format a token. * * @param <T> Type of the token * @param token Token to be formatted * @param trailingWS Number of single-length spaces to be added after the token * @param trailingNL Number of newlines to be added after the token * @return Formatted token */ private <T extends Token> T formatToken(T token, int trailingWS, int trailingNL) { if (token == null) { return token; } if (!isInLineRange(token, lineRange)) { checkForNewline(token); return token; } int prevTrailingNL = this.trailingNL; int prevTrailingWS = this.trailingWS; this.trailingNL = trailingNL > 0 ? 1 : 0; this.trailingWS = trailingWS; token = formatTokenInternal(token); this.leadingNL = trailingNL > 0 ? trailingNL - 1 : 0; this.hasNewline = trailingNL > 0; this.trailingNL = prevTrailingNL; this.trailingWS = prevTrailingWS; return token; } private <T extends Node> void checkForNewline(T node) { for (Minutiae mintiae : node.trailingMinutiae()) { if (mintiae.kind() == SyntaxKind.END_OF_LINE_MINUTIAE) { this.hasNewline = true; return; } } } /** * Format a list of nodes. * * @param <T> Type of the list item * @param nodeList Node list to be formatted * @param itemTrailingWS Number of single-length spaces to be added after each item of the list * @param itemTrailingNL Number of newlines to be added after each item of the list * @param listTrailingWS Number of single-length spaces to be added after the last item of the list * @param listTrailingNL Number of newlines to be added after the last item of the list * @return Formatted node list */ @SuppressWarnings("unchecked") protected <T extends Node> NodeList<T> formatNodeList(NodeList<T> nodeList, int itemTrailingWS, int itemTrailingNL, int listTrailingWS, int listTrailingNL) { if (nodeList.isEmpty()) { return nodeList; } boolean nodeModified = false; int size = nodeList.size(); Node[] newNodes = new Node[size]; for (int index = 0; index < size; index++) { T oldNode = nodeList.get(index); T newNode; if (index == size - 1) { newNode = formatNode(oldNode, listTrailingWS, listTrailingNL); } else { newNode = formatNode(oldNode, itemTrailingWS, itemTrailingNL); } if (oldNode != newNode) { nodeModified = true; } newNodes[index] = newNode; } if (!nodeModified) { return nodeList; } return (NodeList<T>) NodeFactory.createNodeList(newNodes); } /** * Format a delimited list of nodes. This method assumes the delimiters are followed by a * single whitespace character only. * * @param <T> Type of the list item * @param nodeList Node list to be formatted * @param itemTrailingWS Number of single-length spaces to be added after each item in the list * @param itemTrailingNL Number of newlines to be added after each item in the list * @param listTrailingWS Number of single-length spaces to be added after the last item in the list * @param listTrailingNL Number of newlines to be added after the last item in the list * @return Formatted node list */ protected <T extends Node> SeparatedNodeList<T> formatSeparatedNodeList(SeparatedNodeList<T> nodeList, int itemTrailingWS, int itemTrailingNL, int listTrailingWS, int listTrailingNL) { return formatSeparatedNodeList(nodeList, itemTrailingWS, itemTrailingNL, 1, 0, listTrailingWS, listTrailingNL); } /** * Format a delimited list of nodes. * * @param <T> Type of the list item * @param nodeList Node list to be formatted * @param itemTrailingWS Number of single-length spaces to be added after each item in the list * @param itemTrailingNL Number of newlines to be added after each item in the list * @param separatorTrailingWS Number of single-length spaces to be added after each separator in the list * @param separatorTrailingNL Number of newlines to be added after each each separator in the list * @param listTrailingWS Number of single-length spaces to be added after the last item in the list * @param listTrailingNL Number of newlines to be added after the last item in the list * @return Formatted node list */ @SuppressWarnings("unchecked") protected <T extends Node> SeparatedNodeList<T> formatSeparatedNodeList(SeparatedNodeList<T> nodeList, int itemTrailingWS, int itemTrailingNL, int separatorTrailingWS, int separatorTrailingNL, int listTrailingWS, int listTrailingNL) { if (nodeList.isEmpty()) { return nodeList; } boolean nodeModified = false; int size = nodeList.size(); Node[] newNodes = new Node[size * 2 - 1]; for (int index = 0; index < size; index++) { T oldNode = nodeList.get(index); T newNode; if (index == size - 1) { newNode = formatNode(oldNode, listTrailingWS, listTrailingNL); } else { newNode = formatNode(oldNode, itemTrailingWS, itemTrailingNL); } newNodes[2 * index] = newNode; if (oldNode != newNode) { nodeModified = true; } if (index == nodeList.size() - 1) { break; } Token oldSeperator = nodeList.getSeparator(index); Token newSeperator = formatToken(oldSeperator, separatorTrailingWS, separatorTrailingNL); newNodes[(2 * index) + 1] = newSeperator; if (oldSeperator != newSeperator) { nodeModified = true; } } if (!nodeModified) { return nodeList; } return (SeparatedNodeList<T>) NodeFactory.createSeparatedNodeList(newNodes); } /** * Format a token. * * @param <T> Type of the token * @param token Token to be formatted * @return Formatted token */ @SuppressWarnings("unchecked") private <T extends Token> T formatTokenInternal(T token) { MinutiaeList newLeadingMinutiaeList = getLeadingMinutiae(); this.lineLength += token.text().length(); MinutiaeList newTrailingMinutiaeList = getTrailingMinutiae(); return (T) token.modify(newLeadingMinutiaeList, newTrailingMinutiaeList); } /** * Get leading minutiae. * * @return Leading minutiae list */ private MinutiaeList getLeadingMinutiae() { List<Minutiae> leadingMinutiae = new ArrayList<>(); if (this.hasNewline) { for (int i = 0; i < this.leadingNL; i++) { leadingMinutiae.add(getNewline()); } if (this.indentation > 0) { String wsContent = getWSContent(this.indentation); leadingMinutiae.add(NodeFactory.createWhitespaceMinutiae(wsContent)); } } MinutiaeList newLeadingMinutiaeList = NodeFactory.createMinutiaeList(leadingMinutiae); return newLeadingMinutiaeList; } /** * Get trailing minutiae. * * @return Trailing minutiae list */ private MinutiaeList getTrailingMinutiae() { List<Minutiae> trailingMinutiae = new ArrayList<>(); if (this.trailingWS > 0) { String wsContent = getWSContent(this.trailingWS); trailingMinutiae.add(NodeFactory.createWhitespaceMinutiae(wsContent)); } if (this.trailingNL > 0) { trailingMinutiae.add(getNewline()); } MinutiaeList newTrailingMinutiaeList = NodeFactory.createMinutiaeList(trailingMinutiae); return newTrailingMinutiaeList; } private Minutiae getNewline() { this.lineLength = 0; return NodeFactory.createEndOfLineMinutiae(FormatterUtils.NEWLINE_SYMBOL); } /** * Indent the code by the 4-whitespace characters. */ private void indent() { this.indentation += DEFAULT_INDENTATION; } /** * Undo the indentation of the code by the 4-whitespace characters. */ private void unindent() { if (this.indentation < DEFAULT_INDENTATION) { this.indentation = 0; return; } this.indentation -= DEFAULT_INDENTATION; } /** * Set the indentation for the code to follow. * * @param value Number of characters to set the indentation from the start of the line. */ private void setIndentation(int value) { this.indentation = value; } private String getWSContent(int count) { this.lineLength += count; StringBuilder sb = new StringBuilder(); for (int i = 0; i < count; i++) { sb.append(" "); } return sb.toString(); } private boolean shouldExpand(RecordTypeDescriptorNode recordTypeDesc) { int fieldCount = recordTypeDesc.fields().size(); fieldCount += recordTypeDesc.recordRestDescriptor().isPresent() ? 1 : 0; if (fieldCount <= 1) { return false; } if (fieldCount > 3) { return true; } for (Node field : recordTypeDesc.fields()) { TextRange textRange = field.textRange(); if ((textRange.endOffset() - textRange.startOffset()) > 15) { return true; } } return false; } }
This should set to 'TLS'. It's effectively setting max TLS version, disabling TLSv1.3 even when it's listed in `enableProtocols`. See table in https://bugs.openjdk.java.net/browse/JDK-8202625.
private String createTlsQuorumConfig(ZookeeperServerConfig config) { StringBuilder sb = new StringBuilder(); sb.append("ssl.quorum.hostnameVerification=false\n"); sb.append("ssl.quorum.clientAuth=NEED\n"); sb.append("ssl.quorum.ciphersuites=").append(String.join(",", new TreeSet<>(TlsContext.ALLOWED_CIPHER_SUITES))).append("\n"); sb.append("ssl.quorum.enabledProtocols=").append(String.join(",", new TreeSet<>(TlsContext.ALLOWED_PROTOCOLS))).append("\n"); sb.append("ssl.quorum.protocol=TLSv1.2\n"); String tlsSetting = config.tlsForQuorumCommunication().name(); switch (tlsSetting) { case "OFF": sb.append("sslQuorum=false\n"); sb.append("portUnification=false\n"); break; case "PORT_UNIFICATION": sb.append("sslQuorum=false\n"); sb.append("portUnification=true\n"); break; case "TLS_WITH_PORT_UNIFICATION": sb.append("sslQuorum=true\n"); sb.append("portUnification=true\n"); break; case "TLS_ONLY": sb.append("sslQuorum=true\n"); sb.append("portUnification=false\n"); break; default: throw new IllegalArgumentException("Unknown value of config setting tlsForQuorumCommunication: " + tlsSetting); } return sb.toString(); }
sb.append("ssl.quorum.protocol=TLSv1.2\n");
private String createTlsQuorumConfig(ZookeeperServerConfig config) { StringBuilder sb = new StringBuilder(); sb.append("ssl.quorum.hostnameVerification=false\n"); sb.append("ssl.quorum.clientAuth=NEED\n"); sb.append("ssl.quorum.ciphersuites=").append(String.join(",", new TreeSet<>(TlsContext.ALLOWED_CIPHER_SUITES))).append("\n"); sb.append("ssl.quorum.enabledProtocols=").append(String.join(",", new TreeSet<>(TlsContext.ALLOWED_PROTOCOLS))).append("\n"); sb.append("ssl.quorum.protocol=TLS\n"); String tlsSetting = config.tlsForQuorumCommunication().name(); switch (tlsSetting) { case "OFF": sb.append("sslQuorum=false\n"); sb.append("portUnification=false\n"); break; case "PORT_UNIFICATION": sb.append("sslQuorum=false\n"); sb.append("portUnification=true\n"); break; case "TLS_WITH_PORT_UNIFICATION": sb.append("sslQuorum=true\n"); sb.append("portUnification=true\n"); break; case "TLS_ONLY": sb.append("sslQuorum=true\n"); sb.append("portUnification=false\n"); break; default: throw new IllegalArgumentException("Unknown value of config setting tlsForQuorumCommunication: " + tlsSetting); } return sb.toString(); }
class VespaZooKeeperServerImpl extends AbstractComponent implements Runnable, VespaZooKeeperServer { private static final java.util.logging.Logger log = java.util.logging.Logger.getLogger(VespaZooKeeperServerImpl.class.getName()); private static final String ZOOKEEPER_JMX_LOG4J_DISABLE = "zookeeper.jmx.log4j.disable"; static final String ZOOKEEPER_JUTE_MAX_BUFFER = "jute.maxbuffer"; private final Thread zkServerThread; private final ZookeeperServerConfig zookeeperServerConfig; VespaZooKeeperServerImpl(ZookeeperServerConfig zookeeperServerConfig, boolean startServer) { this.zookeeperServerConfig = zookeeperServerConfig; System.setProperty("zookeeper.jmx.log4j.disable", "true"); System.setProperty("zookeeper.snapshot.trust.empty", Boolean.valueOf(zookeeperServerConfig.trustEmptySnapshot()).toString()); System.setProperty(ZOOKEEPER_JUTE_MAX_BUFFER, Integer.valueOf(zookeeperServerConfig.juteMaxBuffer()).toString()); writeConfigToDisk(zookeeperServerConfig); zkServerThread = new Thread(this, "zookeeper server"); if (startServer) { zkServerThread.start(); } } @Inject public VespaZooKeeperServerImpl(ZookeeperServerConfig zookeeperServerConfig) { this(zookeeperServerConfig, true); } private void writeConfigToDisk(ZookeeperServerConfig config) { String configFilePath = getDefaults().underVespaHome(config.zooKeeperConfigFile()); new File(configFilePath).getParentFile().mkdirs(); try (FileWriter writer = new FileWriter(configFilePath)) { writer.write(transformConfigToString(config)); writeMyIdFile(config); } catch (IOException e) { throw new RuntimeException("Error writing zookeeper config", e); } } private String transformConfigToString(ZookeeperServerConfig config) { StringBuilder sb = new StringBuilder(); sb.append("tickTime=").append(config.tickTime()).append("\n"); sb.append("initLimit=").append(config.initLimit()).append("\n"); sb.append("syncLimit=").append(config.syncLimit()).append("\n"); sb.append("maxClientCnxns=").append(config.maxClientConnections()).append("\n"); sb.append("snapCount=").append(config.snapshotCount()).append("\n"); sb.append("dataDir=").append(getDefaults().underVespaHome(config.dataDir())).append("\n"); sb.append("clientPort=").append(config.clientPort()).append("\n"); sb.append("autopurge.purgeInterval=").append(config.autopurge().purgeInterval()).append("\n"); sb.append("autopurge.snapRetainCount=").append(config.autopurge().snapRetainCount()).append("\n"); sb.append("4lw.commands.whitelist=conf,cons,crst,dirs,dump,envi,mntr,ruok,srst,srvr,stat,wchs").append("\n"); sb.append("admin.enableServer=false").append("\n"); sb.append("serverCnxnFactory=org.apache.zookeeper.server.NettyServerCnxnFactory").append("\n"); ensureThisServerIsRepresented(config.myid(), config.server()); config.server().forEach(server -> addServerToCfg(sb, server)); sb.append(createTlsQuorumConfig(config)); return sb.toString(); } private void writeMyIdFile(ZookeeperServerConfig config) throws IOException { if (config.server().size() > 1) { try (FileWriter writer = new FileWriter(getDefaults().underVespaHome(config.myidFile()))) { writer.write(config.myid() + "\n"); } } } private void ensureThisServerIsRepresented(int myid, List<ZookeeperServerConfig.Server> servers) { boolean found = false; for (ZookeeperServerConfig.Server server : servers) { if (myid == server.id()) { found = true; break; } } if (!found) { throw new RuntimeException("No id in zookeeper server list that corresponds to my id(" + myid + ")"); } } private void addServerToCfg(StringBuilder sb, ZookeeperServerConfig.Server server) { sb.append("server.").append(server.id()).append("=").append(server.hostname()).append(":").append(server.quorumPort()).append(":").append(server.electionPort()).append("\n"); } private void shutdown() { zkServerThread.interrupt(); try { zkServerThread.join(); } catch (InterruptedException e) { log.log(LogLevel.WARNING, "Error joining server thread on shutdown", e); } } @Override public void run() { System.setProperty(ZOOKEEPER_JMX_LOG4J_DISABLE, "true"); String[] args = new String[]{getDefaults().underVespaHome(zookeeperServerConfig.zooKeeperConfigFile())}; log.log(LogLevel.INFO, "Starting ZooKeeper server with config file " + args[0] + ". Trying to establish ZooKeeper quorum (members: " + zookeeperServerHostnames(zookeeperServerConfig) + ")"); org.apache.zookeeper.server.quorum.QuorumPeerMain.main(args); } @Override public void deconstruct() { shutdown(); super.deconstruct(); } private static Set<String> zookeeperServerHostnames(ZookeeperServerConfig zookeeperServerConfig) { return zookeeperServerConfig.server().stream().map(ZookeeperServerConfig.Server::hostname).collect(Collectors.toSet()); } }
class VespaZooKeeperServerImpl extends AbstractComponent implements Runnable, VespaZooKeeperServer { private static final java.util.logging.Logger log = java.util.logging.Logger.getLogger(VespaZooKeeperServerImpl.class.getName()); private static final String ZOOKEEPER_JMX_LOG4J_DISABLE = "zookeeper.jmx.log4j.disable"; static final String ZOOKEEPER_JUTE_MAX_BUFFER = "jute.maxbuffer"; private final Thread zkServerThread; private final ZookeeperServerConfig zookeeperServerConfig; VespaZooKeeperServerImpl(ZookeeperServerConfig zookeeperServerConfig, boolean startServer) { this.zookeeperServerConfig = zookeeperServerConfig; System.setProperty("zookeeper.jmx.log4j.disable", "true"); System.setProperty("zookeeper.snapshot.trust.empty", Boolean.valueOf(zookeeperServerConfig.trustEmptySnapshot()).toString()); System.setProperty(ZOOKEEPER_JUTE_MAX_BUFFER, Integer.valueOf(zookeeperServerConfig.juteMaxBuffer()).toString()); writeConfigToDisk(zookeeperServerConfig); zkServerThread = new Thread(this, "zookeeper server"); if (startServer) { zkServerThread.start(); } } @Inject public VespaZooKeeperServerImpl(ZookeeperServerConfig zookeeperServerConfig) { this(zookeeperServerConfig, true); } private void writeConfigToDisk(ZookeeperServerConfig config) { String configFilePath = getDefaults().underVespaHome(config.zooKeeperConfigFile()); new File(configFilePath).getParentFile().mkdirs(); try (FileWriter writer = new FileWriter(configFilePath)) { writer.write(transformConfigToString(config)); writeMyIdFile(config); } catch (IOException e) { throw new RuntimeException("Error writing zookeeper config", e); } } private String transformConfigToString(ZookeeperServerConfig config) { StringBuilder sb = new StringBuilder(); sb.append("tickTime=").append(config.tickTime()).append("\n"); sb.append("initLimit=").append(config.initLimit()).append("\n"); sb.append("syncLimit=").append(config.syncLimit()).append("\n"); sb.append("maxClientCnxns=").append(config.maxClientConnections()).append("\n"); sb.append("snapCount=").append(config.snapshotCount()).append("\n"); sb.append("dataDir=").append(getDefaults().underVespaHome(config.dataDir())).append("\n"); sb.append("clientPort=").append(config.clientPort()).append("\n"); sb.append("autopurge.purgeInterval=").append(config.autopurge().purgeInterval()).append("\n"); sb.append("autopurge.snapRetainCount=").append(config.autopurge().snapRetainCount()).append("\n"); sb.append("4lw.commands.whitelist=conf,cons,crst,dirs,dump,envi,mntr,ruok,srst,srvr,stat,wchs").append("\n"); sb.append("admin.enableServer=false").append("\n"); sb.append("serverCnxnFactory=org.apache.zookeeper.server.NettyServerCnxnFactory").append("\n"); ensureThisServerIsRepresented(config.myid(), config.server()); config.server().forEach(server -> addServerToCfg(sb, server)); sb.append(createTlsQuorumConfig(config)); return sb.toString(); } private void writeMyIdFile(ZookeeperServerConfig config) throws IOException { if (config.server().size() > 1) { try (FileWriter writer = new FileWriter(getDefaults().underVespaHome(config.myidFile()))) { writer.write(config.myid() + "\n"); } } } private void ensureThisServerIsRepresented(int myid, List<ZookeeperServerConfig.Server> servers) { boolean found = false; for (ZookeeperServerConfig.Server server : servers) { if (myid == server.id()) { found = true; break; } } if (!found) { throw new RuntimeException("No id in zookeeper server list that corresponds to my id(" + myid + ")"); } } private void addServerToCfg(StringBuilder sb, ZookeeperServerConfig.Server server) { sb.append("server.").append(server.id()).append("=").append(server.hostname()).append(":").append(server.quorumPort()).append(":").append(server.electionPort()).append("\n"); } private void shutdown() { zkServerThread.interrupt(); try { zkServerThread.join(); } catch (InterruptedException e) { log.log(LogLevel.WARNING, "Error joining server thread on shutdown", e); } } @Override public void run() { System.setProperty(ZOOKEEPER_JMX_LOG4J_DISABLE, "true"); String[] args = new String[]{getDefaults().underVespaHome(zookeeperServerConfig.zooKeeperConfigFile())}; log.log(LogLevel.INFO, "Starting ZooKeeper server with config file " + args[0] + ". Trying to establish ZooKeeper quorum (members: " + zookeeperServerHostnames(zookeeperServerConfig) + ")"); org.apache.zookeeper.server.quorum.QuorumPeerMain.main(args); } @Override public void deconstruct() { shutdown(); super.deconstruct(); } private static Set<String> zookeeperServerHostnames(ZookeeperServerConfig zookeeperServerConfig) { return zookeeperServerConfig.server().stream().map(ZookeeperServerConfig.Server::hostname).collect(Collectors.toSet()); } }
```suggestion tomlKeyEntryNode.location()), tableArrayChild.location(), tableArrayChild.children()); ```
private void addChildParentArrayToParent(TomlTableNode rootTable, TomlTableArrayNode tableArrayChild) { TomlTableNode parentTable = getParentTable(rootTable, tableArrayChild); List<TomlKeyEntryNode> keys = tableArrayChild.key().keys(); TomlKeyEntryNode tomlKeyEntryNode = keys.get(keys.size() - 1); List<TomlKeyEntryNode> list = new ArrayList<>(); list.add(tomlKeyEntryNode); TomlTableArrayNode newTomlTableArray = new TomlTableArrayNode(new TomlKeyNode(list, getLocationOfKeyEntryList(list)), tableArrayChild.location(), tableArrayChild.children()); TopLevelNode topLevelNode = parentTable.entries().get(newTomlTableArray.key().name()); if (topLevelNode == null) { addChildToTableAST(parentTable, newTomlTableArray); } else { if (topLevelNode instanceof TomlTableArrayNode) { ((TomlTableArrayNode) topLevelNode).addChild(newTomlTableArray.children().get(0)); } else if (topLevelNode instanceof TomlKeyValueNode) { TomlDiagnostic nodeExists = dlog.error(newTomlTableArray.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, newTomlTableArray.key().name()); parentTable.addDiagnostic(nodeExists); } else { throw new UnsupportedOperationException(); } } }
getLocationOfKeyEntryList(list)), tableArrayChild.location(), tableArrayChild.children());
private void addChildParentArrayToParent(TomlTableNode rootTable, TomlTableArrayNode tableArrayChild) { TomlTableNode parentTable = getParentTable(rootTable, tableArrayChild); List<TomlKeyEntryNode> keys = tableArrayChild.key().keys(); TomlKeyEntryNode tomlKeyEntryNode = keys.get(keys.size() - 1); List<TomlKeyEntryNode> list = new ArrayList<>(); list.add(tomlKeyEntryNode); TomlTableArrayNode newTomlTableArray = new TomlTableArrayNode(new TomlKeyNode(list, tomlKeyEntryNode.location()), tableArrayChild.location(), tableArrayChild.children()); TopLevelNode topLevelNode = parentTable.entries().get(newTomlTableArray.key().name()); if (topLevelNode == null) { addChildToTableAST(parentTable, newTomlTableArray); } else { if (topLevelNode instanceof TomlTableArrayNode) { ((TomlTableArrayNode) topLevelNode).addChild(newTomlTableArray.children().get(0)); } else if (topLevelNode instanceof TomlKeyValueNode) { TomlDiagnostic nodeExists = dlog.error(newTomlTableArray.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, newTomlTableArray.key().name()); parentTable.addDiagnostic(nodeExists); } else { throw new UnsupportedOperationException(); } } }
class TomlTransformer extends NodeTransformer<TomlNode> { private DiagnosticLog dlog; public TomlTransformer() { this.dlog = DiagnosticLog.getInstance(); } @Override public TomlNode transform(DocumentNode documentNode) { TomlTableNode rootTable = createRootTable(documentNode); NodeList<DocumentMemberDeclarationNode> members = documentNode.members(); for (DocumentMemberDeclarationNode rootNode : members) { TomlNode transformedChild = rootNode.apply(this); addChildNodeToParent(rootTable, transformedChild); } return rootTable; } private TomlTableNode createRootTable(DocumentNode modulePartNode) { TomlNodeLocation location = getPosition(modulePartNode); TomlKeyEntryNode root = new TomlKeyEntryNode(new TomlUnquotedKeyNode("__root", location)); List<TomlKeyEntryNode> tomlKeyEntryNodes = Collections.singletonList(root); TomlKeyNode tomlKeyNode = new TomlKeyNode(tomlKeyEntryNodes, location); return new TomlTableNode(tomlKeyNode, location); } private void addChildNodeToParent(TomlTableNode rootTable, TomlNode transformedChild) { if (transformedChild.kind() == TomlType.TABLE) { TomlTableNode tableChild = (TomlTableNode) transformedChild; addChildTableToParent(rootTable, tableChild); } else if (transformedChild.kind() == TomlType.TABLE_ARRAY) { TomlTableArrayNode transformedArray = (TomlTableArrayNode) transformedChild; addChildParentArrayToParent(rootTable, transformedArray); } else if (transformedChild.kind() == TomlType.KEY_VALUE) { TomlKeyValueNode transformedKeyValuePair = (TomlKeyValueNode) transformedChild; addChildKeyValueToParent(rootTable, transformedKeyValuePair); } else { throw new UnsupportedOperationException(); } } private void addChildKeyValueToParent(TomlTableNode rootTable, TomlKeyValueNode transformedKeyValuePair) { List<TomlKeyEntryNode> keys = transformedKeyValuePair.key().keys(); List<String> parentTables = new ArrayList<>(); for (int i = 0; i < keys.size() - 1; i++) { parentTables.add(keys.get(i).name().toString()); } TomlTableNode parentTable = rootTable; for (int i = 0; i < parentTables.size(); i++) { String newTable = parentTables.get(i); TopLevelNode dottedParentNode = parentTable.entries().get(newTable); if (dottedParentNode != null) { if (dottedParentNode.kind() == TomlType.TABLE) { parentTable = (TomlTableNode) dottedParentNode; } else { TomlDiagnostic nodeExists = dlog.error(dottedParentNode.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, newTable); parentTable.addDiagnostic(nodeExists); } } else { TomlKeyEntryNode tomlKeyEntryNode = keys.get(i); parentTable = createDottedKeyParentTable(parentTable, tomlKeyEntryNode, transformedKeyValuePair.location()); } } if (isDottedKey(keys)) { List<TomlKeyEntryNode> list = new ArrayList<>(); TomlKeyEntryNode childKeyEntry = keys.get(keys.size() - 1); list.add(childKeyEntry); TomlKeyNode newKey = new TomlKeyNode(list, childKeyEntry.location()); transformedKeyValuePair = new TomlKeyValueNode(newKey, transformedKeyValuePair.value(), transformedKeyValuePair.location()); } addChildToTableAST(parentTable, transformedKeyValuePair); } private void addChildToTableAST(TomlTableNode parentTable, TopLevelNode value) { Map<String, TopLevelNode> entries = parentTable.entries(); String key = value.key().name(); if (entries.containsKey(key)) { TomlDiagnostic nodeExists = dlog.error(value.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, key); parentTable.addDiagnostic(nodeExists); } else { entries.put(key, value); } } private TomlTableNode createDottedKeyParentTable(TomlTableNode parentTable, TomlKeyEntryNode dottedKey, TomlNodeLocation location) { List<TomlKeyEntryNode> list = new ArrayList<>(); list.add(dottedKey); TomlKeyNode newTableKey = new TomlKeyNode(list, getLocationOfKeyEntryList(list)); TomlTableNode newTomlTableNode = new TomlTableNode(newTableKey, location); addChildToTableAST(parentTable, newTomlTableNode); return newTomlTableNode; } private TomlKeyEntryNode getLastKeyEntry(TopLevelNode childNode) { return childNode.key().keys().get(childNode.key().keys().size() - 1); } private TomlTableNode getParentTable(TomlTableNode rootTable, TopLevelNode childNode) { List<String> parentTables = new ArrayList<>(); for (int i = 0; i < (childNode.key().keys().size() - 1); i++) { parentTables.add(childNode.key().keys().get(i).name().toString()); } TomlTableNode parentTable = rootTable; for (int i = 0; i < parentTables.size(); i++) { String parentString = parentTables.get(i); TopLevelNode rootTableNode = parentTable.entries().get(parentString); if (rootTableNode != null) { parentTable = (TomlTableNode) rootTableNode; } else { TomlKeyEntryNode tomlKeyEntryNode = childNode.key().keys().get(i); if (childNode instanceof TomlTableArrayNode) { parentTable = generateTable(parentTable, tomlKeyEntryNode, false, childNode.location()); } else { parentTable = generateTable(parentTable, tomlKeyEntryNode, true, childNode.location()); } } } return parentTable; } private void addChildTableToParent(TomlTableNode rootTable, TomlTableNode tableChild) { TomlTableNode parentTable = getParentTable(rootTable, tableChild); TopLevelNode topLevelNode = parentTable.entries().get(tableChild.key().name()); TomlKeyEntryNode lastKeyEntry = getLastKeyEntry(tableChild); List<TomlKeyEntryNode> entries = new ArrayList<>(); entries.add(lastKeyEntry); TomlTableNode newTableNode = new TomlTableNode(new TomlKeyNode(entries, getLocationOfKeyEntryList(entries)), tableChild.generated(), tableChild.location(), tableChild.entries()); if (topLevelNode == null) { addChildToTableAST(parentTable, newTableNode); } else { if (topLevelNode instanceof TomlTableNode) { TomlTableNode targetTable = (TomlTableNode) topLevelNode; if ((targetTable).generated()) { parentTable.replaceGeneratedTable(newTableNode); } else { TomlDiagnostic nodeExists = dlog.error(tableChild.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, newTableNode.key().name()); parentTable.addDiagnostic(nodeExists); } } else if (topLevelNode instanceof TomlKeyValueNode) { TomlDiagnostic nodeExist = dlog.error(newTableNode.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, tableChild.key().name()); parentTable.addDiagnostic(nodeExist); } else { throw new UnsupportedOperationException(); } } } private TomlTableNode generateTable(TomlTableNode parentTable, TomlKeyEntryNode parentString, boolean isGenerated , TomlNodeLocation location) { List<TomlKeyEntryNode> list = new ArrayList<>(); list.add(parentString); TomlKeyNode newTableKey = new TomlKeyNode(list, getLocationOfKeyEntryList(list)); TomlTableNode newTomlTableNode = new TomlTableNode(newTableKey, isGenerated, location); addChildToTableAST(parentTable, newTomlTableNode); return newTomlTableNode; } @Override public TomlNode transform(TableNode tableNode) { SeparatedNodeList<ValueNode> identifierList = tableNode.identifier(); TomlKeyNode tomlKeyNode = getTomlKeyNode(identifierList); TomlTableNode tomlTableNode = new TomlTableNode(tomlKeyNode, getPosition(tableNode)); addChildToTable(tableNode, tomlTableNode); return tomlTableNode; } private void addChildToTable(TableNode stTableNode, TomlTableNode astTomlTableNode) { NodeList<KeyValueNode> children = stTableNode.fields(); for (KeyValueNode child : children) { TomlNode transformedChild = child.apply(this); if (transformedChild instanceof TomlKeyValueNode) { TopLevelNode topLevelChild = (TopLevelNode) transformedChild; checkExistingNodes(astTomlTableNode, topLevelChild); addChildKeyValueToParent(astTomlTableNode, (TomlKeyValueNode) transformedChild); } else { throw new UnsupportedOperationException(); } } } private boolean isDottedKey(List<TomlKeyEntryNode> keys) { return keys.size() > 1; } private void checkExistingNodes(TomlTableNode tomlTableNode, TopLevelNode topLevelChild) { Map<String, TopLevelNode> childs = tomlTableNode.entries(); String childName = topLevelChild.key().name(); if (childs.get(childName) != null) { TomlDiagnostic nodeExists = dlog.error(topLevelChild.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, childName); tomlTableNode.addDiagnostic(nodeExists); } } @Override public TomlNode transform(TableArrayNode tableArrayNode) { SeparatedNodeList<ValueNode> identifierList = tableArrayNode.identifier(); TomlKeyNode tomlKeyNode = getTomlKeyNode(identifierList); TomlTableArrayNode tomlTableArrayNode = new TomlTableArrayNode(tomlKeyNode, getPosition(tableArrayNode)); TomlTableNode anonTable = addChildsToTableArray(tableArrayNode); tomlTableArrayNode.addChild(anonTable); return tomlTableArrayNode; } private TomlTableNode addChildsToTableArray(TableArrayNode tableArrayNode) { NodeList<KeyValueNode> children = tableArrayNode.fields(); TomlNodeLocation position = getPosition(tableArrayNode); TomlKeyNode anonKey = getTomlKeyNode(tableArrayNode.identifier()); TomlTableNode anonTable = new TomlTableNode(anonKey, position); for (KeyValueNode child : children) { TomlNode transformedChild = child.apply(this); if (transformedChild instanceof TomlKeyValueNode) { addChildKeyValueToParent(anonTable, (TomlKeyValueNode) transformedChild); } else { throw new UnsupportedOperationException(); } } return anonTable; } @Override public TomlNode transform(KeyValueNode keyValue) { SeparatedNodeList<ValueNode> identifierList = keyValue.identifier(); TomlKeyNode tomlKeyNode = getTomlKeyNode(identifierList); ValueNode value = keyValue.value(); TomlValueNode tomlValue = transformValue(value); return new TomlKeyValueNode(tomlKeyNode, tomlValue, getPosition(keyValue)); } private TomlKeyNode getTomlKeyNode(SeparatedNodeList<ValueNode> identifierList) { List<TomlKeyEntryNode> nodeList = new ArrayList<>(); for (Node node : identifierList) { TomlBasicValueNode transformedNode = (TomlBasicValueNode) node.apply(this); nodeList.add(new TomlKeyEntryNode(transformedNode)); } return new TomlKeyNode(nodeList, getLocationOfKeyEntryList(nodeList)); } private TomlValueNode transformValue(ValueNode valueToken) { return (TomlValueNode) valueToken.apply(this); } @Override public TomlNode transform(ArrayNode array) { SeparatedNodeList<ValueNode> values = array.values(); List<TomlValueNode> elements = new ArrayList<>(); for (ValueNode value : values) { TomlValueNode transformedValue = (TomlValueNode) value.apply(this); elements.add(transformedValue); } return new TomlArrayValueNode(elements, getPosition(array)); } @Override protected TomlNode transformSyntaxNode(Node node) { return null; } private TomlNodeLocation getPosition(Node node) { return new TomlNodeLocation(node.lineRange(), node.textRange()); } /** * Transforms ST StringLiteralNode into AST string node. * * @param stringLiteralNode Syntax Tree representative for string nodes * @return AST string value representative node */ @Override public TomlNode transform(StringLiteralNode stringLiteralNode) { boolean multilineString = isMultilineString(stringLiteralNode.startDoubleQuote()); Optional<Token> content = stringLiteralNode.content(); String valueString; if (content.isEmpty()) { valueString = ""; } else { valueString = content.get().text(); if (multilineString) { valueString = removeFirstNewline(valueString); valueString = trimBackslashWhitespaces(valueString); } } String unescapedJava = StringEscapeUtils.unescapeJava(valueString); TomlNodeLocation position = getPosition(stringLiteralNode); return new TomlStringValueNode(unescapedJava, position); } private String trimBackslashWhitespaces(String value) { StringBuilder output = new StringBuilder(); String[] split = value.split("\\\\\\r?\\n"); for (String str : split) { output.append(str.stripLeading()); } return output.toString(); } /** * Transforms ST LiteralStringLiteralNode into AST literal string node. * * @param literalStringLiteralNode Syntax Tree representative for literal string nodes * @return AST Literal string value representative node */ @Override public TomlNode transform(LiteralStringLiteralNode literalStringLiteralNode) { boolean multilineString = isMultilineString(literalStringLiteralNode.startSingleQuote()); Optional<Token> content = literalStringLiteralNode.content(); String valueString; if (content.isEmpty()) { valueString = ""; } else { valueString = content.get().text(); if (multilineString) { valueString = removeFirstNewline(valueString); } } TomlNodeLocation position = getPosition(literalStringLiteralNode); return new TomlStringValueNode(valueString, position); } private boolean isMultilineString(Token token) { return token.kind() == SyntaxKind.TRIPLE_SINGLE_QUOTE_TOKEN || token.kind() == SyntaxKind.TRIPLE_DOUBLE_QUOTE_TOKEN; } private String removeFirstNewline(String value) { if (value.startsWith("\n")) { return value.substring(1); } if (value.startsWith("\r\n")) { return value.substring(2); } return value; } /** * Transforms ST NumericLiteralNode into AST numerical node. * * @param numericLiteralNode Syntax Tree representative for numerical nodes * @return AST Numerical Value representative node */ @Override public TomlNode transform(NumericLiteralNode numericLiteralNode) { String sign = ""; if (numericLiteralNode.sign().isPresent()) { sign = numericLiteralNode.sign().get().text(); } Token valueToken = numericLiteralNode.value(); return getTomlNode(numericLiteralNode, sign + valueToken.text()); } private TomlNode getTomlNode(NumericLiteralNode numericLiteralNode, String value) { value = value.replace("_", ""); if (numericLiteralNode.kind() == SyntaxKind.DEC_INT) { return new TomlLongValueNode(Long.parseLong(value), getPosition(numericLiteralNode)); } else if (numericLiteralNode.kind() == SyntaxKind.HEX_INT) { value = value.replace("0x", "").replace("0X", ""); return new TomlLongValueNode(Long.parseLong(value, 16), getPosition(numericLiteralNode)); } else if (numericLiteralNode.kind() == SyntaxKind.OCT_INT) { value = value.replace("0o", "").replace("0O", ""); return new TomlLongValueNode(Long.parseLong(value, 8), getPosition(numericLiteralNode)); } else if (numericLiteralNode.kind() == SyntaxKind.BINARY_INT) { value = value.replace("0b", "").replace("0B", ""); return new TomlLongValueNode(Long.parseLong(value, 2), getPosition(numericLiteralNode)); } else { return new TomlDoubleValueNodeNode(Double.parseDouble(value), getPosition(numericLiteralNode)); } } /** * Transforms ST BoolLiteralNode into AST TomlBooleanValue Node. * * @param boolLiteralNode Syntax Tree representative for boolean * @return AST Boolean Value representative node */ @Override public TomlNode transform(BoolLiteralNode boolLiteralNode) { if (boolLiteralNode.value().kind() == SyntaxKind.TRUE_KEYWORD) { return new TomlBooleanValueNode(true, getPosition(boolLiteralNode)); } else { return new TomlBooleanValueNode(false, getPosition(boolLiteralNode)); } } @Override public TomlNode transform(IdentifierLiteralNode identifierLiteralNode) { return new TomlUnquotedKeyNode(identifierLiteralNode.value().text(), getPosition(identifierLiteralNode)); } private TomlNodeLocation getLocationOfKeyEntryList(List<TomlKeyEntryNode> keys) { if (keys.size() == 0) { return null; } TomlNodeLocation locationOfFirstKey = keys.get(0).location(); int startOffset = locationOfFirstKey.textRange().startOffset(); int length = 0; for (TomlKeyEntryNode entryNode : keys) { length += entryNode.location().textRange().length() + 1; } TextRange textRange = TextRange.from(startOffset, length - 1); LineRange lineRange = LineRange.from( locationOfFirstKey.lineRange().filePath(), locationOfFirstKey.lineRange().startLine(), keys.get(keys.size() - 1).location().lineRange().endLine()); return new TomlNodeLocation(lineRange, textRange); } }
class TomlTransformer extends NodeTransformer<TomlNode> { private DiagnosticLog dlog; public TomlTransformer() { this.dlog = DiagnosticLog.getInstance(); } @Override public TomlNode transform(DocumentNode documentNode) { TomlTableNode rootTable = createRootTable(documentNode); NodeList<DocumentMemberDeclarationNode> members = documentNode.members(); for (DocumentMemberDeclarationNode rootNode : members) { TomlNode transformedChild = rootNode.apply(this); addChildNodeToParent(rootTable, transformedChild); } return rootTable; } private TomlTableNode createRootTable(DocumentNode modulePartNode) { TomlNodeLocation location = getPosition(modulePartNode); TomlKeyEntryNode root = new TomlKeyEntryNode(new TomlUnquotedKeyNode("__root", location)); List<TomlKeyEntryNode> tomlKeyEntryNodes = Collections.singletonList(root); TomlKeyNode tomlKeyNode = new TomlKeyNode(tomlKeyEntryNodes, location); return new TomlTableNode(tomlKeyNode, location); } private void addChildNodeToParent(TomlTableNode rootTable, TomlNode transformedChild) { if (transformedChild.kind() == TomlType.TABLE) { TomlTableNode tableChild = (TomlTableNode) transformedChild; addChildTableToParent(rootTable, tableChild); } else if (transformedChild.kind() == TomlType.TABLE_ARRAY) { TomlTableArrayNode transformedArray = (TomlTableArrayNode) transformedChild; addChildParentArrayToParent(rootTable, transformedArray); } else if (transformedChild.kind() == TomlType.KEY_VALUE) { TomlKeyValueNode transformedKeyValuePair = (TomlKeyValueNode) transformedChild; addChildKeyValueToParent(rootTable, transformedKeyValuePair); } else { throw new UnsupportedOperationException(); } } private void addChildKeyValueToParent(TomlTableNode rootTable, TomlKeyValueNode transformedKeyValuePair) { List<TomlKeyEntryNode> keys = transformedKeyValuePair.key().keys(); List<String> parentTables = new ArrayList<>(); for (int i = 0; i < keys.size() - 1; i++) { parentTables.add(keys.get(i).name().toString()); } TomlTableNode parentTable = rootTable; for (int i = 0; i < parentTables.size(); i++) { String newTable = parentTables.get(i); TopLevelNode dottedParentNode = parentTable.entries().get(newTable); if (dottedParentNode != null) { if (dottedParentNode.kind() == TomlType.TABLE) { parentTable = (TomlTableNode) dottedParentNode; } else { TomlDiagnostic nodeExists = dlog.error(dottedParentNode.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, newTable); parentTable.addDiagnostic(nodeExists); } } else { TomlKeyEntryNode tomlKeyEntryNode = keys.get(i); parentTable = createDottedKeyParentTable(parentTable, tomlKeyEntryNode, transformedKeyValuePair.location()); } } if (isDottedKey(keys)) { List<TomlKeyEntryNode> list = new ArrayList<>(); TomlKeyEntryNode childKeyEntry = keys.get(keys.size() - 1); list.add(childKeyEntry); TomlKeyNode newKey = new TomlKeyNode(list, childKeyEntry.location()); transformedKeyValuePair = new TomlKeyValueNode(newKey, transformedKeyValuePair.value(), transformedKeyValuePair.location()); } addChildToTableAST(parentTable, transformedKeyValuePair); } private void addChildToTableAST(TomlTableNode parentTable, TopLevelNode value) { Map<String, TopLevelNode> entries = parentTable.entries(); String key = value.key().name(); if (entries.containsKey(key)) { TomlDiagnostic nodeExists = dlog.error(value.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, key); parentTable.addDiagnostic(nodeExists); } else { entries.put(key, value); } } private TomlTableNode createDottedKeyParentTable(TomlTableNode parentTable, TomlKeyEntryNode dottedKey, TomlNodeLocation location) { List<TomlKeyEntryNode> list = new ArrayList<>(); list.add(dottedKey); TomlKeyNode newTableKey = new TomlKeyNode(list, dottedKey.location()); TomlTableNode newTomlTableNode = new TomlTableNode(newTableKey, location); addChildToTableAST(parentTable, newTomlTableNode); return newTomlTableNode; } private TomlKeyEntryNode getLastKeyEntry(TopLevelNode childNode) { return childNode.key().keys().get(childNode.key().keys().size() - 1); } private TomlTableNode getParentTable(TomlTableNode rootTable, TopLevelNode childNode) { List<String> parentTables = new ArrayList<>(); for (int i = 0; i < (childNode.key().keys().size() - 1); i++) { parentTables.add(childNode.key().keys().get(i).name().toString()); } TomlTableNode parentTable = rootTable; for (int i = 0; i < parentTables.size(); i++) { String parentString = parentTables.get(i); TopLevelNode rootTableNode = parentTable.entries().get(parentString); if (rootTableNode != null) { parentTable = (TomlTableNode) rootTableNode; } else { TomlKeyEntryNode tomlKeyEntryNode = childNode.key().keys().get(i); if (childNode instanceof TomlTableArrayNode) { parentTable = generateTable(parentTable, tomlKeyEntryNode, false, childNode.location()); } else { parentTable = generateTable(parentTable, tomlKeyEntryNode, true, childNode.location()); } } } return parentTable; } private void addChildTableToParent(TomlTableNode rootTable, TomlTableNode tableChild) { TomlTableNode parentTable = getParentTable(rootTable, tableChild); TopLevelNode topLevelNode = parentTable.entries().get(tableChild.key().name()); TomlKeyEntryNode lastKeyEntry = getLastKeyEntry(tableChild); List<TomlKeyEntryNode> entries = new ArrayList<>(); entries.add(lastKeyEntry); TomlTableNode newTableNode = new TomlTableNode(new TomlKeyNode(entries, lastKeyEntry.location()), tableChild.generated(), tableChild.location(), tableChild.entries()); if (topLevelNode == null) { addChildToTableAST(parentTable, newTableNode); } else { if (topLevelNode instanceof TomlTableNode) { TomlTableNode targetTable = (TomlTableNode) topLevelNode; if ((targetTable).generated()) { parentTable.replaceGeneratedTable(newTableNode); } else { TomlDiagnostic nodeExists = dlog.error(tableChild.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, newTableNode.key().name()); parentTable.addDiagnostic(nodeExists); } } else if (topLevelNode instanceof TomlKeyValueNode) { TomlDiagnostic nodeExist = dlog.error(newTableNode.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, tableChild.key().name()); parentTable.addDiagnostic(nodeExist); } else { throw new UnsupportedOperationException(); } } } private TomlTableNode generateTable(TomlTableNode parentTable, TomlKeyEntryNode parentString, boolean isGenerated, TomlNodeLocation location) { List<TomlKeyEntryNode> list = new ArrayList<>(); list.add(parentString); TomlKeyNode newTableKey = new TomlKeyNode(list, parentString.location()); TomlTableNode newTomlTableNode = new TomlTableNode(newTableKey, isGenerated, location); addChildToTableAST(parentTable, newTomlTableNode); return newTomlTableNode; } @Override public TomlNode transform(TableNode tableNode) { SeparatedNodeList<ValueNode> identifierList = tableNode.identifier(); TomlKeyNode tomlKeyNode = getTomlKeyNode(identifierList); TomlTableNode tomlTableNode = new TomlTableNode(tomlKeyNode, getPosition(tableNode)); addChildToTable(tableNode, tomlTableNode); return tomlTableNode; } private void addChildToTable(TableNode stTableNode, TomlTableNode astTomlTableNode) { NodeList<KeyValueNode> children = stTableNode.fields(); for (KeyValueNode child : children) { TomlNode transformedChild = child.apply(this); if (transformedChild instanceof TomlKeyValueNode) { TopLevelNode topLevelChild = (TopLevelNode) transformedChild; checkExistingNodes(astTomlTableNode, topLevelChild); addChildKeyValueToParent(astTomlTableNode, (TomlKeyValueNode) transformedChild); } else { throw new UnsupportedOperationException(); } } } private boolean isDottedKey(List<TomlKeyEntryNode> keys) { return keys.size() > 1; } private void checkExistingNodes(TomlTableNode tomlTableNode, TopLevelNode topLevelChild) { Map<String, TopLevelNode> childs = tomlTableNode.entries(); String childName = topLevelChild.key().name(); if (childs.get(childName) != null) { TomlDiagnostic nodeExists = dlog.error(topLevelChild.location(), DiagnosticErrorCode.ERROR_EXISTING_NODE, childName); tomlTableNode.addDiagnostic(nodeExists); } } @Override public TomlNode transform(TableArrayNode tableArrayNode) { SeparatedNodeList<ValueNode> identifierList = tableArrayNode.identifier(); TomlKeyNode tomlKeyNode = getTomlKeyNode(identifierList); TomlTableArrayNode tomlTableArrayNode = new TomlTableArrayNode(tomlKeyNode, getPosition(tableArrayNode)); TomlTableNode anonTable = addChildsToTableArray(tableArrayNode); tomlTableArrayNode.addChild(anonTable); return tomlTableArrayNode; } private TomlTableNode addChildsToTableArray(TableArrayNode tableArrayNode) { NodeList<KeyValueNode> children = tableArrayNode.fields(); TomlNodeLocation position = getPosition(tableArrayNode); TomlKeyNode anonKey = getTomlKeyNode(tableArrayNode.identifier()); TomlTableNode anonTable = new TomlTableNode(anonKey, position); for (KeyValueNode child : children) { TomlNode transformedChild = child.apply(this); if (transformedChild instanceof TomlKeyValueNode) { addChildKeyValueToParent(anonTable, (TomlKeyValueNode) transformedChild); } else { throw new UnsupportedOperationException(); } } return anonTable; } @Override public TomlNode transform(KeyValueNode keyValue) { SeparatedNodeList<ValueNode> identifierList = keyValue.identifier(); TomlKeyNode tomlKeyNode = getTomlKeyNode(identifierList); ValueNode value = keyValue.value(); TomlValueNode tomlValue = transformValue(value); return new TomlKeyValueNode(tomlKeyNode, tomlValue, getPosition(keyValue)); } private TomlKeyNode getTomlKeyNode(SeparatedNodeList<ValueNode> identifierList) { List<TomlKeyEntryNode> nodeList = new ArrayList<>(); for (Node node : identifierList) { TomlBasicValueNode transformedNode = (TomlBasicValueNode) node.apply(this); nodeList.add(new TomlKeyEntryNode(transformedNode)); } return new TomlKeyNode(nodeList, getTomlNodeListLocation(nodeList)); } private TomlValueNode transformValue(ValueNode valueToken) { return (TomlValueNode) valueToken.apply(this); } @Override public TomlNode transform(ArrayNode array) { SeparatedNodeList<ValueNode> values = array.values(); List<TomlValueNode> elements = new ArrayList<>(); for (ValueNode value : values) { TomlValueNode transformedValue = (TomlValueNode) value.apply(this); elements.add(transformedValue); } return new TomlArrayValueNode(elements, getPosition(array)); } @Override protected TomlNode transformSyntaxNode(Node node) { return null; } private TomlNodeLocation getPosition(Node node) { return new TomlNodeLocation(node.lineRange(), node.textRange()); } /** * Transforms ST StringLiteralNode into AST string node. * * @param stringLiteralNode Syntax Tree representative for string nodes * @return AST string value representative node */ @Override public TomlNode transform(StringLiteralNode stringLiteralNode) { boolean multilineString = isMultilineString(stringLiteralNode.startDoubleQuote()); Optional<Token> content = stringLiteralNode.content(); String valueString; if (content.isEmpty()) { valueString = ""; } else { valueString = content.get().text(); if (multilineString) { valueString = removeFirstNewline(valueString); valueString = trimBackslashWhitespaces(valueString); } } String unescapedJava = StringEscapeUtils.unescapeJava(valueString); TomlNodeLocation position = getPosition(stringLiteralNode); return new TomlStringValueNode(unescapedJava, position); } private String trimBackslashWhitespaces(String value) { StringBuilder output = new StringBuilder(); String[] split = value.split("\\\\\\r?\\n"); for (String str : split) { output.append(str.stripLeading()); } return output.toString(); } /** * Transforms ST LiteralStringLiteralNode into AST literal string node. * * @param literalStringLiteralNode Syntax Tree representative for literal string nodes * @return AST Literal string value representative node */ @Override public TomlNode transform(LiteralStringLiteralNode literalStringLiteralNode) { boolean multilineString = isMultilineString(literalStringLiteralNode.startSingleQuote()); Optional<Token> content = literalStringLiteralNode.content(); String valueString; if (content.isEmpty()) { valueString = ""; } else { valueString = content.get().text(); if (multilineString) { valueString = removeFirstNewline(valueString); } } TomlNodeLocation position = getPosition(literalStringLiteralNode); return new TomlStringValueNode(valueString, position); } private boolean isMultilineString(Token token) { return token.kind() == SyntaxKind.TRIPLE_SINGLE_QUOTE_TOKEN || token.kind() == SyntaxKind.TRIPLE_DOUBLE_QUOTE_TOKEN; } private String removeFirstNewline(String value) { if (value.startsWith("\n")) { return value.substring(1); } if (value.startsWith("\r\n")) { return value.substring(2); } return value; } /** * Transforms ST NumericLiteralNode into AST numerical node. * * @param numericLiteralNode Syntax Tree representative for numerical nodes * @return AST Numerical Value representative node */ @Override public TomlNode transform(NumericLiteralNode numericLiteralNode) { String sign = ""; if (numericLiteralNode.sign().isPresent()) { sign = numericLiteralNode.sign().get().text(); } Token valueToken = numericLiteralNode.value(); return getTomlNode(numericLiteralNode, sign + valueToken.text()); } private TomlNode getTomlNode(NumericLiteralNode numericLiteralNode, String value) { value = value.replace("_", ""); if (numericLiteralNode.kind() == SyntaxKind.DEC_INT) { return new TomlLongValueNode(Long.parseLong(value), getPosition(numericLiteralNode)); } else if (numericLiteralNode.kind() == SyntaxKind.HEX_INT) { value = value.replace("0x", "").replace("0X", ""); return new TomlLongValueNode(Long.parseLong(value, 16), getPosition(numericLiteralNode)); } else if (numericLiteralNode.kind() == SyntaxKind.OCT_INT) { value = value.replace("0o", "").replace("0O", ""); return new TomlLongValueNode(Long.parseLong(value, 8), getPosition(numericLiteralNode)); } else if (numericLiteralNode.kind() == SyntaxKind.BINARY_INT) { value = value.replace("0b", "").replace("0B", ""); return new TomlLongValueNode(Long.parseLong(value, 2), getPosition(numericLiteralNode)); } else { return new TomlDoubleValueNodeNode(Double.parseDouble(value), getPosition(numericLiteralNode)); } } /** * Transforms ST BoolLiteralNode into AST TomlBooleanValue Node. * * @param boolLiteralNode Syntax Tree representative for boolean * @return AST Boolean Value representative node */ @Override public TomlNode transform(BoolLiteralNode boolLiteralNode) { if (boolLiteralNode.value().kind() == SyntaxKind.TRUE_KEYWORD) { return new TomlBooleanValueNode(true, getPosition(boolLiteralNode)); } else { return new TomlBooleanValueNode(false, getPosition(boolLiteralNode)); } } @Override public TomlNode transform(IdentifierLiteralNode identifierLiteralNode) { return new TomlUnquotedKeyNode(identifierLiteralNode.value().text(), getPosition(identifierLiteralNode)); } /** * Gives Location of list of TomlNodes * Should only call this method when the execution order is ensured. * * @param tomlNodes AST TomlNode key list * @return TomlNodeLocation */ private TomlNodeLocation getTomlNodeListLocation(List<? extends TomlNode> tomlNodes) { if (tomlNodes.size() == 0) { return null; } TomlNode firstNode = tomlNodes.get(0); if (tomlNodes.size() == 1) { return firstNode.location(); } TomlNodeLocation firstKeyLocation = firstNode.location(); int length = 0; for (TomlNode entryNode : tomlNodes) { length += entryNode.location().textRange().length() + 1; } TextRange textRange = TextRange.from(firstKeyLocation.textRange().startOffset(), length - 1); LineRange lineRange = LineRange.from( firstKeyLocation.lineRange().filePath(), firstKeyLocation.lineRange().startLine(), tomlNodes.get(tomlNodes.size() - 1).location().lineRange().endLine()); return new TomlNodeLocation(lineRange, textRange); } }
```suggestion "Synthetic bean does not provide a creation method, use ExtendedBeanConfigurator#creator(), ExtendedBeanConfigurator#supplier(), ExtendedBeanConfigurator#createWith() or ExtendedBeanConfigurator#runtimeValue()"); ```
public SyntheticBeanBuildItem done() { if (supplier == null && runtimeValue == null && fun == null && creatorConsumer == null) { throw new IllegalStateException( "Synthetic bean does not provide a creation method, use ExtendedBeanConfigurator } return new SyntheticBeanBuildItem(this); }
"Synthetic bean does not provide a creation method, use ExtendedBeanConfigurator
public SyntheticBeanBuildItem done() { if (supplier == null && runtimeValue == null && fun == null && creatorConsumer == null) { throw new IllegalStateException( "Synthetic bean does not provide a creation method, use ExtendedBeanConfigurator } return new SyntheticBeanBuildItem(this); }
class ExtendedBeanConfigurator extends BeanConfiguratorBase<ExtendedBeanConfigurator, Object> { private Supplier<?> supplier; private RuntimeValue<?> runtimeValue; private Function<SyntheticCreationalContext<?>, ?> fun; private boolean staticInit; ExtendedBeanConfigurator(DotName implClazz) { super(implClazz); this.staticInit = true; } /** * Finish the configurator. * * @return a new build item */ /** * Use {@link * * @param supplier A supplier returned from a recorder * @return self */ public ExtendedBeanConfigurator supplier(Supplier<?> supplier) { if (runtimeValue != null || fun != null) { throw multipleCreationMethods(); } this.supplier = Objects.requireNonNull(supplier); return this; } /** * Use {@link * * @param runtimeValue A runtime value returned from a recorder * @return self */ public ExtendedBeanConfigurator runtimeValue(RuntimeValue<?> runtimeValue) { if (supplier != null || fun != null) { throw multipleCreationMethods(); } this.runtimeValue = Objects.requireNonNull(runtimeValue); return this; } /** * This method is useful if you need to use build-time parameters or synthetic injection points during creation of a * bean instance. * * @param fun A function returned from a recorder * @return self */ public <B> ExtendedBeanConfigurator createWith(Function<SyntheticCreationalContext<B>, B> fun) { if (supplier != null || runtimeValue != null) { throw multipleCreationMethods(); } this.fun = cast(Objects.requireNonNull(fun)); return this; } /** * A synthetic bean whose instance is produced through a recorder is initialized during * {@link ExecutionTime * <p> * It is possible to change this behavior and initialize the bean during the {@link ExecutionTime * However, in such case a client that attempts to obtain such bean during {@link ExecutionTime * runtime-init synthetic beans are initialized will receive an exception. * <p> * {@link ExecutionTime * {@link SyntheticBeansRuntimeInitBuildItem}. * * @return self * @see SyntheticBeansRuntimeInitBuildItem */ public ExtendedBeanConfigurator setRuntimeInit() { this.staticInit = false; return this; } DotName getImplClazz() { return implClazz; } Set<AnnotationInstance> getQualifiers() { return qualifiers; } Supplier<?> getSupplier() { return supplier; } RuntimeValue<?> getRuntimeValue() { return runtimeValue; } Function<SyntheticCreationalContext<?>, ?> getFunction() { return fun; } private IllegalStateException multipleCreationMethods() { return new IllegalStateException("It is not possible to specify multiple creation methods"); } }
class ExtendedBeanConfigurator extends BeanConfiguratorBase<ExtendedBeanConfigurator, Object> { private Supplier<?> supplier; private RuntimeValue<?> runtimeValue; private Function<SyntheticCreationalContext<?>, ?> fun; private boolean staticInit; ExtendedBeanConfigurator(DotName implClazz) { super(implClazz); this.staticInit = true; } /** * Finish the configurator. * * @return a new build item */ /** * Use {@link * * @param supplier A supplier returned from a recorder * @return self */ public ExtendedBeanConfigurator supplier(Supplier<?> supplier) { if (runtimeValue != null || fun != null) { throw multipleCreationMethods(); } this.supplier = Objects.requireNonNull(supplier); return this; } /** * Use {@link * * @param runtimeValue A runtime value returned from a recorder * @return self */ public ExtendedBeanConfigurator runtimeValue(RuntimeValue<?> runtimeValue) { if (supplier != null || fun != null) { throw multipleCreationMethods(); } this.runtimeValue = Objects.requireNonNull(runtimeValue); return this; } /** * This method is useful if you need to use build-time parameters or synthetic injection points during creation of a * bean instance. * * @param fun A function returned from a recorder * @return self */ public <B> ExtendedBeanConfigurator createWith(Function<SyntheticCreationalContext<B>, B> fun) { if (supplier != null || runtimeValue != null) { throw multipleCreationMethods(); } this.fun = cast(Objects.requireNonNull(fun)); return this; } /** * A synthetic bean whose instance is produced through a recorder is initialized during * {@link ExecutionTime * <p> * It is possible to change this behavior and initialize the bean during the {@link ExecutionTime * However, in such case a client that attempts to obtain such bean during {@link ExecutionTime * runtime-init synthetic beans are initialized will receive an exception. * <p> * {@link ExecutionTime * {@link SyntheticBeansRuntimeInitBuildItem}. * * @return self * @see SyntheticBeansRuntimeInitBuildItem */ public ExtendedBeanConfigurator setRuntimeInit() { this.staticInit = false; return this; } DotName getImplClazz() { return implClazz; } Set<AnnotationInstance> getQualifiers() { return qualifiers; } Supplier<?> getSupplier() { return supplier; } RuntimeValue<?> getRuntimeValue() { return runtimeValue; } Function<SyntheticCreationalContext<?>, ?> getFunction() { return fun; } private IllegalStateException multipleCreationMethods() { return new IllegalStateException("It is not possible to specify multiple creation methods"); } }
Hello, I am curious if some work is done on https://issues.apache.org/jira/browse/FLINK-29267 ? I require this solution, I tried to check the files but not sure how can i access connector configurations in PostgresRowConverter file.
protected JdbcSerializationConverter createExternalConverter(LogicalType type) { switch (type.getTypeRoot()) { case CHAR: case VARCHAR: return (val, index, statement) -> { String valString = val.getString(index).toString(); if (UUID_REGEX_PATTERN.matcher(valString).matches()) { statement.setObject(index, UUID.fromString(valString)); } else { statement.setString(index, valString); } }; } return super.createExternalConverter(type); }
if (UUID_REGEX_PATTERN.matcher(valString).matches()) {
protected JdbcSerializationConverter createExternalConverter(LogicalType type) { switch (type.getTypeRoot()) { case CHAR: case VARCHAR: return (val, index, statement) -> { String valString = val.getString(index).toString(); if (UUID_REGEX_PATTERN.matcher(valString).matches()) { statement.setObject(index, UUID.fromString(valString)); } else { statement.setString(index, valString); } }; } return super.createExternalConverter(type); }
class PostgresRowConverter extends AbstractJdbcRowConverter { private static final long serialVersionUID = 1L; private static final Pattern UUID_REGEX_PATTERN = Pattern.compile("^[{]?[0-9a-fA-F]{8}-([0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12}[}]?$"); @Override public String converterName() { return "PostgreSQL"; } public PostgresRowConverter(RowType rowType) { super(rowType); } @Override public JdbcDeserializationConverter createInternalConverter(LogicalType type) { LogicalTypeRoot root = type.getTypeRoot(); if (root == LogicalTypeRoot.ARRAY) { ArrayType arrayType = (ArrayType) type; return createPostgresArrayConverter(arrayType); } else { return createPrimitiveConverter(type); } } @Override @Override protected JdbcSerializationConverter createNullableExternalConverter(LogicalType type) { LogicalTypeRoot root = type.getTypeRoot(); if (root == LogicalTypeRoot.ARRAY) { return (val, index, statement) -> { throw new IllegalStateException( String.format( "Writing ARRAY type is not yet supported in JDBC:%s.", converterName())); }; } else { return super.createNullableExternalConverter(type); } } private JdbcDeserializationConverter createPostgresArrayConverter(ArrayType arrayType) { final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType()); final JdbcDeserializationConverter elementConverter = createNullableInternalConverter(arrayType.getElementType()); return val -> { PgArray pgArray = (PgArray) val; Object[] in = (Object[]) pgArray.getArray(); final Object[] array = (Object[]) Array.newInstance(elementClass, in.length); for (int i = 0; i < in.length; i++) { array[i] = elementConverter.deserialize(in[i]); } return new GenericArrayData(array); }; } private JdbcDeserializationConverter createPrimitiveConverter(LogicalType type) { return super.createInternalConverter(type); } }
class PostgresRowConverter extends AbstractJdbcRowConverter { private static final long serialVersionUID = 1L; private static final Pattern UUID_REGEX_PATTERN = Pattern.compile("^[{]?[0-9a-fA-F]{8}-([0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12}[}]?$"); @Override public String converterName() { return "PostgreSQL"; } public PostgresRowConverter(RowType rowType) { super(rowType); } @Override public JdbcDeserializationConverter createInternalConverter(LogicalType type) { LogicalTypeRoot root = type.getTypeRoot(); if (root == LogicalTypeRoot.ARRAY) { ArrayType arrayType = (ArrayType) type; return createPostgresArrayConverter(arrayType); } else { return createPrimitiveConverter(type); } } @Override @Override protected JdbcSerializationConverter createNullableExternalConverter(LogicalType type) { LogicalTypeRoot root = type.getTypeRoot(); if (root == LogicalTypeRoot.ARRAY) { return (val, index, statement) -> { throw new IllegalStateException( String.format( "Writing ARRAY type is not yet supported in JDBC:%s.", converterName())); }; } else { return super.createNullableExternalConverter(type); } } private JdbcDeserializationConverter createPostgresArrayConverter(ArrayType arrayType) { final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType()); final JdbcDeserializationConverter elementConverter = createNullableInternalConverter(arrayType.getElementType()); return val -> { PgArray pgArray = (PgArray) val; Object[] in = (Object[]) pgArray.getArray(); final Object[] array = (Object[]) Array.newInstance(elementClass, in.length); for (int i = 0; i < in.length; i++) { array[i] = elementConverter.deserialize(in[i]); } return new GenericArrayData(array); }; } private JdbcDeserializationConverter createPrimitiveConverter(LogicalType type) { return super.createInternalConverter(type); } }
This method is only protected by `readLock`, but you do write operation here. Why not just save the splitted rules in `commonProperties`?
public String[] getSqlBlockRules() { if (this.sqlBlockRulesSplit.length != 0) { return this.sqlBlockRulesSplit; } String sqlBlockRules = commonProperties.getSqlBlockRules(); if (StringUtils.isNotEmpty(sqlBlockRules)) { this.sqlBlockRulesSplit = sqlBlockRules.replace(" ", "").split(","); } return this.sqlBlockRulesSplit; }
String sqlBlockRules = commonProperties.getSqlBlockRules();
public String[] getSqlBlockRules() { return commonProperties.getSqlBlockRulesSplit(); }
class UserProperty implements Writable { private static final String PROP_MAX_USER_CONNECTIONS = "max_user_connections"; private static final String PROP_MAX_QUERY_INSTANCES = "max_query_instances"; private static final String PROP_RESOURCE = "resource"; private static final String PROP_QUOTA = "quota"; private static final String PROP_DEFAULT_LOAD_CLUSTER = "default_load_cluster"; private static final String PROP_LOAD_CLUSTER = "load_cluster"; private static final String PROP_SQL_BLOCK_RULES = "sql_block_rules"; public static final Set<Pattern> ADVANCED_PROPERTIES = Sets.newHashSet(); public static final Set<Pattern> COMMON_PROPERTIES = Sets.newHashSet(); private String qualifiedUser; private CommonUserProperties commonProperties = new CommonUserProperties(); private UserResource resource = new UserResource(1000); private String defaultLoadCluster = null; private Map<String, DppConfig> clusterToDppConfig = Maps.newHashMap(); /* * We keep white list here to save Baidu domain name (BNS) or DNS as white list. * Each frontend will periodically resolve the domain name to ip, and update the privilege table. * We never persist the resolved IPs. */ private WhiteList whiteList = new WhiteList(); private String[] sqlBlockRulesSplit = {}; static { ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_MAX_USER_CONNECTIONS + "$", Pattern.CASE_INSENSITIVE)); ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_RESOURCE + ".", Pattern.CASE_INSENSITIVE)); ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_LOAD_CLUSTER + "." + DppConfig.CLUSTER_NAME_REGEX + "." + DppConfig.PRIORITY + "$", Pattern.CASE_INSENSITIVE)); ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_MAX_QUERY_INSTANCES + "$", Pattern.CASE_INSENSITIVE)); ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_SQL_BLOCK_RULES + "$", Pattern.CASE_INSENSITIVE)); COMMON_PROPERTIES.add(Pattern.compile("^" + PROP_QUOTA + ".", Pattern.CASE_INSENSITIVE)); COMMON_PROPERTIES.add(Pattern.compile("^" + PROP_DEFAULT_LOAD_CLUSTER + "$", Pattern.CASE_INSENSITIVE)); COMMON_PROPERTIES.add(Pattern.compile("^" + PROP_LOAD_CLUSTER + "." + DppConfig.CLUSTER_NAME_REGEX + ".", Pattern.CASE_INSENSITIVE)); } public UserProperty() { } public UserProperty(String qualifiedUser) { this.qualifiedUser = qualifiedUser; } public String getQualifiedUser() { return qualifiedUser; } public long getMaxConn() { return this.commonProperties.getMaxConn(); } public long getMaxQueryInstances() { return commonProperties.getMaxQueryInstances(); } public WhiteList getWhiteList() { return whiteList; } public void setPasswordForDomain(String domain, byte[] password, boolean errOnExist) throws DdlException { if (errOnExist && whiteList.containsDomain(domain)) { throw new DdlException("Domain " + domain + " of user " + qualifiedUser + " already exists"); } if (password != null) { whiteList.setPassword(domain, password); } } public void removeDomain(String domain) { whiteList.removeDomain(domain); } public void update(List<Pair<String, String>> properties) throws DdlException { long newMaxConn = this.commonProperties.getMaxConn(); long newMaxQueryInstances = this.commonProperties.getMaxQueryInstances(); String sqlBlockRules = this.commonProperties.getSqlBlockRules(); UserResource newResource = resource.getCopiedUserResource(); String newDefaultLoadCluster = defaultLoadCluster; Map<String, DppConfig> newDppConfigs = Maps.newHashMap(clusterToDppConfig); for (Pair<String, String> entry : properties) { String key = entry.first; String value = entry.second; String[] keyArr = key.split("\\" + SetUserPropertyVar.DOT_SEPARATOR); if (keyArr[0].equalsIgnoreCase(PROP_MAX_USER_CONNECTIONS)) { if (keyArr.length != 1) { throw new DdlException(PROP_MAX_USER_CONNECTIONS + " format error"); } try { newMaxConn = Long.parseLong(value); } catch (NumberFormatException e) { throw new DdlException(PROP_MAX_USER_CONNECTIONS + " is not number"); } if (newMaxConn <= 0 || newMaxConn > 10000) { throw new DdlException(PROP_MAX_USER_CONNECTIONS + " is not valid, must between 1 and 10000"); } } else if (keyArr[0].equalsIgnoreCase(PROP_RESOURCE)) { if (keyArr.length != 2) { throw new DdlException(PROP_RESOURCE + " format error"); } int resource = 0; try { resource = Integer.parseInt(value); } catch (NumberFormatException e) { throw new DdlException(key + " is not number"); } if (resource <= 0) { throw new DdlException(key + " is not valid"); } newResource.updateResource(keyArr[1], resource); } else if (keyArr[0].equalsIgnoreCase(PROP_QUOTA)) { if (keyArr.length != 2) { throw new DdlException(PROP_QUOTA + " format error"); } int quota = 0; try { quota = Integer.parseInt(value); } catch (NumberFormatException e) { throw new DdlException(key + " is not number"); } if (quota <= 0) { throw new DdlException(key + " is not valid"); } newResource.updateGroupShare(keyArr[1], quota); } else if (keyArr[0].equalsIgnoreCase(PROP_LOAD_CLUSTER)) { updateLoadCluster(keyArr, value, newDppConfigs); } else if (keyArr[0].equalsIgnoreCase(PROP_DEFAULT_LOAD_CLUSTER)) { if (keyArr.length != 1) { throw new DdlException(PROP_DEFAULT_LOAD_CLUSTER + " format error"); } if (value != null && !newDppConfigs.containsKey(value)) { throw new DdlException("Load cluster[" + value + "] does not exist"); } newDefaultLoadCluster = value; } else if (keyArr[0].equalsIgnoreCase(PROP_MAX_QUERY_INSTANCES)) { if (keyArr.length != 1) { throw new DdlException(PROP_MAX_QUERY_INSTANCES + " format error"); } try { newMaxQueryInstances = Long.parseLong(value); } catch (NumberFormatException e) { throw new DdlException(PROP_MAX_QUERY_INSTANCES + " is not number"); } } else if (keyArr[0].equalsIgnoreCase(PROP_SQL_BLOCK_RULES)) { if (keyArr.length != 1) { throw new DdlException(PROP_SQL_BLOCK_RULES + " format error"); } sqlBlockRules = value; this.sqlBlockRulesSplit = sqlBlockRules.replace(" ", "").split(","); } else { throw new DdlException("Unknown user property(" + key + ")"); } } this.commonProperties.setMaxConn(newMaxConn); this.commonProperties.setMaxQueryInstances(newMaxQueryInstances); this.commonProperties.setSqlBlockRules(sqlBlockRules); resource = newResource; if (newDppConfigs.containsKey(newDefaultLoadCluster)) { defaultLoadCluster = newDefaultLoadCluster; } else { defaultLoadCluster = null; } clusterToDppConfig = newDppConfigs; } private void updateLoadCluster(String[] keyArr, String value, Map<String, DppConfig> newDppConfigs) throws DdlException { if (keyArr.length == 1 && Strings.isNullOrEmpty(value)) { newDppConfigs.clear(); } else if (keyArr.length == 2 && Strings.isNullOrEmpty(value)) { String cluster = keyArr[1]; newDppConfigs.remove(cluster); } else if (keyArr.length == 3 && Strings.isNullOrEmpty(value)) { String cluster = keyArr[1]; if (!newDppConfigs.containsKey(cluster)) { throw new DdlException("Load cluster[" + value + "] does not exist"); } try { newDppConfigs.get(cluster).resetConfigByKey(keyArr[2]); } catch (LoadException e) { throw new DdlException(e.getMessage()); } } else if (keyArr.length == 3 && value != null) { String cluster = keyArr[1]; Map<String, String> configMap = Maps.newHashMap(); configMap.put(keyArr[2], value); try { DppConfig newDppConfig = DppConfig.create(configMap); if (newDppConfigs.containsKey(cluster)) { newDppConfigs.get(cluster).update(newDppConfig, true); } else { newDppConfigs.put(cluster, newDppConfig); } } catch (LoadException e) { throw new DdlException(e.getMessage()); } } else { throw new DdlException(PROP_LOAD_CLUSTER + " format error"); } } public UserResource getResource() { return resource; } public String getDefaultLoadCluster() { return defaultLoadCluster; } public Pair<String, DppConfig> getLoadClusterInfo(String cluster) { String tmpCluster = cluster; if (tmpCluster == null) { tmpCluster = defaultLoadCluster; } DppConfig dppConfig = null; if (tmpCluster != null) { dppConfig = clusterToDppConfig.get(tmpCluster); if (dppConfig != null) { dppConfig = dppConfig.getCopiedDppConfig(); } } return Pair.create(tmpCluster, dppConfig); } public List<List<String>> fetchProperty() { List<List<String>> result = Lists.newArrayList(); String dot = SetUserPropertyVar.DOT_SEPARATOR; result.add(Lists.newArrayList(PROP_MAX_USER_CONNECTIONS, String.valueOf(commonProperties.getMaxConn()))); result.add(Lists.newArrayList(PROP_MAX_QUERY_INSTANCES, String.valueOf(commonProperties.getMaxQueryInstances()))); result.add(Lists.newArrayList(PROP_SQL_BLOCK_RULES, commonProperties.getSqlBlockRules())); ResourceGroup group = resource.getResource(); for (Map.Entry<ResourceType, Integer> entry : group.getQuotaMap().entrySet()) { result.add(Lists.newArrayList(PROP_RESOURCE + dot + entry.getKey().getDesc().toLowerCase(), entry.getValue().toString())); } Map<String, AtomicInteger> groups = resource.getShareByGroup(); for (Map.Entry<String, AtomicInteger> entry : groups.entrySet()) { result.add(Lists.newArrayList(PROP_QUOTA + dot + entry.getKey(), entry.getValue().toString())); } if (defaultLoadCluster != null) { result.add(Lists.newArrayList(PROP_DEFAULT_LOAD_CLUSTER, defaultLoadCluster)); } else { result.add(Lists.newArrayList(PROP_DEFAULT_LOAD_CLUSTER, "")); } for (Map.Entry<String, DppConfig> entry : clusterToDppConfig.entrySet()) { String cluster = entry.getKey(); DppConfig dppConfig = entry.getValue(); String clusterPrefix = PROP_LOAD_CLUSTER + dot + cluster + dot; if (dppConfig.getPaloPath() != null) { result.add(Lists.newArrayList(clusterPrefix + DppConfig.getPaloPathKey(), dppConfig.getPaloPath())); } result.add(Lists.newArrayList(clusterPrefix + DppConfig.getHttpPortKey(), String.valueOf(dppConfig.getHttpPort()))); if (dppConfig.getHadoopConfigs() != null) { List<String> configs = Lists.newArrayList(); for (Map.Entry<String, String> configEntry : dppConfig.getHadoopConfigs().entrySet()) { configs.add(String.format("%s=%s", configEntry.getKey(), configEntry.getValue())); } result.add(Lists.newArrayList(clusterPrefix + DppConfig.getHadoopConfigsKey(), StringUtils.join(configs, ";"))); } result.add(Lists.newArrayList(clusterPrefix + DppConfig.getPriorityKey(), String.valueOf(dppConfig.getPriority()))); } Map<String, Set<String>> resolvedIPs = whiteList.getResolvedIPs(); List<String> ips = Lists.newArrayList(); for (Map.Entry<String, Set<String>> entry : resolvedIPs.entrySet()) { ips.add(entry.getKey() + ":" + Joiner.on(",").join(entry.getValue())); } if (!ips.isEmpty()) { result.add(Lists.newArrayList("resolved IPs", Joiner.on(";").join(ips))); } Collections.sort(result, new Comparator<List<String>>() { @Override public int compare(List<String> o1, List<String> o2) { return o1.get(0).compareTo(o2.get(0)); } }); return result; } public static UserProperty read(DataInput in) throws IOException { UserProperty userProperty = new UserProperty(); userProperty.readFields(in); return userProperty; } @Override public void write(DataOutput out) throws IOException { Text.writeString(out, qualifiedUser); resource.write(out); if (defaultLoadCluster == null) { out.writeBoolean(false); } else { out.writeBoolean(true); Text.writeString(out, defaultLoadCluster); } out.writeInt(clusterToDppConfig.size()); for (Map.Entry<String, DppConfig> entry : clusterToDppConfig.entrySet()) { Text.writeString(out, entry.getKey()); entry.getValue().write(out); } whiteList.write(out); commonProperties.write(out); } public void readFields(DataInput in) throws IOException { if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_43) { in.readBoolean(); } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_30) { qualifiedUser = ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, Text.readString(in)); } else { qualifiedUser = Text.readString(in); } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_43) { int passwordLen = in.readInt(); byte[] password = new byte[passwordLen]; in.readFully(password); in.readBoolean(); if (Catalog.getCurrentCatalogJournalVersion() >= 1) { in.readBoolean(); } } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_100) { long maxConn = in.readLong(); this.commonProperties.setMaxConn(maxConn); } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_43) { Map<String, AccessPrivilege> dbPrivMap = Maps.newHashMap(); int numPriv = in.readInt(); for (int i = 0; i < numPriv; ++i) { String dbName = null; if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_30) { dbName = ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, Text.readString(in)); } else { dbName = Text.readString(in); } AccessPrivilege ap = AccessPrivilege.valueOf(Text.readString(in)); dbPrivMap.put(dbName, ap); } } resource = UserResource.readIn(in); if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_12) { if (in.readBoolean()) { defaultLoadCluster = Text.readString(in); } int clusterNum = in.readInt(); for (int i = 0; i < clusterNum; ++i) { String cluster = Text.readString(in); DppConfig dppConfig = new DppConfig(); dppConfig.readFields(in); clusterToDppConfig.put(cluster, dppConfig); } } if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_21) { whiteList.readFields(in); if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_69) { whiteList.convertOldDomainPrivMap(qualifiedUser); } } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_43) { if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_30) { if (in.readBoolean()) { Text.readString(in); } } } if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_100) { this.commonProperties = CommonUserProperties.read(in); } } }
class UserProperty implements Writable { private static final String PROP_MAX_USER_CONNECTIONS = "max_user_connections"; private static final String PROP_MAX_QUERY_INSTANCES = "max_query_instances"; private static final String PROP_RESOURCE = "resource"; private static final String PROP_QUOTA = "quota"; private static final String PROP_DEFAULT_LOAD_CLUSTER = "default_load_cluster"; private static final String PROP_LOAD_CLUSTER = "load_cluster"; private static final String PROP_SQL_BLOCK_RULES = "sql_block_rules"; public static final Set<Pattern> ADVANCED_PROPERTIES = Sets.newHashSet(); public static final Set<Pattern> COMMON_PROPERTIES = Sets.newHashSet(); private String qualifiedUser; private CommonUserProperties commonProperties = new CommonUserProperties(); private UserResource resource = new UserResource(1000); private String defaultLoadCluster = null; private Map<String, DppConfig> clusterToDppConfig = Maps.newHashMap(); /* * We keep white list here to save Baidu domain name (BNS) or DNS as white list. * Each frontend will periodically resolve the domain name to ip, and update the privilege table. * We never persist the resolved IPs. */ private WhiteList whiteList = new WhiteList(); static { ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_MAX_USER_CONNECTIONS + "$", Pattern.CASE_INSENSITIVE)); ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_RESOURCE + ".", Pattern.CASE_INSENSITIVE)); ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_LOAD_CLUSTER + "." + DppConfig.CLUSTER_NAME_REGEX + "." + DppConfig.PRIORITY + "$", Pattern.CASE_INSENSITIVE)); ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_MAX_QUERY_INSTANCES + "$", Pattern.CASE_INSENSITIVE)); ADVANCED_PROPERTIES.add(Pattern.compile("^" + PROP_SQL_BLOCK_RULES + "$", Pattern.CASE_INSENSITIVE)); COMMON_PROPERTIES.add(Pattern.compile("^" + PROP_QUOTA + ".", Pattern.CASE_INSENSITIVE)); COMMON_PROPERTIES.add(Pattern.compile("^" + PROP_DEFAULT_LOAD_CLUSTER + "$", Pattern.CASE_INSENSITIVE)); COMMON_PROPERTIES.add(Pattern.compile("^" + PROP_LOAD_CLUSTER + "." + DppConfig.CLUSTER_NAME_REGEX + ".", Pattern.CASE_INSENSITIVE)); } public UserProperty() { } public UserProperty(String qualifiedUser) { this.qualifiedUser = qualifiedUser; } public String getQualifiedUser() { return qualifiedUser; } public long getMaxConn() { return this.commonProperties.getMaxConn(); } public long getMaxQueryInstances() { return commonProperties.getMaxQueryInstances(); } public WhiteList getWhiteList() { return whiteList; } public void setPasswordForDomain(String domain, byte[] password, boolean errOnExist) throws DdlException { if (errOnExist && whiteList.containsDomain(domain)) { throw new DdlException("Domain " + domain + " of user " + qualifiedUser + " already exists"); } if (password != null) { whiteList.setPassword(domain, password); } } public void removeDomain(String domain) { whiteList.removeDomain(domain); } public void update(List<Pair<String, String>> properties) throws DdlException { long newMaxConn = this.commonProperties.getMaxConn(); long newMaxQueryInstances = this.commonProperties.getMaxQueryInstances(); String sqlBlockRules = this.commonProperties.getSqlBlockRules(); UserResource newResource = resource.getCopiedUserResource(); String newDefaultLoadCluster = defaultLoadCluster; Map<String, DppConfig> newDppConfigs = Maps.newHashMap(clusterToDppConfig); for (Pair<String, String> entry : properties) { String key = entry.first; String value = entry.second; String[] keyArr = key.split("\\" + SetUserPropertyVar.DOT_SEPARATOR); if (keyArr[0].equalsIgnoreCase(PROP_MAX_USER_CONNECTIONS)) { if (keyArr.length != 1) { throw new DdlException(PROP_MAX_USER_CONNECTIONS + " format error"); } try { newMaxConn = Long.parseLong(value); } catch (NumberFormatException e) { throw new DdlException(PROP_MAX_USER_CONNECTIONS + " is not number"); } if (newMaxConn <= 0 || newMaxConn > 10000) { throw new DdlException(PROP_MAX_USER_CONNECTIONS + " is not valid, must between 1 and 10000"); } } else if (keyArr[0].equalsIgnoreCase(PROP_RESOURCE)) { if (keyArr.length != 2) { throw new DdlException(PROP_RESOURCE + " format error"); } int resource = 0; try { resource = Integer.parseInt(value); } catch (NumberFormatException e) { throw new DdlException(key + " is not number"); } if (resource <= 0) { throw new DdlException(key + " is not valid"); } newResource.updateResource(keyArr[1], resource); } else if (keyArr[0].equalsIgnoreCase(PROP_QUOTA)) { if (keyArr.length != 2) { throw new DdlException(PROP_QUOTA + " format error"); } int quota = 0; try { quota = Integer.parseInt(value); } catch (NumberFormatException e) { throw new DdlException(key + " is not number"); } if (quota <= 0) { throw new DdlException(key + " is not valid"); } newResource.updateGroupShare(keyArr[1], quota); } else if (keyArr[0].equalsIgnoreCase(PROP_LOAD_CLUSTER)) { updateLoadCluster(keyArr, value, newDppConfigs); } else if (keyArr[0].equalsIgnoreCase(PROP_DEFAULT_LOAD_CLUSTER)) { if (keyArr.length != 1) { throw new DdlException(PROP_DEFAULT_LOAD_CLUSTER + " format error"); } if (value != null && !newDppConfigs.containsKey(value)) { throw new DdlException("Load cluster[" + value + "] does not exist"); } newDefaultLoadCluster = value; } else if (keyArr[0].equalsIgnoreCase(PROP_MAX_QUERY_INSTANCES)) { if (keyArr.length != 1) { throw new DdlException(PROP_MAX_QUERY_INSTANCES + " format error"); } try { newMaxQueryInstances = Long.parseLong(value); } catch (NumberFormatException e) { throw new DdlException(PROP_MAX_QUERY_INSTANCES + " is not number"); } } else if (keyArr[0].equalsIgnoreCase(PROP_SQL_BLOCK_RULES)) { if (keyArr.length != 1) { throw new DdlException(PROP_SQL_BLOCK_RULES + " format error"); } sqlBlockRules = value; } else { throw new DdlException("Unknown user property(" + key + ")"); } } this.commonProperties.setMaxConn(newMaxConn); this.commonProperties.setMaxQueryInstances(newMaxQueryInstances); this.commonProperties.setSqlBlockRules(sqlBlockRules); resource = newResource; if (newDppConfigs.containsKey(newDefaultLoadCluster)) { defaultLoadCluster = newDefaultLoadCluster; } else { defaultLoadCluster = null; } clusterToDppConfig = newDppConfigs; } private void updateLoadCluster(String[] keyArr, String value, Map<String, DppConfig> newDppConfigs) throws DdlException { if (keyArr.length == 1 && Strings.isNullOrEmpty(value)) { newDppConfigs.clear(); } else if (keyArr.length == 2 && Strings.isNullOrEmpty(value)) { String cluster = keyArr[1]; newDppConfigs.remove(cluster); } else if (keyArr.length == 3 && Strings.isNullOrEmpty(value)) { String cluster = keyArr[1]; if (!newDppConfigs.containsKey(cluster)) { throw new DdlException("Load cluster[" + value + "] does not exist"); } try { newDppConfigs.get(cluster).resetConfigByKey(keyArr[2]); } catch (LoadException e) { throw new DdlException(e.getMessage()); } } else if (keyArr.length == 3 && value != null) { String cluster = keyArr[1]; Map<String, String> configMap = Maps.newHashMap(); configMap.put(keyArr[2], value); try { DppConfig newDppConfig = DppConfig.create(configMap); if (newDppConfigs.containsKey(cluster)) { newDppConfigs.get(cluster).update(newDppConfig, true); } else { newDppConfigs.put(cluster, newDppConfig); } } catch (LoadException e) { throw new DdlException(e.getMessage()); } } else { throw new DdlException(PROP_LOAD_CLUSTER + " format error"); } } public UserResource getResource() { return resource; } public String getDefaultLoadCluster() { return defaultLoadCluster; } public Pair<String, DppConfig> getLoadClusterInfo(String cluster) { String tmpCluster = cluster; if (tmpCluster == null) { tmpCluster = defaultLoadCluster; } DppConfig dppConfig = null; if (tmpCluster != null) { dppConfig = clusterToDppConfig.get(tmpCluster); if (dppConfig != null) { dppConfig = dppConfig.getCopiedDppConfig(); } } return Pair.create(tmpCluster, dppConfig); } public List<List<String>> fetchProperty() { List<List<String>> result = Lists.newArrayList(); String dot = SetUserPropertyVar.DOT_SEPARATOR; result.add(Lists.newArrayList(PROP_MAX_USER_CONNECTIONS, String.valueOf(commonProperties.getMaxConn()))); result.add(Lists.newArrayList(PROP_MAX_QUERY_INSTANCES, String.valueOf(commonProperties.getMaxQueryInstances()))); result.add(Lists.newArrayList(PROP_SQL_BLOCK_RULES, commonProperties.getSqlBlockRules())); ResourceGroup group = resource.getResource(); for (Map.Entry<ResourceType, Integer> entry : group.getQuotaMap().entrySet()) { result.add(Lists.newArrayList(PROP_RESOURCE + dot + entry.getKey().getDesc().toLowerCase(), entry.getValue().toString())); } Map<String, AtomicInteger> groups = resource.getShareByGroup(); for (Map.Entry<String, AtomicInteger> entry : groups.entrySet()) { result.add(Lists.newArrayList(PROP_QUOTA + dot + entry.getKey(), entry.getValue().toString())); } if (defaultLoadCluster != null) { result.add(Lists.newArrayList(PROP_DEFAULT_LOAD_CLUSTER, defaultLoadCluster)); } else { result.add(Lists.newArrayList(PROP_DEFAULT_LOAD_CLUSTER, "")); } for (Map.Entry<String, DppConfig> entry : clusterToDppConfig.entrySet()) { String cluster = entry.getKey(); DppConfig dppConfig = entry.getValue(); String clusterPrefix = PROP_LOAD_CLUSTER + dot + cluster + dot; if (dppConfig.getPaloPath() != null) { result.add(Lists.newArrayList(clusterPrefix + DppConfig.getPaloPathKey(), dppConfig.getPaloPath())); } result.add(Lists.newArrayList(clusterPrefix + DppConfig.getHttpPortKey(), String.valueOf(dppConfig.getHttpPort()))); if (dppConfig.getHadoopConfigs() != null) { List<String> configs = Lists.newArrayList(); for (Map.Entry<String, String> configEntry : dppConfig.getHadoopConfigs().entrySet()) { configs.add(String.format("%s=%s", configEntry.getKey(), configEntry.getValue())); } result.add(Lists.newArrayList(clusterPrefix + DppConfig.getHadoopConfigsKey(), StringUtils.join(configs, ";"))); } result.add(Lists.newArrayList(clusterPrefix + DppConfig.getPriorityKey(), String.valueOf(dppConfig.getPriority()))); } Map<String, Set<String>> resolvedIPs = whiteList.getResolvedIPs(); List<String> ips = Lists.newArrayList(); for (Map.Entry<String, Set<String>> entry : resolvedIPs.entrySet()) { ips.add(entry.getKey() + ":" + Joiner.on(",").join(entry.getValue())); } if (!ips.isEmpty()) { result.add(Lists.newArrayList("resolved IPs", Joiner.on(";").join(ips))); } Collections.sort(result, new Comparator<List<String>>() { @Override public int compare(List<String> o1, List<String> o2) { return o1.get(0).compareTo(o2.get(0)); } }); return result; } public static UserProperty read(DataInput in) throws IOException { UserProperty userProperty = new UserProperty(); userProperty.readFields(in); return userProperty; } @Override public void write(DataOutput out) throws IOException { Text.writeString(out, qualifiedUser); resource.write(out); if (defaultLoadCluster == null) { out.writeBoolean(false); } else { out.writeBoolean(true); Text.writeString(out, defaultLoadCluster); } out.writeInt(clusterToDppConfig.size()); for (Map.Entry<String, DppConfig> entry : clusterToDppConfig.entrySet()) { Text.writeString(out, entry.getKey()); entry.getValue().write(out); } whiteList.write(out); commonProperties.write(out); } public void readFields(DataInput in) throws IOException { if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_43) { in.readBoolean(); } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_30) { qualifiedUser = ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, Text.readString(in)); } else { qualifiedUser = Text.readString(in); } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_43) { int passwordLen = in.readInt(); byte[] password = new byte[passwordLen]; in.readFully(password); in.readBoolean(); if (Catalog.getCurrentCatalogJournalVersion() >= 1) { in.readBoolean(); } } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_100) { long maxConn = in.readLong(); this.commonProperties.setMaxConn(maxConn); } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_43) { Map<String, AccessPrivilege> dbPrivMap = Maps.newHashMap(); int numPriv = in.readInt(); for (int i = 0; i < numPriv; ++i) { String dbName = null; if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_30) { dbName = ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, Text.readString(in)); } else { dbName = Text.readString(in); } AccessPrivilege ap = AccessPrivilege.valueOf(Text.readString(in)); dbPrivMap.put(dbName, ap); } } resource = UserResource.readIn(in); if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_12) { if (in.readBoolean()) { defaultLoadCluster = Text.readString(in); } int clusterNum = in.readInt(); for (int i = 0; i < clusterNum; ++i) { String cluster = Text.readString(in); DppConfig dppConfig = new DppConfig(); dppConfig.readFields(in); clusterToDppConfig.put(cluster, dppConfig); } } if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_21) { whiteList.readFields(in); if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_69) { whiteList.convertOldDomainPrivMap(qualifiedUser); } } if (Catalog.getCurrentCatalogJournalVersion() < FeMetaVersion.VERSION_43) { if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_30) { if (in.readBoolean()) { Text.readString(in); } } } if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_100) { this.commonProperties = CommonUserProperties.read(in); } } }
I think this visitor is useful for us for use cases like this. Rather than checking each expression kind (via syntax kind), this is clean. Is it ok if we use it everywhere?
public static Hover getHover(HoverContext context) { Optional<Document> srcFile = context.currentDocument(); Optional<SemanticModel> semanticModel = context.currentSemanticModel(); if (semanticModel.isEmpty() || srcFile.isEmpty()) { return HoverUtil.getDefaultHoverObject(); } Position cursorPosition = context.getCursorPosition(); LinePosition linePosition = LinePosition.from(cursorPosition.getLine(), cursorPosition.getCharacter()); context.checkCancelled(); Optional<Symbol> symbolAtCursor = semanticModel.get().symbol(srcFile.get(), linePosition); context.checkCancelled(); if (symbolAtCursor.isEmpty()) { Range nodeRange = new Range(context.getCursorPosition(), context.getCursorPosition()); NonTerminalNode nodeAtCursor = CommonUtil.findNode(nodeRange, srcFile.get().syntaxTree()); if (nodeAtCursor != null) { MatchedExpressionNodeResolver expressionResolver = new MatchedExpressionNodeResolver(nodeAtCursor); Optional<ExpressionNode> expr = nodeAtCursor.apply(expressionResolver); if (expr.isPresent()) { return getHoverForExpression(context, expr.get()); } } return HoverUtil.getDefaultHoverObject(); } return getHoverForSymbol(symbolAtCursor.get(), context); }
MatchedExpressionNodeResolver expressionResolver = new MatchedExpressionNodeResolver(nodeAtCursor);
public static Hover getHover(HoverContext context) { Optional<Document> srcFile = context.currentDocument(); Optional<SemanticModel> semanticModel = context.currentSemanticModel(); if (semanticModel.isEmpty() || srcFile.isEmpty()) { return HoverUtil.getDefaultHoverObject(); } Position cursorPosition = context.getCursorPosition(); LinePosition linePosition = LinePosition.from(cursorPosition.getLine(), cursorPosition.getCharacter()); context.checkCancelled(); Optional<Symbol> symbolAtCursor = semanticModel.get().symbol(srcFile.get(), linePosition); context.checkCancelled(); if (symbolAtCursor.isEmpty()) { Range nodeRange = new Range(context.getCursorPosition(), context.getCursorPosition()); NonTerminalNode nodeAtCursor = CommonUtil.findNode(nodeRange, srcFile.get().syntaxTree()); if (nodeAtCursor != null) { MatchedExpressionNodeResolver expressionResolver = new MatchedExpressionNodeResolver(nodeAtCursor); Optional<ExpressionNode> expr = nodeAtCursor.apply(expressionResolver); if (expr.isPresent()) { return getHoverForExpression(context, expr.get()); } } return HoverUtil.getDefaultHoverObject(); } return getHoverForSymbol(symbolAtCursor.get(), context); }
class HoverUtil { /** * Get the hover content. * * @param context Hover operation context * @return {@link Hover} Hover content */ private static Hover getHoverForSymbol(Symbol symbol, HoverContext context) { switch (symbol.kind()) { case FUNCTION: return getFunctionHoverMarkupContent((FunctionSymbol) symbol, context); case METHOD: return getFunctionHoverMarkupContent((MethodSymbol) symbol, context); case RESOURCE_METHOD: return getFunctionHoverMarkupContent((ResourceMethodSymbol) symbol, context); case TYPE_DEFINITION: return getTypeDefHoverMarkupContent((TypeDefinitionSymbol) symbol, context); case CLASS: return getClassHoverMarkupContent((ClassSymbol) symbol, context); case OBJECT_FIELD: case RECORD_FIELD: case CONSTANT: case ANNOTATION: case ENUM: case ENUM_MEMBER: case CLASS_FIELD: return getDescriptionOnlyHoverObject(symbol); case VARIABLE: return getVariableHoverMarkupContent((VariableSymbol) symbol); case TYPE: if (symbol instanceof TypeReferenceTypeSymbol) { return getHoverForSymbol(((TypeReferenceTypeSymbol) symbol).definition(), context); } else { return HoverUtil.getDefaultHoverObject(); } default: return HoverUtil.getDefaultHoverObject(); } } /** * Get hover for expression nodes. Note that we are supplying hover for a selected set of expressions. * * @param context Context * @param exprNode Expression node * @return Hover */ private static Hover getHoverForExpression(HoverContext context, Node exprNode) { switch (exprNode.kind()) { case IMPLICIT_NEW_EXPRESSION: case EXPLICIT_NEW_EXPRESSION: Optional<TypeSymbol> optionalTypeSymbol = context.currentSemanticModel() .flatMap(semanticModel -> semanticModel.typeOf(exprNode)) .map(CommonUtil::getRawType); if (optionalTypeSymbol.isEmpty()) { break; } TypeSymbol typeSymbol = optionalTypeSymbol.get(); if (typeSymbol.typeKind() == TypeDescKind.UNION) { UnionTypeSymbol unionTypeSymbol = (UnionTypeSymbol) typeSymbol; Optional<TypeSymbol> classTypeSymbol = unionTypeSymbol.memberTypeDescriptors().stream() .map(CommonUtil::getRawType) .filter(member -> member.typeKind() != TypeDescKind.ERROR) .findFirst(); if (classTypeSymbol.isEmpty()) { break; } typeSymbol = classTypeSymbol.get(); } if (typeSymbol instanceof ClassSymbol) { ClassSymbol classSymbol = (ClassSymbol) typeSymbol; if (classSymbol.initMethod().isEmpty()) { break; } MethodSymbol initMethodSymbol = classSymbol.initMethod().get(); return getFunctionHoverMarkupContent(initMethodSymbol, context); } } return getDefaultHoverObject(); } private static Hover getObjectHoverMarkupContent(Documentation documentation, ObjectTypeSymbol classSymbol, HoverContext context) { List<String> hoverContent = new ArrayList<>(); if (documentation.description().isPresent()) { hoverContent.add(documentation.description().get()); } Optional<Package> currentPackage = context.workspace() .project(context.filePath()).map(Project::currentPackage); Optional<Module> currentModule = context.currentModule(); if (currentModule.isPresent() && currentPackage.isPresent()) { Map<String, String> paramsMap = documentation.parameterMap(); if (!paramsMap.isEmpty()) { List<String> params = new ArrayList<>(); params.add(header(3, ContextConstants.FIELD_TITLE) + CommonUtil.MD_LINE_SEPARATOR); params.addAll(classSymbol.fieldDescriptors().entrySet().stream() .filter(fieldEntry -> withValidAccessModifiers( fieldEntry.getValue(), currentPackage.get(), currentModule.get().moduleId(), context)) .map(fieldEntry -> { String desc = paramsMap.get(fieldEntry.getKey()); String modifiedTypeName = CommonUtil.getModifiedTypeName(context, fieldEntry.getValue().typeDescriptor()); return quotedString(modifiedTypeName) + " " + italicString(boldString(fieldEntry.getKey())) + " : " + desc; }).collect(Collectors.toList())); if (params.size() > 1) { hoverContent.add(String.join(CommonUtil.MD_LINE_SEPARATOR, params)); } } List<String> methods = new ArrayList<>(); classSymbol.methods().entrySet().stream() .filter(methodSymbol -> withValidAccessModifiers(methodSymbol.getValue(), currentPackage.get(), currentModule.get().moduleId(), context)) .forEach(methodEntry -> { MethodSymbol methodSymbol = methodEntry.getValue(); StringBuilder methodInfo = new StringBuilder(); Optional<Documentation> methodDoc = methodSymbol.documentation(); String signature = CommonUtil.getModifiedSignature(context, methodSymbol.signature()); methodInfo.append(quotedString(signature)); if (methodDoc.isPresent() && methodDoc.get().description().isPresent()) { methodInfo.append(CommonUtil.MD_LINE_SEPARATOR).append(methodDoc.get().description().get()); } methods.add(bulletItem(methodInfo.toString())); }); if (!methods.isEmpty()) { methods.add(0, header(3, ContextConstants.METHOD_TITLE) + CommonUtil.MD_LINE_SEPARATOR); hoverContent.add(String.join(CommonUtil.MD_LINE_SEPARATOR, methods)); } } Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(hoverContent.stream().collect(Collectors.joining(getHorizontalSeparator()))); hover.setContents(hoverMarkupContent); return hover; } private static Hover getTypeDefHoverMarkupContent(TypeDefinitionSymbol symbol, HoverContext context) { TypeSymbol rawType = CommonUtil.getRawType(symbol.typeDescriptor()); Optional<Documentation> documentation = symbol.documentation(); if (documentation.isEmpty()) { return getDefaultHoverObject(); } if (rawType.typeKind() == TypeDescKind.RECORD) { return getRecordTypeHoverContent(documentation.get(), (RecordTypeSymbol) rawType, context); } if (rawType.typeKind() == TypeDescKind.OBJECT) { return getObjectHoverMarkupContent(documentation.get(), (ObjectTypeSymbol) rawType, context); } return getDescriptionOnlyHoverObject(documentation.get()); } private static Hover getClassHoverMarkupContent(ClassSymbol symbol, HoverContext context) { Optional<Documentation> documentation = symbol.documentation(); if (documentation.isEmpty()) { return getDefaultHoverObject(); } return getObjectHoverMarkupContent(documentation.get(), symbol, context); } private static Hover getRecordTypeHoverContent(Documentation documentation, RecordTypeSymbol recordType, HoverContext ctx) { List<String> hoverContent = new ArrayList<>(); if (documentation.description().isPresent()) { hoverContent.add(documentation.description().get()); } Map<String, String> paramsMap = documentation.parameterMap(); if (!paramsMap.isEmpty()) { List<String> params = new ArrayList<>(); params.add(header(3, ContextConstants.FIELD_TITLE) + CommonUtil.MD_LINE_SEPARATOR); params.addAll(recordType.fieldDescriptors().entrySet().stream() .map(fieldEntry -> { String desc = paramsMap.get(fieldEntry.getKey()); String typeName = CommonUtil.getModifiedTypeName(ctx, fieldEntry.getValue().typeDescriptor()); return quotedString(typeName) + " " + italicString(boldString(fieldEntry.getKey())) + " : " + desc; }).collect(Collectors.toList())); Optional<TypeSymbol> restTypeDesc = recordType.restTypeDescriptor(); restTypeDesc.ifPresent(typeSymbol -> params.add(quotedString(CommonUtil.getModifiedTypeName(ctx, typeSymbol) + "..."))); hoverContent.add(String.join(CommonUtil.MD_LINE_SEPARATOR, params)); } Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(hoverContent.stream().collect(Collectors.joining(getHorizontalSeparator()))); hover.setContents(hoverMarkupContent); return hover; } /** * Get the default hover object. * * @return {@link Hover} hover default hover object. */ public static Hover getDefaultHoverObject() { Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(""); hover.setContents(hoverMarkupContent); return hover; } /** * Get the description only hover object. * * @return {@link Hover} */ private static Hover getDescriptionOnlyHoverObject(Documentation documentation) { String description = ""; if (documentation.description().isPresent()) { description = documentation.description().get(); } Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(description); hover.setContents(hoverMarkupContent); return hover; } /** * Get the description only hover object. * * @return {@link Hover} */ public static Hover getDescriptionOnlyHoverObject(Symbol symbol) { if (!(symbol instanceof Documentable) || ((Documentable) symbol).documentation().isEmpty()) { return getDefaultHoverObject(); } return getDescriptionOnlyHoverObject(((Documentable) symbol).documentation().get()); } private static Hover getFunctionHoverMarkupContent(FunctionSymbol symbol, HoverContext ctx) { Optional<Documentation> documentation = symbol.documentation(); if (documentation.isEmpty()) { return getDefaultHoverObject(); } List<String> hoverContent = new ArrayList<>(); if (documentation.get().description().isPresent()) { hoverContent.add(documentation.get().description().get()); } Map<String, String> paramsMap = documentation.get().parameterMap(); if (!paramsMap.isEmpty()) { List<String> params = new ArrayList<>(); params.add(header(3, ContextConstants.PARAM_TITLE) + CommonUtil.MD_LINE_SEPARATOR); params.addAll(symbol.typeDescriptor().params().get().stream() .map(param -> { if (param.getName().isEmpty()) { return quotedString(CommonUtil.getModifiedTypeName(ctx, param.typeDescriptor())); } String paramName = param.getName().get(); String desc = paramsMap.get(paramName); return quotedString(CommonUtil.getModifiedTypeName(ctx, param.typeDescriptor())) + " " + italicString(boldString(paramName)) + " : " + desc; }).collect(Collectors.toList())); Optional<ParameterSymbol> restParam = symbol.typeDescriptor().restParam(); if (restParam.isPresent()) { String modifiedTypeName = CommonUtil.getModifiedTypeName(ctx, restParam.get().typeDescriptor()); StringBuilder restParamBuilder = new StringBuilder(quotedString(modifiedTypeName + "...")); if (restParam.get().getName().isPresent()) { restParamBuilder.append(" ").append(italicString(boldString(restParam.get().getName().get()))) .append(" : ").append(paramsMap.get(restParam.get().getName().get())); } params.add(restParamBuilder.toString()); } hoverContent.add(String.join(CommonUtil.MD_LINE_SEPARATOR, params)); } if (documentation.get().returnDescription().isPresent()) { TypeSymbol returnTypeDesc = symbol.typeDescriptor().returnTypeDescriptor().orElseThrow(); String returnTypeName = quotedString(CommonUtil.getModifiedTypeName(ctx, returnTypeDesc)); String returnDoc = header(3, ContextConstants.RETURN_TITLE) + CommonUtil.MD_LINE_SEPARATOR + returnTypeName + " : " + documentation.get().returnDescription().get(); hoverContent.add(returnDoc); } Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(hoverContent.stream().collect(Collectors.joining(getHorizontalSeparator()))); hover.setContents(hoverMarkupContent); return hover; } private static Hover getVariableHoverMarkupContent(VariableSymbol symbol) { Optional<Documentation> documentation = symbol.documentation(); List<String> hoverContent = new ArrayList<>(); if (documentation.isPresent() && documentation.get().description().isPresent()) { hoverContent.add(documentation.get().description().get()); } TypeSymbol varTypeSymbol = symbol.typeDescriptor(); String type = varTypeSymbol.signature(); String varName = symbol.getName().isPresent() ? " " + symbol.getName().get() : ""; String modifiedVariable = quotedString(type) + CommonUtil.escapeEscapeCharsInIdentifier(varName); hoverContent.add(modifiedVariable); Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(hoverContent.stream().collect(Collectors.joining(getHorizontalSeparator()))); hover.setContents(hoverMarkupContent); return hover; } private static String getHorizontalSeparator() { return CommonUtil.MD_LINE_SEPARATOR + CommonUtil.MD_LINE_SEPARATOR + "---" + CommonUtil.MD_LINE_SEPARATOR + CommonUtil.MD_LINE_SEPARATOR; } private static String quotedString(String value) { return "`" + value.trim() + "`"; } private static String boldString(String value) { return "**" + value.trim() + "**"; } private static String italicString(String value) { return "*" + value.trim() + "*"; } private static String bulletItem(String value) { return "+ " + value.trim() + CommonUtil.MD_LINE_SEPARATOR; } private static String header(int level, String header) { return String.join("", Collections.nCopies(level, " } /** * Check if a given symbol has valid access modifiers to be visible with in the give context. * * @param symbol Symbol. * @param currentPackage Current Package. * @param currentModule Current Module. * @return {@link Boolean} Whether the symbol is visible in the current context. */ private static Boolean withValidAccessModifiers(Symbol symbol, Package currentPackage, ModuleId currentModule, HoverContext context) { Optional<Project> project = context.workspace().project(context.filePath()); Optional<ModuleSymbol> typeSymbolModule = symbol.getModule(); if (project.isEmpty() || typeSymbolModule.isEmpty()) { return false; } boolean isResource = false; boolean isPrivate = false; boolean isPublic = false; boolean isRemote = false; if (symbol instanceof Qualifiable) { Qualifiable qSymbol = (Qualifiable) symbol; isPrivate = qSymbol.qualifiers().contains(Qualifier.PRIVATE); isPublic = qSymbol.qualifiers().contains(Qualifier.PUBLIC); isResource = qSymbol.qualifiers().contains(Qualifier.RESOURCE); isRemote = qSymbol.qualifiers().contains(Qualifier.REMOTE); } if (isResource || isRemote || isPublic) { return true; } ModuleID objModuleId = typeSymbolModule.get().id(); return (!isPrivate && objModuleId.moduleName().equals(currentModule.moduleName()) && objModuleId.orgName().equals(currentPackage.packageOrg().value())); } }
class HoverUtil { /** * Get the hover content. * * @param context Hover operation context * @return {@link Hover} Hover content */ private static Hover getHoverForSymbol(Symbol symbol, HoverContext context) { switch (symbol.kind()) { case FUNCTION: return getFunctionHoverMarkupContent((FunctionSymbol) symbol, context); case METHOD: return getFunctionHoverMarkupContent((MethodSymbol) symbol, context); case RESOURCE_METHOD: return getFunctionHoverMarkupContent((ResourceMethodSymbol) symbol, context); case TYPE_DEFINITION: return getTypeDefHoverMarkupContent((TypeDefinitionSymbol) symbol, context); case CLASS: return getClassHoverMarkupContent((ClassSymbol) symbol, context); case OBJECT_FIELD: case RECORD_FIELD: case CONSTANT: case ANNOTATION: case ENUM: case ENUM_MEMBER: case CLASS_FIELD: return getDescriptionOnlyHoverObject(symbol); case VARIABLE: return getVariableHoverMarkupContent((VariableSymbol) symbol); case TYPE: if (symbol instanceof TypeReferenceTypeSymbol) { return getHoverForSymbol(((TypeReferenceTypeSymbol) symbol).definition(), context); } else { return HoverUtil.getDefaultHoverObject(); } default: return HoverUtil.getDefaultHoverObject(); } } /** * Get hover for expression nodes. Note that we are supplying hover for a selected set of expressions. * * @param context Context * @param exprNode Expression node * @return Hover */ private static Hover getHoverForExpression(HoverContext context, Node exprNode) { switch (exprNode.kind()) { case IMPLICIT_NEW_EXPRESSION: case EXPLICIT_NEW_EXPRESSION: Optional<TypeSymbol> optionalTypeSymbol = context.currentSemanticModel() .flatMap(semanticModel -> semanticModel.typeOf(exprNode)) .map(CommonUtil::getRawType); if (optionalTypeSymbol.isEmpty()) { break; } TypeSymbol typeSymbol = optionalTypeSymbol.get(); if (typeSymbol.typeKind() == TypeDescKind.UNION) { UnionTypeSymbol unionTypeSymbol = (UnionTypeSymbol) typeSymbol; Optional<TypeSymbol> classTypeSymbol = unionTypeSymbol.memberTypeDescriptors().stream() .map(CommonUtil::getRawType) .filter(member -> member.typeKind() != TypeDescKind.ERROR) .findFirst(); if (classTypeSymbol.isEmpty()) { break; } typeSymbol = classTypeSymbol.get(); } if (typeSymbol instanceof ClassSymbol) { ClassSymbol classSymbol = (ClassSymbol) typeSymbol; if (classSymbol.initMethod().isEmpty()) { break; } MethodSymbol initMethodSymbol = classSymbol.initMethod().get(); return getFunctionHoverMarkupContent(initMethodSymbol, context); } } return getDefaultHoverObject(); } private static Hover getObjectHoverMarkupContent(Documentation documentation, ObjectTypeSymbol classSymbol, HoverContext context) { List<String> hoverContent = new ArrayList<>(); if (documentation.description().isPresent()) { hoverContent.add(documentation.description().get()); } Optional<Package> currentPackage = context.workspace() .project(context.filePath()).map(Project::currentPackage); Optional<Module> currentModule = context.currentModule(); if (currentModule.isPresent() && currentPackage.isPresent()) { Map<String, String> paramsMap = documentation.parameterMap(); if (!paramsMap.isEmpty()) { List<String> params = new ArrayList<>(); params.add(header(3, ContextConstants.FIELD_TITLE) + CommonUtil.MD_LINE_SEPARATOR); params.addAll(classSymbol.fieldDescriptors().entrySet().stream() .filter(fieldEntry -> withValidAccessModifiers( fieldEntry.getValue(), currentPackage.get(), currentModule.get().moduleId(), context)) .map(fieldEntry -> { String desc = paramsMap.get(fieldEntry.getKey()); String modifiedTypeName = CommonUtil.getModifiedTypeName(context, fieldEntry.getValue().typeDescriptor()); return quotedString(modifiedTypeName) + " " + italicString(boldString(fieldEntry.getKey())) + " : " + desc; }).collect(Collectors.toList())); if (params.size() > 1) { hoverContent.add(String.join(CommonUtil.MD_LINE_SEPARATOR, params)); } } List<String> methods = new ArrayList<>(); classSymbol.methods().entrySet().stream() .filter(methodSymbol -> withValidAccessModifiers(methodSymbol.getValue(), currentPackage.get(), currentModule.get().moduleId(), context)) .forEach(methodEntry -> { MethodSymbol methodSymbol = methodEntry.getValue(); StringBuilder methodInfo = new StringBuilder(); Optional<Documentation> methodDoc = methodSymbol.documentation(); String signature = CommonUtil.getModifiedSignature(context, methodSymbol.signature()); methodInfo.append(quotedString(signature)); if (methodDoc.isPresent() && methodDoc.get().description().isPresent()) { methodInfo.append(CommonUtil.MD_LINE_SEPARATOR).append(methodDoc.get().description().get()); } methods.add(bulletItem(methodInfo.toString())); }); if (!methods.isEmpty()) { methods.add(0, header(3, ContextConstants.METHOD_TITLE) + CommonUtil.MD_LINE_SEPARATOR); hoverContent.add(String.join(CommonUtil.MD_LINE_SEPARATOR, methods)); } } Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(hoverContent.stream().collect(Collectors.joining(getHorizontalSeparator()))); hover.setContents(hoverMarkupContent); return hover; } private static Hover getTypeDefHoverMarkupContent(TypeDefinitionSymbol symbol, HoverContext context) { TypeSymbol rawType = CommonUtil.getRawType(symbol.typeDescriptor()); Optional<Documentation> documentation = symbol.documentation(); if (documentation.isEmpty()) { return getDefaultHoverObject(); } if (rawType.typeKind() == TypeDescKind.RECORD) { return getRecordTypeHoverContent(documentation.get(), (RecordTypeSymbol) rawType, context); } if (rawType.typeKind() == TypeDescKind.OBJECT) { return getObjectHoverMarkupContent(documentation.get(), (ObjectTypeSymbol) rawType, context); } return getDescriptionOnlyHoverObject(documentation.get()); } private static Hover getClassHoverMarkupContent(ClassSymbol symbol, HoverContext context) { Optional<Documentation> documentation = symbol.documentation(); if (documentation.isEmpty()) { return getDefaultHoverObject(); } return getObjectHoverMarkupContent(documentation.get(), symbol, context); } private static Hover getRecordTypeHoverContent(Documentation documentation, RecordTypeSymbol recordType, HoverContext ctx) { List<String> hoverContent = new ArrayList<>(); if (documentation.description().isPresent()) { hoverContent.add(documentation.description().get()); } Map<String, String> paramsMap = documentation.parameterMap(); if (!paramsMap.isEmpty()) { List<String> params = new ArrayList<>(); params.add(header(3, ContextConstants.FIELD_TITLE) + CommonUtil.MD_LINE_SEPARATOR); params.addAll(recordType.fieldDescriptors().entrySet().stream() .map(fieldEntry -> { String desc = paramsMap.get(fieldEntry.getKey()); String typeName = CommonUtil.getModifiedTypeName(ctx, fieldEntry.getValue().typeDescriptor()); return quotedString(typeName) + " " + italicString(boldString(fieldEntry.getKey())) + " : " + desc; }).collect(Collectors.toList())); Optional<TypeSymbol> restTypeDesc = recordType.restTypeDescriptor(); restTypeDesc.ifPresent(typeSymbol -> params.add(quotedString(CommonUtil.getModifiedTypeName(ctx, typeSymbol) + "..."))); hoverContent.add(String.join(CommonUtil.MD_LINE_SEPARATOR, params)); } Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(hoverContent.stream().collect(Collectors.joining(getHorizontalSeparator()))); hover.setContents(hoverMarkupContent); return hover; } /** * Get the default hover object. * * @return {@link Hover} hover default hover object. */ public static Hover getDefaultHoverObject() { Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(""); hover.setContents(hoverMarkupContent); return hover; } /** * Get the description only hover object. * * @return {@link Hover} */ private static Hover getDescriptionOnlyHoverObject(Documentation documentation) { String description = ""; if (documentation.description().isPresent()) { description = documentation.description().get(); } Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(description); hover.setContents(hoverMarkupContent); return hover; } /** * Get the description only hover object. * * @return {@link Hover} */ public static Hover getDescriptionOnlyHoverObject(Symbol symbol) { if (!(symbol instanceof Documentable) || ((Documentable) symbol).documentation().isEmpty()) { return getDefaultHoverObject(); } return getDescriptionOnlyHoverObject(((Documentable) symbol).documentation().get()); } private static Hover getFunctionHoverMarkupContent(FunctionSymbol symbol, HoverContext ctx) { Optional<Documentation> documentation = symbol.documentation(); if (documentation.isEmpty()) { return getDefaultHoverObject(); } List<String> hoverContent = new ArrayList<>(); if (documentation.get().description().isPresent()) { hoverContent.add(documentation.get().description().get()); } Map<String, String> paramsMap = documentation.get().parameterMap(); if (!paramsMap.isEmpty()) { List<String> params = new ArrayList<>(); params.add(header(3, ContextConstants.PARAM_TITLE) + CommonUtil.MD_LINE_SEPARATOR); params.addAll(symbol.typeDescriptor().params().get().stream() .map(param -> { if (param.getName().isEmpty()) { return quotedString(CommonUtil.getModifiedTypeName(ctx, param.typeDescriptor())); } String paramName = param.getName().get(); String desc = paramsMap.get(paramName); return quotedString(CommonUtil.getModifiedTypeName(ctx, param.typeDescriptor())) + " " + italicString(boldString(paramName)) + " : " + desc; }).collect(Collectors.toList())); Optional<ParameterSymbol> restParam = symbol.typeDescriptor().restParam(); if (restParam.isPresent()) { String modifiedTypeName = CommonUtil.getModifiedTypeName(ctx, restParam.get().typeDescriptor()); StringBuilder restParamBuilder = new StringBuilder(quotedString(modifiedTypeName + "...")); if (restParam.get().getName().isPresent()) { restParamBuilder.append(" ").append(italicString(boldString(restParam.get().getName().get()))) .append(" : ").append(paramsMap.get(restParam.get().getName().get())); } params.add(restParamBuilder.toString()); } hoverContent.add(String.join(CommonUtil.MD_LINE_SEPARATOR, params)); } if (documentation.get().returnDescription().isPresent()) { TypeSymbol returnTypeDesc = symbol.typeDescriptor().returnTypeDescriptor().orElseThrow(); String returnTypeName = quotedString(CommonUtil.getModifiedTypeName(ctx, returnTypeDesc)); String returnDoc = header(3, ContextConstants.RETURN_TITLE) + CommonUtil.MD_LINE_SEPARATOR + returnTypeName + " : " + documentation.get().returnDescription().get(); hoverContent.add(returnDoc); } Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(hoverContent.stream().collect(Collectors.joining(getHorizontalSeparator()))); hover.setContents(hoverMarkupContent); return hover; } private static Hover getVariableHoverMarkupContent(VariableSymbol symbol) { Optional<Documentation> documentation = symbol.documentation(); List<String> hoverContent = new ArrayList<>(); if (documentation.isPresent() && documentation.get().description().isPresent()) { hoverContent.add(documentation.get().description().get()); } TypeSymbol varTypeSymbol = symbol.typeDescriptor(); String type = varTypeSymbol.signature(); String varName = symbol.getName().isPresent() ? " " + symbol.getName().get() : ""; String modifiedVariable = quotedString(type) + CommonUtil.escapeEscapeCharsInIdentifier(varName); hoverContent.add(modifiedVariable); Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(hoverContent.stream().collect(Collectors.joining(getHorizontalSeparator()))); hover.setContents(hoverMarkupContent); return hover; } private static String getHorizontalSeparator() { return CommonUtil.MD_LINE_SEPARATOR + CommonUtil.MD_LINE_SEPARATOR + "---" + CommonUtil.MD_LINE_SEPARATOR + CommonUtil.MD_LINE_SEPARATOR; } private static String quotedString(String value) { return "`" + value.trim() + "`"; } private static String boldString(String value) { return "**" + value.trim() + "**"; } private static String italicString(String value) { return "*" + value.trim() + "*"; } private static String bulletItem(String value) { return "+ " + value.trim() + CommonUtil.MD_LINE_SEPARATOR; } private static String header(int level, String header) { return String.join("", Collections.nCopies(level, " } /** * Check if a given symbol has valid access modifiers to be visible with in the give context. * * @param symbol Symbol. * @param currentPackage Current Package. * @param currentModule Current Module. * @return {@link Boolean} Whether the symbol is visible in the current context. */ private static Boolean withValidAccessModifiers(Symbol symbol, Package currentPackage, ModuleId currentModule, HoverContext context) { Optional<Project> project = context.workspace().project(context.filePath()); Optional<ModuleSymbol> typeSymbolModule = symbol.getModule(); if (project.isEmpty() || typeSymbolModule.isEmpty()) { return false; } boolean isResource = false; boolean isPrivate = false; boolean isPublic = false; boolean isRemote = false; if (symbol instanceof Qualifiable) { Qualifiable qSymbol = (Qualifiable) symbol; isPrivate = qSymbol.qualifiers().contains(Qualifier.PRIVATE); isPublic = qSymbol.qualifiers().contains(Qualifier.PUBLIC); isResource = qSymbol.qualifiers().contains(Qualifier.RESOURCE); isRemote = qSymbol.qualifiers().contains(Qualifier.REMOTE); } if (isResource || isRemote || isPublic) { return true; } ModuleID objModuleId = typeSymbolModule.get().id(); return (!isPrivate && objModuleId.moduleName().equals(currentModule.moduleName()) && objModuleId.orgName().equals(currentPackage.packageOrg().value())); } }
Following is from the test case we have added. `BAssertUtil.validateError(resultNeg, i++, "invalid escape sequence '\\\u0000'", 4, 9);` That string is displayed as below in IntelliJ ![Screenshot from 2021-09-13 14-43-53](https://user-images.githubusercontent.com/39232462/133057806-a9dcd175-e46a-4cb7-8d77-616bbc690f1d.png)
private void processIdentifierEnd() { while (!reader.isEOF()) { int nextChar = reader.peek(); if (isIdentifierFollowingChar(nextChar)) { reader.advance(); continue; } if (nextChar != LexerTerminals.BACKSLASH) { break; } nextChar = reader.peek(1); switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.TAB: reader.advance(); reportInvalidEscapeSequence((char) 0); break; case 'u': if (reader.peek(2) == LexerTerminals.OPEN_BRACE) { processNumericEscape(); } else { reader.advance(2); } continue; default: if (!isValidQuotedIdentifierEscapeChar(nextChar)) { reportInvalidEscapeSequence((char) nextChar); } reader.advance(2); continue; } break; } }
reportInvalidEscapeSequence((char) 0);
private void processIdentifierEnd() { while (!reader.isEOF()) { int nextChar = reader.peek(); if (isIdentifierFollowingChar(nextChar)) { reader.advance(); continue; } if (nextChar != LexerTerminals.BACKSLASH) { break; } nextChar = reader.peek(1); switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.TAB: reader.advance(); reportLexerError(DiagnosticErrorCode.ERROR_INVALID_ESCAPE_SEQUENCE, ""); break; case 'u': if (reader.peek(2) == LexerTerminals.OPEN_BRACE) { processNumericEscape(); } else { reader.advance(2); } continue; default: if (!isValidQuotedIdentifierEscapeChar(nextChar)) { reportInvalidEscapeSequence((char) nextChar); } reader.advance(2); continue; } break; } }
class BallerinaLexer extends AbstractLexer { public BallerinaLexer(CharReader charReader) { super(charReader, ParserMode.DEFAULT); } /** * Get the next lexical token. * * @return Next lexical token. */ public STToken nextToken() { STToken token; switch (this.mode) { case TEMPLATE: token = readTemplateToken(); break; case INTERPOLATION: processLeadingTrivia(); token = readTokenInInterpolation(); break; case INTERPOLATION_BRACED_CONTENT: processLeadingTrivia(); token = readTokenInBracedContentInInterpolation(); break; case DEFAULT: case IMPORT: default: processLeadingTrivia(); token = readToken(); } return cloneWithDiagnostics(token); } /* * Private Methods */ private STToken readToken() { reader.mark(); if (reader.isEOF()) { return getSyntaxToken(SyntaxKind.EOF_TOKEN); } int c = reader.peek(); if (c == LexerTerminals.BACKSLASH) { processUnquotedIdentifier(); return getIdentifierToken(); } reader.advance(); STToken token; switch (c) { case LexerTerminals.COLON: token = getSyntaxToken(SyntaxKind.COLON_TOKEN); break; case LexerTerminals.SEMICOLON: token = getSyntaxToken(SyntaxKind.SEMICOLON_TOKEN); break; case LexerTerminals.DOT: token = processDot(); break; case LexerTerminals.COMMA: token = getSyntaxToken(SyntaxKind.COMMA_TOKEN); break; case LexerTerminals.OPEN_PARANTHESIS: token = getSyntaxToken(SyntaxKind.OPEN_PAREN_TOKEN); break; case LexerTerminals.CLOSE_PARANTHESIS: token = getSyntaxToken(SyntaxKind.CLOSE_PAREN_TOKEN); break; case LexerTerminals.OPEN_BRACE: if (peek() == LexerTerminals.PIPE) { reader.advance(); token = getSyntaxToken(SyntaxKind.OPEN_BRACE_PIPE_TOKEN); } else { token = getSyntaxToken(SyntaxKind.OPEN_BRACE_TOKEN); } break; case LexerTerminals.CLOSE_BRACE: token = getSyntaxToken(SyntaxKind.CLOSE_BRACE_TOKEN); break; case LexerTerminals.OPEN_BRACKET: token = getSyntaxToken(SyntaxKind.OPEN_BRACKET_TOKEN); break; case LexerTerminals.CLOSE_BRACKET: token = getSyntaxToken(SyntaxKind.CLOSE_BRACKET_TOKEN); break; case LexerTerminals.PIPE: token = processPipeOperator(); break; case LexerTerminals.QUESTION_MARK: if (peek() == LexerTerminals.DOT && reader.peek(1) != LexerTerminals.DOT) { reader.advance(); token = getSyntaxToken(SyntaxKind.OPTIONAL_CHAINING_TOKEN); } else if (peek() == LexerTerminals.COLON) { reader.advance(); token = getSyntaxToken(SyntaxKind.ELVIS_TOKEN); } else { token = getSyntaxToken(SyntaxKind.QUESTION_MARK_TOKEN); } break; case LexerTerminals.DOUBLE_QUOTE: token = processStringLiteral(); break; case LexerTerminals.HASH: token = processDocumentationString(); break; case LexerTerminals.AT: token = getSyntaxToken(SyntaxKind.AT_TOKEN); break; case LexerTerminals.EQUAL: token = processEqualOperator(); break; case LexerTerminals.PLUS: token = getSyntaxToken(SyntaxKind.PLUS_TOKEN); break; case LexerTerminals.MINUS: if (reader.peek() == LexerTerminals.GT) { reader.advance(); if (peek() == LexerTerminals.GT) { reader.advance(); token = getSyntaxToken(SyntaxKind.SYNC_SEND_TOKEN); } else { token = getSyntaxToken(SyntaxKind.RIGHT_ARROW_TOKEN); } } else { token = getSyntaxToken(SyntaxKind.MINUS_TOKEN); } break; case LexerTerminals.ASTERISK: token = getSyntaxToken(SyntaxKind.ASTERISK_TOKEN); break; case LexerTerminals.SLASH: token = processSlashToken(); break; case LexerTerminals.PERCENT: token = getSyntaxToken(SyntaxKind.PERCENT_TOKEN); break; case LexerTerminals.LT: token = processTokenStartWithLt(); break; case LexerTerminals.GT: token = processTokenStartWithGt(); break; case LexerTerminals.EXCLAMATION_MARK: token = processExclamationMarkOperator(); break; case LexerTerminals.BITWISE_AND: if (peek() == LexerTerminals.BITWISE_AND) { reader.advance(); token = getSyntaxToken(SyntaxKind.LOGICAL_AND_TOKEN); } else { token = getSyntaxToken(SyntaxKind.BITWISE_AND_TOKEN); } break; case LexerTerminals.BITWISE_XOR: token = getSyntaxToken(SyntaxKind.BITWISE_XOR_TOKEN); break; case LexerTerminals.NEGATION: token = getSyntaxToken(SyntaxKind.NEGATION_TOKEN); break; case LexerTerminals.BACKTICK: startMode(ParserMode.TEMPLATE); token = getBacktickToken(); break; case LexerTerminals.SINGLE_QUOTE: token = processQuotedIdentifier(); break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': token = processNumericLiteral(c); break; default: if (isIdentifierInitialChar(c)) { token = processIdentifierOrKeyword(); break; } STToken invalidToken = processInvalidToken(); token = nextToken(); token = SyntaxErrors.addDiagnostic(token, DiagnosticErrorCode.ERROR_INVALID_TOKEN, invalidToken); break; } return token; } private STToken getSyntaxToken(SyntaxKind kind) { STNode leadingTrivia = getLeadingTrivia(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } private STToken getIdentifierToken() { STNode leadingTrivia = getLeadingTrivia(); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createIdentifierToken(lexeme, leadingTrivia, trailingTrivia); } private STToken getLiteral(SyntaxKind kind) { STNode leadingTrivia = getLeadingTrivia(); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createLiteralValueToken(kind, lexeme, leadingTrivia, trailingTrivia); } /** * Process leading trivia. */ private void processLeadingTrivia() { processSyntaxTrivia(this.leadingTriviaList, true); } /** * Process and return trailing trivia. * * @return Trailing trivia */ private STNode processTrailingTrivia() { List<STNode> triviaList = new ArrayList<>(INITIAL_TRIVIA_CAPACITY); processSyntaxTrivia(triviaList, false); return STNodeFactory.createNodeList(triviaList); } /** * Process syntax trivia and add it to the provided list. * <p> * <code>syntax-trivia := whitespace | end-of-line | comments</code> * * @param triviaList List of trivia * @param isLeading Flag indicating whether the currently processing leading trivia or not */ private void processSyntaxTrivia(List<STNode> triviaList, boolean isLeading) { while (!reader.isEOF()) { reader.mark(); char c = reader.peek(); switch (c) { case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.FORM_FEED: triviaList.add(processWhitespaces()); break; case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: triviaList.add(processEndOfLine()); if (isLeading) { break; } return; case LexerTerminals.SLASH: if (reader.peek(1) == LexerTerminals.SLASH) { triviaList.add(processComment()); break; } return; default: return; } } } /** * Process whitespace up to an end of line. * <p> * <code>whitespace := 0x9 | 0xC | 0x20</code> * * @return Whitespace trivia */ private STNode processWhitespaces() { while (!reader.isEOF()) { char c = reader.peek(); switch (c) { case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.FORM_FEED: reader.advance(); continue; case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: break; default: break; } break; } return STNodeFactory.createMinutiae(SyntaxKind.WHITESPACE_MINUTIAE, getLexeme()); } /** * Process end of line. * <p> * <code>end-of-line := 0xA | 0xD</code> * * @return End of line trivia */ private STNode processEndOfLine() { char c = reader.peek(); switch (c) { case LexerTerminals.NEWLINE: reader.advance(); return STNodeFactory.createMinutiae(SyntaxKind.END_OF_LINE_MINUTIAE, getLexeme()); case LexerTerminals.CARRIAGE_RETURN: reader.advance(); if (reader.peek() == LexerTerminals.NEWLINE) { reader.advance(); } return STNodeFactory.createMinutiae(SyntaxKind.END_OF_LINE_MINUTIAE, getLexeme()); default: throw new IllegalStateException(); } } /** * Process dot, ellipsis or decimal floating point token. * * @return Dot, ellipsis or decimal floating point token */ private STToken processDot() { int nextChar = reader.peek(); if (nextChar == LexerTerminals.DOT) { int nextNextChar = reader.peek(1); if (nextNextChar == LexerTerminals.DOT) { reader.advance(2); return getSyntaxToken(SyntaxKind.ELLIPSIS_TOKEN); } else if (nextNextChar == LexerTerminals.LT) { reader.advance(2); return getSyntaxToken(SyntaxKind.DOUBLE_DOT_LT_TOKEN); } } else if (nextChar == LexerTerminals.AT) { reader.advance(); return getSyntaxToken(SyntaxKind.ANNOT_CHAINING_TOKEN); } else if (nextChar == LexerTerminals.LT) { reader.advance(); return getSyntaxToken(SyntaxKind.DOT_LT_TOKEN); } if (this.mode != ParserMode.IMPORT && isDigit(nextChar)) { return processDecimalFloatLiteral(); } return getSyntaxToken(SyntaxKind.DOT_TOKEN); } /** * <p> * Process a comment, and add it to trivia list. * </p> * <code> * Comment := * <br/><br/> * AnyCharButNewline := ^ 0xA * </code> */ private STNode processComment() { reader.advance(2); int nextToken = peek(); while (!reader.isEOF()) { switch (nextToken) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: break; default: reader.advance(); nextToken = peek(); continue; } break; } return STNodeFactory.createMinutiae(SyntaxKind.COMMENT_MINUTIAE, getLexeme()); } /** * Process any token that starts with '='. * * @return One of the tokens: <code>'=', '==', '=>', '==='</code> */ private STToken processEqualOperator() { switch (peek()) { case LexerTerminals.EQUAL: reader.advance(); if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.TRIPPLE_EQUAL_TOKEN); } else { return getSyntaxToken(SyntaxKind.DOUBLE_EQUAL_TOKEN); } case LexerTerminals.GT: reader.advance(); return getSyntaxToken(SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN); default: return getSyntaxToken(SyntaxKind.EQUAL_TOKEN); } } /** * <p> * Process and returns a numeric literal. * </p> * <code> * numeric-literal := int-literal | floating-point-literal * <br/> * floating-point-literal := DecimalFloatingPointNumber | HexFloatingPointLiteral * <br/> * int-literal := DecimalNumber | HexIntLiteral * <br/> * DecimalNumber := 0 | NonZeroDigit Digit* * <br/> * Digit := 0 .. 9 * <br/> * NonZeroDigit := 1 .. 9 * </code> * * @return The numeric literal. */ private STToken processNumericLiteral(int startChar) { int nextChar = peek(); if (isHexIndicator(startChar, nextChar)) { return processHexLiteral(); } int len = 1; while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.DOT: case 'e': case 'E': case 'f': case 'F': case 'd': case 'D': char nextNextChar = reader.peek(1); if (nextNextChar == LexerTerminals.DOT) { break; } if (nextChar == LexerTerminals.DOT && isNumericFollowedByIdentifier(nextNextChar)) { break; } if (this.mode == ParserMode.IMPORT) { break; } if (startChar == '0' && len > 1) { reportLexerError(DiagnosticErrorCode.ERROR_LEADING_ZEROS_IN_NUMERIC_LITERALS); } return processDecimalFloatLiteral(); default: if (isDigit(nextChar)) { reader.advance(); len++; nextChar = peek(); continue; } break; } break; } if (startChar == '0' && len > 1) { reportLexerError(DiagnosticErrorCode.ERROR_LEADING_ZEROS_IN_NUMERIC_LITERALS); } return getLiteral(SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN); } private boolean isNumericFollowedByIdentifier(char nextNextChar) { switch (nextNextChar) { case 'e': case 'E': case 'f': case 'F': case 'd': case 'D': char thirdChar = this.reader.peek(2); if (thirdChar == LexerTerminals.PLUS || thirdChar == LexerTerminals.MINUS) { return false; } return isIdentifierInitialChar(thirdChar); default: return isIdentifierInitialChar(nextNextChar); } } /** * <p> * Process and returns a decimal floating point literal. * </p> * <code> * DecimalFloatingPointNumber := * DecimalNumber Exponent [FloatingPointTypeSuffix] * | DottedDecimalNumber [Exponent] [FloatingPointTypeSuffix] * | DecimalNumber FloatingPointTypeSuffix * <br/> * DottedDecimalNumber := DecimalNumber . Digit* | . Digit+ * <br/> * FloatingPointTypeSuffix := DecimalTypeSuffix | FloatTypeSuffix * <br/> * DecimalTypeSuffix := d | D * <br/> * FloatTypeSuffix := f | F * </code> * * @return The decimal floating point literal. */ private STToken processDecimalFloatLiteral() { int nextChar = peek(); if (nextChar == LexerTerminals.DOT) { reader.advance(); nextChar = peek(); } while (isDigit(nextChar)) { reader.advance(); nextChar = peek(); } switch (nextChar) { case 'e': case 'E': return processExponent(false); case 'f': case 'F': case 'd': case 'D': return parseFloatingPointTypeSuffix(); } return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN); } /** * <p> * Process an exponent or hex-exponent. * </p> * <code> * exponent := Exponent | HexExponent * <br/> * Exponent := ExponentIndicator [Sign] Digit+ * <br/> * HexExponent := HexExponentIndicator [Sign] Digit+ * <br/> * ExponentIndicator := e | E * <br/> * HexExponentIndicator := p | P * <br/> * Sign := + | - * <br/> * Digit := 0 .. 9 * </code> * * @param isHex HexExponent or not * @return The decimal floating point literal. */ private STToken processExponent(boolean isHex) { reader.advance(); int nextChar = peek(); if (nextChar == LexerTerminals.PLUS || nextChar == LexerTerminals.MINUS) { reader.advance(); nextChar = peek(); } if (!isDigit(nextChar)) { reportLexerError(DiagnosticErrorCode.ERROR_MISSING_DIGIT_AFTER_EXPONENT_INDICATOR); } while (isDigit(nextChar)) { reader.advance(); nextChar = peek(); } if (isHex) { return getLiteral(SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN); } switch (nextChar) { case 'f': case 'F': case 'd': case 'D': return parseFloatingPointTypeSuffix(); } return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN); } /** * <p> * Parse floating point type suffix. * </p> * <code> * FloatingPointTypeSuffix := DecimalTypeSuffix | FloatTypeSuffix * <br/> * DecimalTypeSuffix := d | D * <br/> * FloatTypeSuffix := f | F * </code> * * @return The decimal floating point literal. */ private STToken parseFloatingPointTypeSuffix() { reader.advance(); return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN); } /** * <p> * Process and returns a hex literal. * </p> * <code> * hex-literal := HexIntLiteral | HexFloatingPointLiteral * <br/> * HexIntLiteral := HexIndicator HexNumber * <br/> * HexNumber := HexDigit+ * <br/> * HexIndicator := 0x | 0X * <br/> * HexDigit := Digit | a .. f | A .. F * <br/> * HexFloatingPointLiteral := HexIndicator HexFloatingPointNumber * <br/> * HexFloatingPointNumber := HexNumber HexExponent | DottedHexNumber [HexExponent] * <br/> * DottedHexNumber := HexDigit+ . HexDigit* | . HexDigit+ * </code> * * @return The hex literal. */ private STToken processHexLiteral() { reader.advance(); if (peek() == LexerTerminals.DOT && !isHexDigit(reader.peek(1))) { reportLexerError(DiagnosticErrorCode.ERROR_MISSING_HEX_DIGIT_AFTER_DOT); } int nextChar; while (isHexDigit(peek())) { reader.advance(); } nextChar = peek(); switch (nextChar) { case LexerTerminals.DOT: reader.advance(); nextChar = peek(); while (isHexDigit(nextChar)) { reader.advance(); nextChar = peek(); } switch (nextChar) { case 'p': case 'P': return processExponent(true); } break; case 'p': case 'P': return processExponent(true); default: return getLiteral(SyntaxKind.HEX_INTEGER_LITERAL_TOKEN); } return getLiteral(SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN); } /** * Process and returns an identifier or a keyword. * * @return An identifier or a keyword. */ private STToken processIdentifierOrKeyword() { processUnquotedIdentifier(); String tokenText = getLexeme(); switch (tokenText) { case LexerTerminals.INT: return getSyntaxToken(SyntaxKind.INT_KEYWORD); case LexerTerminals.FLOAT: return getSyntaxToken(SyntaxKind.FLOAT_KEYWORD); case LexerTerminals.STRING: return getSyntaxToken(SyntaxKind.STRING_KEYWORD); case LexerTerminals.BOOLEAN: return getSyntaxToken(SyntaxKind.BOOLEAN_KEYWORD); case LexerTerminals.DECIMAL: return getSyntaxToken(SyntaxKind.DECIMAL_KEYWORD); case LexerTerminals.XML: return getSyntaxToken(SyntaxKind.XML_KEYWORD); case LexerTerminals.JSON: return getSyntaxToken(SyntaxKind.JSON_KEYWORD); case LexerTerminals.HANDLE: return getSyntaxToken(SyntaxKind.HANDLE_KEYWORD); case LexerTerminals.ANY: return getSyntaxToken(SyntaxKind.ANY_KEYWORD); case LexerTerminals.ANYDATA: return getSyntaxToken(SyntaxKind.ANYDATA_KEYWORD); case LexerTerminals.NEVER: return getSyntaxToken(SyntaxKind.NEVER_KEYWORD); case LexerTerminals.BYTE: return getSyntaxToken(SyntaxKind.BYTE_KEYWORD); case LexerTerminals.PUBLIC: return getSyntaxToken(SyntaxKind.PUBLIC_KEYWORD); case LexerTerminals.PRIVATE: return getSyntaxToken(SyntaxKind.PRIVATE_KEYWORD); case LexerTerminals.FUNCTION: return getSyntaxToken(SyntaxKind.FUNCTION_KEYWORD); case LexerTerminals.RETURN: return getSyntaxToken(SyntaxKind.RETURN_KEYWORD); case LexerTerminals.RETURNS: return getSyntaxToken(SyntaxKind.RETURNS_KEYWORD); case LexerTerminals.EXTERNAL: return getSyntaxToken(SyntaxKind.EXTERNAL_KEYWORD); case LexerTerminals.TYPE: return getSyntaxToken(SyntaxKind.TYPE_KEYWORD); case LexerTerminals.RECORD: return getSyntaxToken(SyntaxKind.RECORD_KEYWORD); case LexerTerminals.OBJECT: return getSyntaxToken(SyntaxKind.OBJECT_KEYWORD); case LexerTerminals.REMOTE: return getSyntaxToken(SyntaxKind.REMOTE_KEYWORD); case LexerTerminals.ABSTRACT: return getSyntaxToken(SyntaxKind.ABSTRACT_KEYWORD); case LexerTerminals.CLIENT: return getSyntaxToken(SyntaxKind.CLIENT_KEYWORD); case LexerTerminals.IF: return getSyntaxToken(SyntaxKind.IF_KEYWORD); case LexerTerminals.ELSE: return getSyntaxToken(SyntaxKind.ELSE_KEYWORD); case LexerTerminals.WHILE: return getSyntaxToken(SyntaxKind.WHILE_KEYWORD); case LexerTerminals.TRUE: return getSyntaxToken(SyntaxKind.TRUE_KEYWORD); case LexerTerminals.FALSE: return getSyntaxToken(SyntaxKind.FALSE_KEYWORD); case LexerTerminals.CHECK: return getSyntaxToken(SyntaxKind.CHECK_KEYWORD); case LexerTerminals.FAIL: return getSyntaxToken(SyntaxKind.FAIL_KEYWORD); case LexerTerminals.CHECKPANIC: return getSyntaxToken(SyntaxKind.CHECKPANIC_KEYWORD); case LexerTerminals.CONTINUE: return getSyntaxToken(SyntaxKind.CONTINUE_KEYWORD); case LexerTerminals.BREAK: return getSyntaxToken(SyntaxKind.BREAK_KEYWORD); case LexerTerminals.PANIC: return getSyntaxToken(SyntaxKind.PANIC_KEYWORD); case LexerTerminals.IMPORT: return getSyntaxToken(SyntaxKind.IMPORT_KEYWORD); case LexerTerminals.VERSION: return getSyntaxToken(SyntaxKind.VERSION_KEYWORD); case LexerTerminals.AS: return getSyntaxToken(SyntaxKind.AS_KEYWORD); case LexerTerminals.SERVICE: return getSyntaxToken(SyntaxKind.SERVICE_KEYWORD); case LexerTerminals.ON: return getSyntaxToken(SyntaxKind.ON_KEYWORD); case LexerTerminals.RESOURCE: return getSyntaxToken(SyntaxKind.RESOURCE_KEYWORD); case LexerTerminals.LISTENER: return getSyntaxToken(SyntaxKind.LISTENER_KEYWORD); case LexerTerminals.CONST: return getSyntaxToken(SyntaxKind.CONST_KEYWORD); case LexerTerminals.FINAL: return getSyntaxToken(SyntaxKind.FINAL_KEYWORD); case LexerTerminals.TYPEOF: return getSyntaxToken(SyntaxKind.TYPEOF_KEYWORD); case LexerTerminals.IS: return getSyntaxToken(SyntaxKind.IS_KEYWORD); case LexerTerminals.NULL: return getSyntaxToken(SyntaxKind.NULL_KEYWORD); case LexerTerminals.LOCK: return getSyntaxToken(SyntaxKind.LOCK_KEYWORD); case LexerTerminals.ANNOTATION: return getSyntaxToken(SyntaxKind.ANNOTATION_KEYWORD); case LexerTerminals.SOURCE: return getSyntaxToken(SyntaxKind.SOURCE_KEYWORD); case LexerTerminals.VAR: return getSyntaxToken(SyntaxKind.VAR_KEYWORD); case LexerTerminals.WORKER: return getSyntaxToken(SyntaxKind.WORKER_KEYWORD); case LexerTerminals.PARAMETER: return getSyntaxToken(SyntaxKind.PARAMETER_KEYWORD); case LexerTerminals.FIELD: return getSyntaxToken(SyntaxKind.FIELD_KEYWORD); case LexerTerminals.ISOLATED: return getSyntaxToken(SyntaxKind.ISOLATED_KEYWORD); case LexerTerminals.XMLNS: return getSyntaxToken(SyntaxKind.XMLNS_KEYWORD); case LexerTerminals.FORK: return getSyntaxToken(SyntaxKind.FORK_KEYWORD); case LexerTerminals.MAP: return getSyntaxToken(SyntaxKind.MAP_KEYWORD); case LexerTerminals.FUTURE: return getSyntaxToken(SyntaxKind.FUTURE_KEYWORD); case LexerTerminals.TYPEDESC: return getSyntaxToken(SyntaxKind.TYPEDESC_KEYWORD); case LexerTerminals.TRAP: return getSyntaxToken(SyntaxKind.TRAP_KEYWORD); case LexerTerminals.IN: return getSyntaxToken(SyntaxKind.IN_KEYWORD); case LexerTerminals.FOREACH: return getSyntaxToken(SyntaxKind.FOREACH_KEYWORD); case LexerTerminals.TABLE: return getSyntaxToken(SyntaxKind.TABLE_KEYWORD); case LexerTerminals.ERROR: return getSyntaxToken(SyntaxKind.ERROR_KEYWORD); case LexerTerminals.LET: return getSyntaxToken(SyntaxKind.LET_KEYWORD); case LexerTerminals.STREAM: return getSyntaxToken(SyntaxKind.STREAM_KEYWORD); case LexerTerminals.NEW: return getSyntaxToken(SyntaxKind.NEW_KEYWORD); case LexerTerminals.READONLY: return getSyntaxToken(SyntaxKind.READONLY_KEYWORD); case LexerTerminals.DISTINCT: return getSyntaxToken(SyntaxKind.DISTINCT_KEYWORD); case LexerTerminals.FROM: return getSyntaxToken(SyntaxKind.FROM_KEYWORD); case LexerTerminals.START: return getSyntaxToken(SyntaxKind.START_KEYWORD); case LexerTerminals.FLUSH: return getSyntaxToken(SyntaxKind.FLUSH_KEYWORD); case LexerTerminals.WAIT: return getSyntaxToken(SyntaxKind.WAIT_KEYWORD); case LexerTerminals.DO: return getSyntaxToken(SyntaxKind.DO_KEYWORD); case LexerTerminals.TRANSACTION: return getSyntaxToken(SyntaxKind.TRANSACTION_KEYWORD); case LexerTerminals.COMMIT: return getSyntaxToken(SyntaxKind.COMMIT_KEYWORD); case LexerTerminals.RETRY: return getSyntaxToken(SyntaxKind.RETRY_KEYWORD); case LexerTerminals.ROLLBACK: return getSyntaxToken(SyntaxKind.ROLLBACK_KEYWORD); case LexerTerminals.TRANSACTIONAL: return getSyntaxToken(SyntaxKind.TRANSACTIONAL_KEYWORD); case LexerTerminals.ENUM: return getSyntaxToken(SyntaxKind.ENUM_KEYWORD); case LexerTerminals.BASE16: return getSyntaxToken(SyntaxKind.BASE16_KEYWORD); case LexerTerminals.BASE64: return getSyntaxToken(SyntaxKind.BASE64_KEYWORD); case LexerTerminals.MATCH: return getSyntaxToken(SyntaxKind.MATCH_KEYWORD); case LexerTerminals.CONFLICT: return getSyntaxToken(SyntaxKind.CONFLICT_KEYWORD); case LexerTerminals.CLASS: return getSyntaxToken(SyntaxKind.CLASS_KEYWORD); case LexerTerminals.CONFIGURABLE: return getSyntaxToken(SyntaxKind.CONFIGURABLE_KEYWORD); default: if (this.keywordModes.contains(KeywordMode.QUERY)) { return getQueryCtxKeywordOrIdentifier(tokenText); } return getIdentifierToken(); } } private STToken getQueryCtxKeywordOrIdentifier(String tokenText) { switch (tokenText) { case LexerTerminals.WHERE: return getSyntaxToken(SyntaxKind.WHERE_KEYWORD); case LexerTerminals.SELECT: return getSyntaxToken(SyntaxKind.SELECT_KEYWORD); case LexerTerminals.LIMIT: return getSyntaxToken(SyntaxKind.LIMIT_KEYWORD); case LexerTerminals.JOIN: return getSyntaxToken(SyntaxKind.JOIN_KEYWORD); case LexerTerminals.OUTER: return getSyntaxToken(SyntaxKind.OUTER_KEYWORD); case LexerTerminals.EQUALS: return getSyntaxToken(SyntaxKind.EQUALS_KEYWORD); case LexerTerminals.ORDER: return getSyntaxToken(SyntaxKind.ORDER_KEYWORD); case LexerTerminals.BY: return getSyntaxToken(SyntaxKind.BY_KEYWORD); case LexerTerminals.ASCENDING: return getSyntaxToken(SyntaxKind.ASCENDING_KEYWORD); case LexerTerminals.DESCENDING: return getSyntaxToken(SyntaxKind.DESCENDING_KEYWORD); default: return getIdentifierToken(); } } /** * Process and returns an invalid token. Consumes the input until {@link * is reached. */ private STToken processInvalidToken() { while (!isEndOfInvalidToken()) { reader.advance(); } String tokenText = getLexeme(); STToken invalidToken = STNodeFactory.createInvalidToken(tokenText); STNode invalidNodeMinutiae = STNodeFactory.createInvalidNodeMinutiae(invalidToken); this.leadingTriviaList.add(invalidNodeMinutiae); return invalidToken; } /** * Check whether the current index is pointing to an end of an invalid lexer-token. * An invalid token is considered to end if one of the below is reached: * <ul> * <li>a whitespace</li> * <li>semicolon</li> * <li>newline</li> * </ul> * * @return <code>true</code>, if the end of an invalid token is reached, <code>false</code> otherwise */ private boolean isEndOfInvalidToken() { if (reader.isEOF()) { return true; } int currentChar = peek(); switch (currentChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.SEMICOLON: case LexerTerminals.COLON: case LexerTerminals.DOT: case LexerTerminals.COMMA: case LexerTerminals.OPEN_PARANTHESIS: case LexerTerminals.CLOSE_PARANTHESIS: case LexerTerminals.OPEN_BRACE: case LexerTerminals.CLOSE_BRACE: case LexerTerminals.OPEN_BRACKET: case LexerTerminals.CLOSE_BRACKET: case LexerTerminals.PIPE: case LexerTerminals.QUESTION_MARK: case LexerTerminals.DOUBLE_QUOTE: case LexerTerminals.SINGLE_QUOTE: case LexerTerminals.HASH: case LexerTerminals.AT: case LexerTerminals.BACKTICK: case LexerTerminals.DOLLAR: case LexerTerminals.EQUAL: case LexerTerminals.PLUS: case LexerTerminals.MINUS: case LexerTerminals.ASTERISK: case LexerTerminals.SLASH: case LexerTerminals.PERCENT: case LexerTerminals.GT: case LexerTerminals.LT: case LexerTerminals.BACKSLASH: case LexerTerminals.EXCLAMATION_MARK: case LexerTerminals.BITWISE_AND: case LexerTerminals.BITWISE_XOR: case LexerTerminals.NEGATION: return true; default: return isIdentifierFollowingChar(currentChar); } } /** * <p> * Check whether current input index points to a start of a hex-numeric literal. * </p> * <code>HexIndicator := 0x | 0X</code> * * @param startChar Starting character of the literal * @param nextChar Second character of the literal * @return <code>true</code>, if the current input points to a start of a hex-numeric literal. * <code>false</code> otherwise. */ private boolean isHexIndicator(int startChar, int nextChar) { return startChar == '0' && (nextChar == 'x' || nextChar == 'X'); } /** * Returns the next character from the reader, without consuming the stream. * * @return Next character */ private int peek() { return this.reader.peek(); } /** * Get the text associated with the current token. * * @return Text associated with the current token. */ private String getLexeme() { return reader.getMarkedChars(); } /** * Process and return double-quoted string literal. * <p> * <code>string-literal := DoubleQuotedStringLiteral * <br/> * DoubleQuotedStringLiteral := " (StringChar | StringEscape)* " * <br/> * StringChar := ^ ( 0xA | 0xD | \ | " ) * <br/> * StringEscape := StringSingleEscape | NumericEscape * <br/> * StringSingleEscape := \t | \n | \r | \\ | \" * <br/> * NumericEscape := \ u{ CodePoint } * <br/> * CodePoint := HexDigit+ * </code> * * @return String literal token */ private STToken processStringLiteral() { int nextChar; while (!reader.isEOF()) { nextChar = peek(); switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: reportLexerError(DiagnosticErrorCode.ERROR_MISSING_DOUBLE_QUOTE); break; case LexerTerminals.DOUBLE_QUOTE: this.reader.advance(); break; case LexerTerminals.BACKSLASH: switch (this.reader.peek(1)) { case 'n': case 't': case 'r': case LexerTerminals.BACKSLASH: case LexerTerminals.DOUBLE_QUOTE: this.reader.advance(2); continue; case 'u': if (this.reader.peek(2) == LexerTerminals.OPEN_BRACE) { processNumericEscape(); } else { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); this.reader.advance(2); } continue; default: reportInvalidEscapeSequence(this.reader.peek(2)); this.reader.advance(); continue; } default: this.reader.advance(); continue; } break; } return getLiteral(SyntaxKind.STRING_LITERAL_TOKEN); } /** * Process numeric escape. * <p> * <code>NumericEscape := \ u { CodePoint }</code> */ private void processNumericEscape() { this.reader.advance(3); if (!isHexDigit(peek())) { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); return; } reader.advance(); while (isHexDigit(peek())) { reader.advance(); } if (peek() != LexerTerminals.CLOSE_BRACE) { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); return; } this.reader.advance(); } /** * Process any token that starts with '!'. * * @return One of the tokens: <code>'!', '!=', '!=='</code> */ private STToken processExclamationMarkOperator() { switch (peek()) { case LexerTerminals.EQUAL: reader.advance(); if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.NOT_DOUBLE_EQUAL_TOKEN); } else { return getSyntaxToken(SyntaxKind.NOT_EQUAL_TOKEN); } default: if (isNotIsToken()) { reader.advance(2); return getSyntaxToken(SyntaxKind.NOT_IS_KEYWORD); } return getSyntaxToken(SyntaxKind.EXCLAMATION_MARK_TOKEN); } } private boolean isNotIsToken() { if ((reader.peek() == 'i' && reader.peek(1) == 's') && !(isIdentifierFollowingChar(reader.peek(2)) || reader.peek(2) == LexerTerminals.BACKSLASH)) { return true; } else { return false; } } /** * Process any token that starts with '|'. * * @return One of the tokens: <code>'|', '|}', '||'</code> */ private STToken processPipeOperator() { switch (peek()) { case LexerTerminals.CLOSE_BRACE: reader.advance(); return getSyntaxToken(SyntaxKind.CLOSE_BRACE_PIPE_TOKEN); case LexerTerminals.PIPE: reader.advance(); return getSyntaxToken(SyntaxKind.LOGICAL_OR_TOKEN); default: return getSyntaxToken(SyntaxKind.PIPE_TOKEN); } } /** * Process any token that starts with '/'. * * @return One of the tokens: <code>'/', '/*', '/**\/<' </code> */ private STToken processSlashToken() { if (peek() != LexerTerminals.ASTERISK) { return getSyntaxToken(SyntaxKind.SLASH_TOKEN); } reader.advance(); if (peek() != LexerTerminals.ASTERISK) { return getSyntaxToken(SyntaxKind.SLASH_ASTERISK_TOKEN); } else if (reader.peek(1) == LexerTerminals.SLASH && reader.peek(2) == LexerTerminals.LT) { reader.advance(3); return getSyntaxToken(SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); } else { return getSyntaxToken(SyntaxKind.SLASH_ASTERISK_TOKEN); } } /** * Process and return documentation string. * <p> * <code> * DocumentationContentString := ( BlankSpace* * <br/> * DocumentationContent := (^ 0xA)* 0xA * <br/> * BlankSpace := Tab | Space * <br/> * Space := 0x20 * <br/> * Tab := 0x9 * </code> * * @return Documentation string token */ private STToken processDocumentationString() { int nextChar = peek(); while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: if (peek() == LexerTerminals.CARRIAGE_RETURN && reader.peek(1) == LexerTerminals.NEWLINE) { reader.advance(); } reader.advance(); int lookAheadCount = 0; int lookAheadChar = reader.peek(lookAheadCount); while (lookAheadChar == LexerTerminals.SPACE || lookAheadChar == LexerTerminals.TAB) { lookAheadCount++; lookAheadChar = reader.peek(lookAheadCount); } if (lookAheadChar != LexerTerminals.HASH) { break; } reader.advance(lookAheadCount); nextChar = peek(); continue; default: reader.advance(); nextChar = peek(); continue; } break; } STNode leadingTrivia = getLeadingTrivia(); String lexeme = getLexeme(); STNode trailingTrivia = STNodeFactory.createNodeList(new ArrayList<>(0)); return STNodeFactory.createLiteralValueToken(SyntaxKind.DOCUMENTATION_STRING, lexeme, leadingTrivia, trailingTrivia); } private STToken getBacktickToken() { STNode leadingTrivia = getLeadingTrivia(); STNode trailingTrivia = STNodeFactory.createEmptyNodeList(); return STNodeFactory.createToken(SyntaxKind.BACKTICK_TOKEN, leadingTrivia, trailingTrivia); } private STToken readTemplateToken() { reader.mark(); if (reader.isEOF()) { return getSyntaxToken(SyntaxKind.EOF_TOKEN); } char nextChar = this.reader.peek(); switch (nextChar) { case LexerTerminals.BACKTICK: reader.advance(); endMode(); return getSyntaxToken(SyntaxKind.BACKTICK_TOKEN); case LexerTerminals.DOLLAR: if (reader.peek(1) == LexerTerminals.OPEN_BRACE) { startMode(ParserMode.INTERPOLATION); reader.advance(2); return getSyntaxToken(SyntaxKind.INTERPOLATION_START_TOKEN); } default: while (!reader.isEOF()) { nextChar = this.reader.peek(); switch (nextChar) { case LexerTerminals.DOLLAR: if (this.reader.peek(1) == LexerTerminals.OPEN_BRACE) { break; } reader.advance(); continue; case LexerTerminals.BACKTICK: break; default: reader.advance(); continue; } break; } } return getLiteral(SyntaxKind.TEMPLATE_STRING); } /** * Process quoted identifier. * <p> * <code> * QuotedIdentifier := ' (IdentifierFollowingChar | IdentifierEscape) IdentifierEnd * </code> * * @return An identifier token. */ private STToken processQuotedIdentifier() { processIdentifierEnd(); if (String.valueOf(LexerTerminals.SINGLE_QUOTE).equals(getLexeme())) { reportLexerError(DiagnosticErrorCode.ERROR_INCOMPLETE_QUOTED_IDENTIFIER); } return getIdentifierToken(); } /** * Process unquoted identifier. * <p> * <code> * UnquotedIdentifier := (IdentifierInitialChar | IdentifierEscape) IdentifierEnd * </code> */ private void processUnquotedIdentifier() { processIdentifierEnd(); } /** * Process identifier end. * <p> * <i>Note: Need to update the {@link DocumentationLexer} whenever changing the identifier processing.</i> * <p> * <code> * IdentifierEnd := IdentifierChar* * <br/> * IdentifierChar := IdentifierFollowingChar | IdentifierEscape * <br/> * IdentifierEscape := IdentifierSingleEscape | NumericEscape * </code> * */ private void reportInvalidEscapeSequence(char nextChar) { String escapeSequence = String.valueOf(nextChar); reportLexerError(DiagnosticErrorCode.ERROR_INVALID_ESCAPE_SEQUENCE, escapeSequence); } private boolean isValidQuotedIdentifierEscapeChar(int nextChar) { if ('A' <= nextChar && nextChar <= 'Z') { return false; } if ('a' <= nextChar && nextChar <= 'z') { return false; } return !isUnicodePatternWhiteSpaceChar(nextChar); } private STToken processTokenStartWithLt() { int nextChar = peek(); switch (nextChar) { case LexerTerminals.EQUAL: reader.advance(); return getSyntaxToken(SyntaxKind.LT_EQUAL_TOKEN); case LexerTerminals.MINUS: int nextNextChar = reader.peek(1); if (isDigit(nextNextChar)) { return getSyntaxToken(SyntaxKind.LT_TOKEN); } reader.advance(); return getSyntaxToken(SyntaxKind.LEFT_ARROW_TOKEN); case LexerTerminals.LT: reader.advance(); return getSyntaxToken(SyntaxKind.DOUBLE_LT_TOKEN); } return getSyntaxToken(SyntaxKind.LT_TOKEN); } private STToken processTokenStartWithGt() { if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.GT_EQUAL_TOKEN); } if (reader.peek() != LexerTerminals.GT) { return getSyntaxToken(SyntaxKind.GT_TOKEN); } char nextChar = reader.peek(1); switch (nextChar) { case LexerTerminals.GT: if (reader.peek(2) == LexerTerminals.EQUAL) { reader.advance(2); return getSyntaxToken(SyntaxKind.TRIPPLE_GT_TOKEN); } return getSyntaxToken(SyntaxKind.GT_TOKEN); case LexerTerminals.EQUAL: reader.advance(1); return getSyntaxToken(SyntaxKind.DOUBLE_GT_TOKEN); default: return getSyntaxToken(SyntaxKind.GT_TOKEN); } } /* * ------------------------------------------------------------------------------------------------------------ * INTERPOLATION Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readTokenInInterpolation() { reader.mark(); int nextChar = peek(); switch (nextChar) { case LexerTerminals.OPEN_BRACE: startMode(ParserMode.INTERPOLATION_BRACED_CONTENT); return readToken(); case LexerTerminals.CLOSE_BRACE: endMode(); reader.advance(); return getSyntaxTokenWithoutTrailingTrivia(SyntaxKind.CLOSE_BRACE_TOKEN); case LexerTerminals.BACKTICK: default: return readToken(); } } private STToken getSyntaxTokenWithoutTrailingTrivia(SyntaxKind kind) { STNode leadingTrivia = getLeadingTrivia(); STNode trailingTrivia = STNodeFactory.createNodeList(new ArrayList<>(0)); return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } /* * ------------------------------------------------------------------------------------------------------------ * INTERPOLATION_BRACED_CONTENT Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readTokenInBracedContentInInterpolation() { reader.mark(); int nextChar = peek(); switch (nextChar) { case LexerTerminals.OPEN_BRACE: startMode(ParserMode.INTERPOLATION_BRACED_CONTENT); break; case LexerTerminals.CLOSE_BRACE: endMode(); break; case LexerTerminals.BACKTICK: while (this.mode != ParserMode.DEFAULT) { endMode(); } reader.advance(); return getBacktickToken(); default: break; } return readToken(); } }
class BallerinaLexer extends AbstractLexer { public BallerinaLexer(CharReader charReader) { super(charReader, ParserMode.DEFAULT); } /** * Get the next lexical token. * * @return Next lexical token. */ public STToken nextToken() { STToken token; switch (this.mode) { case TEMPLATE: token = readTemplateToken(); break; case INTERPOLATION: processLeadingTrivia(); token = readTokenInInterpolation(); break; case INTERPOLATION_BRACED_CONTENT: processLeadingTrivia(); token = readTokenInBracedContentInInterpolation(); break; case DEFAULT: case IMPORT: default: processLeadingTrivia(); token = readToken(); } return cloneWithDiagnostics(token); } /* * Private Methods */ private STToken readToken() { reader.mark(); if (reader.isEOF()) { return getSyntaxToken(SyntaxKind.EOF_TOKEN); } int c = reader.peek(); if (c == LexerTerminals.BACKSLASH) { processUnquotedIdentifier(); return getIdentifierToken(); } reader.advance(); STToken token; switch (c) { case LexerTerminals.COLON: token = getSyntaxToken(SyntaxKind.COLON_TOKEN); break; case LexerTerminals.SEMICOLON: token = getSyntaxToken(SyntaxKind.SEMICOLON_TOKEN); break; case LexerTerminals.DOT: token = processDot(); break; case LexerTerminals.COMMA: token = getSyntaxToken(SyntaxKind.COMMA_TOKEN); break; case LexerTerminals.OPEN_PARANTHESIS: token = getSyntaxToken(SyntaxKind.OPEN_PAREN_TOKEN); break; case LexerTerminals.CLOSE_PARANTHESIS: token = getSyntaxToken(SyntaxKind.CLOSE_PAREN_TOKEN); break; case LexerTerminals.OPEN_BRACE: if (peek() == LexerTerminals.PIPE) { reader.advance(); token = getSyntaxToken(SyntaxKind.OPEN_BRACE_PIPE_TOKEN); } else { token = getSyntaxToken(SyntaxKind.OPEN_BRACE_TOKEN); } break; case LexerTerminals.CLOSE_BRACE: token = getSyntaxToken(SyntaxKind.CLOSE_BRACE_TOKEN); break; case LexerTerminals.OPEN_BRACKET: token = getSyntaxToken(SyntaxKind.OPEN_BRACKET_TOKEN); break; case LexerTerminals.CLOSE_BRACKET: token = getSyntaxToken(SyntaxKind.CLOSE_BRACKET_TOKEN); break; case LexerTerminals.PIPE: token = processPipeOperator(); break; case LexerTerminals.QUESTION_MARK: if (peek() == LexerTerminals.DOT && reader.peek(1) != LexerTerminals.DOT) { reader.advance(); token = getSyntaxToken(SyntaxKind.OPTIONAL_CHAINING_TOKEN); } else if (peek() == LexerTerminals.COLON) { reader.advance(); token = getSyntaxToken(SyntaxKind.ELVIS_TOKEN); } else { token = getSyntaxToken(SyntaxKind.QUESTION_MARK_TOKEN); } break; case LexerTerminals.DOUBLE_QUOTE: token = processStringLiteral(); break; case LexerTerminals.HASH: token = processDocumentationString(); break; case LexerTerminals.AT: token = getSyntaxToken(SyntaxKind.AT_TOKEN); break; case LexerTerminals.EQUAL: token = processEqualOperator(); break; case LexerTerminals.PLUS: token = getSyntaxToken(SyntaxKind.PLUS_TOKEN); break; case LexerTerminals.MINUS: if (reader.peek() == LexerTerminals.GT) { reader.advance(); if (peek() == LexerTerminals.GT) { reader.advance(); token = getSyntaxToken(SyntaxKind.SYNC_SEND_TOKEN); } else { token = getSyntaxToken(SyntaxKind.RIGHT_ARROW_TOKEN); } } else { token = getSyntaxToken(SyntaxKind.MINUS_TOKEN); } break; case LexerTerminals.ASTERISK: token = getSyntaxToken(SyntaxKind.ASTERISK_TOKEN); break; case LexerTerminals.SLASH: token = processSlashToken(); break; case LexerTerminals.PERCENT: token = getSyntaxToken(SyntaxKind.PERCENT_TOKEN); break; case LexerTerminals.LT: token = processTokenStartWithLt(); break; case LexerTerminals.GT: token = processTokenStartWithGt(); break; case LexerTerminals.EXCLAMATION_MARK: token = processExclamationMarkOperator(); break; case LexerTerminals.BITWISE_AND: if (peek() == LexerTerminals.BITWISE_AND) { reader.advance(); token = getSyntaxToken(SyntaxKind.LOGICAL_AND_TOKEN); } else { token = getSyntaxToken(SyntaxKind.BITWISE_AND_TOKEN); } break; case LexerTerminals.BITWISE_XOR: token = getSyntaxToken(SyntaxKind.BITWISE_XOR_TOKEN); break; case LexerTerminals.NEGATION: token = getSyntaxToken(SyntaxKind.NEGATION_TOKEN); break; case LexerTerminals.BACKTICK: startMode(ParserMode.TEMPLATE); token = getBacktickToken(); break; case LexerTerminals.SINGLE_QUOTE: token = processQuotedIdentifier(); break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': token = processNumericLiteral(c); break; default: if (isIdentifierInitialChar(c)) { token = processIdentifierOrKeyword(); break; } STToken invalidToken = processInvalidToken(); token = nextToken(); token = SyntaxErrors.addDiagnostic(token, DiagnosticErrorCode.ERROR_INVALID_TOKEN, invalidToken); break; } return token; } private STToken getSyntaxToken(SyntaxKind kind) { STNode leadingTrivia = getLeadingTrivia(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } private STToken getIdentifierToken() { STNode leadingTrivia = getLeadingTrivia(); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createIdentifierToken(lexeme, leadingTrivia, trailingTrivia); } private STToken getLiteral(SyntaxKind kind) { STNode leadingTrivia = getLeadingTrivia(); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createLiteralValueToken(kind, lexeme, leadingTrivia, trailingTrivia); } /** * Process leading trivia. */ private void processLeadingTrivia() { processSyntaxTrivia(this.leadingTriviaList, true); } /** * Process and return trailing trivia. * * @return Trailing trivia */ private STNode processTrailingTrivia() { List<STNode> triviaList = new ArrayList<>(INITIAL_TRIVIA_CAPACITY); processSyntaxTrivia(triviaList, false); return STNodeFactory.createNodeList(triviaList); } /** * Process syntax trivia and add it to the provided list. * <p> * <code>syntax-trivia := whitespace | end-of-line | comments</code> * * @param triviaList List of trivia * @param isLeading Flag indicating whether the currently processing leading trivia or not */ private void processSyntaxTrivia(List<STNode> triviaList, boolean isLeading) { while (!reader.isEOF()) { reader.mark(); char c = reader.peek(); switch (c) { case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.FORM_FEED: triviaList.add(processWhitespaces()); break; case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: triviaList.add(processEndOfLine()); if (isLeading) { break; } return; case LexerTerminals.SLASH: if (reader.peek(1) == LexerTerminals.SLASH) { triviaList.add(processComment()); break; } return; default: return; } } } /** * Process whitespace up to an end of line. * <p> * <code>whitespace := 0x9 | 0xC | 0x20</code> * * @return Whitespace trivia */ private STNode processWhitespaces() { while (!reader.isEOF()) { char c = reader.peek(); switch (c) { case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.FORM_FEED: reader.advance(); continue; case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: break; default: break; } break; } return STNodeFactory.createMinutiae(SyntaxKind.WHITESPACE_MINUTIAE, getLexeme()); } /** * Process end of line. * <p> * <code>end-of-line := 0xA | 0xD</code> * * @return End of line trivia */ private STNode processEndOfLine() { char c = reader.peek(); switch (c) { case LexerTerminals.NEWLINE: reader.advance(); return STNodeFactory.createMinutiae(SyntaxKind.END_OF_LINE_MINUTIAE, getLexeme()); case LexerTerminals.CARRIAGE_RETURN: reader.advance(); if (reader.peek() == LexerTerminals.NEWLINE) { reader.advance(); } return STNodeFactory.createMinutiae(SyntaxKind.END_OF_LINE_MINUTIAE, getLexeme()); default: throw new IllegalStateException(); } } /** * Process dot, ellipsis or decimal floating point token. * * @return Dot, ellipsis or decimal floating point token */ private STToken processDot() { int nextChar = reader.peek(); if (nextChar == LexerTerminals.DOT) { int nextNextChar = reader.peek(1); if (nextNextChar == LexerTerminals.DOT) { reader.advance(2); return getSyntaxToken(SyntaxKind.ELLIPSIS_TOKEN); } else if (nextNextChar == LexerTerminals.LT) { reader.advance(2); return getSyntaxToken(SyntaxKind.DOUBLE_DOT_LT_TOKEN); } } else if (nextChar == LexerTerminals.AT) { reader.advance(); return getSyntaxToken(SyntaxKind.ANNOT_CHAINING_TOKEN); } else if (nextChar == LexerTerminals.LT) { reader.advance(); return getSyntaxToken(SyntaxKind.DOT_LT_TOKEN); } if (this.mode != ParserMode.IMPORT && isDigit(nextChar)) { return processDecimalFloatLiteral(); } return getSyntaxToken(SyntaxKind.DOT_TOKEN); } /** * <p> * Process a comment, and add it to trivia list. * </p> * <code> * Comment := * <br/><br/> * AnyCharButNewline := ^ 0xA * </code> */ private STNode processComment() { reader.advance(2); int nextToken = peek(); while (!reader.isEOF()) { switch (nextToken) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: break; default: reader.advance(); nextToken = peek(); continue; } break; } return STNodeFactory.createMinutiae(SyntaxKind.COMMENT_MINUTIAE, getLexeme()); } /** * Process any token that starts with '='. * * @return One of the tokens: <code>'=', '==', '=>', '==='</code> */ private STToken processEqualOperator() { switch (peek()) { case LexerTerminals.EQUAL: reader.advance(); if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.TRIPPLE_EQUAL_TOKEN); } else { return getSyntaxToken(SyntaxKind.DOUBLE_EQUAL_TOKEN); } case LexerTerminals.GT: reader.advance(); return getSyntaxToken(SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN); default: return getSyntaxToken(SyntaxKind.EQUAL_TOKEN); } } /** * <p> * Process and returns a numeric literal. * </p> * <code> * numeric-literal := int-literal | floating-point-literal * <br/> * floating-point-literal := DecimalFloatingPointNumber | HexFloatingPointLiteral * <br/> * int-literal := DecimalNumber | HexIntLiteral * <br/> * DecimalNumber := 0 | NonZeroDigit Digit* * <br/> * Digit := 0 .. 9 * <br/> * NonZeroDigit := 1 .. 9 * </code> * * @return The numeric literal. */ private STToken processNumericLiteral(int startChar) { int nextChar = peek(); if (isHexIndicator(startChar, nextChar)) { return processHexLiteral(); } int len = 1; while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.DOT: case 'e': case 'E': case 'f': case 'F': case 'd': case 'D': char nextNextChar = reader.peek(1); if (nextNextChar == LexerTerminals.DOT) { break; } if (nextChar == LexerTerminals.DOT && isNumericFollowedByIdentifier(nextNextChar)) { break; } if (this.mode == ParserMode.IMPORT) { break; } if (startChar == '0' && len > 1) { reportLexerError(DiagnosticErrorCode.ERROR_LEADING_ZEROS_IN_NUMERIC_LITERALS); } return processDecimalFloatLiteral(); default: if (isDigit(nextChar)) { reader.advance(); len++; nextChar = peek(); continue; } break; } break; } if (startChar == '0' && len > 1) { reportLexerError(DiagnosticErrorCode.ERROR_LEADING_ZEROS_IN_NUMERIC_LITERALS); } return getLiteral(SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN); } private boolean isNumericFollowedByIdentifier(char nextNextChar) { switch (nextNextChar) { case 'e': case 'E': case 'f': case 'F': case 'd': case 'D': char thirdChar = this.reader.peek(2); if (thirdChar == LexerTerminals.PLUS || thirdChar == LexerTerminals.MINUS) { return false; } return isIdentifierInitialChar(thirdChar); default: return isIdentifierInitialChar(nextNextChar); } } /** * <p> * Process and returns a decimal floating point literal. * </p> * <code> * DecimalFloatingPointNumber := * DecimalNumber Exponent [FloatingPointTypeSuffix] * | DottedDecimalNumber [Exponent] [FloatingPointTypeSuffix] * | DecimalNumber FloatingPointTypeSuffix * <br/> * DottedDecimalNumber := DecimalNumber . Digit* | . Digit+ * <br/> * FloatingPointTypeSuffix := DecimalTypeSuffix | FloatTypeSuffix * <br/> * DecimalTypeSuffix := d | D * <br/> * FloatTypeSuffix := f | F * </code> * * @return The decimal floating point literal. */ private STToken processDecimalFloatLiteral() { int nextChar = peek(); if (nextChar == LexerTerminals.DOT) { reader.advance(); nextChar = peek(); } while (isDigit(nextChar)) { reader.advance(); nextChar = peek(); } switch (nextChar) { case 'e': case 'E': return processExponent(false); case 'f': case 'F': case 'd': case 'D': return parseFloatingPointTypeSuffix(); } return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN); } /** * <p> * Process an exponent or hex-exponent. * </p> * <code> * exponent := Exponent | HexExponent * <br/> * Exponent := ExponentIndicator [Sign] Digit+ * <br/> * HexExponent := HexExponentIndicator [Sign] Digit+ * <br/> * ExponentIndicator := e | E * <br/> * HexExponentIndicator := p | P * <br/> * Sign := + | - * <br/> * Digit := 0 .. 9 * </code> * * @param isHex HexExponent or not * @return The decimal floating point literal. */ private STToken processExponent(boolean isHex) { reader.advance(); int nextChar = peek(); if (nextChar == LexerTerminals.PLUS || nextChar == LexerTerminals.MINUS) { reader.advance(); nextChar = peek(); } if (!isDigit(nextChar)) { reportLexerError(DiagnosticErrorCode.ERROR_MISSING_DIGIT_AFTER_EXPONENT_INDICATOR); } while (isDigit(nextChar)) { reader.advance(); nextChar = peek(); } if (isHex) { return getLiteral(SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN); } switch (nextChar) { case 'f': case 'F': case 'd': case 'D': return parseFloatingPointTypeSuffix(); } return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN); } /** * <p> * Parse floating point type suffix. * </p> * <code> * FloatingPointTypeSuffix := DecimalTypeSuffix | FloatTypeSuffix * <br/> * DecimalTypeSuffix := d | D * <br/> * FloatTypeSuffix := f | F * </code> * * @return The decimal floating point literal. */ private STToken parseFloatingPointTypeSuffix() { reader.advance(); return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN); } /** * <p> * Process and returns a hex literal. * </p> * <code> * hex-literal := HexIntLiteral | HexFloatingPointLiteral * <br/> * HexIntLiteral := HexIndicator HexNumber * <br/> * HexNumber := HexDigit+ * <br/> * HexIndicator := 0x | 0X * <br/> * HexDigit := Digit | a .. f | A .. F * <br/> * HexFloatingPointLiteral := HexIndicator HexFloatingPointNumber * <br/> * HexFloatingPointNumber := HexNumber HexExponent | DottedHexNumber [HexExponent] * <br/> * DottedHexNumber := HexDigit+ . HexDigit* | . HexDigit+ * </code> * * @return The hex literal. */ private STToken processHexLiteral() { reader.advance(); if (peek() == LexerTerminals.DOT && !isHexDigit(reader.peek(1))) { reportLexerError(DiagnosticErrorCode.ERROR_MISSING_HEX_DIGIT_AFTER_DOT); } int nextChar; while (isHexDigit(peek())) { reader.advance(); } nextChar = peek(); switch (nextChar) { case LexerTerminals.DOT: reader.advance(); nextChar = peek(); while (isHexDigit(nextChar)) { reader.advance(); nextChar = peek(); } switch (nextChar) { case 'p': case 'P': return processExponent(true); } break; case 'p': case 'P': return processExponent(true); default: return getLiteral(SyntaxKind.HEX_INTEGER_LITERAL_TOKEN); } return getLiteral(SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN); } /** * Process and returns an identifier or a keyword. * * @return An identifier or a keyword. */ private STToken processIdentifierOrKeyword() { processUnquotedIdentifier(); String tokenText = getLexeme(); switch (tokenText) { case LexerTerminals.INT: return getSyntaxToken(SyntaxKind.INT_KEYWORD); case LexerTerminals.FLOAT: return getSyntaxToken(SyntaxKind.FLOAT_KEYWORD); case LexerTerminals.STRING: return getSyntaxToken(SyntaxKind.STRING_KEYWORD); case LexerTerminals.BOOLEAN: return getSyntaxToken(SyntaxKind.BOOLEAN_KEYWORD); case LexerTerminals.DECIMAL: return getSyntaxToken(SyntaxKind.DECIMAL_KEYWORD); case LexerTerminals.XML: return getSyntaxToken(SyntaxKind.XML_KEYWORD); case LexerTerminals.JSON: return getSyntaxToken(SyntaxKind.JSON_KEYWORD); case LexerTerminals.HANDLE: return getSyntaxToken(SyntaxKind.HANDLE_KEYWORD); case LexerTerminals.ANY: return getSyntaxToken(SyntaxKind.ANY_KEYWORD); case LexerTerminals.ANYDATA: return getSyntaxToken(SyntaxKind.ANYDATA_KEYWORD); case LexerTerminals.NEVER: return getSyntaxToken(SyntaxKind.NEVER_KEYWORD); case LexerTerminals.BYTE: return getSyntaxToken(SyntaxKind.BYTE_KEYWORD); case LexerTerminals.PUBLIC: return getSyntaxToken(SyntaxKind.PUBLIC_KEYWORD); case LexerTerminals.PRIVATE: return getSyntaxToken(SyntaxKind.PRIVATE_KEYWORD); case LexerTerminals.FUNCTION: return getSyntaxToken(SyntaxKind.FUNCTION_KEYWORD); case LexerTerminals.RETURN: return getSyntaxToken(SyntaxKind.RETURN_KEYWORD); case LexerTerminals.RETURNS: return getSyntaxToken(SyntaxKind.RETURNS_KEYWORD); case LexerTerminals.EXTERNAL: return getSyntaxToken(SyntaxKind.EXTERNAL_KEYWORD); case LexerTerminals.TYPE: return getSyntaxToken(SyntaxKind.TYPE_KEYWORD); case LexerTerminals.RECORD: return getSyntaxToken(SyntaxKind.RECORD_KEYWORD); case LexerTerminals.OBJECT: return getSyntaxToken(SyntaxKind.OBJECT_KEYWORD); case LexerTerminals.REMOTE: return getSyntaxToken(SyntaxKind.REMOTE_KEYWORD); case LexerTerminals.ABSTRACT: return getSyntaxToken(SyntaxKind.ABSTRACT_KEYWORD); case LexerTerminals.CLIENT: return getSyntaxToken(SyntaxKind.CLIENT_KEYWORD); case LexerTerminals.IF: return getSyntaxToken(SyntaxKind.IF_KEYWORD); case LexerTerminals.ELSE: return getSyntaxToken(SyntaxKind.ELSE_KEYWORD); case LexerTerminals.WHILE: return getSyntaxToken(SyntaxKind.WHILE_KEYWORD); case LexerTerminals.TRUE: return getSyntaxToken(SyntaxKind.TRUE_KEYWORD); case LexerTerminals.FALSE: return getSyntaxToken(SyntaxKind.FALSE_KEYWORD); case LexerTerminals.CHECK: return getSyntaxToken(SyntaxKind.CHECK_KEYWORD); case LexerTerminals.FAIL: return getSyntaxToken(SyntaxKind.FAIL_KEYWORD); case LexerTerminals.CHECKPANIC: return getSyntaxToken(SyntaxKind.CHECKPANIC_KEYWORD); case LexerTerminals.CONTINUE: return getSyntaxToken(SyntaxKind.CONTINUE_KEYWORD); case LexerTerminals.BREAK: return getSyntaxToken(SyntaxKind.BREAK_KEYWORD); case LexerTerminals.PANIC: return getSyntaxToken(SyntaxKind.PANIC_KEYWORD); case LexerTerminals.IMPORT: return getSyntaxToken(SyntaxKind.IMPORT_KEYWORD); case LexerTerminals.VERSION: return getSyntaxToken(SyntaxKind.VERSION_KEYWORD); case LexerTerminals.AS: return getSyntaxToken(SyntaxKind.AS_KEYWORD); case LexerTerminals.SERVICE: return getSyntaxToken(SyntaxKind.SERVICE_KEYWORD); case LexerTerminals.ON: return getSyntaxToken(SyntaxKind.ON_KEYWORD); case LexerTerminals.RESOURCE: return getSyntaxToken(SyntaxKind.RESOURCE_KEYWORD); case LexerTerminals.LISTENER: return getSyntaxToken(SyntaxKind.LISTENER_KEYWORD); case LexerTerminals.CONST: return getSyntaxToken(SyntaxKind.CONST_KEYWORD); case LexerTerminals.FINAL: return getSyntaxToken(SyntaxKind.FINAL_KEYWORD); case LexerTerminals.TYPEOF: return getSyntaxToken(SyntaxKind.TYPEOF_KEYWORD); case LexerTerminals.IS: return getSyntaxToken(SyntaxKind.IS_KEYWORD); case LexerTerminals.NULL: return getSyntaxToken(SyntaxKind.NULL_KEYWORD); case LexerTerminals.LOCK: return getSyntaxToken(SyntaxKind.LOCK_KEYWORD); case LexerTerminals.ANNOTATION: return getSyntaxToken(SyntaxKind.ANNOTATION_KEYWORD); case LexerTerminals.SOURCE: return getSyntaxToken(SyntaxKind.SOURCE_KEYWORD); case LexerTerminals.VAR: return getSyntaxToken(SyntaxKind.VAR_KEYWORD); case LexerTerminals.WORKER: return getSyntaxToken(SyntaxKind.WORKER_KEYWORD); case LexerTerminals.PARAMETER: return getSyntaxToken(SyntaxKind.PARAMETER_KEYWORD); case LexerTerminals.FIELD: return getSyntaxToken(SyntaxKind.FIELD_KEYWORD); case LexerTerminals.ISOLATED: return getSyntaxToken(SyntaxKind.ISOLATED_KEYWORD); case LexerTerminals.XMLNS: return getSyntaxToken(SyntaxKind.XMLNS_KEYWORD); case LexerTerminals.FORK: return getSyntaxToken(SyntaxKind.FORK_KEYWORD); case LexerTerminals.MAP: return getSyntaxToken(SyntaxKind.MAP_KEYWORD); case LexerTerminals.FUTURE: return getSyntaxToken(SyntaxKind.FUTURE_KEYWORD); case LexerTerminals.TYPEDESC: return getSyntaxToken(SyntaxKind.TYPEDESC_KEYWORD); case LexerTerminals.TRAP: return getSyntaxToken(SyntaxKind.TRAP_KEYWORD); case LexerTerminals.IN: return getSyntaxToken(SyntaxKind.IN_KEYWORD); case LexerTerminals.FOREACH: return getSyntaxToken(SyntaxKind.FOREACH_KEYWORD); case LexerTerminals.TABLE: return getSyntaxToken(SyntaxKind.TABLE_KEYWORD); case LexerTerminals.ERROR: return getSyntaxToken(SyntaxKind.ERROR_KEYWORD); case LexerTerminals.LET: return getSyntaxToken(SyntaxKind.LET_KEYWORD); case LexerTerminals.STREAM: return getSyntaxToken(SyntaxKind.STREAM_KEYWORD); case LexerTerminals.NEW: return getSyntaxToken(SyntaxKind.NEW_KEYWORD); case LexerTerminals.READONLY: return getSyntaxToken(SyntaxKind.READONLY_KEYWORD); case LexerTerminals.DISTINCT: return getSyntaxToken(SyntaxKind.DISTINCT_KEYWORD); case LexerTerminals.FROM: return getSyntaxToken(SyntaxKind.FROM_KEYWORD); case LexerTerminals.START: return getSyntaxToken(SyntaxKind.START_KEYWORD); case LexerTerminals.FLUSH: return getSyntaxToken(SyntaxKind.FLUSH_KEYWORD); case LexerTerminals.WAIT: return getSyntaxToken(SyntaxKind.WAIT_KEYWORD); case LexerTerminals.DO: return getSyntaxToken(SyntaxKind.DO_KEYWORD); case LexerTerminals.TRANSACTION: return getSyntaxToken(SyntaxKind.TRANSACTION_KEYWORD); case LexerTerminals.COMMIT: return getSyntaxToken(SyntaxKind.COMMIT_KEYWORD); case LexerTerminals.RETRY: return getSyntaxToken(SyntaxKind.RETRY_KEYWORD); case LexerTerminals.ROLLBACK: return getSyntaxToken(SyntaxKind.ROLLBACK_KEYWORD); case LexerTerminals.TRANSACTIONAL: return getSyntaxToken(SyntaxKind.TRANSACTIONAL_KEYWORD); case LexerTerminals.ENUM: return getSyntaxToken(SyntaxKind.ENUM_KEYWORD); case LexerTerminals.BASE16: return getSyntaxToken(SyntaxKind.BASE16_KEYWORD); case LexerTerminals.BASE64: return getSyntaxToken(SyntaxKind.BASE64_KEYWORD); case LexerTerminals.MATCH: return getSyntaxToken(SyntaxKind.MATCH_KEYWORD); case LexerTerminals.CONFLICT: return getSyntaxToken(SyntaxKind.CONFLICT_KEYWORD); case LexerTerminals.CLASS: return getSyntaxToken(SyntaxKind.CLASS_KEYWORD); case LexerTerminals.CONFIGURABLE: return getSyntaxToken(SyntaxKind.CONFIGURABLE_KEYWORD); default: if (this.keywordModes.contains(KeywordMode.QUERY)) { return getQueryCtxKeywordOrIdentifier(tokenText); } return getIdentifierToken(); } } private STToken getQueryCtxKeywordOrIdentifier(String tokenText) { switch (tokenText) { case LexerTerminals.WHERE: return getSyntaxToken(SyntaxKind.WHERE_KEYWORD); case LexerTerminals.SELECT: return getSyntaxToken(SyntaxKind.SELECT_KEYWORD); case LexerTerminals.LIMIT: return getSyntaxToken(SyntaxKind.LIMIT_KEYWORD); case LexerTerminals.JOIN: return getSyntaxToken(SyntaxKind.JOIN_KEYWORD); case LexerTerminals.OUTER: return getSyntaxToken(SyntaxKind.OUTER_KEYWORD); case LexerTerminals.EQUALS: return getSyntaxToken(SyntaxKind.EQUALS_KEYWORD); case LexerTerminals.ORDER: return getSyntaxToken(SyntaxKind.ORDER_KEYWORD); case LexerTerminals.BY: return getSyntaxToken(SyntaxKind.BY_KEYWORD); case LexerTerminals.ASCENDING: return getSyntaxToken(SyntaxKind.ASCENDING_KEYWORD); case LexerTerminals.DESCENDING: return getSyntaxToken(SyntaxKind.DESCENDING_KEYWORD); default: return getIdentifierToken(); } } /** * Process and returns an invalid token. Consumes the input until {@link * is reached. */ private STToken processInvalidToken() { while (!isEndOfInvalidToken()) { reader.advance(); } String tokenText = getLexeme(); STToken invalidToken = STNodeFactory.createInvalidToken(tokenText); STNode invalidNodeMinutiae = STNodeFactory.createInvalidNodeMinutiae(invalidToken); this.leadingTriviaList.add(invalidNodeMinutiae); return invalidToken; } /** * Check whether the current index is pointing to an end of an invalid lexer-token. * An invalid token is considered to end if one of the below is reached: * <ul> * <li>a whitespace</li> * <li>semicolon</li> * <li>newline</li> * </ul> * * @return <code>true</code>, if the end of an invalid token is reached, <code>false</code> otherwise */ private boolean isEndOfInvalidToken() { if (reader.isEOF()) { return true; } int currentChar = peek(); switch (currentChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.SEMICOLON: case LexerTerminals.COLON: case LexerTerminals.DOT: case LexerTerminals.COMMA: case LexerTerminals.OPEN_PARANTHESIS: case LexerTerminals.CLOSE_PARANTHESIS: case LexerTerminals.OPEN_BRACE: case LexerTerminals.CLOSE_BRACE: case LexerTerminals.OPEN_BRACKET: case LexerTerminals.CLOSE_BRACKET: case LexerTerminals.PIPE: case LexerTerminals.QUESTION_MARK: case LexerTerminals.DOUBLE_QUOTE: case LexerTerminals.SINGLE_QUOTE: case LexerTerminals.HASH: case LexerTerminals.AT: case LexerTerminals.BACKTICK: case LexerTerminals.DOLLAR: case LexerTerminals.EQUAL: case LexerTerminals.PLUS: case LexerTerminals.MINUS: case LexerTerminals.ASTERISK: case LexerTerminals.SLASH: case LexerTerminals.PERCENT: case LexerTerminals.GT: case LexerTerminals.LT: case LexerTerminals.BACKSLASH: case LexerTerminals.EXCLAMATION_MARK: case LexerTerminals.BITWISE_AND: case LexerTerminals.BITWISE_XOR: case LexerTerminals.NEGATION: return true; default: return isIdentifierFollowingChar(currentChar); } } /** * <p> * Check whether current input index points to a start of a hex-numeric literal. * </p> * <code>HexIndicator := 0x | 0X</code> * * @param startChar Starting character of the literal * @param nextChar Second character of the literal * @return <code>true</code>, if the current input points to a start of a hex-numeric literal. * <code>false</code> otherwise. */ private boolean isHexIndicator(int startChar, int nextChar) { return startChar == '0' && (nextChar == 'x' || nextChar == 'X'); } /** * Returns the next character from the reader, without consuming the stream. * * @return Next character */ private int peek() { return this.reader.peek(); } /** * Get the text associated with the current token. * * @return Text associated with the current token. */ private String getLexeme() { return reader.getMarkedChars(); } /** * Process and return double-quoted string literal. * <p> * <code>string-literal := DoubleQuotedStringLiteral * <br/> * DoubleQuotedStringLiteral := " (StringChar | StringEscape)* " * <br/> * StringChar := ^ ( 0xA | 0xD | \ | " ) * <br/> * StringEscape := StringSingleEscape | NumericEscape * <br/> * StringSingleEscape := \t | \n | \r | \\ | \" * <br/> * NumericEscape := \ u{ CodePoint } * <br/> * CodePoint := HexDigit+ * </code> * * @return String literal token */ private STToken processStringLiteral() { int nextChar; while (!reader.isEOF()) { nextChar = peek(); switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: reportLexerError(DiagnosticErrorCode.ERROR_MISSING_DOUBLE_QUOTE); break; case LexerTerminals.DOUBLE_QUOTE: this.reader.advance(); break; case LexerTerminals.BACKSLASH: switch (this.reader.peek(1)) { case 'n': case 't': case 'r': case LexerTerminals.BACKSLASH: case LexerTerminals.DOUBLE_QUOTE: this.reader.advance(2); continue; case 'u': if (this.reader.peek(2) == LexerTerminals.OPEN_BRACE) { processNumericEscape(); } else { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); this.reader.advance(2); } continue; default: reportInvalidEscapeSequence(this.reader.peek(1)); this.reader.advance(); continue; } default: this.reader.advance(); continue; } break; } return getLiteral(SyntaxKind.STRING_LITERAL_TOKEN); } /** * Process numeric escape. * <p> * <code>NumericEscape := \ u { CodePoint }</code> */ private void processNumericEscape() { this.reader.advance(3); if (!isHexDigit(peek())) { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); return; } reader.advance(); while (isHexDigit(peek())) { reader.advance(); } if (peek() != LexerTerminals.CLOSE_BRACE) { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); return; } this.reader.advance(); } /** * Process any token that starts with '!'. * * @return One of the tokens: <code>'!', '!=', '!=='</code> */ private STToken processExclamationMarkOperator() { switch (peek()) { case LexerTerminals.EQUAL: reader.advance(); if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.NOT_DOUBLE_EQUAL_TOKEN); } else { return getSyntaxToken(SyntaxKind.NOT_EQUAL_TOKEN); } default: if (isNotIsToken()) { reader.advance(2); return getSyntaxToken(SyntaxKind.NOT_IS_KEYWORD); } return getSyntaxToken(SyntaxKind.EXCLAMATION_MARK_TOKEN); } } private boolean isNotIsToken() { if ((reader.peek() == 'i' && reader.peek(1) == 's') && !(isIdentifierFollowingChar(reader.peek(2)) || reader.peek(2) == LexerTerminals.BACKSLASH)) { return true; } else { return false; } } /** * Process any token that starts with '|'. * * @return One of the tokens: <code>'|', '|}', '||'</code> */ private STToken processPipeOperator() { switch (peek()) { case LexerTerminals.CLOSE_BRACE: reader.advance(); return getSyntaxToken(SyntaxKind.CLOSE_BRACE_PIPE_TOKEN); case LexerTerminals.PIPE: reader.advance(); return getSyntaxToken(SyntaxKind.LOGICAL_OR_TOKEN); default: return getSyntaxToken(SyntaxKind.PIPE_TOKEN); } } /** * Process any token that starts with '/'. * * @return One of the tokens: <code>'/', '/*', '/**\/<' </code> */ private STToken processSlashToken() { if (peek() != LexerTerminals.ASTERISK) { return getSyntaxToken(SyntaxKind.SLASH_TOKEN); } reader.advance(); if (peek() != LexerTerminals.ASTERISK) { return getSyntaxToken(SyntaxKind.SLASH_ASTERISK_TOKEN); } else if (reader.peek(1) == LexerTerminals.SLASH && reader.peek(2) == LexerTerminals.LT) { reader.advance(3); return getSyntaxToken(SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); } else { return getSyntaxToken(SyntaxKind.SLASH_ASTERISK_TOKEN); } } /** * Process and return documentation string. * <p> * <code> * DocumentationContentString := ( BlankSpace* * <br/> * DocumentationContent := (^ 0xA)* 0xA * <br/> * BlankSpace := Tab | Space * <br/> * Space := 0x20 * <br/> * Tab := 0x9 * </code> * * @return Documentation string token */ private STToken processDocumentationString() { int nextChar = peek(); while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: if (peek() == LexerTerminals.CARRIAGE_RETURN && reader.peek(1) == LexerTerminals.NEWLINE) { reader.advance(); } reader.advance(); int lookAheadCount = 0; int lookAheadChar = reader.peek(lookAheadCount); while (lookAheadChar == LexerTerminals.SPACE || lookAheadChar == LexerTerminals.TAB) { lookAheadCount++; lookAheadChar = reader.peek(lookAheadCount); } if (lookAheadChar != LexerTerminals.HASH) { break; } reader.advance(lookAheadCount); nextChar = peek(); continue; default: reader.advance(); nextChar = peek(); continue; } break; } STNode leadingTrivia = getLeadingTrivia(); String lexeme = getLexeme(); STNode trailingTrivia = STNodeFactory.createNodeList(new ArrayList<>(0)); return STNodeFactory.createLiteralValueToken(SyntaxKind.DOCUMENTATION_STRING, lexeme, leadingTrivia, trailingTrivia); } private STToken getBacktickToken() { STNode leadingTrivia = getLeadingTrivia(); STNode trailingTrivia = STNodeFactory.createEmptyNodeList(); return STNodeFactory.createToken(SyntaxKind.BACKTICK_TOKEN, leadingTrivia, trailingTrivia); } private STToken readTemplateToken() { reader.mark(); if (reader.isEOF()) { return getSyntaxToken(SyntaxKind.EOF_TOKEN); } char nextChar = this.reader.peek(); switch (nextChar) { case LexerTerminals.BACKTICK: reader.advance(); endMode(); return getSyntaxToken(SyntaxKind.BACKTICK_TOKEN); case LexerTerminals.DOLLAR: if (reader.peek(1) == LexerTerminals.OPEN_BRACE) { startMode(ParserMode.INTERPOLATION); reader.advance(2); return getSyntaxToken(SyntaxKind.INTERPOLATION_START_TOKEN); } default: while (!reader.isEOF()) { nextChar = this.reader.peek(); switch (nextChar) { case LexerTerminals.DOLLAR: if (this.reader.peek(1) == LexerTerminals.OPEN_BRACE) { break; } reader.advance(); continue; case LexerTerminals.BACKTICK: break; default: reader.advance(); continue; } break; } } return getLiteral(SyntaxKind.TEMPLATE_STRING); } /** * Process quoted identifier. * <p> * <code> * QuotedIdentifier := ' (IdentifierFollowingChar | IdentifierEscape) IdentifierEnd * </code> * * @return An identifier token. */ private STToken processQuotedIdentifier() { processIdentifierEnd(); if (String.valueOf(LexerTerminals.SINGLE_QUOTE).equals(getLexeme())) { reportLexerError(DiagnosticErrorCode.ERROR_INCOMPLETE_QUOTED_IDENTIFIER); } return getIdentifierToken(); } /** * Process unquoted identifier. * <p> * <code> * UnquotedIdentifier := (IdentifierInitialChar | IdentifierEscape) IdentifierEnd * </code> */ private void processUnquotedIdentifier() { processIdentifierEnd(); } /** * Process identifier end. * <p> * <i>Note: Need to update the {@link DocumentationLexer} whenever changing the identifier processing.</i> * <p> * <code> * IdentifierEnd := IdentifierChar* * <br/> * IdentifierChar := IdentifierFollowingChar | IdentifierEscape * <br/> * IdentifierEscape := IdentifierSingleEscape | NumericEscape * </code> * */ private void reportInvalidEscapeSequence(char nextChar) { String escapeSequence = String.valueOf(nextChar); reportLexerError(DiagnosticErrorCode.ERROR_INVALID_ESCAPE_SEQUENCE, escapeSequence); } private boolean isValidQuotedIdentifierEscapeChar(int nextChar) { if ('A' <= nextChar && nextChar <= 'Z') { return false; } if ('a' <= nextChar && nextChar <= 'z') { return false; } return !isUnicodePatternWhiteSpaceChar(nextChar); } private STToken processTokenStartWithLt() { int nextChar = peek(); switch (nextChar) { case LexerTerminals.EQUAL: reader.advance(); return getSyntaxToken(SyntaxKind.LT_EQUAL_TOKEN); case LexerTerminals.MINUS: int nextNextChar = reader.peek(1); if (isDigit(nextNextChar)) { return getSyntaxToken(SyntaxKind.LT_TOKEN); } reader.advance(); return getSyntaxToken(SyntaxKind.LEFT_ARROW_TOKEN); case LexerTerminals.LT: reader.advance(); return getSyntaxToken(SyntaxKind.DOUBLE_LT_TOKEN); } return getSyntaxToken(SyntaxKind.LT_TOKEN); } private STToken processTokenStartWithGt() { if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.GT_EQUAL_TOKEN); } if (reader.peek() != LexerTerminals.GT) { return getSyntaxToken(SyntaxKind.GT_TOKEN); } char nextChar = reader.peek(1); switch (nextChar) { case LexerTerminals.GT: if (reader.peek(2) == LexerTerminals.EQUAL) { reader.advance(2); return getSyntaxToken(SyntaxKind.TRIPPLE_GT_TOKEN); } return getSyntaxToken(SyntaxKind.GT_TOKEN); case LexerTerminals.EQUAL: reader.advance(1); return getSyntaxToken(SyntaxKind.DOUBLE_GT_TOKEN); default: return getSyntaxToken(SyntaxKind.GT_TOKEN); } } /* * ------------------------------------------------------------------------------------------------------------ * INTERPOLATION Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readTokenInInterpolation() { reader.mark(); int nextChar = peek(); switch (nextChar) { case LexerTerminals.OPEN_BRACE: startMode(ParserMode.INTERPOLATION_BRACED_CONTENT); return readToken(); case LexerTerminals.CLOSE_BRACE: endMode(); reader.advance(); return getSyntaxTokenWithoutTrailingTrivia(SyntaxKind.CLOSE_BRACE_TOKEN); case LexerTerminals.BACKTICK: default: return readToken(); } } private STToken getSyntaxTokenWithoutTrailingTrivia(SyntaxKind kind) { STNode leadingTrivia = getLeadingTrivia(); STNode trailingTrivia = STNodeFactory.createNodeList(new ArrayList<>(0)); return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } /* * ------------------------------------------------------------------------------------------------------------ * INTERPOLATION_BRACED_CONTENT Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readTokenInBracedContentInInterpolation() { reader.mark(); int nextChar = peek(); switch (nextChar) { case LexerTerminals.OPEN_BRACE: startMode(ParserMode.INTERPOLATION_BRACED_CONTENT); break; case LexerTerminals.CLOSE_BRACE: endMode(); break; case LexerTerminals.BACKTICK: while (this.mode != ParserMode.DEFAULT) { endMode(); } reader.advance(); return getBacktickToken(); default: break; } return readToken(); } }
I think leaving it here is the cleanest way now that I've looked at the usage again. It needs to eventually be copied to a byte[] in this case so it can be passed over to the native reader.
public static byte[] getPosition(@Nullable ShufflePosition shufflePosition) { if (shufflePosition == null) { return null; } Preconditions.checkArgument(shufflePosition instanceof ByteArrayShufflePosition); ByteArrayShufflePosition adapter = (ByteArrayShufflePosition) shufflePosition; return adapter.getPosition().toByteArray(); }
return adapter.getPosition().toByteArray();
public static byte[] getPosition(@Nullable ShufflePosition shufflePosition) { if (shufflePosition == null) { return null; } Preconditions.checkArgument(shufflePosition instanceof ByteArrayShufflePosition); ByteArrayShufflePosition adapter = (ByteArrayShufflePosition) shufflePosition; return adapter.getPosition().toByteArray(); }
class ByteArrayShufflePosition implements Comparable<ShufflePosition>, ShufflePosition { private static final ByteString ZERO = ByteString.copyFrom(new byte[] {0}); private final ByteString position; public ByteArrayShufflePosition(ByteString position) { this.position = position; } public static ByteArrayShufflePosition fromBase64(String position) { return ByteArrayShufflePosition.of(decodeBase64(position)); } public static ByteArrayShufflePosition of(byte[] position) { if (position == null) { return null; } return new ByteArrayShufflePosition(UnsafeByteOperations.unsafeWrap(position)); } public static ByteArrayShufflePosition of(ByteString position) { if (position == null) { return null; } return new ByteArrayShufflePosition(position); } public ByteString getPosition() { return position; } public String encodeBase64() { return encodeBase64URLSafeString(position.toByteArray()); } /** * Returns the {@link ByteArrayShufflePosition} that immediately follows this one, i.e. there are * no possible {@link ByteArrayShufflePosition ByteArrayShufflePositions} between this and its * successor. */ public ByteArrayShufflePosition immediateSuccessor() { return new ByteArrayShufflePosition(position.concat(ZERO)); } @Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (o instanceof ByteArrayShufflePosition) { ByteArrayShufflePosition that = (ByteArrayShufflePosition) o; return this.position.equals(that.position); } return false; } @Override public int hashCode() { return position.hashCode(); } @Override public String toString() { return "ShufflePosition(base64:" + encodeBase64() + ")"; } /** May only compare homogenous ByteArrayShufflePosition types. */ @Override public int compareTo(ShufflePosition o) { if (this == o) { return 0; } ByteArrayShufflePosition other = (ByteArrayShufflePosition) o; return ByteString.unsignedLexicographicalComparator().compare(position, other.position); } }
class ByteArrayShufflePosition implements Comparable<ShufflePosition>, ShufflePosition { private static final ByteString ZERO = ByteString.copyFrom(new byte[] {0}); private final ByteString position; public ByteArrayShufflePosition(ByteString position) { this.position = position; } public static ByteArrayShufflePosition fromBase64(String position) { return ByteArrayShufflePosition.of(decodeBase64(position)); } public static ByteArrayShufflePosition of(byte[] position) { if (position == null) { return null; } return new ByteArrayShufflePosition(UnsafeByteOperations.unsafeWrap(position)); } public static ByteArrayShufflePosition of(ByteString position) { if (position == null) { return null; } return new ByteArrayShufflePosition(position); } public ByteString getPosition() { return position; } public String encodeBase64() { return encodeBase64URLSafeString(position.toByteArray()); } /** * Returns the {@link ByteArrayShufflePosition} that immediately follows this one, i.e. there are * no possible {@link ByteArrayShufflePosition ByteArrayShufflePositions} between this and its * successor. */ public ByteArrayShufflePosition immediateSuccessor() { return new ByteArrayShufflePosition(position.concat(ZERO)); } @Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (o instanceof ByteArrayShufflePosition) { ByteArrayShufflePosition that = (ByteArrayShufflePosition) o; return this.position.equals(that.position); } return false; } @Override public int hashCode() { return position.hashCode(); } @Override public String toString() { return "ShufflePosition(base64:" + encodeBase64() + ")"; } /** May only compare homogenous ByteArrayShufflePosition types. */ @Override public int compareTo(ShufflePosition o) { if (this == o) { return 0; } ByteArrayShufflePosition other = (ByteArrayShufflePosition) o; return ByteString.unsignedLexicographicalComparator().compare(position, other.position); } }
Ah ah I thought about it too but decided to not bother you with this but if we are two, let's be picky :). Yeah, it would sure be a nice improvement. Maybe, use a `Set` instead of a `List`. I suppose a `HashSet` would be good enough.
public static RolesAllowedCheck of(String[] allowedRoles) { return CACHE.computeIfAbsent(Arrays.asList(allowedRoles), new Function<List<String>, RolesAllowedCheck>() { @Override public RolesAllowedCheck apply(List<String> allowedRolesList) { return new RolesAllowedCheck(allowedRolesList.toArray(new String[0])); } }); }
return CACHE.computeIfAbsent(Arrays.asList(allowedRoles), new Function<List<String>, RolesAllowedCheck>() {
public static RolesAllowedCheck of(String[] allowedRoles) { return CACHE.computeIfAbsent(Arrays.asList(allowedRoles), new Function<List<String>, RolesAllowedCheck>() { @Override public RolesAllowedCheck apply(List<String> allowedRolesList) { return new RolesAllowedCheck(allowedRolesList.toArray(new String[0])); } }); }
class hanging around * for the entire lifecycle of the application */ private static final Map<List<String>, RolesAllowedCheck> CACHE = new ConcurrentHashMap<>(); private final String[] allowedRoles; private RolesAllowedCheck(String[] allowedRoles) { this.allowedRoles = allowedRoles; }
class hanging around * for the entire lifecycle of the application */ private static final Map<List<String>, RolesAllowedCheck> CACHE = new ConcurrentHashMap<>(); private final String[] allowedRoles; private RolesAllowedCheck(String[] allowedRoles) { this.allowedRoles = allowedRoles; }
If this step is done only if the `id` and `url` are not null, then the initialization of `username` and `password` variables can be done within the `if` condition.
public void execute(BuildContext buildContext) { CompilerContext context = buildContext.get(BuildContextField.COMPILER_CONTEXT); Manifest manifest = ManifestProcessor.getInstance(context).getManifest(); List<Library> mavenDependencies = new ArrayList<>(); if (manifest.getPlatform().getLibraries() == null) { return; } String targetRepo = buildContext.get(BuildContextField.TARGET_DIR).toString() + File.separator + "platform-libs"; MavenResolver resolver = new MavenResolver(targetRepo); if (manifest.getPlatform().getRepositories() != null && manifest.getPlatform().getRepositories().size() > 0) { for (Repository repository : manifest.getPlatform().getRepositories()) { String id = repository.getId(); String url = repository.getUrl(); String username = repository.getUser(); String password = repository.getToken(); if (id != null && url != null) { if (username != null && password != null) { resolver.addRepository(id, url, username, password); continue; } resolver.addRepository(id, url); } } } for (Library library : manifest.getPlatform().getLibraries()) { if (library.getPath() == null) { mavenDependencies.add(library); } } if (mavenDependencies.size() > 0) { buildContext.out().println("Resolving Maven dependencies\n\tDownloading dependencies into " + targetRepo); for (Library library : mavenDependencies) { try { Dependency dependency = resolver.resolve(library.getGroupId(), library.getArtifactId(), library.getVersion(), false); library.setPath(Utils.getJarPath(targetRepo, dependency)); } catch (MavenResolverException e) { buildContext.err().print("cannot resolve " + library.getArtifactId()); } } buildContext.out().println(); } }
if (username != null && password != null) {
public void execute(BuildContext buildContext) { CompilerContext context = buildContext.get(BuildContextField.COMPILER_CONTEXT); Manifest manifest = ManifestProcessor.getInstance(context).getManifest(); List<Library> platformLibs = manifest.getPlatform().getLibraries(); List<Repository> mavenCustomRepos = manifest.getPlatform().getRepositories(); List<Library> mavenDependencies = new ArrayList<>(); if (platformLibs == null) { return; } String targetRepo = buildContext.get(BuildContextField.TARGET_DIR).toString() + File.separator + "platform-libs"; MavenResolver resolver = new MavenResolver(targetRepo); if (mavenCustomRepos != null && mavenCustomRepos.size() > 0) { for (Repository repository : mavenCustomRepos) { String id = repository.getId(); String url = repository.getUrl(); if (id == null && url == null) { throw createLauncherException("custom maven repository properties are not specified for " + "given platform repository."); } String username = repository.getUsername(); String password = repository.getPassword(); if (username != null && password != null) { resolver.addRepository(id, url, username, password); continue; } resolver.addRepository(id, url); } } for (Library library : platformLibs) { if (library.getPath() == null) { if (library.getArtifactId() == null && library.getGroupId() == null && library.getVersion() == null) { throw createLauncherException("path or maven dependency properties are not specified for " + "given platform library dependency."); } if (library.getArtifactId() == null || library.getGroupId() == null || library.getVersion() == null) { throw createLauncherException("artifact-id, group-id, and version should be specified to " + "resolve the maven dependency."); } mavenDependencies.add(library); } } if (mavenDependencies.size() > 0) { buildContext.out().println("Resolving Maven dependencies\n\tDownloading dependencies into " + targetRepo); for (Library library : mavenDependencies) { try { Dependency dependency = resolver.resolve(library.getGroupId(), library.getArtifactId(), library.getVersion(), false); library.setPath(Utils.getJarPath(targetRepo, dependency)); } catch (MavenResolverException e) { throw createLauncherException("cannot resolve " + library.getArtifactId() + ": " + e.getMessage()); } } buildContext.out().println(); } }
class ResolveMavenDependenciesTask implements Task { @Override }
class ResolveMavenDependenciesTask implements Task { @Override }
```suggestion "JDBC Store configured but '%s' datasource is missing. You can configure your datasource by following the guide available at: https://quarkus.io/guides/datasource", ```
public QuarkusQuartzConnectionPoolProvider() { InstanceHandle<AgroalDataSource> instanceHandle; ArcContainer container = Arc.container(); boolean useDefaultDataSource = "QUARKUS_QUARTZ_DEFAULT_DATASOURCE".equals(dataSourceName); if (useDefaultDataSource) { instanceHandle = container.instance(AgroalDataSource.class); } else { instanceHandle = container.instance(AgroalDataSource.class, new DataSourceLiteral(dataSourceName)); } if (instanceHandle.isAvailable()) { this.dataSource = instanceHandle.get(); } else { String message = String.format( "JDBC Store configured but '%s' datasource is missing. You can configure your datasource by following the guide available at: https: useDefaultDataSource ? "default" : dataSourceName); throw new IllegalStateException(message); } }
"JDBC Store configured but '%s' datasource is missing. You can configure your datasource by following the guide available at: https:
public QuarkusQuartzConnectionPoolProvider() { final ArcContainer container = Arc.container(); final InstanceHandle<AgroalDataSource> instanceHandle; final boolean useDefaultDataSource = "QUARKUS_QUARTZ_DEFAULT_DATASOURCE".equals(dataSourceName); if (useDefaultDataSource) { instanceHandle = container.instance(AgroalDataSource.class); } else { instanceHandle = container.instance(AgroalDataSource.class, new DataSourceLiteral(dataSourceName)); } if (instanceHandle.isAvailable()) { this.dataSource = instanceHandle.get(); } else { String message = String.format( "JDBC Store configured but '%s' datasource is missing. You can configure your datasource by following the guide available at: https: useDefaultDataSource ? "default" : dataSourceName); throw new IllegalStateException(message); } }
class QuarkusQuartzConnectionPoolProvider implements PoolingConnectionProvider { private AgroalDataSource dataSource; private static String dataSourceName; @SuppressWarnings("unused") public QuarkusQuartzConnectionPoolProvider(Properties properties) { this(); } @Override public DataSource getDataSource() { return dataSource; } @Override public Connection getConnection() throws SQLException { return dataSource.getConnection(); } @Override public void shutdown() { } @Override public void initialize() { } static void setDataSourceName(String dataSourceName) { QuarkusQuartzConnectionPoolProvider.dataSourceName = dataSourceName; } private class DataSourceLiteral extends AnnotationLiteral<io.quarkus.agroal.DataSource> implements io.quarkus.agroal.DataSource { private String name; public DataSourceLiteral(String name) { this.name = name; } @Override public String value() { return name; } } }
class QuarkusQuartzConnectionPoolProvider implements PoolingConnectionProvider { private AgroalDataSource dataSource; private static String dataSourceName; @Override public DataSource getDataSource() { return dataSource; } @Override public Connection getConnection() throws SQLException { return dataSource.getConnection(); } @Override public void shutdown() { } @Override public void initialize() { } static void setDataSourceName(String dataSourceName) { QuarkusQuartzConnectionPoolProvider.dataSourceName = dataSourceName; } private static class DataSourceLiteral extends AnnotationLiteral<io.quarkus.agroal.DataSource> implements io.quarkus.agroal.DataSource { private String name; public DataSourceLiteral(String name) { this.name = name; } @Override public String value() { return name; } } }
I don't think this it's reasonable.
public Operator(OperatorType opType) { this.opType = opType; }
this.opType = opType;
public Operator(OperatorType opType) { this.opType = opType; }
class Operator { public static final long DEFAULT_LIMIT = -1; public static final long DEFAULT_OFFSET = 0; protected final OperatorType opType; protected long limit = DEFAULT_LIMIT; protected ScalarOperator predicate = null; private static long saltGenerator = 0; /** * Before entering the Cascades search framework, * we need to merge LogicalProject and child children into one node * to reduce the impact of LogicalProject on RULE matching * such as Join reorder */ protected Projection projection; protected RowOutputInfo rowOutputInfo; protected long salt = 0; public Operator(OperatorType opType, long limit, ScalarOperator predicate, Projection projection) { this.opType = opType; this.limit = limit; this.predicate = predicate; this.projection = projection; } @SuppressWarnings("unchecked") public <T extends Operator> T cast() { return (T) this; } public boolean isLogical() { return false; } public boolean isPhysical() { return false; } public OperatorType getOpType() { return opType; } public long getLimit() { return limit; } @Deprecated public void setLimit(long limit) { this.limit = limit; } public boolean hasLimit() { return limit != DEFAULT_LIMIT; } public ScalarOperator getPredicate() { return predicate; } @Deprecated public void setPredicate(ScalarOperator predicate) { this.predicate = predicate; } public Projection getProjection() { return projection; } public void setProjection(Projection projection) { this.projection = projection; } public void addSalt() { if ((this instanceof LogicalJoinOperator) || (this instanceof LogicalScanOperator)) { this.salt = ++saltGenerator; } } public void setSalt(long salt) { if ((this instanceof LogicalJoinOperator) || (this instanceof LogicalScanOperator) || (this instanceof PhysicalScanOperator) || (this instanceof PhysicalJoinOperator)) { this.salt = salt; } } public boolean hasSalt() { return salt > 0; } public long getSalt() { return salt; } public RowOutputInfo getRowOutputInfo(List<OptExpression> inputs) { if (rowOutputInfo != null) { return rowOutputInfo; } rowOutputInfo = deriveRowOutputInfo(inputs); if (projection != null) { rowOutputInfo = new RowOutputInfo(projection.getColumnRefMap(), projection.getCommonSubOperatorMap(), rowOutputInfo.getOriginalColOutputInfo(), rowOutputInfo.getEndogenousCols()); } return rowOutputInfo; } protected RowOutputInfo deriveRowOutputInfo(List<OptExpression> inputs) { throw new UnsupportedOperationException(); } protected RowOutputInfo projectInputRow(RowOutputInfo inputRow) { List<ColumnOutputInfo> entryList = Lists.newArrayList(); for (ColumnOutputInfo columnOutputInfo : inputRow.getColumnOutputInfo()) { entryList.add(new ColumnOutputInfo(columnOutputInfo.getColumnRef(), columnOutputInfo.getColumnRef())); } return new RowOutputInfo(entryList); } public <R, C> R accept(OperatorVisitor<R, C> visitor, C context) { return visitor.visitOperator(this, context); } public <R, C> R accept(OptExpressionVisitor<R, C> visitor, OptExpression optExpression, C context) { return visitor.visit(optExpression, context); } @Override public String toString() { return opType.name(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Operator operator = (Operator) o; return limit == operator.limit && opType == operator.opType && Objects.equals(predicate, operator.predicate) && Objects.equals(projection, operator.projection) && Objects.equals(salt, operator.salt); } @Override public int hashCode() { return Objects.hash(opType.ordinal(), limit, predicate, projection, salt); } public abstract static class Builder<O extends Operator, B extends Builder> { protected O builder = newInstance(); protected abstract O newInstance(); public B withOperator(O operator) { builder.limit = operator.limit; builder.predicate = operator.predicate; builder.projection = operator.projection; builder.salt = operator.salt; return (B) this; } public O build() { O newOne = builder; builder = null; return newOne; } public OperatorType getOpType() { return builder.opType; } public long getLimit() { return builder.limit; } public B setLimit(long limit) { builder.limit = limit; return (B) this; } public ScalarOperator getPredicate() { return builder.predicate; } public B setPredicate(ScalarOperator predicate) { builder.predicate = predicate; return (B) this; } public Projection getProjection() { return builder.projection; } public B setProjection(Projection projection) { builder.projection = projection; return (B) this; } public B addSalt() { builder.salt = ++saltGenerator; return (B) this; } } }
class Operator { public static final long DEFAULT_LIMIT = -1; public static final long DEFAULT_OFFSET = 0; protected final OperatorType opType; protected long limit = DEFAULT_LIMIT; protected ScalarOperator predicate = null; private static long saltGenerator = 0; /** * Before entering the Cascades search framework, * we need to merge LogicalProject and child children into one node * to reduce the impact of LogicalProject on RULE matching * such as Join reorder */ protected Projection projection; protected RowOutputInfo rowOutputInfo; protected long salt = 0; public Operator(OperatorType opType, long limit, ScalarOperator predicate, Projection projection) { this.opType = opType; this.limit = limit; this.predicate = predicate; this.projection = projection; } @SuppressWarnings("unchecked") public <T extends Operator> T cast() { return (T) this; } public boolean isLogical() { return false; } public boolean isPhysical() { return false; } public OperatorType getOpType() { return opType; } public long getLimit() { return limit; } @Deprecated public void setLimit(long limit) { this.limit = limit; } public boolean hasLimit() { return limit != DEFAULT_LIMIT; } public ScalarOperator getPredicate() { return predicate; } @Deprecated public void setPredicate(ScalarOperator predicate) { this.predicate = predicate; } public Projection getProjection() { return projection; } public void setProjection(Projection projection) { this.projection = projection; } public void addSalt() { if ((this instanceof LogicalJoinOperator) || (this instanceof LogicalScanOperator)) { this.salt = ++saltGenerator; } } public void setSalt(long salt) { if ((this instanceof LogicalJoinOperator) || (this instanceof LogicalScanOperator) || (this instanceof PhysicalScanOperator) || (this instanceof PhysicalJoinOperator)) { this.salt = salt; } } public boolean hasSalt() { return salt > 0; } public long getSalt() { return salt; } public RowOutputInfo getRowOutputInfo(List<OptExpression> inputs) { if (rowOutputInfo != null) { return rowOutputInfo; } rowOutputInfo = deriveRowOutputInfo(inputs); if (projection != null) { rowOutputInfo = new RowOutputInfo(projection.getColumnRefMap(), projection.getCommonSubOperatorMap(), rowOutputInfo.getOriginalColOutputInfo(), rowOutputInfo.getEndogenousCols()); } return rowOutputInfo; } protected RowOutputInfo deriveRowOutputInfo(List<OptExpression> inputs) { throw new UnsupportedOperationException(); } protected RowOutputInfo projectInputRow(RowOutputInfo inputRow) { List<ColumnOutputInfo> entryList = Lists.newArrayList(); for (ColumnOutputInfo columnOutputInfo : inputRow.getColumnOutputInfo()) { entryList.add(new ColumnOutputInfo(columnOutputInfo.getColumnRef(), columnOutputInfo.getColumnRef())); } return new RowOutputInfo(entryList); } public <R, C> R accept(OperatorVisitor<R, C> visitor, C context) { return visitor.visitOperator(this, context); } public <R, C> R accept(OptExpressionVisitor<R, C> visitor, OptExpression optExpression, C context) { return visitor.visit(optExpression, context); } @Override public String toString() { return opType.name(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Operator operator = (Operator) o; return limit == operator.limit && opType == operator.opType && Objects.equals(predicate, operator.predicate) && Objects.equals(projection, operator.projection) && Objects.equals(salt, operator.salt); } @Override public int hashCode() { return Objects.hash(opType.ordinal(), limit, predicate, projection, salt); } public abstract static class Builder<O extends Operator, B extends Builder> { protected O builder = newInstance(); protected abstract O newInstance(); public B withOperator(O operator) { builder.limit = operator.limit; builder.predicate = operator.predicate; builder.projection = operator.projection; builder.salt = operator.salt; return (B) this; } public O build() { O newOne = builder; builder = null; return newOne; } public OperatorType getOpType() { return builder.opType; } public long getLimit() { return builder.limit; } public B setLimit(long limit) { builder.limit = limit; return (B) this; } public ScalarOperator getPredicate() { return builder.predicate; } public B setPredicate(ScalarOperator predicate) { builder.predicate = predicate; return (B) this; } public Projection getProjection() { return builder.projection; } public B setProjection(Projection projection) { builder.projection = projection; return (B) this; } public B addSalt() { builder.salt = ++saltGenerator; return (B) this; } } }
Yeah. For Java 11 there is no "JRE" - the only distribution for Java is the JDK. The containers that we provide have the Java 11 JDK. For Java 8, there is a development distribution, and a runtime distribution (JRE). For the containers we provide the JRE.
public static DataflowRunner fromOptions(PipelineOptions options) { DataflowPipelineOptions dataflowOptions = PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options); ArrayList<String> missing = new ArrayList<>(); if (dataflowOptions.getAppName() == null) { missing.add("appName"); } if (missing.size() > 0) { throw new IllegalArgumentException( "Missing required values: " + Joiner.on(',').join(missing)); } PathValidator validator = dataflowOptions.getPathValidator(); String gcpTempLocation; try { gcpTempLocation = dataflowOptions.getGcpTempLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires gcpTempLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(gcpTempLocation); String stagingLocation; try { stagingLocation = dataflowOptions.getStagingLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires stagingLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(stagingLocation); if (!isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) { validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs()); } if (dataflowOptions.getFilesToStage() == null) { dataflowOptions.setFilesToStage( detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader())); if (dataflowOptions.getFilesToStage().isEmpty()) { throw new IllegalArgumentException("No files to stage has been found."); } else { LOG.info( "PipelineOptions.filesToStage was not specified. " + "Defaulting to files from the classpath: will stage {} files. " + "Enable logging at DEBUG level to see which files will be staged.", dataflowOptions.getFilesToStage().size()); LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage()); } } String jobName = dataflowOptions.getJobName().toLowerCase(); checkArgument( jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"), "JobName invalid; the name must consist of only the characters " + "[-a-z0-9], starting with a letter and ending with a letter " + "or number"); if (!jobName.equals(dataflowOptions.getJobName())) { LOG.info( "PipelineOptions.jobName did not match the service requirements. " + "Using {} instead of {}.", jobName, dataflowOptions.getJobName()); } dataflowOptions.setJobName(jobName); String project = dataflowOptions.getProject(); if (project.matches("[0-9]*")) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project number."); } else if (!project.matches(PROJECT_ID_REGEXP)) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project description."); } DataflowPipelineDebugOptions debugOptions = dataflowOptions.as(DataflowPipelineDebugOptions.class); if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) { throw new IllegalArgumentException( "Number of worker harness threads '" + debugOptions.getNumberOfWorkerHarnessThreads() + "' invalid. Please make sure the value is non-negative."); } if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) { dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT); } DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String javaVersion = Float.parseFloat(System.getProperty("java.specification.version")) >= 9 ? "(JDK 11 environment)" : "(JRE 8 environment)"; String userAgent = String.format( "%s %s/%s", dataflowRunnerInfo.getName(), javaVersion, dataflowRunnerInfo.getVersion()) .replace(" ", "_"); dataflowOptions.setUserAgent(userAgent); return new DataflowRunner(dataflowOptions); }
String userAgent =
public static DataflowRunner fromOptions(PipelineOptions options) { DataflowPipelineOptions dataflowOptions = PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options); ArrayList<String> missing = new ArrayList<>(); if (dataflowOptions.getAppName() == null) { missing.add("appName"); } if (missing.size() > 0) { throw new IllegalArgumentException( "Missing required values: " + Joiner.on(',').join(missing)); } PathValidator validator = dataflowOptions.getPathValidator(); String gcpTempLocation; try { gcpTempLocation = dataflowOptions.getGcpTempLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires gcpTempLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(gcpTempLocation); String stagingLocation; try { stagingLocation = dataflowOptions.getStagingLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires stagingLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(stagingLocation); if (!isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) { validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs()); } if (dataflowOptions.getFilesToStage() == null) { dataflowOptions.setFilesToStage( detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader())); if (dataflowOptions.getFilesToStage().isEmpty()) { throw new IllegalArgumentException("No files to stage has been found."); } else { LOG.info( "PipelineOptions.filesToStage was not specified. " + "Defaulting to files from the classpath: will stage {} files. " + "Enable logging at DEBUG level to see which files will be staged.", dataflowOptions.getFilesToStage().size()); LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage()); } } String jobName = dataflowOptions.getJobName().toLowerCase(); checkArgument( jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"), "JobName invalid; the name must consist of only the characters " + "[-a-z0-9], starting with a letter and ending with a letter " + "or number"); if (!jobName.equals(dataflowOptions.getJobName())) { LOG.info( "PipelineOptions.jobName did not match the service requirements. " + "Using {} instead of {}.", jobName, dataflowOptions.getJobName()); } dataflowOptions.setJobName(jobName); String project = dataflowOptions.getProject(); if (project.matches("[0-9]*")) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project number."); } else if (!project.matches(PROJECT_ID_REGEXP)) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project description."); } DataflowPipelineDebugOptions debugOptions = dataflowOptions.as(DataflowPipelineDebugOptions.class); if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) { throw new IllegalArgumentException( "Number of worker harness threads '" + debugOptions.getNumberOfWorkerHarnessThreads() + "' invalid. Please make sure the value is non-negative."); } if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) { dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT); } String javaVersion = Float.parseFloat(System.getProperty("java.specification.version")) >= 9 ? "(JDK 11 environment)" : "(JRE 8 environment)"; DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String userAgent = String.format( "%s/%s%s", dataflowRunnerInfo.getName(), dataflowRunnerInfo.getVersion(), javaVersion) .replace(" ", "_"); dataflowOptions.setUserAgent(userAgent); return new DataflowRunner(dataflowOptions); }
class path allowing for * user specified configuration injection into the ObjectMapper. This supports user custom types * on {@link PipelineOptions}
class path allowing for * user specified configuration injection into the ObjectMapper. This supports user custom types * on {@link PipelineOptions}
Is it correct that this error is getting logged? Only `Context` is an unknown type here, right? Even though there are errors when attempting to resolve `FunctionEntry`'s members, it is not unknown?
public void testFunctionPointerAsVariable() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-type-mismatch1-negative.bal"); Assert.assertEquals(result.getErrorCount(), 3); BAssertUtil.validateError(result, 0, "incompatible types: expected 'function (string,int) returns " + "(boolean)', found 'function (string,float) returns (boolean)'", 2, 53); BAssertUtil.validateError(result, 1, "unknown type 'Context'", 10, 29); BAssertUtil.validateError(result, 2, "unknown type 'FunctionEntry'", 12, 5); }
BAssertUtil.validateError(result, 2, "unknown type 'FunctionEntry'", 12, 5);
public void testFunctionPointerAsVariable() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-type-mismatch1-negative.bal"); Assert.assertEquals(result.getErrorCount(), 3); BAssertUtil.validateError(result, 0, "incompatible types: expected 'function (string,int) returns " + "(boolean)', found 'function (string,float) returns (boolean)'", 2, 53); BAssertUtil.validateError(result, 1, "unknown type 'Context'", 10, 29); BAssertUtil.validateError(result, 2, "unknown type 'FunctionEntry'", 12, 5); }
class FunctionPointersNegativeTest { @Test() @Test() public void testLambdaAsVariable() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-type-mismatch2-negative.bal"); Assert.assertEquals(result.getErrorCount(), 1); BAssertUtil.validateError(result, 0, "incompatible types: expected 'function (string,int) returns " + "(boolean)', found 'function (string,boolean) returns (boolean)'", 2, 53); } @Test() public void testFPInStruct() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-struct-negative.bal"); Assert.assertEquals(result.getErrorCount(), 1); BAssertUtil.validateError(result, 0, "undefined field 'getFullName' in record 'Person'", 17, 20); } @Test() public void testFPInStructIncorrectArg() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-struct-incorrect-arg-negative.bal"); Assert.assertEquals(result.getErrorCount(), 1); BAssertUtil.validateError(result, 0, "incompatible types: expected 'string', found 'Person'", 32, 39); } @Test(groups = { "disableOnOldParser" }) public void testFPWithNoImport() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-with-import-negative.bal"); Assert.assertEquals(result.getErrorCount(), 6); int i = -1; BAssertUtil.validateError(result, ++i, "undefined module 'streams'", 19, 5); BAssertUtil.validateError(result, ++i, "unknown type 'Select'", 19, 5); BAssertUtil.validateError(result, ++i, "undefined function 'createSelect'", 19, 32); BAssertUtil.validateError(result, ++i, "undefined module 'streams'", 19, 32); BAssertUtil.validateError(result, ++i, "undefined symbol 'outputProcess'", 19, 53); } @Test() public void testFPInvalidInvocation() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative" + "/fp_invalid_invocation_negative.bal"); Assert.assertEquals(result.getErrorCount(), 6); int i = 0; BAssertUtil.validateError(result, i++, "undefined field 'getFullName' in record 'Person'", 35, 20); BAssertUtil.validateError(result, i++, "undefined field 'getFname' in object 'Employee'", 45, 15); BAssertUtil.validateError(result, i++, "undefined function 'f3'", 46, 9); BAssertUtil.validateError(result, i++, "undefined field 'getFname' in object 'Employee'", 77, 15); BAssertUtil.validateError(result, i++, "undefined function 'f3'", 78, 9); BAssertUtil.validateError(result, i, "undefined method 'getLname' in object 'Employee'", 83, 11); } @Test public void testFPWithMissingArgs() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative" + "/fp_invocation_with_missing_args.bal"); Assert.assertEquals(result.getErrorCount(), 4); int i = 0; BAssertUtil.validateError(result, i++, "missing required parameter 'i' in call to 'fn'()", 9, 16); BAssertUtil.validateError(result, i++, "missing required parameter 'i' in call to 'fn'()", 20, 16); BAssertUtil.validateError(result, i++, "too many arguments in call to 'fn()'", 31, 16); BAssertUtil.validateError(result, i, "too many arguments in call to 'fn()'", 42, 16); } }
class FunctionPointersNegativeTest { @Test() @Test() public void testLambdaAsVariable() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-type-mismatch2-negative.bal"); Assert.assertEquals(result.getErrorCount(), 1); BAssertUtil.validateError(result, 0, "incompatible types: expected 'function (string,int) returns " + "(boolean)', found 'function (string,boolean) returns (boolean)'", 2, 53); } @Test() public void testFPInStruct() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-struct-negative.bal"); Assert.assertEquals(result.getErrorCount(), 1); BAssertUtil.validateError(result, 0, "undefined field 'getFullName' in record 'Person'", 17, 20); } @Test() public void testFPInStructIncorrectArg() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-struct-incorrect-arg-negative.bal"); Assert.assertEquals(result.getErrorCount(), 1); BAssertUtil.validateError(result, 0, "incompatible types: expected 'string', found 'Person'", 32, 39); } @Test(groups = { "disableOnOldParser" }) public void testFPWithNoImport() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative/fp-with-import-negative.bal"); Assert.assertEquals(result.getErrorCount(), 6); int i = -1; BAssertUtil.validateError(result, ++i, "undefined module 'streams'", 19, 5); BAssertUtil.validateError(result, ++i, "unknown type 'Select'", 19, 5); BAssertUtil.validateError(result, ++i, "undefined function 'createSelect'", 19, 32); BAssertUtil.validateError(result, ++i, "undefined module 'streams'", 19, 32); BAssertUtil.validateError(result, ++i, "undefined symbol 'outputProcess'", 19, 53); } @Test() public void testFPInvalidInvocation() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative" + "/fp_invalid_invocation_negative.bal"); Assert.assertEquals(result.getErrorCount(), 6); int i = 0; BAssertUtil.validateError(result, i++, "undefined field 'getFullName' in record 'Person'", 35, 20); BAssertUtil.validateError(result, i++, "undefined field 'getFname' in object 'Employee'", 45, 15); BAssertUtil.validateError(result, i++, "undefined function 'f3'", 46, 9); BAssertUtil.validateError(result, i++, "undefined field 'getFname' in object 'Employee'", 77, 15); BAssertUtil.validateError(result, i++, "undefined function 'f3'", 78, 9); BAssertUtil.validateError(result, i, "undefined method 'getLname' in object 'Employee'", 83, 11); } @Test public void testFPWithMissingArgs() { CompileResult result = BCompileUtil.compile("test-src/expressions/lambda/negative" + "/fp_invocation_with_missing_args.bal"); Assert.assertEquals(result.getErrorCount(), 4); int i = 0; BAssertUtil.validateError(result, i++, "missing required parameter 'i' in call to 'fn'()", 9, 16); BAssertUtil.validateError(result, i++, "missing required parameter 'i' in call to 'fn'()", 20, 16); BAssertUtil.validateError(result, i++, "too many arguments in call to 'fn()'", 31, 16); BAssertUtil.validateError(result, i, "too many arguments in call to 'fn()'", 42, 16); } }
nit: I find the happen before/after confusing since they may happen whenever we are just blocking on them completing here Maybe Block on completing the past closes before returning. We do so after starting the current closes in the background so that they can happen in parallel.
public void processElement(ProcessContext c, BoundedWindow window) throws Exception { getDynamicDestinations().setSideInputAccessorFromProcessContext(c); Map<DestinationT, Writer<DestinationT, OutputT>> writers = Maps.newHashMap(); for (UserT input : c.element().getValue()) { DestinationT destination = getDynamicDestinations().getDestination(input); Writer<DestinationT, OutputT> writer = writers.get(destination); if (writer == null) { String uuid = UUID.randomUUID().toString(); LOG.info( "Opening writer {} for window {} pane {} destination {}", uuid, window, c.pane(), destination); writer = writeOperation.createWriter(); writer.setDestination(destination); writer.open(uuid); writers.put(destination, writer); } writeOrClose(writer, getDynamicDestinations().formatRecord(input)); } CompletionStage<List<Void>> pastCloseFutures = MoreFutures.allAsList(closeFutures); closeFutures.clear(); for (Map.Entry<DestinationT, Writer<DestinationT, OutputT>> entry : writers.entrySet()) { int shard = c.element().getKey().getShardNumber(); checkArgument( shard != UNKNOWN_SHARDNUM, "Shard should have been set, but is unset for element %s", c.element()); Writer<DestinationT, OutputT> writer = entry.getValue(); deferredOutput.add( KV.of( c.timestamp(), new FileResult<>(writer.getOutputFile(), shard, window, c.pane(), entry.getKey()))); closeWriterInBackground(writer); } MoreFutures.get(pastCloseFutures); }
public void processElement(ProcessContext c, BoundedWindow window) throws Exception { getDynamicDestinations().setSideInputAccessorFromProcessContext(c); PaneInfo paneInfo = c.pane(); DestinationT destination = getDynamicDestinations().getDestination(c.element()); WriterKey<DestinationT> key = new WriterKey<>(window, c.pane(), destination); Writer<DestinationT, OutputT> writer = writers.get(key); if (writer == null) { if (getMaxNumWritersPerBundle() < 0 || writers.size() <= getMaxNumWritersPerBundle()) { String uuid = UUID.randomUUID().toString(); LOG.info( "Opening writer {} for window {} pane {} destination {}", uuid, window, paneInfo, destination); writer = writeOperation.createWriter(); writer.setDestination(destination); writer.open(uuid); writers.put(key, writer); LOG.debug("Done opening writer"); } else { if (spilledShardNum == UNKNOWN_SHARDNUM) { spilledShardNum = ThreadLocalRandom.current().nextInt(SPILLED_RECORD_SHARDING_FACTOR); } else { spilledShardNum = (spilledShardNum + 1) % SPILLED_RECORD_SHARDING_FACTOR; } c.output( unwrittenRecordsTag, KV.of( ShardedKey.of(hashDestination(destination, destinationCoder), spilledShardNum), c.element())); return; } } writeOrClose(writer, getDynamicDestinations().formatRecord(c.element())); }
class WriteUnshardedTempFilesFn extends DoFn<UserT, FileResult<DestinationT>> { private final @Nullable TupleTag<KV<ShardedKey<Integer>, UserT>> unwrittenRecordsTag; private final Coder<DestinationT> destinationCoder; private @Nullable Map<WriterKey<DestinationT>, Writer<DestinationT, OutputT>> writers; private int spilledShardNum = UNKNOWN_SHARDNUM; WriteUnshardedTempFilesFn( @Nullable TupleTag<KV<ShardedKey<Integer>, UserT>> unwrittenRecordsTag, Coder<DestinationT> destinationCoder) { this.unwrittenRecordsTag = unwrittenRecordsTag; this.destinationCoder = destinationCoder; } @StartBundle public void startBundle(StartBundleContext c) { writers = Maps.newHashMap(); } @ProcessElement @FinishBundle public void finishBundle(FinishBundleContext c) throws Exception { for (Map.Entry<WriterKey<DestinationT>, Writer<DestinationT, OutputT>> entry : writers.entrySet()) { WriterKey<DestinationT> key = entry.getKey(); Writer<DestinationT, OutputT> writer = entry.getValue(); try { writer.close(); } catch (Exception e) { writer.cleanup(); throw e; } BoundedWindow window = key.window; c.output( new FileResult<>( writer.getOutputFile(), UNKNOWN_SHARDNUM, window, key.paneInfo, key.destination), window.maxTimestamp(), window); } } }
class WriteUnshardedTempFilesFn extends DoFn<UserT, FileResult<DestinationT>> { private final @Nullable TupleTag<KV<ShardedKey<Integer>, UserT>> unwrittenRecordsTag; private final Coder<DestinationT> destinationCoder; private @Nullable Map<WriterKey<DestinationT>, Writer<DestinationT, OutputT>> writers; private int spilledShardNum = UNKNOWN_SHARDNUM; WriteUnshardedTempFilesFn( @Nullable TupleTag<KV<ShardedKey<Integer>, UserT>> unwrittenRecordsTag, Coder<DestinationT> destinationCoder) { this.unwrittenRecordsTag = unwrittenRecordsTag; this.destinationCoder = destinationCoder; } @StartBundle public void startBundle(StartBundleContext c) { writers = Maps.newHashMap(); } @ProcessElement @FinishBundle public void finishBundle(FinishBundleContext c) throws Exception { for (Map.Entry<WriterKey<DestinationT>, Writer<DestinationT, OutputT>> entry : writers.entrySet()) { WriterKey<DestinationT> key = entry.getKey(); Writer<DestinationT, OutputT> writer = entry.getValue(); try { writer.close(); } catch (Exception e) { writer.cleanup(); throw e; } BoundedWindow window = key.window; c.output( new FileResult<>( writer.getOutputFile(), UNKNOWN_SHARDNUM, window, key.paneInfo, key.destination), window.maxTimestamp(), window); } } }
Nit: please add blank line between line 94 and 95. Nit2: add space before "cast" Maybe let's drop the ": Beam incubation date :)" part (imo, it may be distractive and it's redundant)
public void testSavePerfsToBigQuery() throws IOException, InterruptedException { NexmarkConfiguration nexmarkConfiguration1 = new NexmarkConfiguration(); nexmarkConfiguration1.query = QUERY; nexmarkConfiguration1.cpuDelayMs = 100L; NexmarkPerf nexmarkPerf1 = new NexmarkPerf(); nexmarkPerf1.numResults = 1000L; nexmarkPerf1.eventsPerSec = 0.5F; nexmarkPerf1.runtimeSec = 0.325F; NexmarkConfiguration nexmarkConfiguration2 = new NexmarkConfiguration(); nexmarkConfiguration2.query = QUERY; nexmarkConfiguration1.cpuDelayMs = 200L; NexmarkPerf nexmarkPerf2 = new NexmarkPerf(); nexmarkPerf2.numResults = 1001L; nexmarkPerf2.eventsPerSec = 1.5F; nexmarkPerf2.runtimeSec = 1.325F; HashMap<NexmarkConfiguration, NexmarkPerf> perfs = new HashMap<>(2); perfs.put(nexmarkConfiguration1, nexmarkPerf1); perfs.put(nexmarkConfiguration2, nexmarkPerf2); int startTimestampSeconds = 1454284800; Main.savePerfsToBigQuery( options, perfs, fakeBqServices, new Instant(startTimestampSeconds * 1000L)); String tableSpec = NexmarkUtils.tableSpec(options, String.valueOf(QUERY), 0L, null); List<TableRow> actualRows = fakeDatasetService.getAllRows( options.getProject(), options.getBigQueryDataset(), BigQueryHelpers.parseTableSpec(tableSpec).getTableId()); assertEquals("Wrong number of rows inserted", 2, actualRows.size()); List<TableRow> expectedRows = new ArrayList<>(); TableRow row1 = new TableRow() .set("timestamp", startTimestampSeconds) .set("runtimeSec", nexmarkPerf1.runtimeSec) .set("eventsPerSec", nexmarkPerf1.eventsPerSec) .set("numResults", (int) nexmarkPerf1.numResults); expectedRows.add(row1); TableRow row2 = new TableRow() .set("timestamp", startTimestampSeconds) .set("runtimeSec", nexmarkPerf2.runtimeSec) .set("eventsPerSec", nexmarkPerf2.eventsPerSec) .set("numResults", (int) nexmarkPerf2.numResults); expectedRows.add(row2); assertThat(actualRows, containsInAnyOrder(Iterables.toArray(expectedRows, TableRow.class))); }
public void testSavePerfsToBigQuery() throws IOException, InterruptedException { NexmarkConfiguration nexmarkConfiguration1 = new NexmarkConfiguration(); nexmarkConfiguration1.query = QUERY; nexmarkConfiguration1.cpuDelayMs = 100L; NexmarkPerf nexmarkPerf1 = new NexmarkPerf(); nexmarkPerf1.numResults = 1000L; nexmarkPerf1.eventsPerSec = 0.5F; nexmarkPerf1.runtimeSec = 0.325F; NexmarkConfiguration nexmarkConfiguration2 = new NexmarkConfiguration(); nexmarkConfiguration2.query = QUERY; nexmarkConfiguration1.cpuDelayMs = 200L; NexmarkPerf nexmarkPerf2 = new NexmarkPerf(); nexmarkPerf2.numResults = 1001L; nexmarkPerf2.eventsPerSec = 1.5F; nexmarkPerf2.runtimeSec = 1.325F; HashMap<NexmarkConfiguration, NexmarkPerf> perfs = new HashMap<>(2); perfs.put(nexmarkConfiguration1, nexmarkPerf1); perfs.put(nexmarkConfiguration2, nexmarkPerf2); int startTimestampSeconds = 1454284800; Main.savePerfsToBigQuery( options, perfs, fakeBqServices, new Instant(startTimestampSeconds * 1000L)); String tableSpec = NexmarkUtils.tableSpec(options, String.valueOf(QUERY), 0L, null); List<TableRow> actualRows = fakeDatasetService.getAllRows( options.getProject(), options.getBigQueryDataset(), BigQueryHelpers.parseTableSpec(tableSpec).getTableId()); assertEquals("Wrong number of rows inserted", 2, actualRows.size()); List<TableRow> expectedRows = new ArrayList<>(); TableRow row1 = new TableRow() .set("timestamp", startTimestampSeconds) .set("runtimeSec", nexmarkPerf1.runtimeSec) .set("eventsPerSec", nexmarkPerf1.eventsPerSec) .set("numResults", (int) nexmarkPerf1.numResults); expectedRows.add(row1); TableRow row2 = new TableRow() .set("timestamp", startTimestampSeconds) .set("runtimeSec", nexmarkPerf2.runtimeSec) .set("eventsPerSec", nexmarkPerf2.eventsPerSec) .set("numResults", (int) nexmarkPerf2.numResults); expectedRows.add(row2); assertThat(actualRows, containsInAnyOrder(Iterables.toArray(expectedRows, TableRow.class))); }
class PerfsToBigQueryTest { private static final int QUERY = 1; private NexmarkOptions options; private FakeDatasetService fakeDatasetService = new FakeDatasetService(); private FakeJobService fakeJobService = new FakeJobService(); private FakeBigQueryServices fakeBqServices = new FakeBigQueryServices() .withDatasetService(fakeDatasetService) .withJobService(fakeJobService); @Rule public transient TemporaryFolder testFolder = new TemporaryFolder(); @Before public void before() throws IOException, InterruptedException { options = PipelineOptionsFactory.create().as(NexmarkOptions.class); options.setBigQueryTable("nexmark"); options.setBigQueryDataset("nexmark"); options.setRunner(DirectRunner.class); options.setStreaming(true); options.setProject("nexmark-test"); options.setTempLocation(testFolder.getRoot().getAbsolutePath()); options.setResourceNameMode(NexmarkUtils.ResourceNameMode.QUERY_RUNNER_AND_MODE); FakeDatasetService.setUp(); fakeDatasetService.createDataset( options.getProject(), options.getBigQueryDataset(), "", "", null); } @Test }
class PerfsToBigQueryTest { private static final int QUERY = 1; private NexmarkOptions options; private FakeDatasetService fakeDatasetService = new FakeDatasetService(); private FakeJobService fakeJobService = new FakeJobService(); private FakeBigQueryServices fakeBqServices = new FakeBigQueryServices() .withDatasetService(fakeDatasetService) .withJobService(fakeJobService); @Rule public transient TemporaryFolder testFolder = new TemporaryFolder(); @Before public void before() throws IOException, InterruptedException { options = PipelineOptionsFactory.create().as(NexmarkOptions.class); options.setBigQueryTable("nexmark"); options.setBigQueryDataset("nexmark"); options.setRunner(DirectRunner.class); options.setStreaming(true); options.setProject("nexmark-test"); options.setTempLocation(testFolder.getRoot().getAbsolutePath()); options.setResourceNameMode(NexmarkUtils.ResourceNameMode.QUERY_RUNNER_AND_MODE); FakeDatasetService.setUp(); fakeDatasetService.createDataset( options.getProject(), options.getBigQueryDataset(), "", "", null); } @Test }
Could we preserve this existing line case. just add one more line to test `*||**|`, like ``` // complete delimiter "Whether 'tis nobler in the mind to suffer |*", // edge case: partial delimiter then complete delimiter "The slings and arrows of outrageous fortune,*||**|" // truncated delimiter "Or to take arms against a sea of troubles,|" ```
public void testReadStringsWithCustomDelimiter() throws Exception { final String[] inputStrings = new String[] { "To be, or not to be: that |is the question: ", "To be, or not to be: that *is the question: ", "Whether 'tis nobler in the mind to suffer *||**|", "The slings and arrows of outrageous fortune,|" }; File tmpFile = tempFolder.newFile("tmpfile.txt"); String filename = tmpFile.getPath(); try (Writer writer = Files.newBufferedWriter(tmpFile.toPath(), UTF_8)) { writer.write(Joiner.on("").join(inputStrings)); } PAssert.that(p.apply(TextIO.read().from(filename).withDelimiter(new byte[] {'|', '*'}))) .containsInAnyOrder( "To be, or not to be: that |is the question: To be, or not to be: " + "that *is the question: Whether 'tis nobler in the mind to suffer *|", "*|The slings and arrows of outrageous fortune,|"); p.run(); }
"Whether 'tis nobler in the mind to suffer *||**|",
public void testReadStringsWithCustomDelimiter() throws Exception { final String[] inputStrings = new String[] { "To be, or not to be: that |is the question: ", "To be, or not to be: that *is the question: ", "Whether 'tis nobler in the mind to suffer |*", "The slings and arrows of outrageous fortune,*||**|", "Or to take arms against a sea of troubles,|" }; File tmpFile = tempFolder.newFile("tmpfile.txt"); String filename = tmpFile.getPath(); try (Writer writer = Files.newBufferedWriter(tmpFile.toPath(), UTF_8)) { writer.write(Joiner.on("").join(inputStrings)); } PAssert.that(p.apply(TextIO.read().from(filename).withDelimiter(new byte[] {'|', '*'}))) .containsInAnyOrder( "To be, or not to be: that |is the question: To be, or not to be: " + "that *is the question: Whether 'tis nobler in the mind to suffer ", "The slings and arrows of outrageous fortune,*|", "*|Or to take arms against a sea of troubles,|"); p.run(); }
class BasicIOTest { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Rule public TestPipeline p = TestPipeline.create(); private void runTestRead(String[] expected) throws Exception { File tmpFile = tempFolder.newFile(); String filename = tmpFile.getPath(); try (PrintStream writer = new PrintStream(new FileOutputStream(tmpFile))) { for (String elem : expected) { byte[] encodedElem = CoderUtils.encodeToByteArray(StringUtf8Coder.of(), elem); String line = new String(encodedElem, StandardCharsets.UTF_8); writer.println(line); } } TextIO.Read read = TextIO.read().from(filename); PCollection<String> output = p.apply(read); PAssert.that(output).containsInAnyOrder(expected); p.run(); } @Test public void testDelimiterSelfOverlaps() { assertFalse(TextIO.Read.isSelfOverlapping(new byte[] {'a', 'b', 'c'})); assertFalse(TextIO.Read.isSelfOverlapping(new byte[] {'c', 'a', 'b', 'd', 'a', 'b'})); assertFalse(TextIO.Read.isSelfOverlapping(new byte[] {'a', 'b', 'c', 'a', 'b', 'd'})); assertTrue(TextIO.Read.isSelfOverlapping(new byte[] {'a', 'b', 'a'})); assertTrue(TextIO.Read.isSelfOverlapping(new byte[] {'a', 'b', 'c', 'a', 'b'})); } @Test @Category(NeedsRunner.class) @Test @Category(NeedsRunner.class) public void testReadStrings() throws Exception { runTestRead(LINES_ARRAY); } @Test @Category(NeedsRunner.class) public void testReadEmptyStrings() throws Exception { runTestRead(NO_LINES_ARRAY); } @Test public void testReadNamed() throws Exception { File emptyFile = tempFolder.newFile(); p.enableAbandonedNodeEnforcement(false); assertThat(p.apply(TextIO.read().from("somefile")).getName(), startsWith("TextIO.Read/Read")); assertThat( p.apply("MyRead", TextIO.read().from(emptyFile.getPath())).getName(), startsWith("MyRead/Read")); } @Test public void testReadDisplayData() { TextIO.Read read = TextIO.read().from("foo.*").withCompression(BZIP2); DisplayData displayData = DisplayData.from(read); assertThat(displayData, hasDisplayItem("filePattern", "foo.*")); assertThat(displayData, hasDisplayItem("compressionType", BZIP2.toString())); } /** Options for testing. */ public interface RuntimeTestOptions extends PipelineOptions { ValueProvider<String> getInput(); void setInput(ValueProvider<String> value); } @Test public void testRuntimeOptionsNotCalledInApply() throws Exception { p.enableAbandonedNodeEnforcement(false); RuntimeTestOptions options = PipelineOptionsFactory.as(RuntimeTestOptions.class); p.apply(TextIO.read().from(options.getInput())); } @Test public void testCompressionIsSet() throws Exception { TextIO.Read read = TextIO.read().from("/tmp/test"); assertEquals(AUTO, read.getCompression()); read = TextIO.read().from("/tmp/test").withCompression(GZIP); assertEquals(GZIP, read.getCompression()); } /** * Tests reading from a small, uncompressed file with .gz extension. This must work in GZIP * modes. This is needed because some network file systems / HTTP clients will transparently * decompress gzipped content. */ @Test @Category(NeedsRunner.class) public void testSmallCompressedGzipReadActuallyUncompressed() throws Exception { File smallGzNotCompressed = writeToFile(TINY, tempFolder, "tiny_uncompressed.gz", UNCOMPRESSED); assertReadingCompressedFileMatchesExpected(smallGzNotCompressed, GZIP, TINY, p); p.run(); } /** * Tests reading from a small, uncompressed file with .gz extension. This must work in AUTO * modes. This is needed because some network file systems / HTTP clients will transparently * decompress gzipped content. */ @Test @Category(NeedsRunner.class) public void testSmallCompressedAutoReadActuallyUncompressed() throws Exception { File smallGzNotCompressed = writeToFile(TINY, tempFolder, "tiny_uncompressed.gz", UNCOMPRESSED); assertReadingCompressedFileMatchesExpected(smallGzNotCompressed, AUTO, TINY, p); p.run(); } /** * Tests a zip file with no entries. This is a corner case not tested elsewhere as the default * test zip files have a single entry. */ @Test @Category(NeedsRunner.class) public void testZipCompressedReadWithNoEntries() throws Exception { File file = createZipFile(new ArrayList<>(), tempFolder, "empty zip file"); assertReadingCompressedFileMatchesExpected(file, ZIP, EMPTY, p); p.run(); } /** * Tests a zip file with multiple entries. This is a corner case not tested elsewhere as the * default test zip files have a single entry. */ @Test @Category(NeedsRunner.class) public void testZipCompressedReadWithMultiEntriesFile() throws Exception { String[] entry0 = new String[] {"first", "second", "three"}; String[] entry1 = new String[] {"four", "five", "six"}; String[] entry2 = new String[] {"seven", "eight", "nine"}; List<String> expected = new ArrayList<>(); File file = createZipFile(expected, tempFolder, "multiple entries", entry0, entry1, entry2); assertReadingCompressedFileMatchesExpected(file, ZIP, expected, p); p.run(); } /** * Read a ZIP compressed file containing data, multiple empty entries, and then more data. We * expect just the data back. */ @Test @Category(NeedsRunner.class) public void testZipCompressedReadWithComplexEmptyAndPresentEntries() throws Exception { File file = createZipFile( new ArrayList<>(), tempFolder, "complex empty and present entries", new String[] {"cat"}, new String[] {}, new String[] {}, new String[] {"dog"}); assertReadingCompressedFileMatchesExpected(file, ZIP, Arrays.asList("cat", "dog"), p); p.run(); } @Test public void testTextIOGetName() { assertEquals("TextIO.Read", TextIO.read().from("somefile").getName()); assertEquals("TextIO.Read", TextIO.read().from("somefile").toString()); } private TextSource prepareSource(byte[] data) throws IOException { return TextIOReadTest.prepareSource(tempFolder, data, null, 0); } @Test public void testProgressEmptyFile() throws IOException { try (BoundedSource.BoundedReader<String> reader = prepareSource(new byte[0]).createReader(PipelineOptionsFactory.create())) { assertEquals(0.0, reader.getFractionConsumed(), 1e-6); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertFalse(reader.start()); assertEquals(1.0, reader.getFractionConsumed(), 1e-6); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals(0, reader.getSplitPointsRemaining()); } } @Test public void testProgressTextFile() throws IOException { String file = "line1\nline2\nline3"; try (BoundedSource.BoundedReader<String> reader = prepareSource(file.getBytes(StandardCharsets.UTF_8)) .createReader(PipelineOptionsFactory.create())) { assertEquals(0.0, reader.getFractionConsumed(), 1e-6); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.start()); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.advance()); assertEquals(1, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.advance()); assertEquals(2, reader.getSplitPointsConsumed()); assertEquals(1, reader.getSplitPointsRemaining()); assertFalse(reader.advance()); assertEquals(1.0, reader.getFractionConsumed(), 1e-6); assertEquals(3, reader.getSplitPointsConsumed()); assertEquals(0, reader.getSplitPointsRemaining()); } } @Test public void testProgressAfterSplitting() throws IOException { String file = "line1\nline2\nline3"; BoundedSource<String> source = prepareSource(file.getBytes(StandardCharsets.UTF_8)); BoundedSource<String> remainder; try (BoundedSource.BoundedReader<String> readerOrig = source.createReader(PipelineOptionsFactory.create())) { assertEquals(0.0, readerOrig.getFractionConsumed(), 1e-6); assertEquals(0, readerOrig.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, readerOrig.getSplitPointsRemaining()); assertTrue(readerOrig.start()); assertEquals(0, readerOrig.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, readerOrig.getSplitPointsRemaining()); remainder = readerOrig.splitAtFraction(0.1); assertNotNull(remainder); assertEquals(0, readerOrig.getSplitPointsConsumed()); assertEquals(1, readerOrig.getSplitPointsRemaining()); assertFalse(readerOrig.advance()); assertEquals(1.0, readerOrig.getFractionConsumed(), 1e-6); assertEquals(1, readerOrig.getSplitPointsConsumed()); assertEquals(0, readerOrig.getSplitPointsRemaining()); } try (BoundedSource.BoundedReader<String> reader = remainder.createReader(PipelineOptionsFactory.create())) { assertEquals(0.0, reader.getFractionConsumed(), 1e-6); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.start()); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.advance()); assertEquals(1, reader.getSplitPointsConsumed()); assertEquals(1, reader.getSplitPointsRemaining()); assertFalse(reader.advance()); assertEquals(1.0, reader.getFractionConsumed(), 1e-6); assertEquals(2, reader.getSplitPointsConsumed()); assertEquals(0, reader.getSplitPointsRemaining()); } } @Test public void testInitialSplitAutoModeTxt() throws Exception { PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeTxt = writeToFile(LARGE, tempFolder, "large.txt", UNCOMPRESSED); assertThat(largeTxt.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeTxt.getPath()).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); assertThat(splits, hasSize(greaterThan(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); } @Test public void testInitialSplitAutoModeGz() throws Exception { assumeFalse(SystemUtils.IS_OS_WINDOWS); PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeGz = writeToFile(LARGE, tempFolder, "large.gz", GZIP); assertThat(largeGz.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeGz.getPath()).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); assertThat(splits, hasSize(equalTo(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); } @Test public void testInitialSplitGzipModeTxt() throws Exception { PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeTxt = writeToFile(LARGE, tempFolder, "large.txt", UNCOMPRESSED); assertThat(largeTxt.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeTxt.getPath()).withCompression(GZIP).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); assertThat(splits, hasSize(equalTo(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); } @Test @Category(NeedsRunner.class) public void testReadAll() throws IOException { Path tempFolderPath = tempFolder.getRoot().toPath(); writeToFile(TINY, tempFolder, "readAllTiny1.zip", ZIP); writeToFile(TINY, tempFolder, "readAllTiny2.txt", UNCOMPRESSED); writeToFile(LARGE, tempFolder, "readAllLarge1.zip", ZIP); writeToFile(LARGE, tempFolder, "readAllLarge2.txt", UNCOMPRESSED); PCollection<String> lines = p.apply( Create.of( tempFolderPath.resolve("readAllTiny*").toString(), tempFolderPath.resolve("readAllLarge*").toString())) .apply(TextIO.readAll().withCompression(AUTO)); PAssert.that(lines).containsInAnyOrder(Iterables.concat(TINY, TINY, LARGE, LARGE)); p.run(); } @Test @Category(NeedsRunner.class) public void testReadFiles() throws IOException { Path tempFolderPath = tempFolder.getRoot().toPath(); writeToFile(TINY, tempFolder, "readAllTiny1.zip", ZIP); writeToFile(TINY, tempFolder, "readAllTiny2.txt", UNCOMPRESSED); writeToFile(LARGE, tempFolder, "readAllLarge1.zip", ZIP); writeToFile(LARGE, tempFolder, "readAllLarge2.txt", UNCOMPRESSED); PCollection<String> lines = p.apply( Create.of( tempFolderPath.resolve("readAllTiny*").toString(), tempFolderPath.resolve("readAllLarge*").toString())) .apply(FileIO.matchAll()) .apply(FileIO.readMatches().withCompression(AUTO)) .apply(TextIO.readFiles().withDesiredBundleSizeBytes(10)); PAssert.that(lines).containsInAnyOrder(Iterables.concat(TINY, TINY, LARGE, LARGE)); p.run(); } private List<KV<String, String>> filenameKV(Path path, String fn, List<String> input) { return input.stream() .map(l -> KV.of(path.resolve(fn).toString(), l)) .collect(Collectors.toList()); } @Test @Category(NeedsRunner.class) public void testReadFilesWithFilename() throws IOException { Path tempFolderPath = tempFolder.getRoot().toPath(); writeToFile(TINY, tempFolder, "readAllTiny1.zip", ZIP); writeToFile(TINY, tempFolder, "readAllTiny2.txt", UNCOMPRESSED); writeToFile(LARGE, tempFolder, "readAllLarge1.zip", ZIP); writeToFile(LARGE, tempFolder, "readAllLarge2.txt", UNCOMPRESSED); SerializableFunction<String, ? extends FileBasedSource<String>> createSource = input -> new TextSource( ValueProvider.StaticValueProvider.of(input), EmptyMatchTreatment.DISALLOW, new byte[] {'\n'}, 0); PCollection<KV<String, String>> lines = p.apply( Create.of( tempFolderPath.resolve("readAllTiny*").toString(), tempFolderPath.resolve("readAllLarge*").toString())) .apply(FileIO.matchAll()) .apply(FileIO.readMatches().withCompression(AUTO)) .apply( new ReadAllViaFileBasedSourceWithFilename<>( 10, createSource, KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()))); PAssert.that(lines) .containsInAnyOrder( Iterables.concat( filenameKV(tempFolderPath, "readAllTiny1.zip", TINY), filenameKV(tempFolderPath, "readAllTiny2.txt", TINY), filenameKV(tempFolderPath, "readAllLarge1.zip", LARGE), filenameKV(tempFolderPath, "readAllLarge2.txt", LARGE))); p.run(); } @Test @Category({NeedsRunner.class, UsesUnboundedSplittableParDo.class}) public void testReadWatchForNewFiles() throws IOException, InterruptedException { final Path basePath = tempFolder.getRoot().toPath().resolve("readWatch"); basePath.toFile().mkdir(); p.apply(GenerateSequence.from(0).to(10).withRate(1, Duration.millis(100))) .apply( Window.<Long>into(FixedWindows.of(Duration.millis(150))) .withAllowedLateness(Duration.ZERO) .triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1))) .discardingFiredPanes()) .apply(ToString.elements()) .apply( TextIO.write() .to(basePath.resolve("data").toString()) .withNumShards(1) .withWindowedWrites()); PCollection<String> lines = p.apply( TextIO.read() .from(basePath.resolve("*").toString()) .watchForNewFiles( Duration.millis(100), Watch.Growth.afterTimeSinceNewOutput(Duration.standardSeconds(3)))); PAssert.that(lines).containsInAnyOrder("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"); p.run(); } }
class BasicIOTest { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Rule public TestPipeline p = TestPipeline.create(); private void runTestRead(String[] expected) throws Exception { File tmpFile = tempFolder.newFile(); String filename = tmpFile.getPath(); try (PrintStream writer = new PrintStream(new FileOutputStream(tmpFile))) { for (String elem : expected) { byte[] encodedElem = CoderUtils.encodeToByteArray(StringUtf8Coder.of(), elem); String line = new String(encodedElem, StandardCharsets.UTF_8); writer.println(line); } } TextIO.Read read = TextIO.read().from(filename); PCollection<String> output = p.apply(read); PAssert.that(output).containsInAnyOrder(expected); p.run(); } @Test public void testDelimiterSelfOverlaps() { assertFalse(TextIO.Read.isSelfOverlapping(new byte[] {'a', 'b', 'c'})); assertFalse(TextIO.Read.isSelfOverlapping(new byte[] {'c', 'a', 'b', 'd', 'a', 'b'})); assertFalse(TextIO.Read.isSelfOverlapping(new byte[] {'a', 'b', 'c', 'a', 'b', 'd'})); assertTrue(TextIO.Read.isSelfOverlapping(new byte[] {'a', 'b', 'a'})); assertTrue(TextIO.Read.isSelfOverlapping(new byte[] {'a', 'b', 'c', 'a', 'b'})); } @Test @Category(NeedsRunner.class) @Test @Category(NeedsRunner.class) public void testReadStrings() throws Exception { runTestRead(LINES_ARRAY); } @Test @Category(NeedsRunner.class) public void testReadEmptyStrings() throws Exception { runTestRead(NO_LINES_ARRAY); } @Test public void testReadNamed() throws Exception { File emptyFile = tempFolder.newFile(); p.enableAbandonedNodeEnforcement(false); assertThat(p.apply(TextIO.read().from("somefile")).getName(), startsWith("TextIO.Read/Read")); assertThat( p.apply("MyRead", TextIO.read().from(emptyFile.getPath())).getName(), startsWith("MyRead/Read")); } @Test public void testReadDisplayData() { TextIO.Read read = TextIO.read().from("foo.*").withCompression(BZIP2); DisplayData displayData = DisplayData.from(read); assertThat(displayData, hasDisplayItem("filePattern", "foo.*")); assertThat(displayData, hasDisplayItem("compressionType", BZIP2.toString())); } /** Options for testing. */ public interface RuntimeTestOptions extends PipelineOptions { ValueProvider<String> getInput(); void setInput(ValueProvider<String> value); } @Test public void testRuntimeOptionsNotCalledInApply() throws Exception { p.enableAbandonedNodeEnforcement(false); RuntimeTestOptions options = PipelineOptionsFactory.as(RuntimeTestOptions.class); p.apply(TextIO.read().from(options.getInput())); } @Test public void testCompressionIsSet() throws Exception { TextIO.Read read = TextIO.read().from("/tmp/test"); assertEquals(AUTO, read.getCompression()); read = TextIO.read().from("/tmp/test").withCompression(GZIP); assertEquals(GZIP, read.getCompression()); } /** * Tests reading from a small, uncompressed file with .gz extension. This must work in GZIP * modes. This is needed because some network file systems / HTTP clients will transparently * decompress gzipped content. */ @Test @Category(NeedsRunner.class) public void testSmallCompressedGzipReadActuallyUncompressed() throws Exception { File smallGzNotCompressed = writeToFile(TINY, tempFolder, "tiny_uncompressed.gz", UNCOMPRESSED); assertReadingCompressedFileMatchesExpected(smallGzNotCompressed, GZIP, TINY, p); p.run(); } /** * Tests reading from a small, uncompressed file with .gz extension. This must work in AUTO * modes. This is needed because some network file systems / HTTP clients will transparently * decompress gzipped content. */ @Test @Category(NeedsRunner.class) public void testSmallCompressedAutoReadActuallyUncompressed() throws Exception { File smallGzNotCompressed = writeToFile(TINY, tempFolder, "tiny_uncompressed.gz", UNCOMPRESSED); assertReadingCompressedFileMatchesExpected(smallGzNotCompressed, AUTO, TINY, p); p.run(); } /** * Tests a zip file with no entries. This is a corner case not tested elsewhere as the default * test zip files have a single entry. */ @Test @Category(NeedsRunner.class) public void testZipCompressedReadWithNoEntries() throws Exception { File file = createZipFile(new ArrayList<>(), tempFolder, "empty zip file"); assertReadingCompressedFileMatchesExpected(file, ZIP, EMPTY, p); p.run(); } /** * Tests a zip file with multiple entries. This is a corner case not tested elsewhere as the * default test zip files have a single entry. */ @Test @Category(NeedsRunner.class) public void testZipCompressedReadWithMultiEntriesFile() throws Exception { String[] entry0 = new String[] {"first", "second", "three"}; String[] entry1 = new String[] {"four", "five", "six"}; String[] entry2 = new String[] {"seven", "eight", "nine"}; List<String> expected = new ArrayList<>(); File file = createZipFile(expected, tempFolder, "multiple entries", entry0, entry1, entry2); assertReadingCompressedFileMatchesExpected(file, ZIP, expected, p); p.run(); } /** * Read a ZIP compressed file containing data, multiple empty entries, and then more data. We * expect just the data back. */ @Test @Category(NeedsRunner.class) public void testZipCompressedReadWithComplexEmptyAndPresentEntries() throws Exception { File file = createZipFile( new ArrayList<>(), tempFolder, "complex empty and present entries", new String[] {"cat"}, new String[] {}, new String[] {}, new String[] {"dog"}); assertReadingCompressedFileMatchesExpected(file, ZIP, Arrays.asList("cat", "dog"), p); p.run(); } @Test public void testTextIOGetName() { assertEquals("TextIO.Read", TextIO.read().from("somefile").getName()); assertEquals("TextIO.Read", TextIO.read().from("somefile").toString()); } private TextSource prepareSource(byte[] data) throws IOException { return TextIOReadTest.prepareSource(tempFolder, data, null, 0); } @Test public void testProgressEmptyFile() throws IOException { try (BoundedSource.BoundedReader<String> reader = prepareSource(new byte[0]).createReader(PipelineOptionsFactory.create())) { assertEquals(0.0, reader.getFractionConsumed(), 1e-6); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertFalse(reader.start()); assertEquals(1.0, reader.getFractionConsumed(), 1e-6); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals(0, reader.getSplitPointsRemaining()); } } @Test public void testProgressTextFile() throws IOException { String file = "line1\nline2\nline3"; try (BoundedSource.BoundedReader<String> reader = prepareSource(file.getBytes(StandardCharsets.UTF_8)) .createReader(PipelineOptionsFactory.create())) { assertEquals(0.0, reader.getFractionConsumed(), 1e-6); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.start()); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.advance()); assertEquals(1, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.advance()); assertEquals(2, reader.getSplitPointsConsumed()); assertEquals(1, reader.getSplitPointsRemaining()); assertFalse(reader.advance()); assertEquals(1.0, reader.getFractionConsumed(), 1e-6); assertEquals(3, reader.getSplitPointsConsumed()); assertEquals(0, reader.getSplitPointsRemaining()); } } @Test public void testProgressAfterSplitting() throws IOException { String file = "line1\nline2\nline3"; BoundedSource<String> source = prepareSource(file.getBytes(StandardCharsets.UTF_8)); BoundedSource<String> remainder; try (BoundedSource.BoundedReader<String> readerOrig = source.createReader(PipelineOptionsFactory.create())) { assertEquals(0.0, readerOrig.getFractionConsumed(), 1e-6); assertEquals(0, readerOrig.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, readerOrig.getSplitPointsRemaining()); assertTrue(readerOrig.start()); assertEquals(0, readerOrig.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, readerOrig.getSplitPointsRemaining()); remainder = readerOrig.splitAtFraction(0.1); assertNotNull(remainder); assertEquals(0, readerOrig.getSplitPointsConsumed()); assertEquals(1, readerOrig.getSplitPointsRemaining()); assertFalse(readerOrig.advance()); assertEquals(1.0, readerOrig.getFractionConsumed(), 1e-6); assertEquals(1, readerOrig.getSplitPointsConsumed()); assertEquals(0, readerOrig.getSplitPointsRemaining()); } try (BoundedSource.BoundedReader<String> reader = remainder.createReader(PipelineOptionsFactory.create())) { assertEquals(0.0, reader.getFractionConsumed(), 1e-6); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.start()); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals( BoundedSource.BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); assertTrue(reader.advance()); assertEquals(1, reader.getSplitPointsConsumed()); assertEquals(1, reader.getSplitPointsRemaining()); assertFalse(reader.advance()); assertEquals(1.0, reader.getFractionConsumed(), 1e-6); assertEquals(2, reader.getSplitPointsConsumed()); assertEquals(0, reader.getSplitPointsRemaining()); } } @Test public void testInitialSplitAutoModeTxt() throws Exception { PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeTxt = writeToFile(LARGE, tempFolder, "large.txt", UNCOMPRESSED); assertThat(largeTxt.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeTxt.getPath()).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); assertThat(splits, hasSize(greaterThan(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); } @Test public void testInitialSplitAutoModeGz() throws Exception { assumeFalse(SystemUtils.IS_OS_WINDOWS); PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeGz = writeToFile(LARGE, tempFolder, "large.gz", GZIP); assertThat(largeGz.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeGz.getPath()).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); assertThat(splits, hasSize(equalTo(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); } @Test public void testInitialSplitGzipModeTxt() throws Exception { PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeTxt = writeToFile(LARGE, tempFolder, "large.txt", UNCOMPRESSED); assertThat(largeTxt.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeTxt.getPath()).withCompression(GZIP).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); assertThat(splits, hasSize(equalTo(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); } @Test @Category(NeedsRunner.class) public void testReadAll() throws IOException { Path tempFolderPath = tempFolder.getRoot().toPath(); writeToFile(TINY, tempFolder, "readAllTiny1.zip", ZIP); writeToFile(TINY, tempFolder, "readAllTiny2.txt", UNCOMPRESSED); writeToFile(LARGE, tempFolder, "readAllLarge1.zip", ZIP); writeToFile(LARGE, tempFolder, "readAllLarge2.txt", UNCOMPRESSED); PCollection<String> lines = p.apply( Create.of( tempFolderPath.resolve("readAllTiny*").toString(), tempFolderPath.resolve("readAllLarge*").toString())) .apply(TextIO.readAll().withCompression(AUTO)); PAssert.that(lines).containsInAnyOrder(Iterables.concat(TINY, TINY, LARGE, LARGE)); p.run(); } @Test @Category(NeedsRunner.class) public void testReadFiles() throws IOException { Path tempFolderPath = tempFolder.getRoot().toPath(); writeToFile(TINY, tempFolder, "readAllTiny1.zip", ZIP); writeToFile(TINY, tempFolder, "readAllTiny2.txt", UNCOMPRESSED); writeToFile(LARGE, tempFolder, "readAllLarge1.zip", ZIP); writeToFile(LARGE, tempFolder, "readAllLarge2.txt", UNCOMPRESSED); PCollection<String> lines = p.apply( Create.of( tempFolderPath.resolve("readAllTiny*").toString(), tempFolderPath.resolve("readAllLarge*").toString())) .apply(FileIO.matchAll()) .apply(FileIO.readMatches().withCompression(AUTO)) .apply(TextIO.readFiles().withDesiredBundleSizeBytes(10)); PAssert.that(lines).containsInAnyOrder(Iterables.concat(TINY, TINY, LARGE, LARGE)); p.run(); } private List<KV<String, String>> filenameKV(Path path, String fn, List<String> input) { return input.stream() .map(l -> KV.of(path.resolve(fn).toString(), l)) .collect(Collectors.toList()); } @Test @Category(NeedsRunner.class) public void testReadFilesWithFilename() throws IOException { Path tempFolderPath = tempFolder.getRoot().toPath(); writeToFile(TINY, tempFolder, "readAllTiny1.zip", ZIP); writeToFile(TINY, tempFolder, "readAllTiny2.txt", UNCOMPRESSED); writeToFile(LARGE, tempFolder, "readAllLarge1.zip", ZIP); writeToFile(LARGE, tempFolder, "readAllLarge2.txt", UNCOMPRESSED); SerializableFunction<String, ? extends FileBasedSource<String>> createSource = input -> new TextSource( ValueProvider.StaticValueProvider.of(input), EmptyMatchTreatment.DISALLOW, new byte[] {'\n'}, 0); PCollection<KV<String, String>> lines = p.apply( Create.of( tempFolderPath.resolve("readAllTiny*").toString(), tempFolderPath.resolve("readAllLarge*").toString())) .apply(FileIO.matchAll()) .apply(FileIO.readMatches().withCompression(AUTO)) .apply( new ReadAllViaFileBasedSourceWithFilename<>( 10, createSource, KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()))); PAssert.that(lines) .containsInAnyOrder( Iterables.concat( filenameKV(tempFolderPath, "readAllTiny1.zip", TINY), filenameKV(tempFolderPath, "readAllTiny2.txt", TINY), filenameKV(tempFolderPath, "readAllLarge1.zip", LARGE), filenameKV(tempFolderPath, "readAllLarge2.txt", LARGE))); p.run(); } @Test @Category({NeedsRunner.class, UsesUnboundedSplittableParDo.class}) public void testReadWatchForNewFiles() throws IOException, InterruptedException { final Path basePath = tempFolder.getRoot().toPath().resolve("readWatch"); basePath.toFile().mkdir(); p.apply(GenerateSequence.from(0).to(10).withRate(1, Duration.millis(100))) .apply( Window.<Long>into(FixedWindows.of(Duration.millis(150))) .withAllowedLateness(Duration.ZERO) .triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1))) .discardingFiredPanes()) .apply(ToString.elements()) .apply( TextIO.write() .to(basePath.resolve("data").toString()) .withNumShards(1) .withWindowedWrites()); PCollection<String> lines = p.apply( TextIO.read() .from(basePath.resolve("*").toString()) .watchForNewFiles( Duration.millis(100), Watch.Growth.afterTimeSinceNewOutput(Duration.standardSeconds(3)))); PAssert.that(lines).containsInAnyOrder("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"); p.run(); } }
Shall we create an issue for this?
private boolean checkFillerValue(BUnionType type) { if (type.isNullable()) { return true; } Iterator<BType> iterator = type.getMemberTypes().iterator(); BType firstMember = iterator.next(); while (iterator.hasNext()) { if (!isSameType(firstMember, iterator.next())) { return false; } } return isValueType(firstMember) && hasFillerValue(firstMember); }
return isValueType(firstMember) && hasFillerValue(firstMember);
private boolean checkFillerValue(BUnionType type) { if (type.isNullable()) { return true; } Iterator<BType> iterator = type.getMemberTypes().iterator(); BType firstMember = iterator.next(); while (iterator.hasNext()) { if (!isSameType(firstMember, iterator.next())) { return false; } } return isValueType(firstMember) && hasFillerValue(firstMember); }
class TypePair { BType sourceType; BType targetType; public TypePair(BType sourceType, BType targetType) { this.sourceType = sourceType; this.targetType = targetType; } @Override public boolean equals(Object obj) { if (!(obj instanceof TypePair)) { return false; } TypePair other = (TypePair) obj; return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType); } @Override public int hashCode() { return Objects.hash(sourceType, targetType); } }
class TypePair { BType sourceType; BType targetType; public TypePair(BType sourceType, BType targetType) { this.sourceType = sourceType; this.targetType = targetType; } @Override public boolean equals(Object obj) { if (!(obj instanceof TypePair)) { return false; } TypePair other = (TypePair) obj; return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType); } @Override public int hashCode() { return Objects.hash(sourceType, targetType); } }
It feels inconsistent that the OperatorContext and the internal of taskInfo are given to the `DefaultRuntimeContext` at the same time. I think we either pass the operatorContext that contains all the info or we just pass the required info from the operator context. Same for the other operators.
public void open() throws Exception { super.open(); StreamingRuntimeContext operatorContext = getRuntimeContext(); TaskInfo taskInfo = operatorContext.getTaskInfo(); context = new DefaultRuntimeContext( operatorContext, taskInfo.getNumberOfParallelSubtasks(), taskInfo.getMaxNumberOfParallelSubtasks(), taskInfo.getTaskName()); partitionedContext = new DefaultPartitionedContext(context); nonPartitionedContext = new DefaultNonPartitionedContext<>(context); outputCollector = getOutputCollector(); }
operatorContext,
public void open() throws Exception { super.open(); StreamingRuntimeContext operatorContext = getRuntimeContext(); TaskInfo taskInfo = operatorContext.getTaskInfo(); context = new DefaultRuntimeContext( operatorContext.getJobInfo().getJobName(), operatorContext.getJobType(), taskInfo.getNumberOfParallelSubtasks(), taskInfo.getMaxNumberOfParallelSubtasks(), taskInfo.getTaskName(), operatorContext.getMetricGroup()); partitionedContext = new DefaultPartitionedContext( context, this::currentKey, this::setCurrentKey, getProcessingTimeManager()); outputCollector = getOutputCollector(); nonPartitionedContext = getNonPartitionedContext(); }
class ProcessOperator<IN, OUT> extends AbstractUdfStreamOperator<OUT, OneInputStreamProcessFunction<IN, OUT>> implements OneInputStreamOperator<IN, OUT>, BoundedOneInput { protected transient DefaultRuntimeContext context; protected transient DefaultPartitionedContext partitionedContext; protected transient DefaultNonPartitionedContext<OUT> nonPartitionedContext; protected transient TimestampCollector<OUT> outputCollector; public ProcessOperator(OneInputStreamProcessFunction<IN, OUT> userFunction) { super(userFunction); chainingStrategy = ChainingStrategy.ALWAYS; } @Override @Override public void processElement(StreamRecord<IN> element) throws Exception { outputCollector.setTimestampFromStreamRecord(element); userFunction.processRecord(element.getValue(), outputCollector, partitionedContext); } protected TimestampCollector<OUT> getOutputCollector() { return new OutputCollector<>(output); } @Override public void endInput() throws Exception { userFunction.endInput(nonPartitionedContext); } }
class ProcessOperator<IN, OUT> extends AbstractUdfStreamOperator<OUT, OneInputStreamProcessFunction<IN, OUT>> implements OneInputStreamOperator<IN, OUT>, BoundedOneInput { protected transient DefaultRuntimeContext context; protected transient DefaultPartitionedContext partitionedContext; protected transient NonPartitionedContext<OUT> nonPartitionedContext; protected transient TimestampCollector<OUT> outputCollector; public ProcessOperator(OneInputStreamProcessFunction<IN, OUT> userFunction) { super(userFunction); chainingStrategy = ChainingStrategy.ALWAYS; } @Override @Override public void processElement(StreamRecord<IN> element) throws Exception { outputCollector.setTimestampFromStreamRecord(element); userFunction.processRecord(element.getValue(), outputCollector, partitionedContext); } protected TimestampCollector<OUT> getOutputCollector() { return new OutputCollector<>(output); } @Override public void endInput() throws Exception { userFunction.endInput(nonPartitionedContext); } protected Object currentKey() { throw new UnsupportedOperationException("The key is only defined for keyed operator"); } protected ProcessingTimeManager getProcessingTimeManager() { return UnsupportedProcessingTimeManager.INSTANCE; } protected NonPartitionedContext<OUT> getNonPartitionedContext() { return new DefaultNonPartitionedContext<>( context, partitionedContext, outputCollector, false, null); } }
Where will the key ID come from when this has rolled out?
private String corePublicKeyFlagValue(NodeAgentContext context) { return coreEncryptionPublicKeyIdFlag.with(FetchVector.Dimension.NODE_TYPE, context.nodeType().name()).value(); }
return coreEncryptionPublicKeyIdFlag.with(FetchVector.Dimension.NODE_TYPE, context.nodeType().name()).value();
private String corePublicKeyFlagValue(NodeAgentContext context) { return coreEncryptionPublicKeyIdFlag.with(FetchVector.Dimension.NODE_TYPE, context.nodeType().name()).value(); }
class CoredumpHandler { private static final Pattern HS_ERR_PATTERN = Pattern.compile("hs_err_pid[0-9]+\\.log"); private static final String PROCESSING_DIRECTORY_NAME = "processing"; private static final String METADATA_FILE_NAME = "metadata.json"; private static final String METADATA2_FILE_NAME = "metadata2.json"; private static final String COMPRESSED_EXTENSION = ".zst"; private static final String ENCRYPTED_EXTENSION = ".enc"; public static final String COREDUMP_FILENAME_PREFIX = "dump_"; private final Logger logger = Logger.getLogger(CoredumpHandler.class.getName()); private final ObjectMapper objectMapper = new ObjectMapper(); private final CoreCollector coreCollector; private final Cores cores; private final CoredumpReporter coredumpReporter; private final String crashPatchInContainer; private final Path doneCoredumpsPath; private final Metrics metrics; private final Clock clock; private final Supplier<String> coredumpIdSupplier; private final SecretSharedKeySupplier secretSharedKeySupplier; private final BooleanFlag reportCoresViaCfgFlag; private final StringFlag coreEncryptionPublicKeyIdFlag; /** * @param crashPathInContainer path inside the container where core dump are dumped * @param doneCoredumpsPath path on host where processed core dumps are stored */ public CoredumpHandler(CoreCollector coreCollector, Cores cores, CoredumpReporter coredumpReporter, String crashPathInContainer, Path doneCoredumpsPath, Metrics metrics, FlagSource flagSource) { this(coreCollector, cores, coredumpReporter, crashPathInContainer, doneCoredumpsPath, metrics, Clock.systemUTC(), () -> UUID.randomUUID().toString(), (ctx) -> Optional.empty() /*TODO*/, flagSource); } public CoredumpHandler(CoreCollector coreCollector, Cores cores, CoredumpReporter coredumpReporter, String crashPathInContainer, Path doneCoredumpsPath, Metrics metrics, SecretSharedKeySupplier secretSharedKeySupplier, FlagSource flagSource) { this(coreCollector, cores, coredumpReporter, crashPathInContainer, doneCoredumpsPath, metrics, Clock.systemUTC(), () -> UUID.randomUUID().toString(), secretSharedKeySupplier, flagSource); } CoredumpHandler(CoreCollector coreCollector, Cores cores, CoredumpReporter coredumpReporter, String crashPathInContainer, Path doneCoredumpsPath, Metrics metrics, Clock clock, Supplier<String> coredumpIdSupplier, SecretSharedKeySupplier secretSharedKeySupplier, FlagSource flagSource) { this.coreCollector = coreCollector; this.cores = cores; this.coredumpReporter = coredumpReporter; this.crashPatchInContainer = crashPathInContainer; this.doneCoredumpsPath = doneCoredumpsPath; this.metrics = metrics; this.clock = clock; this.coredumpIdSupplier = coredumpIdSupplier; this.secretSharedKeySupplier = secretSharedKeySupplier; this.reportCoresViaCfgFlag = Flags.REPORT_CORES_VIA_CFG.bindTo(flagSource); this.coreEncryptionPublicKeyIdFlag = Flags.CORE_ENCRYPTION_PUBLIC_KEY_ID.bindTo(flagSource); } public void converge(NodeAgentContext context, Supplier<Map<String, Object>> nodeAttributesSupplier, Optional<DockerImage> dockerImage, boolean throwIfCoreBeingWritten) { ContainerPath containerCrashPath = context.paths().of(crashPatchInContainer, context.users().vespa()); ContainerPath containerProcessingPath = containerCrashPath.resolve(PROCESSING_DIRECTORY_NAME); updateMetrics(context, containerCrashPath); if (throwIfCoreBeingWritten) { List<String> pendingCores = FileFinder.files(containerCrashPath) .match(fileAttributes -> !isReadyForProcessing(fileAttributes)) .maxDepth(1).stream() .map(FileFinder.FileAttributes::filename) .toList(); if (!pendingCores.isEmpty()) throw ConvergenceException.ofError(String.format("Cannot process %s coredumps: Still being written", pendingCores.size() < 5 ? pendingCores : pendingCores.size())); } getCoredumpToProcess(context, containerCrashPath, containerProcessingPath) .ifPresent(path -> { if (reportCoresViaCfgFlag.with(FetchVector.Dimension.NODE_TYPE, context.nodeType().name()).value()) { processAndReportSingleCoreDump2(context, path, dockerImage); } else { processAndReportSingleCoredump(context, path, nodeAttributesSupplier); } }); } /** @return path to directory inside processing directory that contains a core dump file to process */ Optional<ContainerPath> getCoredumpToProcess(NodeAgentContext context, ContainerPath containerCrashPath, ContainerPath containerProcessingPath) { return FileFinder.directories(containerProcessingPath).stream() .map(FileFinder.FileAttributes::path) .findAny() .map(ContainerPath.class::cast) .or(() -> enqueueCoredump(context, containerCrashPath, containerProcessingPath)); } /** * Moves a coredump and related hs_err file(s) to a new directory under the processing/ directory. * Limit to only processing one coredump at the time, starting with the oldest. * * Assumption: hs_err files are much smaller than core files and are written (last modified time) * before the core file. * * @return path to directory inside processing directory which contains the enqueued core dump file */ Optional<ContainerPath> enqueueCoredump(NodeAgentContext context, ContainerPath containerCrashPath, ContainerPath containerProcessingPath) { Predicate<String> isCoreDump = filename -> !HS_ERR_PATTERN.matcher(filename).matches(); List<Path> toProcess = FileFinder.files(containerCrashPath) .match(attributes -> { if (isReadyForProcessing(attributes)) { return true; } else { if (isCoreDump.test(attributes.filename())) context.log(logger, attributes.path() + " is still being written"); return false; } }) .maxDepth(1) .stream() .sorted(Comparator.comparing(FileFinder.FileAttributes::lastModifiedTime)) .map(FileFinder.FileAttributes::path) .toList(); int coredumpIndex = IntStream.range(0, toProcess.size()) .filter(i -> isCoreDump.test(toProcess.get(i).getFileName().toString())) .findFirst() .orElse(-1); if (coredumpIndex == -1) return Optional.empty(); ContainerPath enqueuedDir = containerProcessingPath.resolve(coredumpIdSupplier.get()); new MakeDirectory(enqueuedDir).createParents().converge(context); IntStream.range(0, coredumpIndex + 1) .forEach(i -> { Path path = toProcess.get(i); String prefix = i == coredumpIndex ? COREDUMP_FILENAME_PREFIX : ""; new FileMover(path, enqueuedDir.resolve(prefix + path.getFileName())).converge(context); }); return Optional.of(enqueuedDir); } void processAndReportSingleCoredump(NodeAgentContext context, ContainerPath coredumpDirectory, Supplier<Map<String, Object>> nodeAttributesSupplier) { try { Optional<SecretSharedKey> sharedCoreKey = Optional.of(corePublicKeyFlagValue(context)) .filter(k -> !k.isEmpty()) .map(KeyId::ofString) .flatMap(secretSharedKeySupplier::create); Optional<String> decryptionToken = sharedCoreKey.map(k -> k.sealedSharedKey().toTokenString()); String metadata = getMetadata(context, coredumpDirectory, nodeAttributesSupplier, decryptionToken); String coredumpId = coredumpDirectory.getFileName().toString(); coredumpReporter.reportCoredump(coredumpId, metadata); finishProcessing(context, coredumpDirectory, sharedCoreKey); context.log(logger, "Successfully reported coredump " + coredumpId); } catch (Exception e) { throw new RuntimeException("Failed to process coredump " + coredumpDirectory, e); } } /** * @return coredump metadata from metadata.json if present, otherwise attempts to get metadata using * {@link CoreCollector} and stores it to metadata.json */ String getMetadata(NodeAgentContext context, ContainerPath coredumpDirectory, Supplier<Map<String, Object>> nodeAttributesSupplier, Optional<String> decryptionToken) throws IOException { UnixPath metadataPath = new UnixPath(coredumpDirectory.resolve(METADATA_FILE_NAME)); if (!metadataPath.exists()) { ContainerPath coredumpFile = findCoredumpFileInProcessingDirectory(coredumpDirectory); Map<String, Object> metadata = new HashMap<>(coreCollector.collect(context, coredumpFile)); metadata.putAll(nodeAttributesSupplier.get()); metadata.put("coredump_path", doneCoredumpsPath .resolve(context.containerName().asString()) .resolve(coredumpDirectory.getFileName().toString()) .resolve(coredumpFile.getFileName().toString()).toString()); decryptionToken.ifPresent(token -> metadata.put("decryption_token", token)); String metadataFields = objectMapper.writeValueAsString(Map.of("fields", metadata)); metadataPath.writeUtf8File(metadataFields); return metadataFields; } else { if (decryptionToken.isPresent()) { String metadataFields = metadataWithPatchedTokenValue(metadataPath, decryptionToken.get()); metadataPath.deleteIfExists(); metadataPath.writeUtf8File(metadataFields); return metadataFields; } else { return metadataPath.readUtf8File(); } } } private String metadataWithPatchedTokenValue(UnixPath metadataPath, String decryptionToken) throws JsonProcessingException { var jsonRoot = objectMapper.readTree(metadataPath.readUtf8File()); if (jsonRoot.path("fields").isObject()) { ((ObjectNode)jsonRoot.get("fields")).put("decryption_token", decryptionToken); } return objectMapper.writeValueAsString(jsonRoot); } static OutputStream maybeWrapWithEncryption(OutputStream wrappedStream, Optional<SecretSharedKey> sharedCoreKey) { return sharedCoreKey .map(key -> (OutputStream)new CipherOutputStream(wrappedStream, SharedKeyGenerator.makeAesGcmEncryptionCipher(key))) .orElse(wrappedStream); } /** * Compresses and, if a key is provided, encrypts core file (and deletes the uncompressed core), then moves * the entire core dump processing directory to {@link */ private void finishProcessing(NodeAgentContext context, ContainerPath coredumpDirectory, Optional<SecretSharedKey> sharedCoreKey) { ContainerPath coreFile = findCoredumpFileInProcessingDirectory(coredumpDirectory); String extension = COMPRESSED_EXTENSION + (sharedCoreKey.isPresent() ? ENCRYPTED_EXTENSION : ""); ContainerPath compressedCoreFile = coreFile.resolveSibling(coreFile.getFileName() + extension); try (ZstdCompressingInputStream zcis = new ZstdCompressingInputStream(Files.newInputStream(coreFile)); OutputStream fos = maybeWrapWithEncryption(Files.newOutputStream(compressedCoreFile), sharedCoreKey)) { zcis.transferTo(fos); } catch (IOException e) { throw new UncheckedIOException(e); } new FileDeleter(coreFile).converge(context); Path newCoredumpDirectory = doneCoredumpsPath.resolve(context.containerName().asString()); new MakeDirectory(newCoredumpDirectory).createParents().converge(context); new FileMover(coredumpDirectory.pathOnHost(), newCoredumpDirectory.resolve(coredumpDirectory.getFileName().toString())) .converge(context); } ContainerPath findCoredumpFileInProcessingDirectory(ContainerPath coredumpProccessingDirectory) { return (ContainerPath) FileFinder.files(coredumpProccessingDirectory) .match(nameStartsWith(COREDUMP_FILENAME_PREFIX).and(nameEndsWith(COMPRESSED_EXTENSION).negate()) .and(nameEndsWith(ENCRYPTED_EXTENSION).negate())) .maxDepth(1) .stream() .map(FileFinder.FileAttributes::path) .findFirst() .orElseThrow(() -> new IllegalStateException( "No coredump file found in processing directory " + coredumpProccessingDirectory)); } void updateMetrics(NodeAgentContext context, ContainerPath containerCrashPath) { Dimensions dimensions = generateDimensions(context); int numberOfUnprocessedCoredumps = FileFinder.files(containerCrashPath) .match(nameStartsWith(".").negate()) .match(nameMatches(HS_ERR_PATTERN).negate()) .match(nameEndsWith(COMPRESSED_EXTENSION).negate()) .match(nameEndsWith(ENCRYPTED_EXTENSION).negate()) .match(nameStartsWith("metadata").negate()) .list().size(); metrics.declareGauge(Metrics.APPLICATION_NODE, "coredumps.enqueued", dimensions, Metrics.DimensionType.PRETAGGED).sample(numberOfUnprocessedCoredumps); Path processedCoredumpsPath = doneCoredumpsPath.resolve(context.containerName().asString()); int numberOfProcessedCoredumps = FileFinder.directories(processedCoredumpsPath) .maxDepth(1) .list().size(); metrics.declareGauge(Metrics.APPLICATION_NODE, "coredumps.processed", dimensions, Metrics.DimensionType.PRETAGGED).sample(numberOfProcessedCoredumps); } private Dimensions generateDimensions(NodeAgentContext context) { NodeSpec node = context.node(); Dimensions.Builder dimensionsBuilder = new Dimensions.Builder() .add("host", node.hostname()) .add("flavor", node.flavor()) .add("state", node.state().toString()) .add("zone", context.zone().getId().value()); node.owner().ifPresent(owner -> dimensionsBuilder .add("tenantName", owner.tenant().value()) .add("applicationName", owner.application().value()) .add("instanceName", owner.instance().value()) .add("app", String.join(".", owner.application().value(), owner.instance().value())) .add("applicationId", owner.toFullString()) ); node.membership().ifPresent(membership -> dimensionsBuilder .add("clustertype", membership.type().value()) .add("clusterid", membership.clusterId()) ); node.parentHostname().ifPresent(parent -> dimensionsBuilder.add("parentHostname", parent)); dimensionsBuilder.add("orchestratorState", node.orchestratorStatus().asString()); dimensionsBuilder.add("system", context.zone().getSystemName().value()); return dimensionsBuilder.build(); } private boolean isReadyForProcessing(FileFinder.FileAttributes fileAttributes) { return clock.instant().minusSeconds(60).isAfter(fileAttributes.lastModifiedTime()); } void processAndReportSingleCoreDump2(NodeAgentContext context, ContainerPath coreDumpDirectory, Optional<DockerImage> dockerImage) { CoreDumpMetadata metadata = gatherMetadata(context, coreDumpDirectory); dockerImage.ifPresent(metadata::setDockerImage); dockerImage.flatMap(DockerImage::tag).ifPresent(metadata::setVespaVersion); dockerImage.ifPresent(metadata::setDockerImage); Optional<SecretSharedKey> sharedCoreKey = Optional.of(corePublicKeyFlagValue(context)) .filter(k -> !k.isEmpty()) .map(KeyId::ofString) .flatMap(secretSharedKeySupplier::create); sharedCoreKey.map(key -> key.sealedSharedKey().toTokenString()).ifPresent(metadata::setDecryptionToken); String coreDumpId = coreDumpDirectory.getFileName().toString(); cores.report(context.hostname(), coreDumpId, metadata); context.log(logger, "Core dump reported: " + coreDumpId); finishProcessing(context, coreDumpDirectory, sharedCoreKey); } private CoreDumpMetadata gatherMetadata(NodeAgentContext context, ContainerPath coreDumpDirectory) { ContainerPath metadataPath = coreDumpDirectory.resolve(METADATA2_FILE_NAME); Optional<ReportCoreDumpRequest> request = ReportCoreDumpRequest.load(metadataPath); if (request.isPresent()) { return request.map(requestInstance -> { var metadata = new CoreDumpMetadata(); requestInstance.populateMetadata(metadata, FileSystems.getDefault()); return metadata; }) .get(); } ContainerPath coreDumpFile = findCoredumpFileInProcessingDirectory(coreDumpDirectory); CoreDumpMetadata metadata = coreCollector.collect2(context, coreDumpFile); metadata.setCpuMicrocodeVersion(getMicrocodeVersion()) .setKernelVersion(System.getProperty("os.version")) .setCoreDumpPath(doneCoredumpsPath.resolve(context.containerName().asString()) .resolve(coreDumpDirectory.getFileName().toString()) .resolve(coreDumpFile.getFileName().toString())); ReportCoreDumpRequest requestInstance = new ReportCoreDumpRequest(); requestInstance.fillFrom(metadata); requestInstance.save(metadataPath); context.log(logger, "Wrote " + metadataPath.pathOnHost()); return metadata; } private String getMicrocodeVersion() { String output = uncheck(() -> Files.readAllLines(Paths.get("/proc/cpuinfo")).stream() .filter(line -> line.startsWith("microcode")) .findFirst() .orElse("microcode : UNKNOWN")); String[] results = output.split(":"); if (results.length != 2) { throw ConvergenceException.ofError("Result from detect microcode command not as expected: " + output); } return results[1].trim(); } }
class CoredumpHandler { private static final Pattern HS_ERR_PATTERN = Pattern.compile("hs_err_pid[0-9]+\\.log"); private static final String PROCESSING_DIRECTORY_NAME = "processing"; private static final String METADATA_FILE_NAME = "metadata.json"; private static final String METADATA2_FILE_NAME = "metadata2.json"; private static final String COMPRESSED_EXTENSION = ".zst"; private static final String ENCRYPTED_EXTENSION = ".enc"; public static final String COREDUMP_FILENAME_PREFIX = "dump_"; private final Logger logger = Logger.getLogger(CoredumpHandler.class.getName()); private final ObjectMapper objectMapper = new ObjectMapper(); private final CoreCollector coreCollector; private final Cores cores; private final CoredumpReporter coredumpReporter; private final String crashPatchInContainer; private final Path doneCoredumpsPath; private final Metrics metrics; private final Clock clock; private final Supplier<String> coredumpIdSupplier; private final SecretSharedKeySupplier secretSharedKeySupplier; private final BooleanFlag reportCoresViaCfgFlag; private final StringFlag coreEncryptionPublicKeyIdFlag; /** * @param crashPathInContainer path inside the container where core dump are dumped * @param doneCoredumpsPath path on host where processed core dumps are stored */ public CoredumpHandler(CoreCollector coreCollector, Cores cores, CoredumpReporter coredumpReporter, String crashPathInContainer, Path doneCoredumpsPath, Metrics metrics, FlagSource flagSource) { this(coreCollector, cores, coredumpReporter, crashPathInContainer, doneCoredumpsPath, metrics, Clock.systemUTC(), () -> UUID.randomUUID().toString(), (ctx) -> Optional.empty() /*TODO*/, flagSource); } public CoredumpHandler(CoreCollector coreCollector, Cores cores, CoredumpReporter coredumpReporter, String crashPathInContainer, Path doneCoredumpsPath, Metrics metrics, SecretSharedKeySupplier secretSharedKeySupplier, FlagSource flagSource) { this(coreCollector, cores, coredumpReporter, crashPathInContainer, doneCoredumpsPath, metrics, Clock.systemUTC(), () -> UUID.randomUUID().toString(), secretSharedKeySupplier, flagSource); } CoredumpHandler(CoreCollector coreCollector, Cores cores, CoredumpReporter coredumpReporter, String crashPathInContainer, Path doneCoredumpsPath, Metrics metrics, Clock clock, Supplier<String> coredumpIdSupplier, SecretSharedKeySupplier secretSharedKeySupplier, FlagSource flagSource) { this.coreCollector = coreCollector; this.cores = cores; this.coredumpReporter = coredumpReporter; this.crashPatchInContainer = crashPathInContainer; this.doneCoredumpsPath = doneCoredumpsPath; this.metrics = metrics; this.clock = clock; this.coredumpIdSupplier = coredumpIdSupplier; this.secretSharedKeySupplier = secretSharedKeySupplier; this.reportCoresViaCfgFlag = Flags.REPORT_CORES_VIA_CFG.bindTo(flagSource); this.coreEncryptionPublicKeyIdFlag = Flags.CORE_ENCRYPTION_PUBLIC_KEY_ID.bindTo(flagSource); } public void converge(NodeAgentContext context, Supplier<Map<String, Object>> nodeAttributesSupplier, Optional<DockerImage> dockerImage, boolean throwIfCoreBeingWritten) { ContainerPath containerCrashPath = context.paths().of(crashPatchInContainer, context.users().vespa()); ContainerPath containerProcessingPath = containerCrashPath.resolve(PROCESSING_DIRECTORY_NAME); updateMetrics(context, containerCrashPath); if (throwIfCoreBeingWritten) { List<String> pendingCores = FileFinder.files(containerCrashPath) .match(fileAttributes -> !isReadyForProcessing(fileAttributes)) .maxDepth(1).stream() .map(FileFinder.FileAttributes::filename) .toList(); if (!pendingCores.isEmpty()) throw ConvergenceException.ofError(String.format("Cannot process %s coredumps: Still being written", pendingCores.size() < 5 ? pendingCores : pendingCores.size())); } getCoredumpToProcess(context, containerCrashPath, containerProcessingPath) .ifPresent(path -> { if (reportCoresViaCfgFlag.with(FetchVector.Dimension.NODE_TYPE, context.nodeType().name()).value()) { processAndReportSingleCoreDump2(context, path, dockerImage); } else { processAndReportSingleCoredump(context, path, nodeAttributesSupplier); } }); } /** @return path to directory inside processing directory that contains a core dump file to process */ Optional<ContainerPath> getCoredumpToProcess(NodeAgentContext context, ContainerPath containerCrashPath, ContainerPath containerProcessingPath) { return FileFinder.directories(containerProcessingPath).stream() .map(FileFinder.FileAttributes::path) .findAny() .map(ContainerPath.class::cast) .or(() -> enqueueCoredump(context, containerCrashPath, containerProcessingPath)); } /** * Moves a coredump and related hs_err file(s) to a new directory under the processing/ directory. * Limit to only processing one coredump at the time, starting with the oldest. * * Assumption: hs_err files are much smaller than core files and are written (last modified time) * before the core file. * * @return path to directory inside processing directory which contains the enqueued core dump file */ Optional<ContainerPath> enqueueCoredump(NodeAgentContext context, ContainerPath containerCrashPath, ContainerPath containerProcessingPath) { Predicate<String> isCoreDump = filename -> !HS_ERR_PATTERN.matcher(filename).matches(); List<Path> toProcess = FileFinder.files(containerCrashPath) .match(attributes -> { if (isReadyForProcessing(attributes)) { return true; } else { if (isCoreDump.test(attributes.filename())) context.log(logger, attributes.path() + " is still being written"); return false; } }) .maxDepth(1) .stream() .sorted(Comparator.comparing(FileFinder.FileAttributes::lastModifiedTime)) .map(FileFinder.FileAttributes::path) .toList(); int coredumpIndex = IntStream.range(0, toProcess.size()) .filter(i -> isCoreDump.test(toProcess.get(i).getFileName().toString())) .findFirst() .orElse(-1); if (coredumpIndex == -1) return Optional.empty(); ContainerPath enqueuedDir = containerProcessingPath.resolve(coredumpIdSupplier.get()); new MakeDirectory(enqueuedDir).createParents().converge(context); IntStream.range(0, coredumpIndex + 1) .forEach(i -> { Path path = toProcess.get(i); String prefix = i == coredumpIndex ? COREDUMP_FILENAME_PREFIX : ""; new FileMover(path, enqueuedDir.resolve(prefix + path.getFileName())).converge(context); }); return Optional.of(enqueuedDir); } void processAndReportSingleCoredump(NodeAgentContext context, ContainerPath coredumpDirectory, Supplier<Map<String, Object>> nodeAttributesSupplier) { try { Optional<SecretSharedKey> sharedCoreKey = Optional.of(corePublicKeyFlagValue(context)) .filter(k -> !k.isEmpty()) .map(KeyId::ofString) .flatMap(secretSharedKeySupplier::create); Optional<String> decryptionToken = sharedCoreKey.map(k -> k.sealedSharedKey().toTokenString()); String metadata = getMetadata(context, coredumpDirectory, nodeAttributesSupplier, decryptionToken); String coredumpId = coredumpDirectory.getFileName().toString(); coredumpReporter.reportCoredump(coredumpId, metadata); finishProcessing(context, coredumpDirectory, sharedCoreKey); context.log(logger, "Successfully reported coredump " + coredumpId); } catch (Exception e) { throw new RuntimeException("Failed to process coredump " + coredumpDirectory, e); } } /** * @return coredump metadata from metadata.json if present, otherwise attempts to get metadata using * {@link CoreCollector} and stores it to metadata.json */ String getMetadata(NodeAgentContext context, ContainerPath coredumpDirectory, Supplier<Map<String, Object>> nodeAttributesSupplier, Optional<String> decryptionToken) throws IOException { UnixPath metadataPath = new UnixPath(coredumpDirectory.resolve(METADATA_FILE_NAME)); if (!metadataPath.exists()) { ContainerPath coredumpFile = findCoredumpFileInProcessingDirectory(coredumpDirectory); Map<String, Object> metadata = new HashMap<>(coreCollector.collect(context, coredumpFile)); metadata.putAll(nodeAttributesSupplier.get()); metadata.put("coredump_path", doneCoredumpsPath .resolve(context.containerName().asString()) .resolve(coredumpDirectory.getFileName().toString()) .resolve(coredumpFile.getFileName().toString()).toString()); decryptionToken.ifPresent(token -> metadata.put("decryption_token", token)); String metadataFields = objectMapper.writeValueAsString(Map.of("fields", metadata)); metadataPath.writeUtf8File(metadataFields); return metadataFields; } else { if (decryptionToken.isPresent()) { String metadataFields = metadataWithPatchedTokenValue(metadataPath, decryptionToken.get()); metadataPath.deleteIfExists(); metadataPath.writeUtf8File(metadataFields); return metadataFields; } else { return metadataPath.readUtf8File(); } } } private String metadataWithPatchedTokenValue(UnixPath metadataPath, String decryptionToken) throws JsonProcessingException { var jsonRoot = objectMapper.readTree(metadataPath.readUtf8File()); if (jsonRoot.path("fields").isObject()) { ((ObjectNode)jsonRoot.get("fields")).put("decryption_token", decryptionToken); } return objectMapper.writeValueAsString(jsonRoot); } static OutputStream maybeWrapWithEncryption(OutputStream wrappedStream, Optional<SecretSharedKey> sharedCoreKey) { return sharedCoreKey .map(key -> (OutputStream)new CipherOutputStream(wrappedStream, SharedKeyGenerator.makeAesGcmEncryptionCipher(key))) .orElse(wrappedStream); } /** * Compresses and, if a key is provided, encrypts core file (and deletes the uncompressed core), then moves * the entire core dump processing directory to {@link */ private void finishProcessing(NodeAgentContext context, ContainerPath coredumpDirectory, Optional<SecretSharedKey> sharedCoreKey) { ContainerPath coreFile = findCoredumpFileInProcessingDirectory(coredumpDirectory); String extension = COMPRESSED_EXTENSION + (sharedCoreKey.isPresent() ? ENCRYPTED_EXTENSION : ""); ContainerPath compressedCoreFile = coreFile.resolveSibling(coreFile.getFileName() + extension); try (ZstdCompressingInputStream zcis = new ZstdCompressingInputStream(Files.newInputStream(coreFile)); OutputStream fos = maybeWrapWithEncryption(Files.newOutputStream(compressedCoreFile), sharedCoreKey)) { zcis.transferTo(fos); } catch (IOException e) { throw new UncheckedIOException(e); } new FileDeleter(coreFile).converge(context); Path newCoredumpDirectory = doneCoredumpsPath.resolve(context.containerName().asString()); new MakeDirectory(newCoredumpDirectory).createParents().converge(context); new FileMover(coredumpDirectory.pathOnHost(), newCoredumpDirectory.resolve(coredumpDirectory.getFileName().toString())) .converge(context); } ContainerPath findCoredumpFileInProcessingDirectory(ContainerPath coredumpProccessingDirectory) { return (ContainerPath) FileFinder.files(coredumpProccessingDirectory) .match(nameStartsWith(COREDUMP_FILENAME_PREFIX).and(nameEndsWith(COMPRESSED_EXTENSION).negate()) .and(nameEndsWith(ENCRYPTED_EXTENSION).negate())) .maxDepth(1) .stream() .map(FileFinder.FileAttributes::path) .findFirst() .orElseThrow(() -> new IllegalStateException( "No coredump file found in processing directory " + coredumpProccessingDirectory)); } void updateMetrics(NodeAgentContext context, ContainerPath containerCrashPath) { Dimensions dimensions = generateDimensions(context); int numberOfUnprocessedCoredumps = FileFinder.files(containerCrashPath) .match(nameStartsWith(".").negate()) .match(nameMatches(HS_ERR_PATTERN).negate()) .match(nameEndsWith(COMPRESSED_EXTENSION).negate()) .match(nameEndsWith(ENCRYPTED_EXTENSION).negate()) .match(nameStartsWith("metadata").negate()) .list().size(); metrics.declareGauge(Metrics.APPLICATION_NODE, "coredumps.enqueued", dimensions, Metrics.DimensionType.PRETAGGED).sample(numberOfUnprocessedCoredumps); Path processedCoredumpsPath = doneCoredumpsPath.resolve(context.containerName().asString()); int numberOfProcessedCoredumps = FileFinder.directories(processedCoredumpsPath) .maxDepth(1) .list().size(); metrics.declareGauge(Metrics.APPLICATION_NODE, "coredumps.processed", dimensions, Metrics.DimensionType.PRETAGGED).sample(numberOfProcessedCoredumps); } private Dimensions generateDimensions(NodeAgentContext context) { NodeSpec node = context.node(); Dimensions.Builder dimensionsBuilder = new Dimensions.Builder() .add("host", node.hostname()) .add("flavor", node.flavor()) .add("state", node.state().toString()) .add("zone", context.zone().getId().value()); node.owner().ifPresent(owner -> dimensionsBuilder .add("tenantName", owner.tenant().value()) .add("applicationName", owner.application().value()) .add("instanceName", owner.instance().value()) .add("app", String.join(".", owner.application().value(), owner.instance().value())) .add("applicationId", owner.toFullString()) ); node.membership().ifPresent(membership -> dimensionsBuilder .add("clustertype", membership.type().value()) .add("clusterid", membership.clusterId()) ); node.parentHostname().ifPresent(parent -> dimensionsBuilder.add("parentHostname", parent)); dimensionsBuilder.add("orchestratorState", node.orchestratorStatus().asString()); dimensionsBuilder.add("system", context.zone().getSystemName().value()); return dimensionsBuilder.build(); } private boolean isReadyForProcessing(FileFinder.FileAttributes fileAttributes) { return clock.instant().minusSeconds(60).isAfter(fileAttributes.lastModifiedTime()); } void processAndReportSingleCoreDump2(NodeAgentContext context, ContainerPath coreDumpDirectory, Optional<DockerImage> dockerImage) { CoreDumpMetadata metadata = gatherMetadata(context, coreDumpDirectory); dockerImage.ifPresent(metadata::setDockerImage); dockerImage.flatMap(DockerImage::tag).ifPresent(metadata::setVespaVersion); dockerImage.ifPresent(metadata::setDockerImage); Optional<SecretSharedKey> sharedCoreKey = Optional.of(corePublicKeyFlagValue(context)) .filter(k -> !k.isEmpty()) .map(KeyId::ofString) .flatMap(secretSharedKeySupplier::create); sharedCoreKey.map(key -> key.sealedSharedKey().toTokenString()).ifPresent(metadata::setDecryptionToken); String coreDumpId = coreDumpDirectory.getFileName().toString(); cores.report(context.hostname(), coreDumpId, metadata); context.log(logger, "Core dump reported: " + coreDumpId); finishProcessing(context, coreDumpDirectory, sharedCoreKey); } private CoreDumpMetadata gatherMetadata(NodeAgentContext context, ContainerPath coreDumpDirectory) { ContainerPath metadataPath = coreDumpDirectory.resolve(METADATA2_FILE_NAME); Optional<ReportCoreDumpRequest> request = ReportCoreDumpRequest.load(metadataPath); if (request.isPresent()) { return request.map(requestInstance -> { var metadata = new CoreDumpMetadata(); requestInstance.populateMetadata(metadata, FileSystems.getDefault()); return metadata; }) .get(); } ContainerPath coreDumpFile = findCoredumpFileInProcessingDirectory(coreDumpDirectory); CoreDumpMetadata metadata = coreCollector.collect2(context, coreDumpFile); metadata.setCpuMicrocodeVersion(getMicrocodeVersion()) .setKernelVersion(System.getProperty("os.version")) .setCoreDumpPath(doneCoredumpsPath.resolve(context.containerName().asString()) .resolve(coreDumpDirectory.getFileName().toString()) .resolve(coreDumpFile.getFileName().toString())); ReportCoreDumpRequest requestInstance = new ReportCoreDumpRequest(); requestInstance.fillFrom(metadata); requestInstance.save(metadataPath); context.log(logger, "Wrote " + metadataPath.pathOnHost()); return metadata; } private String getMicrocodeVersion() { String output = uncheck(() -> Files.readAllLines(Paths.get("/proc/cpuinfo")).stream() .filter(line -> line.startsWith("microcode")) .findFirst() .orElse("microcode : UNKNOWN")); String[] results = output.split(":"); if (results.length != 2) { throw ConvergenceException.ofError("Result from detect microcode command not as expected: " + output); } return results[1].trim(); } }
Sorry, I was not clear enough. I meant we don't need to test `sequence(SpecificInputTypeStrategies.ARRAY_COMPARABLE)` takes a single argument. We don't need to test that, because that's a property of the `sequence`, whatever we use here instead of `SpecificInputTypeStrategies.ARRAY_COMPARABLE` does not really matter. The other two cases make total sense and you do it the right way! 👍
protected Stream<TestSpec> testData() { return Stream.of( TestSpec.forStrategy(WILDCARD) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.INT()) .expectSignature("f(*)") .expectArgumentTypes(DataTypes.INT(), DataTypes.INT()), TestSpec.forStrategy(WILDCARD) .calledWithArgumentTypes() .expectSignature("f(*)") .expectArgumentTypes(), TestSpec.forStrategy( explicitSequence( DataTypes.INT().bridgedTo(int.class), DataTypes.BOOLEAN())) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.BOOLEAN()) .expectSignature("f(INT, BOOLEAN)") .expectArgumentTypes( DataTypes.INT().bridgedTo(int.class), DataTypes.BOOLEAN()), TestSpec.forStrategy( explicitSequence( DataTypes.ROW( DataTypes.FIELD("expected", DataTypes.INT())))) .calledWithArgumentTypes( DataTypes.ROW(DataTypes.FIELD("actual", DataTypes.INT()))) .expectSignature("f(ROW<`expected` INT>)") .expectArgumentTypes( DataTypes.ROW(DataTypes.FIELD("expected", DataTypes.INT()))), TestSpec.forStrategy( explicitSequence( new String[] {"i", "s"}, new DataType[] {DataTypes.INT(), DataTypes.STRING()})) .calledWithArgumentTypes(DataTypes.INT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(i INT, s STRING)"), TestSpec.forStrategy(explicitSequence(DataTypes.BIGINT().notNull())) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectErrorMessage( "Unsupported argument type. Expected type 'BIGINT NOT NULL' but actual type was 'BIGINT'."), TestSpec.forStrategy(explicitSequence(DataTypes.BIGINT())) .calledWithArgumentTypes(DataTypes.INT()) .expectArgumentTypes(DataTypes.BIGINT()), TestSpec.forStrategy(explicitSequence(DataTypes.BIGINT())) .calledWithArgumentTypes(DataTypes.STRING()) .expectErrorMessage( "Unsupported argument type. Expected type 'BIGINT' but actual type was 'STRING'."), TestSpec.forStrategy(explicitSequence(DataTypes.BIGINT(), DataTypes.BIGINT())) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectErrorMessage( "Invalid number of arguments. At least 2 arguments expected but 1 passed."), TestSpec.forStrategy(sequence(ANY)) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectSignature("f(<ANY>)") .expectArgumentTypes(DataTypes.BIGINT()), TestSpec.forStrategy(sequence(ANY)) .calledWithArgumentTypes(DataTypes.BIGINT(), DataTypes.BIGINT()) .expectErrorMessage( "Invalid number of arguments. At most 1 arguments expected but 2 passed."), TestSpec.forStrategy( "OR with bridging class", or( explicitSequence(DataTypes.STRING()), explicitSequence(DataTypes.INT().bridgedTo(int.class)), explicitSequence(DataTypes.BOOLEAN()))) .calledWithArgumentTypes(DataTypes.INT()) .calledWithArgumentTypes(DataTypes.TINYINT()) .expectSignature("f(STRING)\nf(INT)\nf(BOOLEAN)") .expectArgumentTypes(DataTypes.INT().bridgedTo(int.class)), TestSpec.forStrategy( "OR with implicit casting", or( explicitSequence(DataTypes.TINYINT()), explicitSequence(DataTypes.INT()), explicitSequence(DataTypes.BIGINT()))) .calledWithArgumentTypes(DataTypes.SMALLINT()) .expectArgumentTypes(DataTypes.INT()), TestSpec.forStrategy( "OR with implicit casting of null", or( explicitSequence(DataTypes.STRING().notNull()), explicitSequence(DataTypes.INT().notNull()), explicitSequence(DataTypes.BIGINT()))) .calledWithArgumentTypes(DataTypes.NULL()) .expectArgumentTypes(DataTypes.BIGINT()), TestSpec.forStrategy( "OR with implicit casting using first match", or( explicitSequence(DataTypes.VARCHAR(20)), explicitSequence(DataTypes.VARCHAR(10)))) .calledWithArgumentTypes(DataTypes.VARCHAR(1)) .expectArgumentTypes(DataTypes.VARCHAR(20)), TestSpec.forStrategy( "OR with invalid implicit casting of null", or( explicitSequence(DataTypes.STRING().notNull()), explicitSequence(DataTypes.INT().notNull()), explicitSequence(DataTypes.BIGINT().notNull()))) .calledWithArgumentTypes(DataTypes.NULL()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\n" + "f(STRING NOT NULL)\nf(INT NOT NULL)\nf(BIGINT NOT NULL)"), TestSpec.forStrategy( "OR with invalid type", or( explicitSequence(DataTypes.INT()), explicitSequence(DataTypes.STRING()))) .calledWithArgumentTypes(DataTypes.BOOLEAN()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(INT)\nf(STRING)"), TestSpec.forStrategy(explicitSequence(DataTypes.INT(), DataTypes.BOOLEAN())) .calledWithArgumentTypes(DataTypes.BOOLEAN(), DataTypes.INT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(INT, BOOLEAN)"), TestSpec.forStrategy(sequence(ANY, explicit(DataTypes.INT()))) .calledWithArgumentTypes(DataTypes.BOOLEAN(), DataTypes.INT()) .calledWithArgumentTypes(DataTypes.BOOLEAN(), DataTypes.TINYINT()) .expectArgumentTypes(DataTypes.BOOLEAN(), DataTypes.INT()), TestSpec.forStrategy( sequence( new String[] {"any", "int"}, new ArgumentTypeStrategy[] { ANY, explicit(DataTypes.INT()) })) .calledWithArgumentTypes(DataTypes.STRING(), DataTypes.BOOLEAN()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(any <ANY>, int INT)"), TestSpec.forStrategy( sequence( explicit(DataTypes.INT()), or( explicit(DataTypes.BOOLEAN()), explicit(DataTypes.INT())))) .expectSignature("f(INT, [BOOLEAN | INT])") .calledWithArgumentTypes(DataTypes.INT(), DataTypes.INT()) .calledWithArgumentTypes(DataTypes.TINYINT(), DataTypes.TINYINT()) .expectArgumentTypes(DataTypes.INT(), DataTypes.INT()), TestSpec.forStrategy( sequence( explicit(DataTypes.INT()), or( explicit(DataTypes.BOOLEAN()), explicit(DataTypes.STRING())))) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.BIGINT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(INT, [BOOLEAN | STRING])"), TestSpec.forStrategy(sequence(LITERAL)) .calledWithLiteralAt(0) .calledWithArgumentTypes(DataTypes.INT()) .expectArgumentTypes(DataTypes.INT()), TestSpec.forStrategy( sequence( and(LITERAL, explicit(DataTypes.STRING())), explicit(DataTypes.INT()))) .calledWithLiteralAt(0) .calledWithArgumentTypes(DataTypes.STRING(), DataTypes.INT()) .expectSignature("f([<LITERAL NOT NULL> & STRING], INT)") .expectArgumentTypes(DataTypes.STRING(), DataTypes.INT()), TestSpec.forStrategy( sequence( and(explicit(DataTypes.STRING()), LITERAL_OR_NULL), explicit(DataTypes.INT()))) .calledWithArgumentTypes(DataTypes.STRING(), DataTypes.INT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf([STRING & <LITERAL>], INT)"), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), explicit(DataTypes.BOOLEAN()) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN()) .expectArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN()), TestSpec.forStrategy( varyingSequence( new String[] {"var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.BOOLEAN().bridgedTo(boolean.class)) })) .calledWithArgumentTypes( DataTypes.BOOLEAN(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN()) .expectSignature("f(var BOOLEAN...)") .expectArgumentTypes( DataTypes.BOOLEAN().bridgedTo(boolean.class), DataTypes.BOOLEAN().bridgedTo(boolean.class), DataTypes.BOOLEAN().bridgedTo(boolean.class)), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), explicit(DataTypes.BOOLEAN()) })) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.STRING()) .expectArgumentTypes(DataTypes.INT(), DataTypes.STRING()), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), explicit(DataTypes.BOOLEAN()) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.STRING()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(i INT, s STRING, var BOOLEAN...)"), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), explicit(DataTypes.BOOLEAN()) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.INT(), DataTypes.BOOLEAN()) .expectErrorMessage( "Unsupported argument type. Expected type 'STRING' but actual type was 'INT'."), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), or( explicit(DataTypes.BOOLEAN()), explicit(DataTypes.INT())) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.BOOLEAN()) .expectArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.BOOLEAN()), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), or( explicit(DataTypes.BOOLEAN()), explicit(DataTypes.INT())) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(i INT, s STRING, var [BOOLEAN | INT]...)"), TestSpec.forStrategy(WILDCARD) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.STRING(), DataTypes.NULL()) .expectSignature("f(*)") .expectArgumentTypes( DataTypes.NULL(), DataTypes.STRING(), DataTypes.NULL()), TestSpec.forStrategy(WILDCARD) .typedArguments( DataTypes.INT().bridgedTo(int.class), DataTypes.STRING(), DataTypes.BOOLEAN()) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.STRING(), DataTypes.NULL()) .expectArgumentTypes( DataTypes.INT().bridgedTo(int.class), DataTypes.STRING(), DataTypes.BOOLEAN()), TestSpec.forStrategy(sequence(OUTPUT_IF_NULL, OUTPUT_IF_NULL, OUTPUT_IF_NULL)) .surroundingStrategy(explicitSequence(DataTypes.BOOLEAN())) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.STRING(), DataTypes.NULL()) .expectSignature("f(<OUTPUT>, <OUTPUT>, <OUTPUT>)") .expectArgumentTypes( DataTypes.BOOLEAN(), DataTypes.STRING(), DataTypes.BOOLEAN()), TestSpec.forStrategy(sequence(or(OUTPUT_IF_NULL, explicit(DataTypes.INT())))) .surroundingStrategy(explicitSequence(DataTypes.BOOLEAN())) .calledWithArgumentTypes(DataTypes.NULL()) .expectSignature("f([<OUTPUT> | INT])") .expectArgumentTypes(DataTypes.BOOLEAN()), TestSpec.forStrategy(explicitSequence(DataTypes.BOOLEAN())) .surroundingStrategy(WILDCARD) .calledWithArgumentTypes(DataTypes.NULL()) .expectSignature("f(BOOLEAN)") .expectArgumentTypes(DataTypes.BOOLEAN()), TestSpec.forStrategy(sequence(or(OUTPUT_IF_NULL, explicit(DataTypes.INT())))) .calledWithArgumentTypes(DataTypes.NULL()) .expectSignature("f([<OUTPUT> | INT])") .expectArgumentTypes(DataTypes.INT()), TestSpec.forStrategy(WILDCARD) .typedArguments(DataTypes.INT(), DataTypes.STRING()) .calledWithArgumentTypes(DataTypes.TINYINT(), DataTypes.STRING()) .expectSignature("f(INT, STRING)") .expectArgumentTypes(DataTypes.INT(), DataTypes.STRING()), TestSpec.forStrategy(WILDCARD) .typedArguments(DataTypes.INT(), DataTypes.STRING()) .calledWithArgumentTypes(DataTypes.STRING(), DataTypes.STRING()) .expectErrorMessage( "Invalid argument type at position 0. Data type INT expected but STRING passed."), TestSpec.forStrategy(WILDCARD) .namedArguments("i", "s") .typedArguments(DataTypes.INT(), DataTypes.STRING()) .expectSignature("f(i => INT, s => STRING)"), TestSpec.forStrategy( "Wildcard with count verifies arguments number", InputTypeStrategies.wildcardWithCount( ConstantArgumentCount.from(2))) .calledWithArgumentTypes(DataTypes.STRING()) .expectErrorMessage( "Invalid number of arguments. At least 2 arguments expected but 1 passed."), TestSpec.forStrategy( "Array strategy infers a common type", SpecificInputTypeStrategies.ARRAY) .expectSignature("f(<COMMON>, <COMMON>...)") .calledWithArgumentTypes( DataTypes.INT().notNull(), DataTypes.BIGINT().notNull(), DataTypes.DOUBLE(), DataTypes.DOUBLE().notNull()) .expectArgumentTypes( DataTypes.DOUBLE(), DataTypes.DOUBLE(), DataTypes.DOUBLE(), DataTypes.DOUBLE()), TestSpec.forStrategy( "Array strategy fails for no arguments", SpecificInputTypeStrategies.ARRAY) .calledWithArgumentTypes() .expectErrorMessage( "Invalid number of arguments. At least 1 arguments expected but 0 passed."), TestSpec.forStrategy( "Array strategy fails for null arguments", SpecificInputTypeStrategies.ARRAY) .calledWithArgumentTypes(DataTypes.NULL()) .expectErrorMessage("Could not find a common type for arguments: [NULL]"), TestSpec.forStrategy( "Map strategy infers common types", SpecificInputTypeStrategies.MAP) .calledWithArgumentTypes( DataTypes.INT().notNull(), DataTypes.DOUBLE(), DataTypes.BIGINT().notNull(), DataTypes.FLOAT().notNull()) .expectArgumentTypes( DataTypes.BIGINT().notNull(), DataTypes.DOUBLE(), DataTypes.BIGINT().notNull(), DataTypes.DOUBLE()), TestSpec.forStrategy( "Map strategy fails for no arguments", SpecificInputTypeStrategies.MAP) .calledWithArgumentTypes() .expectErrorMessage( "Invalid number of arguments. At least 2 arguments expected but 0 passed."), TestSpec.forStrategy( "Map strategy fails for an odd number of arguments", SpecificInputTypeStrategies.MAP) .calledWithArgumentTypes( DataTypes.BIGINT(), DataTypes.BIGINT(), DataTypes.BIGINT()) .expectErrorMessage("Invalid number of arguments. 3 arguments passed."), TestSpec.forStrategy("Cast strategy", SpecificInputTypeStrategies.CAST) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.BIGINT()) .calledWithLiteralAt(1, DataTypes.BIGINT()) .expectSignature("f(<ANY>, <TYPE LITERAL>)") .expectArgumentTypes(DataTypes.INT(), DataTypes.BIGINT()), TestSpec.forStrategy( "Cast strategy for invalid target type", SpecificInputTypeStrategies.CAST) .calledWithArgumentTypes(DataTypes.BOOLEAN(), DataTypes.DATE()) .calledWithLiteralAt(1, DataTypes.DATE()) .expectErrorMessage("Unsupported cast from 'BOOLEAN' to 'DATE'."), TestSpec.forStrategy( "Logical type roots instead of concrete data types", sequence( logical(LogicalTypeRoot.VARCHAR), logical(LogicalTypeRoot.DECIMAL, true), logical(LogicalTypeRoot.DECIMAL), logical(LogicalTypeRoot.BOOLEAN), logical(LogicalTypeRoot.INTEGER, false), logical(LogicalTypeRoot.INTEGER))) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.INT(), DataTypes.DOUBLE(), DataTypes.BOOLEAN().notNull(), DataTypes.INT().notNull(), DataTypes.INT().notNull()) .expectSignature( "f(<VARCHAR>, <DECIMAL NULL>, <DECIMAL>, <BOOLEAN>, <INTEGER NOT NULL>, <INTEGER>)") .expectArgumentTypes( DataTypes.VARCHAR(1), DataTypes.DECIMAL(10, 0), DataTypes.DECIMAL(30, 15), DataTypes.BOOLEAN().notNull(), DataTypes.INT().notNull(), DataTypes.INT().notNull()), TestSpec.forStrategy( "Logical type roots with wrong implicit cast", sequence(logical(LogicalTypeRoot.VARCHAR))) .calledWithArgumentTypes(DataTypes.INT()) .expectSignature("f(<VARCHAR>)") .expectErrorMessage( "Unsupported argument type. Expected type root 'VARCHAR' but actual type was 'INT'."), TestSpec.forStrategy( "Logical type roots with wrong nullability", sequence(logical(LogicalTypeRoot.VARCHAR, false))) .calledWithArgumentTypes(DataTypes.VARCHAR(5)) .expectSignature("f(<VARCHAR NOT NULL>)") .expectErrorMessage( "Unsupported argument type. Expected nullable type of root 'VARCHAR' but actual type was 'VARCHAR(5)'."), TestSpec.forStrategy( "Logical type family instead of concrete data types", sequence( logical(LogicalTypeFamily.CHARACTER_STRING, true), logical(LogicalTypeFamily.EXACT_NUMERIC), logical(LogicalTypeFamily.APPROXIMATE_NUMERIC), logical(LogicalTypeFamily.APPROXIMATE_NUMERIC), logical(LogicalTypeFamily.APPROXIMATE_NUMERIC, false))) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.TINYINT(), DataTypes.INT(), DataTypes.BIGINT().notNull(), DataTypes.DECIMAL(10, 2).notNull()) .expectSignature( "f(<CHARACTER_STRING NULL>, <EXACT_NUMERIC>, <APPROXIMATE_NUMERIC>, <APPROXIMATE_NUMERIC>, <APPROXIMATE_NUMERIC NOT NULL>)") .expectArgumentTypes( DataTypes.VARCHAR(1), DataTypes.TINYINT(), DataTypes.DOUBLE(), DataTypes.DOUBLE().notNull(), DataTypes.DOUBLE().notNull()), TestSpec.forStrategy( "Logical type family with invalid type", sequence(logical(LogicalTypeFamily.EXACT_NUMERIC))) .calledWithArgumentTypes(DataTypes.FLOAT()) .expectSignature("f(<EXACT_NUMERIC>)") .expectErrorMessage( "Unsupported argument type. Expected type of family 'EXACT_NUMERIC' but actual type was 'FLOAT'."), TestSpec.forStrategy( "Constraint argument type strategy", sequence( and( explicit(DataTypes.BOOLEAN()), constraint( "%s must be nullable.", args -> args.get(0) .getLogicalType() .isNullable())))) .calledWithArgumentTypes(DataTypes.BOOLEAN()) .expectSignature("f([BOOLEAN & <CONSTRAINT>])") .expectArgumentTypes(DataTypes.BOOLEAN()), TestSpec.forStrategy( "Constraint argument type strategy invalid", sequence( and( explicit(DataTypes.BOOLEAN().notNull()), constraint( "My constraint says %s must be nullable.", args -> args.get(0) .getLogicalType() .isNullable())))) .calledWithArgumentTypes(DataTypes.BOOLEAN().notNull()) .expectErrorMessage( "My constraint says BOOLEAN NOT NULL must be nullable."), TestSpec.forStrategy( "Composite type strategy with ROW", sequence(InputTypeStrategies.COMPOSITE)) .calledWithArgumentTypes( DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.BIGINT()))) .expectSignature("f(<COMPOSITE>)") .expectArgumentTypes( DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.BIGINT()))), TestSpec.forStrategy( "Composite type strategy with STRUCTURED type", sequence(InputTypeStrategies.COMPOSITE)) .calledWithArgumentTypes(DataTypes.of(SimpleStructuredType.class).notNull()) .expectSignature("f(<COMPOSITE>)") .expectArgumentTypes(DataTypes.of(SimpleStructuredType.class).notNull()), TestSpec.forStrategy( "Same named arguments for overloaded method.", or( sequence(explicit(DataTypes.STRING())), sequence(explicit(DataTypes.INT())))) .namedArguments("sameName") .calledWithArgumentTypes(DataTypes.BOOLEAN()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(STRING)\nf(INT)"), TestSpec.forStrategy( "Common argument type strategy", sequence( InputTypeStrategies.COMMON_ARG, InputTypeStrategies.COMMON_ARG)) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.BIGINT()) .expectSignature("f(<COMMON>, <COMMON>)") .expectArgumentTypes(DataTypes.BIGINT(), DataTypes.BIGINT()), TestSpec.forStrategy( "ArrayElement argument type strategy", sequence( logical(LogicalTypeRoot.ARRAY), SpecificInputTypeStrategies.ARRAY_ELEMENT_ARG)) .calledWithArgumentTypes( DataTypes.ARRAY(DataTypes.INT().notNull()).notNull(), DataTypes.INT()) .expectSignature("f(<ARRAY>, <ARRAY ELEMENT>)") .expectArgumentTypes( DataTypes.ARRAY(DataTypes.INT().notNull()).notNull(), DataTypes.INT()), TestSpec.forStrategy(sequence(SpecificInputTypeStrategies.ARRAY_COMPARABLE)) .expectSignature("f(<ARRAY<COMPARABLE>>)") .calledWithArgumentTypes(DataTypes.ARRAY(DataTypes.ROW())) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\n" + "f(<ARRAY<COMPARABLE>>)"), TestSpec.forStrategy( "Strategy fails if input argument type is not ARRAY", sequence(SpecificInputTypeStrategies.ARRAY_COMPARABLE)) .calledWithArgumentTypes(DataTypes.INT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\n" + "f(<ARRAY<COMPARABLE>>)"), TestSpec.forStrategy( "Strategy fails if the number of input arguments are not one", sequence(SpecificInputTypeStrategies.ARRAY_COMPARABLE)) .calledWithArgumentTypes( DataTypes.ARRAY(DataTypes.INT()), DataTypes.ARRAY(DataTypes.STRING())) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\n" + "f(<ARRAY<COMPARABLE>>)"), TestSpec.forStrategy( "PROCTIME type strategy", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.PROCTIME)) .calledWithArgumentTypes(timeIndicatorType(TimestampKind.PROCTIME)) .expectSignature("f(<WINDOW REFERENCE>)") .expectArgumentTypes(timeIndicatorType(TimestampKind.PROCTIME)), TestSpec.forStrategy( "PROCTIME type strategy on non time indicator", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.PROCTIME)) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectErrorMessage("Reference to a rowtime or proctime window required."), TestSpec.forStrategy( "ROWTIME type strategy", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.ROWTIME)) .calledWithArgumentTypes(timeIndicatorType(TimestampKind.ROWTIME)) .expectSignature("f(<WINDOW REFERENCE>)") .expectArgumentTypes(timeIndicatorType(TimestampKind.ROWTIME)), TestSpec.forStrategy( "ROWTIME type strategy on proctime indicator", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.ROWTIME)) .calledWithArgumentTypes(timeIndicatorType(TimestampKind.PROCTIME)) .expectErrorMessage( "A proctime window cannot provide a rowtime attribute."), TestSpec.forStrategy( "PROCTIME type strategy on rowtime indicator", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.PROCTIME)) .calledWithArgumentTypes(timeIndicatorType(TimestampKind.ROWTIME)) .expectArgumentTypes(timeIndicatorType(TimestampKind.PROCTIME)), TestSpec.forStrategy( "ROWTIME type strategy on long in batch mode", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.ROWTIME)) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectArgumentTypes(DataTypes.BIGINT()), TestSpec.forStrategy( "ROWTIME type strategy on non time attribute", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.ROWTIME)) .calledWithArgumentTypes(DataTypes.SMALLINT()) .expectErrorMessage("Reference to a rowtime or proctime window required."), TestSpec.forStrategy( "PROCTIME type strategy on non time attribute", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.PROCTIME)) .calledWithArgumentTypes(DataTypes.SMALLINT()) .expectErrorMessage("Reference to a rowtime or proctime window required."), TestSpec.forStrategy( "Reinterpret_cast strategy", SpecificInputTypeStrategies.REINTERPRET_CAST) .calledWithArgumentTypes( DataTypes.DATE(), DataTypes.BIGINT(), DataTypes.BOOLEAN().notNull()) .calledWithLiteralAt(1, DataTypes.BIGINT()) .calledWithLiteralAt(2, true) .expectSignature("f(<ANY>, <TYPE LITERAL>, <TRUE | FALSE>)") .expectArgumentTypes( DataTypes.DATE(), DataTypes.BIGINT(), DataTypes.BOOLEAN().notNull()), TestSpec.forStrategy( "Reinterpret_cast strategy non literal overflow", SpecificInputTypeStrategies.REINTERPRET_CAST) .calledWithArgumentTypes( DataTypes.DATE(), DataTypes.BIGINT(), DataTypes.BOOLEAN().notNull()) .calledWithLiteralAt(1, DataTypes.BIGINT()) .expectErrorMessage("Not null boolean literal expected for overflow."), TestSpec.forStrategy( "Reinterpret_cast strategy not supported cast", SpecificInputTypeStrategies.REINTERPRET_CAST) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.BIGINT(), DataTypes.BOOLEAN().notNull()) .calledWithLiteralAt(1, DataTypes.BIGINT()) .calledWithLiteralAt(2, true) .expectErrorMessage("Unsupported reinterpret cast from 'INT' to 'BIGINT'")); }
+ "f(<ARRAY<COMPARABLE>>)"),
protected Stream<TestSpec> testData() { return Stream.of( TestSpec.forStrategy(WILDCARD) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.INT()) .expectSignature("f(*)") .expectArgumentTypes(DataTypes.INT(), DataTypes.INT()), TestSpec.forStrategy(WILDCARD) .calledWithArgumentTypes() .expectSignature("f(*)") .expectArgumentTypes(), TestSpec.forStrategy( explicitSequence( DataTypes.INT().bridgedTo(int.class), DataTypes.BOOLEAN())) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.BOOLEAN()) .expectSignature("f(INT, BOOLEAN)") .expectArgumentTypes( DataTypes.INT().bridgedTo(int.class), DataTypes.BOOLEAN()), TestSpec.forStrategy( explicitSequence( DataTypes.ROW( DataTypes.FIELD("expected", DataTypes.INT())))) .calledWithArgumentTypes( DataTypes.ROW(DataTypes.FIELD("actual", DataTypes.INT()))) .expectSignature("f(ROW<`expected` INT>)") .expectArgumentTypes( DataTypes.ROW(DataTypes.FIELD("expected", DataTypes.INT()))), TestSpec.forStrategy( explicitSequence( new String[] {"i", "s"}, new DataType[] {DataTypes.INT(), DataTypes.STRING()})) .calledWithArgumentTypes(DataTypes.INT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(i INT, s STRING)"), TestSpec.forStrategy(explicitSequence(DataTypes.BIGINT().notNull())) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectErrorMessage( "Unsupported argument type. Expected type 'BIGINT NOT NULL' but actual type was 'BIGINT'."), TestSpec.forStrategy(explicitSequence(DataTypes.BIGINT())) .calledWithArgumentTypes(DataTypes.INT()) .expectArgumentTypes(DataTypes.BIGINT()), TestSpec.forStrategy(explicitSequence(DataTypes.BIGINT())) .calledWithArgumentTypes(DataTypes.STRING()) .expectErrorMessage( "Unsupported argument type. Expected type 'BIGINT' but actual type was 'STRING'."), TestSpec.forStrategy(explicitSequence(DataTypes.BIGINT(), DataTypes.BIGINT())) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectErrorMessage( "Invalid number of arguments. At least 2 arguments expected but 1 passed."), TestSpec.forStrategy(sequence(ANY)) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectSignature("f(<ANY>)") .expectArgumentTypes(DataTypes.BIGINT()), TestSpec.forStrategy(sequence(ANY)) .calledWithArgumentTypes(DataTypes.BIGINT(), DataTypes.BIGINT()) .expectErrorMessage( "Invalid number of arguments. At most 1 arguments expected but 2 passed."), TestSpec.forStrategy( "OR with bridging class", or( explicitSequence(DataTypes.STRING()), explicitSequence(DataTypes.INT().bridgedTo(int.class)), explicitSequence(DataTypes.BOOLEAN()))) .calledWithArgumentTypes(DataTypes.INT()) .calledWithArgumentTypes(DataTypes.TINYINT()) .expectSignature("f(STRING)\nf(INT)\nf(BOOLEAN)") .expectArgumentTypes(DataTypes.INT().bridgedTo(int.class)), TestSpec.forStrategy( "OR with implicit casting", or( explicitSequence(DataTypes.TINYINT()), explicitSequence(DataTypes.INT()), explicitSequence(DataTypes.BIGINT()))) .calledWithArgumentTypes(DataTypes.SMALLINT()) .expectArgumentTypes(DataTypes.INT()), TestSpec.forStrategy( "OR with implicit casting of null", or( explicitSequence(DataTypes.STRING().notNull()), explicitSequence(DataTypes.INT().notNull()), explicitSequence(DataTypes.BIGINT()))) .calledWithArgumentTypes(DataTypes.NULL()) .expectArgumentTypes(DataTypes.BIGINT()), TestSpec.forStrategy( "OR with implicit casting using first match", or( explicitSequence(DataTypes.VARCHAR(20)), explicitSequence(DataTypes.VARCHAR(10)))) .calledWithArgumentTypes(DataTypes.VARCHAR(1)) .expectArgumentTypes(DataTypes.VARCHAR(20)), TestSpec.forStrategy( "OR with invalid implicit casting of null", or( explicitSequence(DataTypes.STRING().notNull()), explicitSequence(DataTypes.INT().notNull()), explicitSequence(DataTypes.BIGINT().notNull()))) .calledWithArgumentTypes(DataTypes.NULL()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\n" + "f(STRING NOT NULL)\nf(INT NOT NULL)\nf(BIGINT NOT NULL)"), TestSpec.forStrategy( "OR with invalid type", or( explicitSequence(DataTypes.INT()), explicitSequence(DataTypes.STRING()))) .calledWithArgumentTypes(DataTypes.BOOLEAN()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(INT)\nf(STRING)"), TestSpec.forStrategy(explicitSequence(DataTypes.INT(), DataTypes.BOOLEAN())) .calledWithArgumentTypes(DataTypes.BOOLEAN(), DataTypes.INT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(INT, BOOLEAN)"), TestSpec.forStrategy(sequence(ANY, explicit(DataTypes.INT()))) .calledWithArgumentTypes(DataTypes.BOOLEAN(), DataTypes.INT()) .calledWithArgumentTypes(DataTypes.BOOLEAN(), DataTypes.TINYINT()) .expectArgumentTypes(DataTypes.BOOLEAN(), DataTypes.INT()), TestSpec.forStrategy( sequence( new String[] {"any", "int"}, new ArgumentTypeStrategy[] { ANY, explicit(DataTypes.INT()) })) .calledWithArgumentTypes(DataTypes.STRING(), DataTypes.BOOLEAN()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(any <ANY>, int INT)"), TestSpec.forStrategy( sequence( explicit(DataTypes.INT()), or( explicit(DataTypes.BOOLEAN()), explicit(DataTypes.INT())))) .expectSignature("f(INT, [BOOLEAN | INT])") .calledWithArgumentTypes(DataTypes.INT(), DataTypes.INT()) .calledWithArgumentTypes(DataTypes.TINYINT(), DataTypes.TINYINT()) .expectArgumentTypes(DataTypes.INT(), DataTypes.INT()), TestSpec.forStrategy( sequence( explicit(DataTypes.INT()), or( explicit(DataTypes.BOOLEAN()), explicit(DataTypes.STRING())))) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.BIGINT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(INT, [BOOLEAN | STRING])"), TestSpec.forStrategy(sequence(LITERAL)) .calledWithLiteralAt(0) .calledWithArgumentTypes(DataTypes.INT()) .expectArgumentTypes(DataTypes.INT()), TestSpec.forStrategy( sequence( and(LITERAL, explicit(DataTypes.STRING())), explicit(DataTypes.INT()))) .calledWithLiteralAt(0) .calledWithArgumentTypes(DataTypes.STRING(), DataTypes.INT()) .expectSignature("f([<LITERAL NOT NULL> & STRING], INT)") .expectArgumentTypes(DataTypes.STRING(), DataTypes.INT()), TestSpec.forStrategy( sequence( and(explicit(DataTypes.STRING()), LITERAL_OR_NULL), explicit(DataTypes.INT()))) .calledWithArgumentTypes(DataTypes.STRING(), DataTypes.INT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf([STRING & <LITERAL>], INT)"), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), explicit(DataTypes.BOOLEAN()) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN()) .expectArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN()), TestSpec.forStrategy( varyingSequence( new String[] {"var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.BOOLEAN().bridgedTo(boolean.class)) })) .calledWithArgumentTypes( DataTypes.BOOLEAN(), DataTypes.BOOLEAN(), DataTypes.BOOLEAN()) .expectSignature("f(var BOOLEAN...)") .expectArgumentTypes( DataTypes.BOOLEAN().bridgedTo(boolean.class), DataTypes.BOOLEAN().bridgedTo(boolean.class), DataTypes.BOOLEAN().bridgedTo(boolean.class)), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), explicit(DataTypes.BOOLEAN()) })) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.STRING()) .expectArgumentTypes(DataTypes.INT(), DataTypes.STRING()), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), explicit(DataTypes.BOOLEAN()) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.STRING()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(i INT, s STRING, var BOOLEAN...)"), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), explicit(DataTypes.BOOLEAN()) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.INT(), DataTypes.BOOLEAN()) .expectErrorMessage( "Unsupported argument type. Expected type 'STRING' but actual type was 'INT'."), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), or( explicit(DataTypes.BOOLEAN()), explicit(DataTypes.INT())) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.BOOLEAN()) .expectArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.BOOLEAN()), TestSpec.forStrategy( varyingSequence( new String[] {"i", "s", "var"}, new ArgumentTypeStrategy[] { explicit(DataTypes.INT()), explicit(DataTypes.STRING()), or( explicit(DataTypes.BOOLEAN()), explicit(DataTypes.INT())) })) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(i INT, s STRING, var [BOOLEAN | INT]...)"), TestSpec.forStrategy(WILDCARD) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.STRING(), DataTypes.NULL()) .expectSignature("f(*)") .expectArgumentTypes( DataTypes.NULL(), DataTypes.STRING(), DataTypes.NULL()), TestSpec.forStrategy(WILDCARD) .typedArguments( DataTypes.INT().bridgedTo(int.class), DataTypes.STRING(), DataTypes.BOOLEAN()) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.STRING(), DataTypes.NULL()) .expectArgumentTypes( DataTypes.INT().bridgedTo(int.class), DataTypes.STRING(), DataTypes.BOOLEAN()), TestSpec.forStrategy(sequence(OUTPUT_IF_NULL, OUTPUT_IF_NULL, OUTPUT_IF_NULL)) .surroundingStrategy(explicitSequence(DataTypes.BOOLEAN())) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.STRING(), DataTypes.NULL()) .expectSignature("f(<OUTPUT>, <OUTPUT>, <OUTPUT>)") .expectArgumentTypes( DataTypes.BOOLEAN(), DataTypes.STRING(), DataTypes.BOOLEAN()), TestSpec.forStrategy(sequence(or(OUTPUT_IF_NULL, explicit(DataTypes.INT())))) .surroundingStrategy(explicitSequence(DataTypes.BOOLEAN())) .calledWithArgumentTypes(DataTypes.NULL()) .expectSignature("f([<OUTPUT> | INT])") .expectArgumentTypes(DataTypes.BOOLEAN()), TestSpec.forStrategy(explicitSequence(DataTypes.BOOLEAN())) .surroundingStrategy(WILDCARD) .calledWithArgumentTypes(DataTypes.NULL()) .expectSignature("f(BOOLEAN)") .expectArgumentTypes(DataTypes.BOOLEAN()), TestSpec.forStrategy(sequence(or(OUTPUT_IF_NULL, explicit(DataTypes.INT())))) .calledWithArgumentTypes(DataTypes.NULL()) .expectSignature("f([<OUTPUT> | INT])") .expectArgumentTypes(DataTypes.INT()), TestSpec.forStrategy(WILDCARD) .typedArguments(DataTypes.INT(), DataTypes.STRING()) .calledWithArgumentTypes(DataTypes.TINYINT(), DataTypes.STRING()) .expectSignature("f(INT, STRING)") .expectArgumentTypes(DataTypes.INT(), DataTypes.STRING()), TestSpec.forStrategy(WILDCARD) .typedArguments(DataTypes.INT(), DataTypes.STRING()) .calledWithArgumentTypes(DataTypes.STRING(), DataTypes.STRING()) .expectErrorMessage( "Invalid argument type at position 0. Data type INT expected but STRING passed."), TestSpec.forStrategy(WILDCARD) .namedArguments("i", "s") .typedArguments(DataTypes.INT(), DataTypes.STRING()) .expectSignature("f(i => INT, s => STRING)"), TestSpec.forStrategy( "Wildcard with count verifies arguments number", InputTypeStrategies.wildcardWithCount( ConstantArgumentCount.from(2))) .calledWithArgumentTypes(DataTypes.STRING()) .expectErrorMessage( "Invalid number of arguments. At least 2 arguments expected but 1 passed."), TestSpec.forStrategy( "Array strategy infers a common type", SpecificInputTypeStrategies.ARRAY) .expectSignature("f(<COMMON>, <COMMON>...)") .calledWithArgumentTypes( DataTypes.INT().notNull(), DataTypes.BIGINT().notNull(), DataTypes.DOUBLE(), DataTypes.DOUBLE().notNull()) .expectArgumentTypes( DataTypes.DOUBLE(), DataTypes.DOUBLE(), DataTypes.DOUBLE(), DataTypes.DOUBLE()), TestSpec.forStrategy( "Array strategy fails for no arguments", SpecificInputTypeStrategies.ARRAY) .calledWithArgumentTypes() .expectErrorMessage( "Invalid number of arguments. At least 1 arguments expected but 0 passed."), TestSpec.forStrategy( "Array strategy fails for null arguments", SpecificInputTypeStrategies.ARRAY) .calledWithArgumentTypes(DataTypes.NULL()) .expectErrorMessage("Could not find a common type for arguments: [NULL]"), TestSpec.forStrategy( "Map strategy infers common types", SpecificInputTypeStrategies.MAP) .calledWithArgumentTypes( DataTypes.INT().notNull(), DataTypes.DOUBLE(), DataTypes.BIGINT().notNull(), DataTypes.FLOAT().notNull()) .expectArgumentTypes( DataTypes.BIGINT().notNull(), DataTypes.DOUBLE(), DataTypes.BIGINT().notNull(), DataTypes.DOUBLE()), TestSpec.forStrategy( "Map strategy fails for no arguments", SpecificInputTypeStrategies.MAP) .calledWithArgumentTypes() .expectErrorMessage( "Invalid number of arguments. At least 2 arguments expected but 0 passed."), TestSpec.forStrategy( "Map strategy fails for an odd number of arguments", SpecificInputTypeStrategies.MAP) .calledWithArgumentTypes( DataTypes.BIGINT(), DataTypes.BIGINT(), DataTypes.BIGINT()) .expectErrorMessage("Invalid number of arguments. 3 arguments passed."), TestSpec.forStrategy("Cast strategy", SpecificInputTypeStrategies.CAST) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.BIGINT()) .calledWithLiteralAt(1, DataTypes.BIGINT()) .expectSignature("f(<ANY>, <TYPE LITERAL>)") .expectArgumentTypes(DataTypes.INT(), DataTypes.BIGINT()), TestSpec.forStrategy( "Cast strategy for invalid target type", SpecificInputTypeStrategies.CAST) .calledWithArgumentTypes(DataTypes.BOOLEAN(), DataTypes.DATE()) .calledWithLiteralAt(1, DataTypes.DATE()) .expectErrorMessage("Unsupported cast from 'BOOLEAN' to 'DATE'."), TestSpec.forStrategy( "Logical type roots instead of concrete data types", sequence( logical(LogicalTypeRoot.VARCHAR), logical(LogicalTypeRoot.DECIMAL, true), logical(LogicalTypeRoot.DECIMAL), logical(LogicalTypeRoot.BOOLEAN), logical(LogicalTypeRoot.INTEGER, false), logical(LogicalTypeRoot.INTEGER))) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.INT(), DataTypes.DOUBLE(), DataTypes.BOOLEAN().notNull(), DataTypes.INT().notNull(), DataTypes.INT().notNull()) .expectSignature( "f(<VARCHAR>, <DECIMAL NULL>, <DECIMAL>, <BOOLEAN>, <INTEGER NOT NULL>, <INTEGER>)") .expectArgumentTypes( DataTypes.VARCHAR(1), DataTypes.DECIMAL(10, 0), DataTypes.DECIMAL(30, 15), DataTypes.BOOLEAN().notNull(), DataTypes.INT().notNull(), DataTypes.INT().notNull()), TestSpec.forStrategy( "Logical type roots with wrong implicit cast", sequence(logical(LogicalTypeRoot.VARCHAR))) .calledWithArgumentTypes(DataTypes.INT()) .expectSignature("f(<VARCHAR>)") .expectErrorMessage( "Unsupported argument type. Expected type root 'VARCHAR' but actual type was 'INT'."), TestSpec.forStrategy( "Logical type roots with wrong nullability", sequence(logical(LogicalTypeRoot.VARCHAR, false))) .calledWithArgumentTypes(DataTypes.VARCHAR(5)) .expectSignature("f(<VARCHAR NOT NULL>)") .expectErrorMessage( "Unsupported argument type. Expected nullable type of root 'VARCHAR' but actual type was 'VARCHAR(5)'."), TestSpec.forStrategy( "Logical type family instead of concrete data types", sequence( logical(LogicalTypeFamily.CHARACTER_STRING, true), logical(LogicalTypeFamily.EXACT_NUMERIC), logical(LogicalTypeFamily.APPROXIMATE_NUMERIC), logical(LogicalTypeFamily.APPROXIMATE_NUMERIC), logical(LogicalTypeFamily.APPROXIMATE_NUMERIC, false))) .calledWithArgumentTypes( DataTypes.NULL(), DataTypes.TINYINT(), DataTypes.INT(), DataTypes.BIGINT().notNull(), DataTypes.DECIMAL(10, 2).notNull()) .expectSignature( "f(<CHARACTER_STRING NULL>, <EXACT_NUMERIC>, <APPROXIMATE_NUMERIC>, <APPROXIMATE_NUMERIC>, <APPROXIMATE_NUMERIC NOT NULL>)") .expectArgumentTypes( DataTypes.VARCHAR(1), DataTypes.TINYINT(), DataTypes.DOUBLE(), DataTypes.DOUBLE().notNull(), DataTypes.DOUBLE().notNull()), TestSpec.forStrategy( "Logical type family with invalid type", sequence(logical(LogicalTypeFamily.EXACT_NUMERIC))) .calledWithArgumentTypes(DataTypes.FLOAT()) .expectSignature("f(<EXACT_NUMERIC>)") .expectErrorMessage( "Unsupported argument type. Expected type of family 'EXACT_NUMERIC' but actual type was 'FLOAT'."), TestSpec.forStrategy( "Constraint argument type strategy", sequence( and( explicit(DataTypes.BOOLEAN()), constraint( "%s must be nullable.", args -> args.get(0) .getLogicalType() .isNullable())))) .calledWithArgumentTypes(DataTypes.BOOLEAN()) .expectSignature("f([BOOLEAN & <CONSTRAINT>])") .expectArgumentTypes(DataTypes.BOOLEAN()), TestSpec.forStrategy( "Constraint argument type strategy invalid", sequence( and( explicit(DataTypes.BOOLEAN().notNull()), constraint( "My constraint says %s must be nullable.", args -> args.get(0) .getLogicalType() .isNullable())))) .calledWithArgumentTypes(DataTypes.BOOLEAN().notNull()) .expectErrorMessage( "My constraint says BOOLEAN NOT NULL must be nullable."), TestSpec.forStrategy( "Composite type strategy with ROW", sequence(InputTypeStrategies.COMPOSITE)) .calledWithArgumentTypes( DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.BIGINT()))) .expectSignature("f(<COMPOSITE>)") .expectArgumentTypes( DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.BIGINT()))), TestSpec.forStrategy( "Composite type strategy with STRUCTURED type", sequence(InputTypeStrategies.COMPOSITE)) .calledWithArgumentTypes(DataTypes.of(SimpleStructuredType.class).notNull()) .expectSignature("f(<COMPOSITE>)") .expectArgumentTypes(DataTypes.of(SimpleStructuredType.class).notNull()), TestSpec.forStrategy( "Same named arguments for overloaded method.", or( sequence(explicit(DataTypes.STRING())), sequence(explicit(DataTypes.INT())))) .namedArguments("sameName") .calledWithArgumentTypes(DataTypes.BOOLEAN()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\nf(STRING)\nf(INT)"), TestSpec.forStrategy( "Common argument type strategy", sequence( InputTypeStrategies.COMMON_ARG, InputTypeStrategies.COMMON_ARG)) .calledWithArgumentTypes(DataTypes.INT(), DataTypes.BIGINT()) .expectSignature("f(<COMMON>, <COMMON>)") .expectArgumentTypes(DataTypes.BIGINT(), DataTypes.BIGINT()), TestSpec.forStrategy( "ArrayElement argument type strategy", sequence( logical(LogicalTypeRoot.ARRAY), SpecificInputTypeStrategies.ARRAY_ELEMENT_ARG)) .calledWithArgumentTypes( DataTypes.ARRAY(DataTypes.INT().notNull()).notNull(), DataTypes.INT()) .expectSignature("f(<ARRAY>, <ARRAY ELEMENT>)") .expectArgumentTypes( DataTypes.ARRAY(DataTypes.INT().notNull()).notNull(), DataTypes.INT()), TestSpec.forStrategy(sequence(SpecificInputTypeStrategies.ARRAY_FULLY_COMPARABLE)) .expectSignature("f(<ARRAY<COMPARABLE>>)") .calledWithArgumentTypes(DataTypes.ARRAY(DataTypes.ROW())) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\n" + "f(<ARRAY<COMPARABLE>>)"), TestSpec.forStrategy( "Strategy fails if input argument type is not ARRAY", sequence(SpecificInputTypeStrategies.ARRAY_FULLY_COMPARABLE)) .calledWithArgumentTypes(DataTypes.INT()) .expectErrorMessage( "Invalid input arguments. Expected signatures are:\n" + "f(<ARRAY<COMPARABLE>>)"), TestSpec.forStrategy( "PROCTIME type strategy", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.PROCTIME)) .calledWithArgumentTypes(timeIndicatorType(TimestampKind.PROCTIME)) .expectSignature("f(<WINDOW REFERENCE>)") .expectArgumentTypes(timeIndicatorType(TimestampKind.PROCTIME)), TestSpec.forStrategy( "PROCTIME type strategy on non time indicator", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.PROCTIME)) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectErrorMessage("Reference to a rowtime or proctime window required."), TestSpec.forStrategy( "ROWTIME type strategy", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.ROWTIME)) .calledWithArgumentTypes(timeIndicatorType(TimestampKind.ROWTIME)) .expectSignature("f(<WINDOW REFERENCE>)") .expectArgumentTypes(timeIndicatorType(TimestampKind.ROWTIME)), TestSpec.forStrategy( "ROWTIME type strategy on proctime indicator", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.ROWTIME)) .calledWithArgumentTypes(timeIndicatorType(TimestampKind.PROCTIME)) .expectErrorMessage( "A proctime window cannot provide a rowtime attribute."), TestSpec.forStrategy( "PROCTIME type strategy on rowtime indicator", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.PROCTIME)) .calledWithArgumentTypes(timeIndicatorType(TimestampKind.ROWTIME)) .expectArgumentTypes(timeIndicatorType(TimestampKind.PROCTIME)), TestSpec.forStrategy( "ROWTIME type strategy on long in batch mode", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.ROWTIME)) .calledWithArgumentTypes(DataTypes.BIGINT()) .expectArgumentTypes(DataTypes.BIGINT()), TestSpec.forStrategy( "ROWTIME type strategy on non time attribute", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.ROWTIME)) .calledWithArgumentTypes(DataTypes.SMALLINT()) .expectErrorMessage("Reference to a rowtime or proctime window required."), TestSpec.forStrategy( "PROCTIME type strategy on non time attribute", SpecificInputTypeStrategies.windowTimeIndicator( TimestampKind.PROCTIME)) .calledWithArgumentTypes(DataTypes.SMALLINT()) .expectErrorMessage("Reference to a rowtime or proctime window required."), TestSpec.forStrategy( "Reinterpret_cast strategy", SpecificInputTypeStrategies.REINTERPRET_CAST) .calledWithArgumentTypes( DataTypes.DATE(), DataTypes.BIGINT(), DataTypes.BOOLEAN().notNull()) .calledWithLiteralAt(1, DataTypes.BIGINT()) .calledWithLiteralAt(2, true) .expectSignature("f(<ANY>, <TYPE LITERAL>, <TRUE | FALSE>)") .expectArgumentTypes( DataTypes.DATE(), DataTypes.BIGINT(), DataTypes.BOOLEAN().notNull()), TestSpec.forStrategy( "Reinterpret_cast strategy non literal overflow", SpecificInputTypeStrategies.REINTERPRET_CAST) .calledWithArgumentTypes( DataTypes.DATE(), DataTypes.BIGINT(), DataTypes.BOOLEAN().notNull()) .calledWithLiteralAt(1, DataTypes.BIGINT()) .expectErrorMessage("Not null boolean literal expected for overflow."), TestSpec.forStrategy( "Reinterpret_cast strategy not supported cast", SpecificInputTypeStrategies.REINTERPRET_CAST) .calledWithArgumentTypes( DataTypes.INT(), DataTypes.BIGINT(), DataTypes.BOOLEAN().notNull()) .calledWithLiteralAt(1, DataTypes.BIGINT()) .calledWithLiteralAt(2, true) .expectErrorMessage("Unsupported reinterpret cast from 'INT' to 'BIGINT'")); }
class InputTypeStrategiesTest extends InputTypeStrategiesTestBase { @Override private static DataType timeIndicatorType(TimestampKind timestampKind) { return TypeConversions.fromLogicalToDataType( new LocalZonedTimestampType(false, timestampKind, 3)); } /** Simple pojo that should be converted to a Structured type. */ public static class SimpleStructuredType { public long f0; } }
class InputTypeStrategiesTest extends InputTypeStrategiesTestBase { @Override private static DataType timeIndicatorType(TimestampKind timestampKind) { return TypeConversions.fromLogicalToDataType( new LocalZonedTimestampType(false, timestampKind, 3)); } /** Simple pojo that should be converted to a Structured type. */ public static class SimpleStructuredType { public long f0; } }
No, this method is simply checking whether the list of media items provided by the user via the ACCEPT header is supported. The method that was using reflection was removed from this util.
private Response buildViolationReportResponse(ConstraintViolationException cve) { Status status = Status.BAD_REQUEST; Response.ResponseBuilder builder = Response.status(status); builder.header(Validation.VALIDATION_HEADER, "true"); MediaType mediaType = ValidatorMediaTypeUtil.getAcceptMediaTypeFromSupported(headers.getAcceptableMediaTypes()); if (mediaType == null) { mediaType = MediaType.APPLICATION_JSON_TYPE; } List<ViolationReport.Violation> violationsInReport = new ArrayList<>(cve.getConstraintViolations().size()); for (ConstraintViolation<?> cv : cve.getConstraintViolations()) { violationsInReport.add(new ViolationReport.Violation(cv.getPropertyPath().toString(), cv.getMessage())); } builder.entity(new ViolationReport("Constraint Violation", status, violationsInReport)); builder.type(mediaType); return builder.build(); }
MediaType mediaType = ValidatorMediaTypeUtil.getAcceptMediaTypeFromSupported(headers.getAcceptableMediaTypes());
private Response buildViolationReportResponse(ConstraintViolationException cve) { Status status = Status.BAD_REQUEST; Response.ResponseBuilder builder = Response.status(status); builder.header(Validation.VALIDATION_HEADER, "true"); MediaType mediaType = ValidatorMediaTypeUtil.getAcceptMediaTypeFromSupported(headers.getAcceptableMediaTypes()); if (mediaType == null) { mediaType = MediaType.APPLICATION_JSON_TYPE; } List<ViolationReport.Violation> violationsInReport = new ArrayList<>(cve.getConstraintViolations().size()); for (ConstraintViolation<?> cv : cve.getConstraintViolations()) { violationsInReport.add(new ViolationReport.Violation(cv.getPropertyPath().toString(), cv.getMessage())); } builder.entity(new ViolationReport("Constraint Violation", status, violationsInReport)); builder.type(mediaType); return builder.build(); }
class ResteasyReactiveViolationExceptionMapper implements ExceptionMapper<ValidationException> { @Context HttpHeaders headers; @Override public Response toResponse(ValidationException exception) { if (!(exception instanceof ResteasyReactiveViolationException)) { throw exception; } ResteasyReactiveViolationException resteasyViolationException = (ResteasyReactiveViolationException) exception; if (hasReturnValueViolation(resteasyViolationException.getConstraintViolations())) { throw resteasyViolationException; } return buildViolationReportResponse(resteasyViolationException); } private boolean hasReturnValueViolation(Set<ConstraintViolation<?>> violations) { for (ConstraintViolation<?> violation : violations) { if (isReturnValueViolation(violation)) { return true; } } return false; } private boolean isReturnValueViolation(ConstraintViolation<?> violation) { Iterator<Path.Node> nodes = violation.getPropertyPath().iterator(); Path.Node firstNode = nodes.next(); if (firstNode.getKind() != ElementKind.METHOD) { return false; } Path.Node secondNode = nodes.next(); return secondNode.getKind() == ElementKind.RETURN_VALUE; } /** * As spec doesn't say anything about the report format, * we just use https: * This also what Reactive Routes uses */ public static class ViolationReport { private String title; private int status; private List<Violation> violations; /** * Requires no-args constructor for some serializers. */ public ViolationReport() { } public ViolationReport(String title, Status status, List<Violation> violations) { this.title = title; this.status = status.getStatusCode(); this.violations = violations; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } public List<Violation> getViolations() { return violations; } public void setViolations(List<Violation> violations) { this.violations = violations; } @Override public String toString() { return "ViolationReport{" + "title='" + title + '\'' + ", status=" + status + ", violations=" + violations + '}'; } public static class Violation { private String field; private String message; /** * Requires no-args constructor for some serializers. */ public Violation() { } public Violation(String field, String message) { this.field = field; this.message = message; } public String getField() { return field; } public void setField(String field) { this.field = field; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } @Override public String toString() { return "Violation{" + "field='" + field + '\'' + ", message='" + message + '\'' + '}'; } } } }
class ResteasyReactiveViolationExceptionMapper implements ExceptionMapper<ValidationException> { @Context HttpHeaders headers; @Override public Response toResponse(ValidationException exception) { if (!(exception instanceof ResteasyReactiveViolationException)) { throw exception; } ResteasyReactiveViolationException resteasyViolationException = (ResteasyReactiveViolationException) exception; if (hasReturnValueViolation(resteasyViolationException.getConstraintViolations())) { throw resteasyViolationException; } return buildViolationReportResponse(resteasyViolationException); } private boolean hasReturnValueViolation(Set<ConstraintViolation<?>> violations) { for (ConstraintViolation<?> violation : violations) { if (isReturnValueViolation(violation)) { return true; } } return false; } private boolean isReturnValueViolation(ConstraintViolation<?> violation) { Iterator<Path.Node> nodes = violation.getPropertyPath().iterator(); Path.Node firstNode = nodes.next(); if (firstNode.getKind() != ElementKind.METHOD) { return false; } Path.Node secondNode = nodes.next(); return secondNode.getKind() == ElementKind.RETURN_VALUE; } /** * As spec doesn't say anything about the report format, * we just use https: * This also what Reactive Routes uses */ public static class ViolationReport { private String title; private int status; private List<Violation> violations; /** * Requires no-args constructor for some serializers. */ public ViolationReport() { } public ViolationReport(String title, Status status, List<Violation> violations) { this.title = title; this.status = status.getStatusCode(); this.violations = violations; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } public List<Violation> getViolations() { return violations; } public void setViolations(List<Violation> violations) { this.violations = violations; } @Override public String toString() { return "ViolationReport{" + "title='" + title + '\'' + ", status=" + status + ", violations=" + violations + '}'; } public static class Violation { private String field; private String message; /** * Requires no-args constructor for some serializers. */ public Violation() { } public Violation(String field, String message) { this.field = field; this.message = message; } public String getField() { return field; } public void setField(String field) { this.field = field; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } @Override public String toString() { return "Violation{" + "field='" + field + '\'' + ", message='" + message + '\'' + '}'; } } } }
Added a synchronized lock in SparkEtlHandler for cluster id. Now initRepository operations are protected by lock if they are in same cluster
private void initRepository() throws LoadException { LOG.info("start to init remote repository"); boolean needUpload = false; boolean needReplace = false; CHECK: { if (Strings.isNullOrEmpty(remoteRepositoryPath) || brokerDesc == null) { break CHECK; } if (!checkCurrentArchiveExists()) { needUpload = true; break CHECK; } String remoteArchivePath = getRemoteArchivePath(currentDppVersion); List<SparkLibrary> libraries = Lists.newArrayList(); getLibraries(remoteArchivePath, libraries); if (libraries.size() != 2) { needUpload = true; needReplace = true; break CHECK; } currentArchive.libraries.addAll(libraries); for (SparkLibrary library : currentArchive.libraries) { String localMd5sum = null; switch (library.libType) { case DPP: localMd5sum = getMd5String(localDppPath); break; case SPARK2X: localMd5sum = getMd5String(localSpark2xPath); break; default: Preconditions.checkState(false, "wrong library type: " + library.libType); break; } if (!localMd5sum.equals(library.md5sum)) { needUpload = true; needReplace = true; break; } } } if (needUpload) { uploadArchive(needReplace); } isInit = true; LOG.info("init spark repository success, current dppVersion={}, archive path={}, libraries size={}", currentDppVersion, currentArchive.remotePath, currentArchive.libraries.size()); }
LOG.info("start to init remote repository");
private void initRepository() throws LoadException { LOG.info("start to init remote repository"); boolean needUpload = false; boolean needReplace = false; CHECK: { if (Strings.isNullOrEmpty(remoteRepositoryPath) || brokerDesc == null) { break CHECK; } if (!checkCurrentArchiveExists()) { needUpload = true; break CHECK; } String remoteArchivePath = getRemoteArchivePath(currentDppVersion); List<SparkLibrary> libraries = Lists.newArrayList(); getLibraries(remoteArchivePath, libraries); if (libraries.size() != 2) { needUpload = true; needReplace = true; break CHECK; } currentArchive.libraries.addAll(libraries); for (SparkLibrary library : currentArchive.libraries) { String localMd5sum = null; switch (library.libType) { case DPP: localMd5sum = getMd5String(localDppPath); break; case SPARK2X: localMd5sum = getMd5String(localSpark2xPath); break; default: Preconditions.checkState(false, "wrong library type: " + library.libType); break; } if (!localMd5sum.equals(library.md5sum)) { needUpload = true; needReplace = true; break; } } } if (needUpload) { uploadArchive(needReplace); } LOG.info("init spark repository success, current dppVersion={}, archive path={}, libraries size={}", currentDppVersion, currentArchive.remotePath, currentArchive.libraries.size()); }
class SparkRepository { private static final Logger LOG = LogManager.getLogger(SparkRepository.class); public static final String REPOSITORY_DIR = "__spark_repository__"; public static final String PREFIX_ARCHIVE = "__archive_"; public static final String PREFIX_LIB = "__lib_"; public static final String SPARK_DPP = "spark-dpp"; public static final String SPARK_2X = "spark-2x"; public static final String SUFFIX = ".zip"; private static final String PATH_DELIMITER = "/"; private static final String FILE_NAME_SEPARATOR = "_"; private String remoteRepositoryPath; private BrokerDesc brokerDesc; private String localDppPath; private String localSpark2xPath; private String currentDppVersion; private SparkArchive currentArchive; private ReentrantReadWriteLock rwLock; private boolean isInit; public SparkRepository(String remoteRepositoryPath, BrokerDesc brokerDesc) { this.remoteRepositoryPath = remoteRepositoryPath; this.brokerDesc = brokerDesc; this.currentDppVersion = FeConstants.spark_dpp_version; currentArchive = new SparkArchive(getRemoteArchivePath(currentDppVersion), currentDppVersion); this.rwLock = new ReentrantReadWriteLock(); this.isInit = false; this.localDppPath = PaloFe.DORIS_HOME_DIR + "/spark-dpp/spark-dpp.jar"; if (!Strings.isNullOrEmpty(Config.spark_resource_path)) { this.localSpark2xPath = Config.spark_resource_path; } else { this.localSpark2xPath = Config.spark_home_default_dir + "/jars/spark-2x.zip"; } } public boolean prepare() throws LoadException { if (!isInit) { initRepository(); } return isInit; } public boolean checkCurrentArchiveExists() { boolean result = false; Preconditions.checkNotNull(remoteRepositoryPath); String remotePath = getRemoteArchivePath(currentDppVersion); readLock(); try { result = BrokerUtil.checkPathExist(remotePath, brokerDesc); LOG.info("check archive exists in repository, {}", result); } catch (UserException e) { LOG.warn("Failed to check remote archive exist, path={}, version={}", remotePath, currentDppVersion); } finally { readUnlock(); } return result; } private void uploadArchive(boolean isReplace) throws LoadException { writeLock(); try { String remoteArchivePath = getRemoteArchivePath(currentDppVersion); if (isReplace) { BrokerUtil.deletePath(remoteArchivePath, brokerDesc); currentArchive.libraries.clear(); } String srcFilePath = null; { srcFilePath = localDppPath; String md5sum = getMd5String(srcFilePath); long size = getFileSize(srcFilePath); String fileName = getFileName(PATH_DELIMITER, srcFilePath); String destFilePath = remoteArchivePath + PATH_DELIMITER + assemblyFileName(PREFIX_LIB, md5sum, fileName, ""); upload(srcFilePath, destFilePath); currentArchive.libraries.add(new SparkLibrary(destFilePath, md5sum, SparkLibrary.LibType.DPP, size)); } { srcFilePath = localSpark2xPath; String md5sum = getMd5String(srcFilePath); long size = getFileSize(srcFilePath); String fileName = getFileName(PATH_DELIMITER, srcFilePath); String destFilePath = remoteArchivePath + PATH_DELIMITER + assemblyFileName(PREFIX_LIB, md5sum, fileName, ""); upload(srcFilePath, destFilePath); currentArchive.libraries.add(new SparkLibrary(destFilePath, md5sum, SparkLibrary.LibType.SPARK2X, size)); } LOG.info("finished to upload archive to repository, currentDppVersion={}, path={}", currentDppVersion, remoteArchivePath); } catch (UserException e) { throw new LoadException(e.getMessage()); } finally { writeUnlock(); } } private void getLibraries(String remoteArchivePath, List<SparkLibrary> libraries) throws LoadException { List<TBrokerFileStatus> fileStatuses = Lists.newArrayList(); readLock(); try { LOG.info("input remote archive path, path={}", remoteArchivePath); BrokerUtil.parseFile(remoteArchivePath + "/*", brokerDesc, fileStatuses); } catch (UserException e) { throw new LoadException(e.getMessage()); } finally { readUnlock(); } LOG.info("get file statuses, size={} ", fileStatuses.size()); for (TBrokerFileStatus fileStatus : fileStatuses) { LOG.info("get file status " + fileStatus.path); String fileName = getFileName(PATH_DELIMITER, fileStatus.path); LOG.info("get file name " + fileName); if (!fileName.startsWith(PREFIX_LIB)) { continue; } String[] lib_arg = unWrap(PREFIX_LIB, SUFFIX, fileName).split(FILE_NAME_SEPARATOR); LOG.info("get lib arg, length={}, arg[0]={}, arg[1]={}", lib_arg.length, lib_arg[0], lib_arg[1]); if (lib_arg.length != 2) { continue; } String md5sum = lib_arg[0]; String type = lib_arg[1]; SparkLibrary.LibType libType = null; switch (type) { case SPARK_DPP: libType = SparkLibrary.LibType.DPP; break; case SPARK_2X: libType = SparkLibrary.LibType.SPARK2X; break; default: Preconditions.checkState(false, "wrong library type: " + type); break; } SparkLibrary remoteFile = new SparkLibrary(fileStatus.path, md5sum, libType, fileStatus.size); libraries.add(remoteFile); LOG.info("get Libraries from remote archive, archive path={}, library={}, md5sum={}, size={}", remoteArchivePath, remoteFile.remotePath, remoteFile.md5sum, remoteFile.size); } } private String getMd5String(String filePath) throws LoadException { File file = new File(filePath); String md5sum = null; try { md5sum = DigestUtils.md5Hex(new FileInputStream(file)); Preconditions.checkNotNull(md5sum); LOG.info("get md5sum from file {}, md5sum={}", filePath, md5sum); return md5sum; } catch (FileNotFoundException e) { throw new LoadException("file " + filePath + "dose not exist"); } catch (IOException e) { throw new LoadException("failed to get md5sum from file " + filePath); } } private long getFileSize(String filePath) throws LoadException { File file = new File(filePath); long size = file.length(); if (size <= 0) { throw new LoadException("failed to get size from file " + filePath); } return size; } private void upload(String srcFilePath, String destFilePath) throws LoadException { try { BrokerUtil.writeFile(srcFilePath, destFilePath , brokerDesc); LOG.info("finished to upload file, localPath={}, remotePath={}", srcFilePath, destFilePath); } catch (UserException e) { throw new LoadException("failed to upload lib to repository, srcPath=" +srcFilePath + " destPath=" + destFilePath + " message=" + e.getMessage()); } } public SparkArchive getCurrentArchive() { return currentArchive; } private static String getFileName(String delimiter, String path) { return path.substring(path.lastIndexOf(delimiter) + 1); } private static String unWrap(String prefix, String suffix, String fileName) { return fileName.substring(prefix.length(), fileName.length() - suffix.length()); } private static String joinPrefix(String prefix, String fileName) { return prefix + fileName; } public static String assemblyFileName(String prefix, String md5sum, String fileName, String suffix) { return prefix + md5sum + FILE_NAME_SEPARATOR + fileName + suffix; } private String getRemoteArchivePath(String version) { return Joiner.on(PATH_DELIMITER).join(remoteRepositoryPath, joinPrefix(PREFIX_ARCHIVE, version)); } public void readLock() { this.rwLock.readLock().lock(); } public void readUnlock() { this.rwLock.readLock().unlock(); } public void writeLock() { this.rwLock.writeLock().lock(); } public void writeUnlock() { this.rwLock.writeLock().unlock(); } public static class SparkArchive { public String remotePath; public String version; public List<SparkLibrary> libraries; public SparkArchive(String remotePath, String version) { this.remotePath = remotePath; this.version = version; this.libraries = Lists.newArrayList(); } } public static class SparkLibrary { public String remotePath; public String md5sum; public long size; public LibType libType; public enum LibType { DPP, SPARK2X } public SparkLibrary(String remotePath, String md5sum, LibType libType, long size) { this.remotePath = remotePath; this.md5sum = md5sum; this.libType = libType; this.size = size; } } }
class SparkRepository { private static final Logger LOG = LogManager.getLogger(SparkRepository.class); public static final String REPOSITORY_DIR = "__spark_repository__"; public static final String PREFIX_ARCHIVE = "__archive_"; public static final String PREFIX_LIB = "__lib_"; public static final String SPARK_DPP = "spark-dpp"; public static final String SPARK_2X = "spark-2x"; public static final String SUFFIX = ".zip"; private static final String PATH_DELIMITER = "/"; private static final String FILE_NAME_SEPARATOR = "_"; private static final String DPP_RESOURCE = "/spark-dpp/spark-dpp.jar"; private static final String SPARK_RESOURCE = "/jars/spark-2x.zip"; private String remoteRepositoryPath; private BrokerDesc brokerDesc; private String localDppPath; private String localSpark2xPath; private String currentDppVersion; private SparkArchive currentArchive; public SparkRepository(String remoteRepositoryPath, BrokerDesc brokerDesc) { this.remoteRepositoryPath = remoteRepositoryPath; this.brokerDesc = brokerDesc; this.currentDppVersion = Config.spark_dpp_version; this.currentArchive = new SparkArchive(getRemoteArchivePath(currentDppVersion), currentDppVersion); this.localDppPath = PaloFe.DORIS_HOME_DIR + DPP_RESOURCE; if (!Strings.isNullOrEmpty(Config.spark_resource_path)) { this.localSpark2xPath = Config.spark_resource_path; } else { this.localSpark2xPath = Config.spark_home_default_dir + SPARK_RESOURCE; } } public void prepare() throws LoadException { initRepository(); } private boolean checkCurrentArchiveExists() { boolean result = false; Preconditions.checkNotNull(remoteRepositoryPath); String remotePath = getRemoteArchivePath(currentDppVersion); try { result = BrokerUtil.checkPathExist(remotePath, brokerDesc); LOG.info("check archive exists in repository, {}", result); } catch (UserException e) { LOG.warn("Failed to check remote archive exist, path={}, version={}", remotePath, currentDppVersion); } return result; } private void uploadArchive(boolean isReplace) throws LoadException { try { String remoteArchivePath = getRemoteArchivePath(currentDppVersion); if (isReplace) { BrokerUtil.deletePath(remoteArchivePath, brokerDesc); currentArchive.libraries.clear(); } String srcFilePath = null; { srcFilePath = localDppPath; String md5sum = getMd5String(srcFilePath); long size = getFileSize(srcFilePath); String fileName = getFileName(PATH_DELIMITER, srcFilePath); String destFilePath = remoteArchivePath + PATH_DELIMITER + assemblyFileName(PREFIX_LIB, md5sum, fileName, ""); upload(srcFilePath, destFilePath); currentArchive.libraries.add(new SparkLibrary(destFilePath, md5sum, SparkLibrary.LibType.DPP, size)); } { srcFilePath = localSpark2xPath; String md5sum = getMd5String(srcFilePath); long size = getFileSize(srcFilePath); String fileName = getFileName(PATH_DELIMITER, srcFilePath); String destFilePath = remoteArchivePath + PATH_DELIMITER + assemblyFileName(PREFIX_LIB, md5sum, fileName, ""); upload(srcFilePath, destFilePath); currentArchive.libraries.add(new SparkLibrary(destFilePath, md5sum, SparkLibrary.LibType.SPARK2X, size)); } LOG.info("finished to upload archive to repository, currentDppVersion={}, path={}", currentDppVersion, remoteArchivePath); } catch (UserException e) { throw new LoadException(e.getMessage()); } } private void getLibraries(String remoteArchivePath, List<SparkLibrary> libraries) throws LoadException { List<TBrokerFileStatus> fileStatuses = Lists.newArrayList(); try { BrokerUtil.parseFile(remoteArchivePath + "/*", brokerDesc, fileStatuses); } catch (UserException e) { throw new LoadException(e.getMessage()); } for (TBrokerFileStatus fileStatus : fileStatuses) { String fileName = getFileName(PATH_DELIMITER, fileStatus.path); if (!fileName.startsWith(PREFIX_LIB)) { continue; } String[] lib_arg = unWrap(PREFIX_LIB, SUFFIX, fileName).split(FILE_NAME_SEPARATOR); if (lib_arg.length != 2) { continue; } String md5sum = lib_arg[0]; String type = lib_arg[1]; SparkLibrary.LibType libType = null; switch (type) { case SPARK_DPP: libType = SparkLibrary.LibType.DPP; break; case SPARK_2X: libType = SparkLibrary.LibType.SPARK2X; break; default: Preconditions.checkState(false, "wrong library type: " + type); break; } SparkLibrary remoteFile = new SparkLibrary(fileStatus.path, md5sum, libType, fileStatus.size); libraries.add(remoteFile); LOG.info("get Libraries from remote archive, archive path={}, library={}, md5sum={}, size={}", remoteArchivePath, remoteFile.remotePath, remoteFile.md5sum, remoteFile.size); } } public String getMd5String(String filePath) throws LoadException { File file = new File(filePath); String md5sum = null; try { md5sum = DigestUtils.md5Hex(new FileInputStream(file)); Preconditions.checkNotNull(md5sum); LOG.debug("get md5sum from file {}, md5sum={}", filePath, md5sum); return md5sum; } catch (FileNotFoundException e) { throw new LoadException("file " + filePath + "dose not exist"); } catch (IOException e) { throw new LoadException("failed to get md5sum from file " + filePath); } } public long getFileSize(String filePath) throws LoadException { File file = new File(filePath); long size = file.length(); if (size <= 0) { throw new LoadException("failed to get size from file " + filePath); } return size; } private void upload(String srcFilePath, String destFilePath) throws LoadException { try { BrokerUtil.writeFile(srcFilePath, destFilePath , brokerDesc); LOG.info("finished to upload file, localPath={}, remotePath={}", srcFilePath, destFilePath); } catch (UserException e) { throw new LoadException("failed to upload lib to repository, srcPath=" +srcFilePath + " destPath=" + destFilePath + " message=" + e.getMessage()); } } public SparkArchive getCurrentArchive() { return currentArchive; } private static String getFileName(String delimiter, String path) { return path.substring(path.lastIndexOf(delimiter) + 1); } private static String unWrap(String prefix, String suffix, String fileName) { return fileName.substring(prefix.length(), fileName.length() - suffix.length()); } private static String joinPrefix(String prefix, String fileName) { return prefix + fileName; } public static String assemblyFileName(String prefix, String md5sum, String fileName, String suffix) { return prefix + md5sum + FILE_NAME_SEPARATOR + fileName + suffix; } private String getRemoteArchivePath(String version) { return Joiner.on(PATH_DELIMITER).join(remoteRepositoryPath, joinPrefix(PREFIX_ARCHIVE, version)); } public static class SparkArchive { public String remotePath; public String version; public List<SparkLibrary> libraries; public SparkArchive(String remotePath, String version) { this.remotePath = remotePath; this.version = version; this.libraries = Lists.newArrayList(); } public SparkLibrary getDppLibrary() { SparkLibrary result = null; Optional<SparkLibrary> library = libraries.stream(). filter(lib -> lib.libType == SparkLibrary.LibType.DPP).findFirst(); if (library.isPresent()) { result = library.get(); } return result; } public SparkLibrary getSpark2xLibrary() { SparkLibrary result = null; Optional<SparkLibrary> library = libraries.stream(). filter(lib -> lib.libType == SparkLibrary.LibType.SPARK2X).findFirst(); if (library.isPresent()) { result = library.get(); } return result; } } public static class SparkLibrary { public String remotePath; public String md5sum; public long size; public LibType libType; public enum LibType { DPP, SPARK2X } public SparkLibrary(String remotePath, String md5sum, LibType libType, long size) { this.remotePath = remotePath; this.md5sum = md5sum; this.libType = libType; this.size = size; } } }
`testFile` starts context with `TOP_LEVEL_NODE`. Let's add the codes inside a function
public void testIntermediateClauseStartRecovery() { testFile("query-expr/query_expr_source_68.bal", "query-expr/query_expr_assert_68.json"); }
testFile("query-expr/query_expr_source_68.bal", "query-expr/query_expr_assert_68.json");
public void testIntermediateClauseStartRecovery() { testFile("query-expr/query_expr_source_68.bal", "query-expr/query_expr_assert_68.json"); }
class QueryExpressionTest extends AbstractExpressionsTest { @Test public void testSimplestQuery() { test("from int a in b select c", "query-expr/query_expr_assert_01.json"); } @Test public void testQueryWithFromIntermediateClause() { test("from int a in b from int c in d select e", "query-expr/query_expr_assert_02.json"); } @Test public void testQueryWithWhereIntermediateClause() { test("from int a in b where c select d", "query-expr/query_expr_assert_03.json"); } @Test public void testQueryWithLetIntermediateClause() { test("from int a in b let int c = d select e", "query-expr/query_expr_assert_04.json"); test("from int a in b let @bar{A:B} int c = d, int e = f select e", "query-expr/query_expr_assert_05.json"); } @Test public void testQueryWithMultipleIntermediateClauses() { test("from int a in b where c where d select e", "query-expr/query_expr_assert_06.json"); test("from int a in b where c from int d in e select f", "query-expr/query_expr_assert_07.json"); test("from int a in b from int d in e let int f = g, int h = i where j select k", "query-expr/query_expr_assert_08.json"); } @Test public void testQueryWithStreamKeyword() { test("stream from int a in b select e", "query-expr/query_expr_assert_09.json"); } @Test public void testQueryWithTableKeyword() { test("table key() from int a in b select e", "query-expr/query_expr_assert_10.json"); test("table key(name, age) from int a in b select e", "query-expr/query_expr_assert_11.json"); } @Test public void testQueryWithOrderByClause() { test("from int a in b where c let int d = e order by f select g", "query-expr/query_expr_assert_39.json"); test("from int a in b where c let int d = e order by f ascending, g select h", "query-expr" + "/query_expr_assert_40.json"); test("from int a in b order by c ascending, d descending select e", "query-expr" + "/query_expr_assert_41.json"); } @Test public void testQueryWithOnConflictAndLimitClauses() { test("from int a in b select c on conflict d", "query-expr/query_expr_assert_51.json"); test("from int a in b limit c select d", "query-expr/query_expr_assert_52.json"); test("from int a in b limit c select d on conflict e", "query-expr/query_expr_assert_53.json"); } @Test public void testQueryWithInnerJoinClause() { test("from int a in b join int c in d on e equals f select g", "query-expr/query_expr_assert_54.json"); } @Test public void testQueryWithOuterJoinClause() { test("from int a in b outer join int c in d on e equals f select g", "query-expr/query_expr_assert_55.json"); } @Test public void testNestedQueryExpr() { testFile("query-expr/query_expr_source_67.bal", "query-expr/query_expr_assert_67.json"); } @Test public void testQueryWithMissingKeySpecifier() { test("table from int a in b select c", "query-expr/query_expr_assert_12.json"); } @Test(enabled = false) public void testQueryWithMissingTableKeyword() { test("key() from int a in b select c", "query-expr/query_expr_assert_13.json"); } @Test public void testQueryFromClauseWithMissingFromKeyword() { test("int a in b select c", "query-expr/query_expr_assert_14.json"); test("from int a in b int c in d select e", "query-expr/query_expr_assert_15.json"); test("from int a in b where c int d in e select f", "query-expr/query_expr_assert_16.json"); test("from int a in b let int c = d int d in e select f", "query-expr/query_expr_assert_17.json"); } @Test public void testQueryFromClauseWithMissingVarName() { test("from int in b select c", "query-expr/query_expr_assert_18.json"); } @Test public void testQueryFromClauseWithMissingInKeyword() { test("from int a b select c", "query-expr/query_expr_assert_19.json"); } @Test public void testQueryFromClauseWithMissingExpression() { test("from int a in select c", "query-expr/query_expr_assert_20.json"); test("from int a in b from int c in where d select e", "query-expr/query_expr_assert_21.json"); } @Test public void testQuerySelectClauseWithMissingSelectKeyword() { test("from int a in b c", "query-expr/query_expr_assert_22.json"); } @Test public void testQuerySelectClauseWithMissingExpression() { test("from int a in b select", "query-expr/query_expr_assert_23.json"); } @Test public void testQueryWhereClauseWithMissingExpression() { test("from int a in b where select d", "query-expr/query_expr_assert_24.json"); } @Test public void testQueryLetClauseWithMissingLetKeyword() { test("from int a in b int c = d select e", "query-expr/query_expr_assert_25.json"); test("from int a in b @A{} int c = d select e", "query-expr/query_expr_assert_26.json"); } @Test public void testQueryLetClauseWithMissingVarName() { test("from int a in b let int = d select e", "query-expr/query_expr_assert_27.json"); } @Test public void testQueryLetClauseWithMissingEqualToken() { test("from int a in b let int c d select e", "query-expr/query_expr_assert_28.json"); } @Test public void testQueryLetClauseWithMissingComma() { test("from int a in b let int c = d int e = f select g", "query-expr/query_expr_assert_29.json"); } @Test public void testQueryWithExtraToken() { test("table foo key() from int a in b select g", "query-expr/query_expr_assert_30.json"); test("table key() foo from int a in b select g", "query-expr/query_expr_assert_31.json"); } @Test(enabled = false) public void testQueryWithTwoKeySpecifiersWithExtraTokenInBetween() { test("table key(a) foo key(b) []", "query-expr/query_expr_assert_32.json"); } @Test public void testQueryWithExtraClausesAfterSelectClause() { test("from int a in b select c where d", "query-expr/query_expr_assert_33.json"); test("from int a in b select c select d", "query-expr/query_expr_assert_34.json"); } @Test public void testQueryWithOperatorPrecedence() { test("from int a in b select c + from int d in e select f", "query-expr/query_expr_assert_35.json"); } @Test public void testQueryWithOperatorPrecedenceWithExtraClauses() { test("from int a in b select c where e + from int d in e select f", "query-expr/query_expr_assert_36.json"); test("from int a in b select c from int d in e + from int f in g select h", "query-expr/query_expr_assert_37.json"); test("from int a in b select c let int a = b + from int f in g select h", "query-expr/query_expr_assert_38.json"); } @Test public void testQueryWithOrderByClauseRecovery() { test("from int a in b order by select c", "query-expr/query_expr_assert_42.json"); test("from int a in b order c select d", "query-expr/query_expr_assert_43.json"); test("from int a in b order by c ascending d e select f", "query-expr/query_expr_assert_44.json"); test("from int a in b by c select d", "query-expr/query_expr_assert_45.json"); test("from int a in b order by ascending select e", "query-expr/query_expr_assert_46.json"); } @Test public void testQueryWithOnConflictAndLimitClauseRecovery() { test("from int a in b select c on d", "query-expr/query_expr_assert_47.json"); test("from int a in b select c conflict d", "query-expr/query_expr_assert_48.json"); test("from int a in b limit select", "query-expr/query_expr_assert_49.json"); test("from int a in b limit select conflict", "query-expr/query_expr_assert_50.json"); } @Test public void testQueryWithJoinClauseRecovery() { test("from int a in b join in d on e equals f select g", "query-expr/query_expr_assert_56.json"); test("from int a in b join int c d on e equals f select g", "query-expr/query_expr_assert_57.json"); test("from int a in b join int c in on e equals f select g", "query-expr/query_expr_assert_59.json"); test("from int a in b outer int c in d on e equals f select g", "query-expr/query_expr_assert_63.json"); } @Test public void testQueryWithJoinOnConditionRecovery() { test("from int a in b join int c in d on select g", "query-expr/query_expr_assert_58.json"); test("from int a in b join int c in d e equals f select g", "query-expr/query_expr_assert_60.json"); test("from int a in b join int c in d select g", "query-expr/query_expr_assert_61.json"); test("from int a in b outer join int c in d select g", "query-expr/query_expr_assert_62.json"); test("from int a in b equals select g", "query-expr/query_expr_assert_64.json"); } @Test public void testIncompleteClauses() { testFile("query-expr/query_expr_source_65.bal", "query-expr/query_expr_assert_65.json"); testFile("query-expr/query_expr_source_66.bal", "query-expr/query_expr_assert_66.json"); } @Test }
class QueryExpressionTest extends AbstractExpressionsTest { @Test public void testSimplestQuery() { test("from int a in b select c", "query-expr/query_expr_assert_01.json"); } @Test public void testQueryWithFromIntermediateClause() { test("from int a in b from int c in d select e", "query-expr/query_expr_assert_02.json"); } @Test public void testQueryWithWhereIntermediateClause() { test("from int a in b where c select d", "query-expr/query_expr_assert_03.json"); } @Test public void testQueryWithLetIntermediateClause() { test("from int a in b let int c = d select e", "query-expr/query_expr_assert_04.json"); test("from int a in b let @bar{A:B} int c = d, int e = f select e", "query-expr/query_expr_assert_05.json"); } @Test public void testQueryWithMultipleIntermediateClauses() { test("from int a in b where c where d select e", "query-expr/query_expr_assert_06.json"); test("from int a in b where c from int d in e select f", "query-expr/query_expr_assert_07.json"); test("from int a in b from int d in e let int f = g, int h = i where j select k", "query-expr/query_expr_assert_08.json"); } @Test public void testQueryWithStreamKeyword() { test("stream from int a in b select e", "query-expr/query_expr_assert_09.json"); } @Test public void testQueryWithTableKeyword() { test("table key() from int a in b select e", "query-expr/query_expr_assert_10.json"); test("table key(name, age) from int a in b select e", "query-expr/query_expr_assert_11.json"); } @Test public void testQueryWithOrderByClause() { test("from int a in b where c let int d = e order by f select g", "query-expr/query_expr_assert_39.json"); test("from int a in b where c let int d = e order by f ascending, g select h", "query-expr" + "/query_expr_assert_40.json"); test("from int a in b order by c ascending, d descending select e", "query-expr" + "/query_expr_assert_41.json"); } @Test public void testQueryWithOnConflictAndLimitClauses() { test("from int a in b select c on conflict d", "query-expr/query_expr_assert_51.json"); test("from int a in b limit c select d", "query-expr/query_expr_assert_52.json"); test("from int a in b limit c select d on conflict e", "query-expr/query_expr_assert_53.json"); } @Test public void testQueryWithInnerJoinClause() { test("from int a in b join int c in d on e equals f select g", "query-expr/query_expr_assert_54.json"); } @Test public void testQueryWithOuterJoinClause() { test("from int a in b outer join int c in d on e equals f select g", "query-expr/query_expr_assert_55.json"); } @Test public void testNestedQueryExpr() { testFile("query-expr/query_expr_source_67.bal", "query-expr/query_expr_assert_67.json"); } @Test public void testQueryWithMissingKeySpecifier() { test("table from int a in b select c", "query-expr/query_expr_assert_12.json"); } @Test(enabled = false) public void testQueryWithMissingTableKeyword() { test("key() from int a in b select c", "query-expr/query_expr_assert_13.json"); } @Test public void testQueryFromClauseWithMissingFromKeyword() { test("int a in b select c", "query-expr/query_expr_assert_14.json"); test("from int a in b int c in d select e", "query-expr/query_expr_assert_15.json"); test("from int a in b where c int d in e select f", "query-expr/query_expr_assert_16.json"); test("from int a in b let int c = d int d in e select f", "query-expr/query_expr_assert_17.json"); } @Test public void testQueryFromClauseWithMissingVarName() { test("from int in b select c", "query-expr/query_expr_assert_18.json"); } @Test public void testQueryFromClauseWithMissingInKeyword() { test("from int a b select c", "query-expr/query_expr_assert_19.json"); } @Test public void testQueryFromClauseWithMissingExpression() { test("from int a in select c", "query-expr/query_expr_assert_20.json"); test("from int a in b from int c in where d select e", "query-expr/query_expr_assert_21.json"); } @Test public void testQuerySelectClauseWithMissingSelectKeyword() { test("from int a in b c", "query-expr/query_expr_assert_22.json"); } @Test public void testQuerySelectClauseWithMissingExpression() { test("from int a in b select", "query-expr/query_expr_assert_23.json"); } @Test public void testQueryWhereClauseWithMissingExpression() { test("from int a in b where select d", "query-expr/query_expr_assert_24.json"); } @Test public void testQueryLetClauseWithMissingLetKeyword() { test("from int a in b int c = d select e", "query-expr/query_expr_assert_25.json"); test("from int a in b @A{} int c = d select e", "query-expr/query_expr_assert_26.json"); } @Test public void testQueryLetClauseWithMissingVarName() { test("from int a in b let int = d select e", "query-expr/query_expr_assert_27.json"); } @Test public void testQueryLetClauseWithMissingEqualToken() { test("from int a in b let int c d select e", "query-expr/query_expr_assert_28.json"); } @Test public void testQueryLetClauseWithMissingComma() { test("from int a in b let int c = d int e = f select g", "query-expr/query_expr_assert_29.json"); } @Test public void testQueryWithExtraToken() { test("table foo key() from int a in b select g", "query-expr/query_expr_assert_30.json"); test("table key() foo from int a in b select g", "query-expr/query_expr_assert_31.json"); } @Test(enabled = false) public void testQueryWithTwoKeySpecifiersWithExtraTokenInBetween() { test("table key(a) foo key(b) []", "query-expr/query_expr_assert_32.json"); } @Test public void testQueryWithExtraClausesAfterSelectClause() { test("from int a in b select c where d", "query-expr/query_expr_assert_33.json"); test("from int a in b select c select d", "query-expr/query_expr_assert_34.json"); } @Test public void testQueryWithOperatorPrecedence() { test("from int a in b select c + from int d in e select f", "query-expr/query_expr_assert_35.json"); } @Test public void testQueryWithOperatorPrecedenceWithExtraClauses() { test("from int a in b select c where e + from int d in e select f", "query-expr/query_expr_assert_36.json"); test("from int a in b select c from int d in e + from int f in g select h", "query-expr/query_expr_assert_37.json"); test("from int a in b select c let int a = b + from int f in g select h", "query-expr/query_expr_assert_38.json"); } @Test public void testQueryWithOrderByClauseRecovery() { test("from int a in b order by select c", "query-expr/query_expr_assert_42.json"); test("from int a in b order c select d", "query-expr/query_expr_assert_43.json"); test("from int a in b order by c ascending d e select f", "query-expr/query_expr_assert_44.json"); test("from int a in b by c select d", "query-expr/query_expr_assert_45.json"); test("from int a in b order by ascending select e", "query-expr/query_expr_assert_46.json"); } @Test public void testQueryWithOnConflictAndLimitClauseRecovery() { test("from int a in b select c on d", "query-expr/query_expr_assert_47.json"); test("from int a in b select c conflict d", "query-expr/query_expr_assert_48.json"); test("from int a in b limit select", "query-expr/query_expr_assert_49.json"); test("from int a in b limit select conflict", "query-expr/query_expr_assert_50.json"); } @Test public void testQueryWithJoinClauseRecovery() { test("from int a in b join in d on e equals f select g", "query-expr/query_expr_assert_56.json"); test("from int a in b join int c d on e equals f select g", "query-expr/query_expr_assert_57.json"); test("from int a in b join int c in on e equals f select g", "query-expr/query_expr_assert_59.json"); test("from int a in b outer int c in d on e equals f select g", "query-expr/query_expr_assert_63.json"); } @Test public void testQueryWithJoinOnConditionRecovery() { test("from int a in b join int c in d on select g", "query-expr/query_expr_assert_58.json"); test("from int a in b join int c in d e equals f select g", "query-expr/query_expr_assert_60.json"); test("from int a in b join int c in d select g", "query-expr/query_expr_assert_61.json"); test("from int a in b outer join int c in d select g", "query-expr/query_expr_assert_62.json"); test("from int a in b equals select g", "query-expr/query_expr_assert_64.json"); } @Test public void testIncompleteClauses() { testFile("query-expr/query_expr_source_65.bal", "query-expr/query_expr_assert_65.json"); testFile("query-expr/query_expr_source_66.bal", "query-expr/query_expr_assert_66.json"); } @Test @Test public void testSelectClauseStartRecovery() { testFile("query-expr/query_expr_source_69.bal", "query-expr/query_expr_assert_69.json"); } }
I think I see why this was done, because you need the metric when the KV states are registered. But can't we just create the metric in this class if required, after the DB was opened? then the builder can get the metrics with a getter before passing it into the `RocksDBKeyedStateBackend`.
public void openDB() throws IOException { db = RocksDBOperationUtils.openDB(dbPath, columnFamilyDescriptors, columnFamilyHandles, columnOptions, dbOptions); defaultColumnFamilyHandle = columnFamilyHandles.remove(0); if (nativeMetricMonitor != null) { nativeMetricMonitor.setRocksDB(db); } }
nativeMetricMonitor.setRocksDB(db);
public void openDB() throws IOException { db = RocksDBOperationUtils.openDB( dbPath, columnFamilyDescriptors, columnFamilyHandles, RocksDBOperationUtils.createColumnFamilyOptions(columnFamilyOptionsFactory, "default"), dbOptions); defaultColumnFamilyHandle = columnFamilyHandles.remove(0); nativeMetricMonitor = nativeMetricOptions.isEnabled() ? new RocksDBNativeMetricMonitor(nativeMetricOptions, metricGroup, db) : null; }
class AbstractRocksDBRestoreOperation<K> implements RocksDBRestoreOperation, AutoCloseable { protected final KeyGroupRange keyGroupRange; protected final int keyGroupPrefixBytes; protected final int numberOfTransferringThreads; protected final CloseableRegistry cancelStreamRegistry; protected final ClassLoader userCodeClassLoader; protected final ColumnFamilyOptions columnOptions; protected final DBOptions dbOptions; protected final Map<String, StateColumnFamilyHandle> kvStateInformation; protected final RocksDBNativeMetricMonitor nativeMetricMonitor; protected final File instanceBasePath; protected final File instanceRocksDBPath; protected final String dbPath; protected List<ColumnFamilyHandle> columnFamilyHandles; protected List<ColumnFamilyDescriptor> columnFamilyDescriptors; protected final StateSerializerProvider<K> keySerializerProvider; protected final SortedMap<Long, Set<StateHandleID>> restoredSstFiles; protected final Collection<KeyedStateHandle> restoreStateHandles; protected RocksDB db; protected long lastCompletedCheckpointId = -1L; protected UUID backendUID = UUID.randomUUID(); protected ColumnFamilyHandle defaultColumnFamilyHandle; protected AbstractRocksDBRestoreOperation( KeyGroupRange keyGroupRange, int keyGroupPrefixBytes, int numberOfTransferringThreads, CloseableRegistry cancelStreamRegistry, ClassLoader userCodeClassLoader, Map<String, StateColumnFamilyHandle> kvStateInformation, StateSerializerProvider<K> keySerializerProvider, File instanceBasePath, File instanceRocksDBPath, DBOptions dbOptions, ColumnFamilyOptions columnOptions, RocksDBNativeMetricMonitor nativeMetricMonitor, @Nonnull Collection<KeyedStateHandle> stateHandles) { this.keyGroupRange = keyGroupRange; this.keyGroupPrefixBytes = keyGroupPrefixBytes; this.numberOfTransferringThreads = numberOfTransferringThreads; this.cancelStreamRegistry = cancelStreamRegistry; this.userCodeClassLoader = userCodeClassLoader; this.kvStateInformation = kvStateInformation; this.keySerializerProvider = keySerializerProvider; this.instanceBasePath = instanceBasePath; this.instanceRocksDBPath = instanceRocksDBPath; this.dbPath = instanceRocksDBPath.getAbsolutePath(); this.dbOptions = dbOptions; this.columnOptions = columnOptions; this.nativeMetricMonitor = nativeMetricMonitor; this.restoreStateHandles = stateHandles; this.restoredSstFiles = new TreeMap<>(); this.columnFamilyHandles = new ArrayList<>(1); this.columnFamilyDescriptors = Collections.emptyList(); } public RocksDB getDb() { return this.db; } @Override public void close() { IOUtils.closeQuietly(defaultColumnFamilyHandle); IOUtils.closeQuietly(db); } @Override public abstract RocksDBRestoreResult restore() throws Exception; }
class AbstractRocksDBRestoreOperation<K> implements RocksDBRestoreOperation, AutoCloseable { protected final KeyGroupRange keyGroupRange; protected final int keyGroupPrefixBytes; protected final int numberOfTransferringThreads; protected final CloseableRegistry cancelStreamRegistry; protected final ClassLoader userCodeClassLoader; protected final Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory; protected final DBOptions dbOptions; protected final Map<String, RocksDbKvStateInfo> kvStateInformation; protected final File instanceBasePath; protected final File instanceRocksDBPath; protected final String dbPath; protected List<ColumnFamilyHandle> columnFamilyHandles; protected List<ColumnFamilyDescriptor> columnFamilyDescriptors; protected final StateSerializerProvider<K> keySerializerProvider; protected final RocksDBNativeMetricOptions nativeMetricOptions; protected final MetricGroup metricGroup; protected final Collection<KeyedStateHandle> restoreStateHandles; protected final RocksDbTtlCompactFiltersManager ttlCompactFiltersManager; protected final TtlTimeProvider ttlTimeProvider; protected RocksDB db; protected ColumnFamilyHandle defaultColumnFamilyHandle; protected RocksDBNativeMetricMonitor nativeMetricMonitor; protected boolean isKeySerializerCompatibilityChecked; protected AbstractRocksDBRestoreOperation( KeyGroupRange keyGroupRange, int keyGroupPrefixBytes, int numberOfTransferringThreads, CloseableRegistry cancelStreamRegistry, ClassLoader userCodeClassLoader, Map<String, RocksDbKvStateInfo> kvStateInformation, StateSerializerProvider<K> keySerializerProvider, File instanceBasePath, File instanceRocksDBPath, DBOptions dbOptions, Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory, RocksDBNativeMetricOptions nativeMetricOptions, MetricGroup metricGroup, @Nonnull Collection<KeyedStateHandle> stateHandles, @Nonnull RocksDbTtlCompactFiltersManager ttlCompactFiltersManager, TtlTimeProvider ttlTimeProvider) { this.keyGroupRange = keyGroupRange; this.keyGroupPrefixBytes = keyGroupPrefixBytes; this.numberOfTransferringThreads = numberOfTransferringThreads; this.cancelStreamRegistry = cancelStreamRegistry; this.userCodeClassLoader = userCodeClassLoader; this.kvStateInformation = kvStateInformation; this.keySerializerProvider = keySerializerProvider; this.instanceBasePath = instanceBasePath; this.instanceRocksDBPath = instanceRocksDBPath; this.dbPath = instanceRocksDBPath.getAbsolutePath(); this.dbOptions = dbOptions; this.columnFamilyOptionsFactory = columnFamilyOptionsFactory; this.nativeMetricOptions = nativeMetricOptions; this.metricGroup = metricGroup; this.restoreStateHandles = stateHandles; this.ttlCompactFiltersManager = ttlCompactFiltersManager; this.ttlTimeProvider = ttlTimeProvider; this.columnFamilyHandles = new ArrayList<>(1); this.columnFamilyDescriptors = Collections.emptyList(); } public RocksDB getDb() { return this.db; } protected RocksDbKvStateInfo getOrRegisterStateColumnFamilyHandle( ColumnFamilyHandle columnFamilyHandle, StateMetaInfoSnapshot stateMetaInfoSnapshot) throws RocksDBException { RocksDbKvStateInfo registeredStateMetaInfoEntry = kvStateInformation.get(stateMetaInfoSnapshot.getName()); if (null == registeredStateMetaInfoEntry) { RegisteredStateMetaInfoBase stateMetaInfo = RegisteredStateMetaInfoBase.fromMetaInfoSnapshot(stateMetaInfoSnapshot); if (columnFamilyHandle == null) { registeredStateMetaInfoEntry = RocksDBOperationUtils.createStateInfo(stateMetaInfo, ttlCompactFiltersManager, ttlTimeProvider, db, columnFamilyOptionsFactory); } else { registeredStateMetaInfoEntry = new RocksDbKvStateInfo(columnFamilyHandle, stateMetaInfo); } RocksDBOperationUtils.registerKvStateInformation( kvStateInformation, nativeMetricMonitor, stateMetaInfoSnapshot.getName(), registeredStateMetaInfoEntry); } else { } return registeredStateMetaInfoEntry; } protected KeyedBackendSerializationProxy<K> readMetaData(DataInputView dataInputView) throws IOException, StateMigrationException { KeyedBackendSerializationProxy<K> serializationProxy = new KeyedBackendSerializationProxy<>(userCodeClassLoader); serializationProxy.read(dataInputView); if (!isKeySerializerCompatibilityChecked) { TypeSerializerSchemaCompatibility<K> keySerializerSchemaCompat = keySerializerProvider.setPreviousSerializerSnapshotForRestoredState(serializationProxy.getKeySerializerSnapshot()); if (keySerializerSchemaCompat.isCompatibleAfterMigration() || keySerializerSchemaCompat.isIncompatible()) { throw new StateMigrationException("The new key serializer must be compatible."); } isKeySerializerCompatibilityChecked = true; } return serializationProxy; } /** * Necessary clean up iff restore operation failed. */ @Override public void close() { IOUtils.closeQuietly(defaultColumnFamilyHandle); IOUtils.closeQuietly(nativeMetricMonitor); IOUtils.closeQuietly(db); columnFamilyDescriptors.forEach((cfd) -> IOUtils.closeQuietly(cfd.getOptions())); } @Override public abstract RocksDBRestoreResult restore() throws Exception; }
Shall we move/merge this line with 103?
public void execute() { if (helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(parentCmdParser, "build"); outStream.println(commandUsageInfo); return; } if (argList != null && argList.size() > 1) { throw LauncherUtils.createUsageException("too many arguments"); } Path currentDir = Paths.get(System.getProperty(USER_DIR)); if (argList == null || argList.size() == 0) { BuilderUtils.compileWithTestsAndWrite(currentDir, offline, lockEnabled, skiptests); } else { String targetFileName; String pkgName = argList.get(0); if (pkgName.endsWith("/")) { pkgName = pkgName.substring(0, pkgName.length() - 1); } if (outputFileName != null && !outputFileName.isEmpty()) { targetFileName = outputFileName; } else { targetFileName = pkgName; } Path sourceFilePath = Paths.get(pkgName); Path resolvedFullPath = currentDir.resolve(sourceFilePath); if (Files.isRegularFile(resolvedFullPath) && sourceFilePath.toString().endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX) && !RepoUtils.hasProjectRepo(currentDir)) { targetFileName = getTargetFileName(Paths.get(targetFileName)); Path parent = resolvedFullPath.getParent(); currentDir = parent != null ? parent : currentDir; Path resolvedFileName = resolvedFullPath.getFileName(); pkgName = resolvedFileName != null ? resolvedFileName.toString() : pkgName; } else if (Files.isDirectory(currentDir)) { if (Files.isDirectory(resolvedFullPath) && !RepoUtils.hasProjectRepo(currentDir)) { outStream.println("error: do you mean to build the ballerina package as a project? If so run" + " ballerina init to make it a project with a .ballerina directory"); return; } } else { outStream.println("error: invalid Ballerina source path, it should either be a directory or a" + "file with a \'" + BLangConstants.BLANG_SRC_FILE_SUFFIX + "\' extension"); return; } BuilderUtils.compileWithTestsAndWrite(currentDir, pkgName, targetFileName, buildCompiledPkg, offline, lockEnabled, skiptests); } Runtime.getRuntime().exit(0); }
!RepoUtils.hasProjectRepo(currentDir)) {
public void execute() { if (helpFlag) { String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(parentCmdParser, "build"); outStream.println(commandUsageInfo); return; } if (argList != null && argList.size() > 1) { throw LauncherUtils.createUsageException("too many arguments"); } Path sourceRootPath = Paths.get(System.getProperty(USER_DIR)); if (argList == null || argList.size() == 0) { BuilderUtils.compileWithTestsAndWrite(sourceRootPath, offline, lockEnabled, skiptests); } else { String targetFileName; String pkgName = argList.get(0); if (pkgName.endsWith("/")) { pkgName = pkgName.substring(0, pkgName.length() - 1); } if (outputFileName != null && !outputFileName.isEmpty()) { targetFileName = outputFileName; } else { targetFileName = pkgName; } Path sourcePath = Paths.get(pkgName); Path resolvedFullPath = sourceRootPath.resolve(sourcePath); if (Files.isRegularFile(resolvedFullPath) && sourcePath.toString().endsWith(BLangConstants.BLANG_SRC_FILE_SUFFIX) && !RepoUtils.hasProjectRepo(sourceRootPath)) { targetFileName = getTargetFileName(Paths.get(targetFileName)); Path parent = resolvedFullPath.getParent(); sourceRootPath = parent != null ? parent : sourceRootPath; Path resolvedFileName = resolvedFullPath.getFileName(); pkgName = resolvedFileName != null ? resolvedFileName.toString() : pkgName; } else if (Files.isDirectory(sourceRootPath)) { if (Files.isDirectory(resolvedFullPath) && !RepoUtils.hasProjectRepo(sourceRootPath)) { outStream.println("error: do you mean to build the ballerina package as a project? If so run" + " ballerina init to make it a project with a .ballerina directory"); return; } } else { outStream.println("error: invalid Ballerina source path, it should either be a directory or a" + "file with a \'" + BLangConstants.BLANG_SRC_FILE_SUFFIX + "\' extension"); return; } BuilderUtils.compileWithTestsAndWrite(sourceRootPath, pkgName, targetFileName, buildCompiledPkg, offline, lockEnabled, skiptests); } Runtime.getRuntime().exit(0); }
class BuildCommand implements BLauncherCmd { private static final String USER_DIR = "user.dir"; private static PrintStream outStream = System.err; private JCommander parentCmdParser; @Parameter(names = {"-c"}, description = "build a compiled package") private boolean buildCompiledPkg; @Parameter(names = {"-o"}, description = "write output to the given file") private String outputFileName; @Parameter(names = {"--offline"}) private boolean offline; @Parameter(names = {"--lockEnabled"}) private boolean lockEnabled; @Parameter(names = {"--skiptests"}) private boolean skiptests; @Parameter(arity = 1) private List<String> argList; @Parameter(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @Parameter(names = "--java.debug", hidden = true) private String debugPort; /** * Get the target file path for a single bal file. * * @param targetPath target path given * @return actual target path */ private String getTargetFileName(Path targetPath) { if (outputFileName == null && targetPath.getParent() != null) { Path targetFileName = targetPath.getFileName(); if (targetFileName != null) { return targetFileName.toString(); } } return targetPath.toString(); } @Override public String getName() { return "build"; } @Override public void printLongDesc(StringBuilder out) { out.append("Compiles Ballerina sources and writes the output to a file. \n"); out.append("\n"); out.append("By default, output filename is the last part of packagename \n"); out.append("or the filename (minus the extension) with the extension \".balx\". \n"); out.append("\n"); out.append("If the output file is specified with the -o flag, the output \n"); out.append("will be written to that file. \n"); } @Override public void printUsage(StringBuilder out) { out.append(" ballerina build <balfile | packagename> [-o output] \n"); } @Override public void setParentCmdParser(JCommander parentCmdParser) { this.parentCmdParser = parentCmdParser; } @Override public void setSelfCmdParser(JCommander selfCmdParser) { } }
class BuildCommand implements BLauncherCmd { private static final String USER_DIR = "user.dir"; private static PrintStream outStream = System.err; private JCommander parentCmdParser; @Parameter(names = {"-c"}, description = "build a compiled package") private boolean buildCompiledPkg; @Parameter(names = {"-o"}, description = "write output to the given file") private String outputFileName; @Parameter(names = {"--offline"}) private boolean offline; @Parameter(names = {"--lockEnabled"}) private boolean lockEnabled; @Parameter(names = {"--skiptests"}) private boolean skiptests; @Parameter(arity = 1) private List<String> argList; @Parameter(names = {"--help", "-h"}, hidden = true) private boolean helpFlag; @Parameter(names = "--java.debug", hidden = true) private String debugPort; /** * Get the target file path for a single bal file. * * @param targetPath target path given * @return actual target path */ private String getTargetFileName(Path targetPath) { if (outputFileName == null && targetPath.getParent() != null) { Path targetFileName = targetPath.getFileName(); if (targetFileName != null) { return targetFileName.toString(); } } return targetPath.toString(); } @Override public String getName() { return "build"; } @Override public void printLongDesc(StringBuilder out) { out.append("Compiles Ballerina sources and writes the output to a file. \n"); out.append("\n"); out.append("By default, output filename is the last part of packagename \n"); out.append("or the filename (minus the extension) with the extension \".balx\". \n"); out.append("\n"); out.append("If the output file is specified with the -o flag, the output \n"); out.append("will be written to that file. \n"); } @Override public void printUsage(StringBuilder out) { out.append(" ballerina build <balfile | packagename> [-o output] \n"); } @Override public void setParentCmdParser(JCommander parentCmdParser) { this.parentCmdParser = parentCmdParser; } @Override public void setSelfCmdParser(JCommander selfCmdParser) { } }
Fixed and a test case added.
private STNode parseTableConstructorExpr(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier) { STNode openBracket; STNode rowList; STNode closeBracket; if (nextTokenKind == SyntaxKind.KEY_KEYWORD) { keySpecifier = parseKeySpecifier(); nextTokenKind = peek().kind; } switch (nextTokenKind) { case OPEN_BRACKET_TOKEN: openBracket = parseOpenBracket(); rowList = parseRowList(); closeBracket = parseCloseBracket(); endContext(); break; default: Solution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, keySpecifier); if (solution.action == Action.REMOVE) { endContext(); return solution.recoveredNode; } return parseTableConstructorExpr(solution.tokenKind, tableKeyword, keySpecifier); } return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); }
switch (nextTokenKind) {
private STNode parseTableConstructorExpr(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier) { STNode openBracket; STNode rowList; STNode closeBracket; switch (nextTokenKind) { case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); break; case OPEN_BRACKET_TOKEN: break; default: Solution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, keySpecifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTableConstructorExpr(solution.tokenKind, tableKeyword, keySpecifier); } openBracket = parseOpenBracket(); rowList = parseRowList(); closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); }
class BallerinaParser { private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.ACTION; private final BallerinaParserErrorHandler errorHandler; private final AbstractTokenReader tokenReader; private ParserRuleContext currentParamKind = ParserRuleContext.REQUIRED_PARAM; protected BallerinaParser(AbstractTokenReader tokenReader) { this.tokenReader = tokenReader; this.errorHandler = new BallerinaParserErrorHandler(tokenReader, this); } /** * Start parsing the given input. * * @return Parsed node */ public STNode parse() { return parseCompUnit(); } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /** * Resume the parsing from the given context. * * @param context Context to resume parsing * @param args Arguments that requires to continue parsing from the given parser context * @return Parsed node */ public STNode resumeParsing(ParserRuleContext context, Object... args) { switch (context) { case COMP_UNIT: return parseCompUnit(); case EXTERNAL_FUNC_BODY: return parseExternalFunctionBody(); case FUNC_BODY: return parseFunctionBody(); case OPEN_BRACE: return parseOpenBrace(); case CLOSE_BRACE: return parseCloseBrace(); case FUNC_NAME: return parseFunctionName(); case OPEN_PARENTHESIS: return parseOpenParenthesis(); case PARAM_LIST: return parseParamList(); case RETURN_TYPE_DESCRIPTOR: return parseReturnTypeDescriptor(); case SIMPLE_TYPE_DESCRIPTOR: return parseTypeDescriptor(); case ASSIGN_OP: return parseAssignOp(); case EXTERNAL_KEYWORD: return parseExternalKeyword(); case FUNC_BODY_BLOCK: return parseFunctionBodyBlock(); case SEMICOLON: return parseSemicolon(); case CLOSE_PARENTHESIS: return parseCloseParenthesis(); case VARIABLE_NAME: return parseVariableName(); case TERMINAL_EXPRESSION: return parseTerminalExpression((boolean) args[0], (boolean) args[1]); case STATEMENT: return parseStatement(); case STATEMENT_WITHOUT_ANNOTS: return parseStatement((STNode) args[0]); case EXPRESSION_RHS: return parseExpressionRhs((OperatorPrecedence) args[1], (STNode) args[0], (boolean) args[2], (boolean) args[3]); case PARAMETER: return parseParameter((STNode) args[0], (int) args[1]); case PARAMETER_WITHOUT_ANNOTS: return parseParamGivenAnnots((STNode) args[0], (STNode) args[1], (int) args[2]); case AFTER_PARAMETER_TYPE: return parseAfterParamType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case PARAMETER_RHS: return parseParameterRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case TOP_LEVEL_NODE: return parseTopLevelNode(); case TOP_LEVEL_NODE_WITHOUT_METADATA: return parseTopLevelNode((STNode) args[0]); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return parseTopLevelNode((STNode) args[0], (STNode) args[1]); case STATEMENT_START_IDENTIFIER: return parseStatementStartIdentifier(); case VAR_DECL_STMT_RHS: return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (boolean) args[4]); case TYPE_REFERENCE: return parseTypeReference(); case FIELD_DESCRIPTOR_RHS: return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]); case NAMED_OR_POSITIONAL_ARG_RHS: return parseNamedOrPositionalArg((STNode) args[0]); case RECORD_BODY_START: return parseRecordBodyStartDelimiter(); case TYPE_DESCRIPTOR: return parseTypeDescriptor(); case OBJECT_MEMBER: return parseObjectMember(); case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]); case OBJECT_FIELD_RHS: return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case OBJECT_TYPE_FIRST_QUALIFIER: return parseObjectTypeQualifiers(); case OBJECT_TYPE_SECOND_QUALIFIER: return parseObjectTypeSecondQualifier((STNode) args[0]); case OBJECT_KEYWORD: return parseObjectKeyword(); case TYPE_NAME: return parseTypeName(); case IF_KEYWORD: return parseIfKeyword(); case ELSE_KEYWORD: return parseElseKeyword(); case ELSE_BODY: return parseElseBody(); case WHILE_KEYWORD: return parseWhileKeyword(); case PANIC_KEYWORD: return parsePanicKeyword(); case MAJOR_VERSION: return parseMajorVersion(); case IMPORT_DECL_RHS: return parseImportDecl((STNode) args[0], (STNode) args[1]); case IMPORT_PREFIX: return parseImportPrefix(); case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case VARIABLE_REF: case FIELD_OR_FUNC_NAME: case SERVICE_NAME: return parseIdentifier(context); case IMPORT_KEYWORD: return parseImportKeyword(); case SLASH: return parseSlashToken(); case DOT: return parseDotToken(); case IMPORT_VERSION_DECL: return parseVersion(); case VERSION_KEYWORD: return parseVersionKeywrod(); case VERSION_NUMBER: return parseVersionNumber(); case DECIMAL_INTEGER_LITERAL: return parseDecimalIntLiteral(context); case IMPORT_SUB_VERSION: return parseSubVersion(context); case IMPORT_PREFIX_DECL: return parseImportPrefixDecl(); case AS_KEYWORD: return parseAsKeyword(); case CONTINUE_KEYWORD: return parseContinueKeyword(); case BREAK_KEYWORD: return parseBreakKeyword(); case RETURN_KEYWORD: return parseReturnKeyword(); case MAPPING_FIELD: return parseMappingField((STNode) args[0]); case SPECIFIC_FIELD_RHS: return parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]); case STRING_LITERAL: return parseStringLiteral(); case COLON: return parseColon(); case OPEN_BRACKET: return parseOpenBracket(); case RESOURCE_DEF: return parseResource(); case OPTIONAL_SERVICE_NAME: return parseServiceName(); case SERVICE_KEYWORD: return parseServiceKeyword(); case ON_KEYWORD: return parseOnKeyword(); case RESOURCE_KEYWORD: return parseResourceKeyword(); case LISTENER_KEYWORD: return parseListenerKeyword(); case NIL_TYPE_DESCRIPTOR: return parseNilTypeDescriptor(); case COMPOUND_ASSIGNMENT_STMT: return parseCompoundAssignmentStmt(); case TYPEOF_KEYWORD: return parseTypeofKeyword(); case ARRAY_TYPE_DESCRIPTOR: return parseArrayTypeDescriptor((STNode) args[0]); case ARRAY_LENGTH: return parseArrayLength(); case FUNC_DEFINITION: case REQUIRED_PARAM: case ANNOT_REFERENCE: return parseIdentifier(context); case IS_KEYWORD: return parseIsKeyword(); case STMT_START_WITH_EXPR_RHS: return parseStamentStartWithExpr((STNode) args[0]); case COMMA: return parseComma(); case CONST_DECL_TYPE: return parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]); case STMT_START_WITH_IDENTIFIER: return parseStatementStartsWithIdentifier((STNode) args[0], (STNode) args[1]); case PARAMETERIZED_TYPE_DESCRIPTOR: return parseParameterizedTypeDescriptor(); case LT: return parseLTToken(); case GT: return parseGTToken(); case NIL_LITERAL: return parseNilLiteral(); case RECORD_FIELD_OR_RECORD_END: return parseFieldOrRestDescriptor((boolean) args[0]); case ANNOTATION_KEYWORD: return parseAnnotationKeyword(); case ANNOT_DECL_OPTIONAL_TYPE: return parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case ANNOT_DECL_RHS: return parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case ANNOT_OPTIONAL_ATTACH_POINTS: return parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (STNode) args[5]); case SOURCE_KEYWORD: return parseSourceKeyword(); case ATTACH_POINT_IDENT: return parseAttachPointIdent((STNode) args[0]); case IDENT_AFTER_OBJECT_IDENT: return parseIdentAfterObjectIdent(); case FUNCTION_IDENT: return parseFunctionIdent(); case FIELD_IDENT: return parseFieldIdent(); case ATTACH_POINT_END: return parseAttachPointEnd(); case XMLNS_KEYWORD: return parseXMLNSKeyword(); case XML_NAMESPACE_PREFIX_DECL: return parseXMLDeclRhs((STNode) args[0], (STNode) args[1]); case NAMESPACE_PREFIX: return parseNamespacePrefix(); case WORKER_KEYWORD: return parseWorkerKeyword(); case WORKER_NAME: return parseWorkerName(); case FORK_KEYWORD: return parseForkKeyword(); case DECIMAL_FLOATING_POINT_LITERAL: return parseDecimalFloatingPointLiteral(); case HEX_FLOATING_POINT_LITERAL: return parseHexFloatingPointLiteral(); case TRAP_KEYWORD: return parseTrapKeyword(); case IN_KEYWORD: return parseInKeyword(); case FOREACH_KEYWORD: return parseForEachKeyword(); case TABLE_KEYWORD: return parseTableKeyword(); case KEY_KEYWORD: return parseKeyKeyword(); case TABLE_KEYWORD_RHS: return parseTableConstructorExpr((STNode) args[0], (STNode) args[1]); default: throw new IllegalStateException("Cannot re-parse rule: " + context); } } /* * Private methods */ private STToken peek() { return this.tokenReader.peek(); } private STToken peek(int k) { return this.tokenReader.peek(k); } private STToken consume() { return this.tokenReader.read(); } private Solution recover(STToken token, ParserRuleContext currentCtx, Object... parsedNodes) { return this.errorHandler.recover(currentCtx, token, parsedNodes); } private void startContext(ParserRuleContext context) { this.errorHandler.startContext(context); } private void endContext() { this.errorHandler.endContext(); } /** * Switch the current context to the provided one. This will replace the * existing context. * * @param context Context to switch to. */ private void switchContext(ParserRuleContext context) { this.errorHandler.switchContext(context); } /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); STToken token = peek(); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(token.kind); if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { otherDecls.add(decl); this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations"); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ private STNode parseTopLevelNode() { STToken token = peek(); return parseTopLevelNode(token.kind); } protected STNode parseTopLevelNode(SyntaxKind tokenKind) { STNode metadata; switch (tokenKind) { case EOF_TOKEN: return consume(); case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(tokenKind); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case SERVICE_KEYWORD: metadata = createEmptyMetadata(); break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(createEmptyMetadata(), null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { metadata = createEmptyMetadata(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind); } return parseTopLevelNode(tokenKind, metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); return parseTopLevelNode(nextToken.kind, metadata); } private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) { STNode qualifier = null; switch (tokenKind) { case EOF_TOKEN: if (metadata != null) { this.errorHandler.reportInvalidNode(null, "invalid metadata"); } return consume(); case PUBLIC_KEYWORD: qualifier = parseQualifier(); tokenKind = peek().kind; break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } return parseTopLevelNode(solution.tokenKind, metadata); } return parseTopLevelNode(tokenKind, metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isModuleVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case OPEN_BRACKET_TOKEN: case QUESTION_MARK_TOKEN: case PIPE_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } case COLON_TOKEN: if (lookahead > 1) { return false; } if (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) { return false; } return isModuleVarDeclStart(lookahead + 2); default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.switchMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STToken token = peek(); STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier); this.tokenReader.resetMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD); return sol.recoveredNode; } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); return parseImportDecl(nextToken.kind, importKeyword, identifier); } private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) { STNode orgName; STNode moduleName; STNode version; STNode alias; switch (tokenKind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgNameNode(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportDecl(solution.tokenKind, importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SLASH); return sol.recoveredNode; } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken nextToken = peek(); return parseDotToken(nextToken.kind); } private STNode parseDotToken(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.DOT_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.DOT); return sol.recoveredNode; } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(peek().kind, moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @param moduleNameStart Starting identifier of the module name * @return Parsed node */ private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); while (!isEndOfImportModuleName(nextTokenKind)) { moduleNameParts.add(parseDotToken()); moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME)); nextTokenKind = peek().kind; } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) { return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); return parseVersion(nextToken.kind); } private STNode parseVersion(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeywrod(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersion(solution.tokenKind); } } /** * Parse version keywrod. * * @return Parsed node */ private STNode parseVersionKeywrod() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD); return sol.recoveredNode; } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); return parseVersionNumber(nextToken.kind); } private STNode parseVersionNumber(SyntaxKind nextTokenKind) { STNode majorVersion; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: majorVersion = parseMajorVersion(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersionNumber(solution.tokenKind); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersion = parseMinorVersion(); if (minorVersion != null) { versionParts.add(minorVersion); STNode patchVersion = parsePatchVersion(); if (patchVersion != null) { versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseSubVersion(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseSubVersion(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) { return consume(); } else { Solution sol = recover(peek(), context); return sol.recoveredNode; } } /** * Parse sub version. i.e: minor-version/patch-version. * * @param context Context indicating what kind of sub-version is being parsed. * @return Parsed node */ private STNode parseSubVersion(ParserRuleContext context) { STToken nextToken = peek(); return parseSubVersion(nextToken.kind, context); } private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) { switch (nextTokenKind) { case AS_KEYWORD: case SEMICOLON_TOKEN: return null; case DOT_TOKEN: STNode leadingDot = parseDotToken(); STNode versionNumber = parseDecimalIntLiteral(context); return STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSubVersion(solution.tokenKind, context); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken token = peek(); return parseImportPrefixDecl(token.kind); } private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefixNode(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportPrefixDecl(solution.tokenKind); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD); return sol.recoveredNode; } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX); return sol.recoveredNode; } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken token = peek(); return parseTopLevelNode(token.kind, metadata, qualifier); } /** * Parse top level node given the next token kind and the modifier that precedes it. * * @param tokenKind Next token kind * @param qualifier Qualifier that precedes the top level node * @return Parsed top-level node */ private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) { switch (tokenKind) { case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, getQualifier(qualifier)); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case ANNOTATION_KEYWORD: STNode constKeyword = STNodeFactory.createEmptyNode(); return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case XMLNS_KEYWORD: reportInvalidQualifier(qualifier); return parseXMLNamepsaceDeclaration(); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { return parseModuleVarDecl(metadata, qualifier); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { return parseModuleVarDecl(metadata, qualifier); } return parseTopLevelNode(solution.tokenKind, metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { this.errorHandler.reportInvalidNode((STToken) qualifier, "invalid qualifier '" + qualifier.toString().trim() + "'"); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse function definition. A function definition has the following structure. * </p> * <code> * function-defn := FUNCTION identifier function-signature function-body * </code> * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @return Parsed node */ private STNode parseFunctionDefinition(STNode metadata, STNode visibilityQualifier) { startContext(ParserRuleContext.FUNC_DEFINITION); STNode functionKeyword = parseFunctionKeyword(); STNode name = parseFunctionName(); STNode openParenthesis = parseOpenParenthesis(); STNode parameters = parseParamList(); STNode closeParenthesis = parseCloseParenthesis(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode body = parseFunctionBody(); endContext(); return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name, openParenthesis, parameters, closeParenthesis, returnTypeDesc, body); } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD); return sol.recoveredNode; } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNC_NAME); return sol.recoveredNode; } } /** * Parse open parenthesis. * * @return Parsed node */ private STNode parseOpenParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_PARENTHESIS); return sol.recoveredNode; } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return sol.recoveredNode; } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @return Parsed node */ private STNode parseParamList() { startContext(ParserRuleContext.PARAM_LIST); ArrayList<STNode> paramsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } STNode startingComma = STNodeFactory.createEmptyNode(); this.currentParamKind = ParserRuleContext.REQUIRED_PARAM; paramsList.add(parseParameter(startingComma)); token = peek(); while (!isEndOfParametersList(token.kind)) { STNode leadingComma = parseComma(); STNode param = parseParameter(leadingComma); paramsList.add(param); token = peek(); } STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param leadingComma Comma that occurs before the param * @return Parsed node */ private STNode parseParameter(STNode leadingComma) { STToken token = peek(); if (this.currentParamKind == ParserRuleContext.REST_PARAM) { this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter"); startContext(ParserRuleContext.REQUIRED_PARAM); } else { startContext(this.currentParamKind); } return parseParameter(token.kind, leadingComma, 1); } private STNode parseParameter(STNode leadingComma, int nextTokenOffset) { return parseParameter(peek().kind, leadingComma, nextTokenOffset); } private STNode parseParameter(SyntaxKind nextTokenKind, STNode leadingComma, int nextTokenOffset) { STNode annots; switch (nextTokenKind) { case AT_TOKEN: annots = parseAnnotations(nextTokenKind); nextTokenKind = peek().kind; break; case PUBLIC_KEYWORD: annots = STNodeFactory.createNodeList(new ArrayList<>()); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); STNode qualifier = STNodeFactory.createEmptyNode(); return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } default: if (nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN && isTypeStartingToken(nextTokenKind)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER, leadingComma, nextTokenOffset); if (solution.action == Action.KEEP) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameter(solution.tokenKind, leadingComma, 0); } return parseParamGivenAnnots(nextTokenKind, leadingComma, annots, 1); } private STNode parseParamGivenAnnots(STNode leadingComma, STNode annots, int nextNextTokenOffset) { return parseParamGivenAnnots(peek().kind, leadingComma, annots, nextNextTokenOffset); } private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, STNode leadingComma, STNode annots, int nextTokenOffset) { STNode qualifier; switch (nextTokenKind) { case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { qualifier = STNodeFactory.createEmptyNode(); break; } case AT_TOKEN: default: if (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) { qualifier = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, leadingComma, annots, nextTokenOffset); if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParamGivenAnnots(solution.tokenKind, leadingComma, annots, 0); } return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } private STNode parseParamGivenAnnotsAndQualifier(STNode leadingComma, STNode annots, STNode qualifier) { STNode type = parseTypeDescriptor(); STNode param = parseAfterParamType(leadingComma, annots, qualifier, type); endContext(); return param; } /** * Check whether the cursor is at the start of a parameter that doesn't have annotations. * * @param tokenOffset Offset of the token to check * @return <code>true</code> if the cursor is at the start of a parameter. <code>false</code> otherwise. */ private boolean isParamWithoutAnnotStart(int tokenOffset) { STToken nextToken = peek(tokenOffset + 1); switch (nextToken.kind) { case PUBLIC_KEYWORD: return isParamWithoutAnnotStart(tokenOffset + 1); case ELLIPSIS_TOKEN: return true; case IDENTIFIER_TOKEN: return true; default: return false; } } private STNode parseAfterParamType(STNode leadingComma, STNode annots, STNode qualifier, STNode type) { STToken token = peek(); return parseAfterParamType(token.kind, leadingComma, annots, qualifier, type); } private STNode parseAfterParamType(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type) { switch (tokenKind) { case ELLIPSIS_TOKEN: this.currentParamKind = ParserRuleContext.REST_PARAM; switchContext(ParserRuleContext.REST_PARAM); reportInvalidQualifier(qualifier); STNode ellipsis = parseEllipsis(); STNode paramName = parseVariableName(); return STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(leadingComma, annots, qualifier, type, paramName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, leadingComma, annots, qualifier, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAfterParamType(solution.tokenKind, leadingComma, annots, qualifier, type); } } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELLIPSIS); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param leadingComma Comma that precedes this parameter * @param annots Annotations attached to the parameter * @param qualifier Visibility qualifier * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { STToken token = peek(); return parseParameterRhs(token.kind, leadingComma, annots, qualifier, type, paramName); } private STNode parseParameterRhs(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { if (isEndOfParameter(tokenKind)) { if (this.currentParamKind == ParserRuleContext.DEFAULTABLE_PARAM) { this.errorHandler.reportInvalidNode(peek(), "cannot have a required parameter after a defaultable parameter"); } return STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName); } else if (tokenKind == SyntaxKind.EQUAL_TOKEN) { if (this.currentParamKind == ParserRuleContext.REQUIRED_PARAM) { this.currentParamKind = ParserRuleContext.DEFAULTABLE_PARAM; switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal, expr); } else { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_RHS, leadingComma, annots, qualifier, type, paramName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind, leadingComma, annots, qualifier, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMA); return sol.recoveredNode; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case AT_TOKEN: return true; default: return false; } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: return true; default: return false; } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseReturnTypeDescriptor() { startContext(ParserRuleContext.RETURN_TYPE_DESCRIPTOR); STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { endContext(); return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(); endContext(); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor() { STToken token = peek(); STNode typeDesc = parseTypeDescriptor(token.kind); return parseComplexTypeDescriptor(typeDesc); } /** * This will handle the parsing of optional,array,union type desc to infinite length. * * @param typeDesc * * @return Parsed type descriptor node */ private STNode parseComplexTypeDescriptor(STNode typeDesc) { STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc)); case OPEN_BRACKET_TOKEN: return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc)); case PIPE_TOKEN: return parseComplexTypeDescriptor(parseUnionTypeDescriptor(typeDesc)); default: return typeDesc; } } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTypeDescriptor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseTypeReference(); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilTypeDescriptor(); case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return parseParameterizedTypeDescriptor(); default: if (isSimpleType(tokenKind)) { return parseSimpleTypeDescriptor(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypeDescriptor(solution.tokenKind); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken node = peek(); if (isSimpleType(node.kind)) { STToken token = consume(); SyntaxKind typeKind = getTypeSyntaxKind(token.kind); return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token); } else { Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return sol.recoveredNode; } } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init, named-worker-decl+] default-worker } * </code> * * @return Parsed node */ private STNode parseFunctionBody() { STToken token = peek(); return parseFunctionBody(token.kind); } /** * Parse function body, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ protected STNode parseFunctionBody(SyntaxKind tokenKind) { switch (tokenKind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNC_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.NONE) { return STNodeFactory.createMissingToken(solution.tokenKind); } return parseFunctionBody(solution.tokenKind); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @return Parsed node */ private STNode parseFunctionBodyBlock() { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STToken token = peek(); ArrayList<STNode> firstStmtList = new ArrayList<>(); ArrayList<STNode> workers = new ArrayList<>(); ArrayList<STNode> secondStmtList = new ArrayList<>(); ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT; boolean hasNamedWorkers = false; while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (currentCtx) { case DEFAULT_WORKER_INIT: if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) { firstStmtList.add(stmt); break; } currentCtx = ParserRuleContext.NAMED_WORKERS; hasNamedWorkers = true; case NAMED_WORKERS: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { workers.add(stmt); break; } currentCtx = ParserRuleContext.DEFAULT_WORKER; case DEFAULT_WORKER: default: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } secondStmtList.add(stmt); break; } token = peek(); } STNode namedWorkersList; STNode statements; if (hasNamedWorkers) { STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList); STNode namedWorkers = STNodeFactory.createNodeList(workers); namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers); statements = STNodeFactory.createNodeList(secondStmtList); } else { namedWorkersList = STNodeFactory.createEmptyNode(); statements = STNodeFactory.createNodeList(firstStmtList); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace); } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.RECORD_FIELD, 1); default: return false; } } private boolean isEndOfObjectTypeNode(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); default: return false; } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); return parseVariableName(token.kind); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME); return sol.recoveredNode; } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACE); return sol.recoveredNode; } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE); return sol.recoveredNode; } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); STNode annotation = parseAnnotations(); STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SEMICOLON); return sol.recoveredNode; } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return sol.recoveredNode; } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ASSIGN_OP); return sol.recoveredNode; } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case EQUAL_GT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; case RIGHT_ARROW_TOKEN: return OperatorPrecedence.ACTION; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case UNARY: case ACTION: case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD); return sol.recoveredNode; } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_NAME); return sol.recoveredNode; } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; STNode fields = parseFieldDescriptors(isInclusive); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind); endContext(); return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken token = peek(); return parseRecordBodyStartDelimiter(token.kind); } private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) { switch (kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyStartDelimiter(solution.tokenKind); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return sol.recoveredNode; } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) { switch (startingDelimeter) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyEnd(); case OPEN_BRACE_TOKEN: return parseCloseBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyCloseDelimiter(solution.tokenKind); } } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return sol.recoveredNode; } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse field descriptors. * </p> * * @return Parsed node */ private STNode parseFieldDescriptors(boolean isInclusive) { ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); boolean endOfFields = false; while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); if (field == null) { endOfFields = true; break; } recordFields.add(field); token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { break; } } while (!endOfFields && !isEndOfRecordTypeNode(token.kind)) { parseFieldOrRestDescriptor(isInclusive); this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor"); token = peek(); } return STNodeFactory.createNodeList(recordFields); } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { return parseFieldOrRestDescriptor(peek().kind, isInclusive); } private STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: return null; case ASTERISK_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); case AT_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode metadata = parseMetaData(nextTokenKind); type = parseTypeDescriptor(); STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; default: if (isTypeStartingToken(nextTokenKind)) { startContext(ParserRuleContext.RECORD_FIELD); metadata = createEmptyMetadata(); type = parseTypeDescriptor(nextTokenKind); fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptor(solution.tokenKind, isInclusive); } } private STNode parseFieldDescriptor(boolean isInclusive, STNode type, STNode metadata) { if (isInclusive) { STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); } else { return parseFieldOrRestDescriptorRhs(metadata, type); } } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeRefOrPkgRef = consume(); return parseQualifiedIdentifier(typeRefOrPkgRef); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken colon = consume(); STToken varOrFuncName = consume(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); } else { this.errorHandler.removeInvalidToken(); return parseQualifiedIdentifier(identifier); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken token = peek(); return parseFieldOrRestDescriptorRhs(token.kind, metadata, type); } private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) { switch (kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) { STToken token = peek(); return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName); } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param kind Kind of the next token * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) { switch (kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, type, fieldName, equalsToken, expression, semicolonToken); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.QUESTION_MARK); return sol.recoveredNode; } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { STToken token = peek(); ArrayList<STNode> stmts = new ArrayList<>(); while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } stmts.add(stmt); token = peek(); } return STNodeFactory.createNodeList(stmts); } private boolean isEndOfStatements(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.STATEMENT, 1); default: return false; } } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken token = peek(); return parseStatement(token.kind); } private STNode parseStatement(SyntaxKind tokenKind) { STNode annots = null; switch (tokenKind) { case CLOSE_BRACE_TOKEN: return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(); case AT_TOKEN: annots = parseAnnotations(tokenKind); tokenKind = peek().kind; break; case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case PANIC_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case WORKER_KEYWORD: break; default: if (isTypeStartingToken(tokenKind)) { break; } if (isValidLHSExpression(tokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind); } return parseStatement(tokenKind, annots); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createNodeList(new ArrayList<>()); } private STNode parseStatement(STNode annots) { return parseStatement(peek().kind, annots); } /** * Parse a single statement, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatement(SyntaxKind tokenKind, STNode annots) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: this.errorHandler.reportInvalidNode(null, "invalid annotations"); return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(tokenKind, annots); case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseStamentStartsWithExpr(tokenKind); case IDENTIFIER_TOKEN: return parseStatementStartsWithIdentifier(getAnnotations(annots)); case LOCK_KEYWORD: return parseLockStatement(); case OPEN_BRACE_TOKEN: return parseBlockNode(); case WORKER_KEYWORD: return parseNamedWorkerDeclaration(getAnnotations(annots)); case FORK_KEYWORD: return parseForkStatement(); case FOREACH_KEYWORD: return parseForEachStatement(); default: if (isTypeStartingToken(tokenKind)) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, annots); } } private STNode getNextNextToken(SyntaxKind tokenKind) { return peek(1).kind == tokenKind ? peek(2) : peek(1); } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode type = parseTypeDescriptor(); STNode varName = parseVariableName(); STNode varDecl = parseVarDeclRhs(annots, finalKeyword, type, varName, isModuleVar); endContext(); return varDecl; } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STToken token = peek(); return parseVarDeclRhs(token.kind, metadata, finalKeyword, type, varName, isModuleVar); } /** * Parse the right hand side of a variable declaration statement, given the * next token kind. * * @param tokenKind Next token kind * @param metadata Metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @param isModuleVar flag indicating whether the var is module level * @return Parsed node */ private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; switch (tokenKind) { case EQUAL_TOKEN: assign = parseAssignOp(); if (isModuleVar) { expr = parseExpression(); } else { expr = parseActionOrExpression(); } semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: if (isModuleVar) { this.errorHandler.reportMissingTokenError("assignment required"); } assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, type, varName, isModuleVar); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, type, varName, isModuleVar); } if (isModuleVar) { return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode assign = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, false); } /** * Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseActionOrExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(SyntaxKind tokenKind) { return parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true); } /** * Parse expression. * * @param isRhsExpr Flag indicating whether this is a rhs expression * @return Parsed node */ private STNode parseExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false); } private void validateLVExpr(STNode expression) { if (isValidLVExpr(expression)) { return; } this.errorHandler.reportInvalidNode(null, "invalid expression for assignment lhs"); } private boolean isValidLVExpr(STNode expression) { switch (expression.kind) { case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case FIELD_ACCESS: return isValidLVExpr(((STFieldAccessExpressionNode) expression).expression); case MEMBER_ACCESS: return isValidLVExpr(((STMemberAccessExpressionNode) expression).containerExpression); default: return false; } } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @param isRhsExpr Flag indicating whether this is a rhs expression * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STToken token = peek(); return parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions); } private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions); } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @param isRhsExpr Is a rhs expression * @param allowActions Allow actions * @return Parsed node */ private STNode parseTerminalExpression(boolean isRhsExpr, boolean allowActions) { return parseTerminalExpression(peek().kind, isRhsExpr, allowActions); } private STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case OPEN_PAREN_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } return parseBracedExpression(isRhsExpr, allowActions); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(isRhsExpr, allowActions); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(isRhsExpr); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(isRhsExpr); case TRAP_KEYWORD: return parseTrapExpression(isRhsExpr); case OPEN_BRACKET_TOKEN: return parseListConstructorExpr(); case LT_TOKEN: return parseTypeCastExpr(); case TABLE_KEYWORD: return parseTableConstructorExpr(); default: Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, isRhsExpr, allowActions); if (solution.recoveredNode.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseQualifiedIdentifier(solution.recoveredNode); } if (solution.recoveredNode.kind == SyntaxKind.OPEN_PAREN_TOKEN && peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } if (solution.recoveredNode.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return parseListConstructorExpr(); } if (solution.recoveredNode.kind == SyntaxKind.LT_TOKEN) { return parseTypeCastExpr(); } if (solution.recoveredNode.kind == SyntaxKind.TABLE_KEYWORD) { return parseTableConstructorExpr(); } return solution.recoveredNode; } } private STNode parseActionOrExpressionInLhs(STNode lhsExpr) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, lhsExpr, false, true); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @param isLVExpr Flag indicating whether this is on a lhsExpr of a statement * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isLVExpr, boolean allowActions) { STToken token = peek(); return parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isLVExpr, allowActions); } /** * Parse the right hand side of an expression given the next token kind. * * @param tokenKind Next token kind * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression * @param isRhsExpr Flag indicating whether this is a rhs expr or not * @param allowActions Flag indicating whether to allow actions or not * @return Parsed node */ private STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { if (isEndOfExpression(tokenKind, isRhsExpr)) { return lhsExpr; } if (!isValidExprRhsStart(tokenKind)) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel); return parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } else { return parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind); if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence)) { return lhsExpr; } STNode newLhsExpr; switch (tokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr); break; case IS_KEYWORD: newLhsExpr = parseTypeTestExpression(lhsExpr); break; case RIGHT_ARROW_TOKEN: newLhsExpr = parseAction(tokenKind, lhsExpr); if (!allowActions) { this.errorHandler.reportInvalidNode(null, "actions are not allowed here"); } break; default: STNode operator = parseBinaryOperator(); STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false); newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions); } private boolean isValidExprRhsStart(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr) { STNode openBracket = consume(); STNode keyExpr; if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { this.errorHandler.reportMissingTokenError("missing expression"); keyExpr = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { keyExpr = parseExpression(); } STNode closeBracket = parseCloseBracket(); return STNodeFactory.createMemberAccessExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET); return sol.recoveredNode; } } /** * Parse field access expression and method call expression. * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) { STNode dotToken = parseDotToken(); STNode fieldOrMethodName = parseIdentifier(ParserRuleContext.FIELD_OR_FUNC_NAME); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Allow actions * @return Parsed node */ private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) { STNode openParen = parseOpenParenthesis(); STNode expr; if (allowActions) { expr = parseActionOrExpression(isRhsExpr); } else { expr = parseExpression(isRhsExpr); } STNode closeParen = parseCloseParenthesis(); if (isAction(expr)) { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } else { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } } /** * Check whether a given node is an action node. * * @param node Node to check * @return <code>true</code> if the node is an action node. <code>false</code> otherwise */ private boolean isAction(STNode node) { switch (node.kind) { case REMOTE_METHOD_CALL_ACTION: case BRACED_ACTION: case CHECK_ACTION: return true; default: return false; } } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr) { if (!isRhsExpr) { if (isCompoundBinaryOperator(tokenKind)) { return true; } return !isValidExprRhsStart(tokenKind); } switch (tokenKind) { case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case CONST_KEYWORD: case LISTENER_KEYWORD: case EQUAL_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case AS_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse basic literals. It is assumed that we come here after validation. * * @return Parsed node */ private STNode parseBasicLiteral() { STToken literalToken = consume(); return STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen); } /** * Parse function call argument list. * * @return Parsed agrs list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); ArrayList<STNode> argsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } SyntaxKind lastProcessedArgKind = parseFirstArg(argsList); parseFollowUpArg(argsList, lastProcessedArgKind); STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } /** * Parse the first argument of a function call. * * @param argsList Arguments list to which the parsed argument must be added * @return Kind of the argument first argument. */ private SyntaxKind parseFirstArg(ArrayList<STNode> argsList) { startContext(ParserRuleContext.ARG); STNode leadingComma = STNodeFactory.createEmptyNode(); STNode arg = parseArg(leadingComma); endContext(); if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) { argsList.add(arg); return arg.kind; } else { reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind); return SyntaxKind.POSITIONAL_ARG; } } /** * Parse follow up arguments. * * @param argsList Arguments list to which the parsed argument must be added * @param lastProcessedArgKind Kind of the argument processed prior to this */ private void parseFollowUpArg(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) { STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { startContext(ParserRuleContext.ARG); STNode leadingComma = parseComma(); nextToken = peek(); if (isEndOfParametersList(nextToken.kind)) { this.errorHandler.reportInvalidNode((STToken) leadingComma, "invalid token " + leadingComma); endContext(); break; } STNode arg = parseArg(nextToken.kind, leadingComma); if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) { if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) { this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg"); } else { argsList.add(arg); lastProcessedArgKind = arg.kind; } } else { reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind); } nextToken = peek(); endContext(); } } /** * Report invalid order of args. * * @param token Staring token of the arg. * @param lastArgKind Kind of the previously processed arg * @param argKind Current arg */ private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) { this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind); } /** * Parse function call argument. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseArg(STNode leadingComma) { STToken token = peek(); return parseArg(token.kind, leadingComma); } private STNode parseArg(SyntaxKind kind, STNode leadingComma) { STNode arg; switch (kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgumentNode(leadingComma, ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(leadingComma); break; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: default: expr = parseExpression(); arg = STNodeFactory.createPositionalArgumentNode(leadingComma, expr); break; } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseNamedOrPositionalArg(STNode leadingComma) { STToken secondToken = peek(2); switch (secondToken.kind) { case EQUAL_TOKEN: STNode argNameOrVarRef = consume(); STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createNamedArgumentNode(leadingComma, argNameOrVarRef, equal, expr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: argNameOrVarRef = consume(); return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrVarRef); case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: default: expr = parseExpression(); return STNodeFactory.createPositionalArgumentNode(leadingComma, expr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STToken nextToken = peek(); return parseObjectTypeQualifiers(nextToken.kind); } private STNode parseObjectTypeQualifiers(SyntaxKind kind) { List<STNode> qualifiers = new ArrayList<>(); STNode firstQualifier; switch (kind) { case CLIENT_KEYWORD: STNode clientKeyword = parseClientKeyword(); firstQualifier = clientKeyword; break; case ABSTRACT_KEYWORD: STNode abstractKeyword = parseAbstractKeyword(); firstQualifier = abstractKeyword; break; case OBJECT_KEYWORD: return STNodeFactory.createNodeList(qualifiers); default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeQualifiers(solution.tokenKind); } STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier); qualifiers.add(firstQualifier); if (secondQualifier != null) { qualifiers.add(secondQualifier); } return STNodeFactory.createNodeList(qualifiers); } private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) { STToken nextToken = peek(); return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier); } private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) { if (firstQualifier.kind != kind) { switch (kind) { case CLIENT_KEYWORD: return parseClientKeyword(); case ABSTRACT_KEYWORD: return parseAbstractKeyword(); case OBJECT_KEYWORD: return null; default: break; } } Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD); return sol.recoveredNode; } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return sol.recoveredNode; } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD); return sol.recoveredNode; } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfObjectTypeNode(nextToken.kind)) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode member = parseObjectMember(nextToken.kind); endContext(); if (member == null) { break; } objectMembers.add(member); nextToken = peek(); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STToken nextToken = peek(); return parseObjectMember(nextToken.kind); } private STNode parseObjectMember(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isTypeStartingToken(nextTokenKind)) { metadata = createEmptyMetadata(); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return parseObjectMember(nextTokenKind, metadata); } private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) { STNode member; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode()); break; case FUNCTION_KEYWORD: member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode()); break; default: if (isTypeStartingToken(nextTokenKind)) { member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return member; } private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(1); STToken nextNextToken = peek(2); return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers); } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param visibilityQualifiers Visibility qualifiers. A modifier can be * a syntax node with either 'PUBLIC' or 'PRIVATE'. * @param nextTokenKind Next token kind * @param nextNextTokenKind Kind of the token after the * @param metadata Metadata * @param visibilityQualifiers Visibility qualifiers * @return Parse object member node */ private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata, STNode visibilityQualifiers) { switch (nextTokenKind) { case REMOTE_KEYWORD: STNode remoteKeyword = parseRemoteKeyword(); ArrayList<STNode> methodQualifiers = new ArrayList<>(); if (visibilityQualifiers.kind != SyntaxKind.NONE) { methodQualifiers.add(visibilityQualifiers); } methodQualifiers.add(remoteKeyword); return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers)); case FUNCTION_KEYWORD: return parseObjectMethod(metadata, visibilityQualifiers); case IDENTIFIER_TOKEN: if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifiers); } break; default: if (isTypeStartingToken(nextTokenKind)) { return parseObjectField(metadata, visibilityQualifiers); } break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifiers); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers); } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD); return sol.recoveredNode; } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STNode type = parseTypeDescriptor(); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param nextTokenKind Kind of the next token * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextTokenKind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName); } return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) { return parseFunctionDefinition(metadata, methodQualifiers); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IF_KEYWORD); return sol.recoveredNode; } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD); return sol.recoveredNode; } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlockNode(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); return parseElseBody(nextToken.kind); } private STNode parseElseBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ELSE_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseElseBody(solution.tokenKind); } } /** * Parse while statement. * <code>while-stmt := while expression block-stmt</code> * * @return While statement */ private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody); } /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD); return sol.recoveredNode; } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD); return sol.recoveredNode; } } /** * Parse check expression. This method is used to parse both check expression * as well as check action. * * <p> * <code> * checking-expr := checking-keyword expression * checking-action := checking-keyword action * </code> * * @param allowActions Allow actions * @param isRhsExpr Is rhs expression * @return Check expression node */ private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions) { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, allowActions); if (isAction(expr)) { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr); } else { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr); } } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD); return sol.recoveredNode; } } /** * * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD); return sol.recoveredNode; } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STNode expr; STNode semicolon; STToken token = peek(); switch (token.kind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseActionOrExpression(); break; } semicolon = parseSemicolon(); return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { List<STNode> fields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createNodeList(fields); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { leadingComma = parseComma(); field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return false; case EOF_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseMappingField(STNode leadingComma) { STToken nextToken = peek(); return parseMappingField(nextToken.kind, leadingComma); } private STNode parseMappingField(SyntaxKind tokenKind, STNode leadingComma) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionValue(leadingComma); case STRING_LITERAL: STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(leadingComma); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadFieldNode(leadingComma, ellipsis, expr); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD, leadingComma); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingField(solution.tokenKind, leadingComma); } } /** * Parse mapping constructor specific-field with an optional value. * * @param leadingComma * @return Parsed node */ private STNode parseSpecificFieldWithOptionValue(STNode leadingComma) { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(leadingComma, key); } private STNode parseSpecificFieldRhs(STNode leadingComma, STNode key) { STToken nextToken = peek(); return parseSpecificFieldRhs(nextToken.kind, leadingComma, key); } private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode leadingComma, STNode key) { STNode colon; STNode valueExpr; switch (tokenKind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(tokenKind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, leadingComma, key); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSpecificFieldRhs(solution.tokenKind, leadingComma, key); } return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_LITERAL); return sol.recoveredNode; } } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COLON); return sol.recoveredNode; } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseComputedField(STNode leadingComma) { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(leadingComma, openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET); return sol.recoveredNode; } } /** * <p> * Parse compound assignment statement, which takes the following format. * </p> * <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code> * * @return Parsed node */ private STNode parseCompoundAssignmentStmt() { startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT); STNode varName = parseVariableName(); STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName); endContext(); return compoundAssignmentStmt; } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return sol.recoveredNode; } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); STNode service = STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); return service; } private STNode parseServiceName() { STToken nextToken = peek(); return parseServiceName(nextToken.kind); } private STNode parseServiceName(SyntaxKind kind) { switch (kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseServiceName(solution.tokenKind); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD); return sol.recoveredNode; } } /** * Check whether the given token kind is a compound binary operator. * <p> * <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @param tokenKind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) { switch (tokenKind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ON_KEYWORD); return sol.recoveredNode; } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfExpressionsList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing expression"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); while (!isEndOfExpressionsList(nextToken.kind)) { leadingComma = parseComma(); exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfExpressionsList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case AT_TOKEN: case DOCUMENTATION_LINE: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse expression list item. * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseExpressionListItem(STNode leadingComma) { STNode expr = parseExpression(); return STNodeFactory.createExpressionListItemNode(leadingComma, expr); } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STToken nextToken = peek(); return parseResource(nextToken.kind); } private STNode parseResource(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isEndOfServiceDecl(nextTokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind); } return parseResource(nextTokenKind, metadata); } private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) { switch (nextTokenKind) { case RESOURCE_KEYWORD: STNode resourceKeyword = parseResourceKeyword(); return parseFunctionDefinition(metadata, resourceKeyword); case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, STNodeFactory.createEmptyNode()); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind, metadata); } } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD); return sol.recoveredNode; } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */ private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return false; case ON_KEYWORD: return true; default: ParserRuleContext sol = this.errorHandler.findBestPath(currentContext); return sol == ParserRuleContext.SERVICE_DECL || sol == ParserRuleContext.CLOSE_BRACE; } case ON_KEYWORD: return true; default: this.errorHandler.removeInvalidToken(); return false; } } /** * Parse listener declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD); return sol.recoveredNode; } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword); } private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: switchContext(ParserRuleContext.ANNOTATION_DECL); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STToken token = peek(); return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName); } private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STNode type; STNode variableName; switch (nextTokenKind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = typeOrVarName; type = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword, typeOrVarName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName); } STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD); return sol.recoveredNode; } } /** * Parse nil type descriptor. * <p> * <code>nil-type-descriptor := ( ) </code> * </p> * * @return Parsed node */ private STNode parseNilTypeDescriptor() { startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken); } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD); return sol.recoveredNode; } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param typeDescriptorNode * * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createArrayTypeDescriptorNode(typeDescriptorNode, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return consume(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: Solution sol = recover(token, ParserRuleContext.ARRAY_LENGTH); return sol.recoveredNode; } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseAnnotations() { STToken nextToken = peek(); return parseAnnotations(nextToken.kind); } private STNode parseAnnotations(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); while (nextTokenKind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextTokenKind = peek().kind; } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } STNode annotValue = parseMappingConstructorExpr(); return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.AT); return sol.recoveredNode; } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData(SyntaxKind nextTokenKind) { STNode docString; STNode annotations; switch (nextTokenKind) { case DOCUMENTATION_LINE: docString = parseDocumentationString(); annotations = parseAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(nextTokenKind); break; default: return createEmptyMetadata(); } return STNodeFactory.createMetadataNode(docString, annotations); } /** * Create empty metadata node. * * @return A metadata node with no doc string and no annotations */ private STNode createEmptyMetadata() { return STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createNodeList(new ArrayList<>())); } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr) { startContext(ParserRuleContext.TYPE_TEST_EXPRESSION); STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptor(); endContext(); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IS_KEYWORD); return sol.recoveredNode; } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Pass statements that starts with an identifier. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatementStartsWithIdentifier(STNode annots) { startContext(ParserRuleContext.STMT_START_WITH_IDENTIFIER); STNode identifier = parseStatementStartIdentifier(); STToken nextToken = peek(); STNode stmt = parseStatementStartsWithIdentifier(nextToken.kind, annots, identifier); endContext(); return stmt; } private STNode parseStatementStartsWithIdentifier(STNode annots, STNode identifier) { return parseStatementStartsWithIdentifier(peek().kind, annots, identifier); } private STNode parseStatementStartsWithIdentifier(SyntaxKind nextTokenKind, STNode annots, STNode identifier) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: return parseTypeDescStartsWithIdentifier(identifier, annots); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStamentStartWithExpr(nextTokenKind, identifier); case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) { return parseTypeDescStartsWithIdentifier(identifier, annots); } default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(identifier); } if (isValidExprRhsStart(nextTokenKind)) { STNode expression = parseActionOrExpressionInLhs(identifier); return parseStamentStartWithExpr(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_IDENTIFIER, annots, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartsWithIdentifier(solution.tokenKind, annots, identifier); } } private STNode parseTypeDescStartsWithIdentifier(STNode typeDesc, STNode annots) { switchContext(ParserRuleContext.VAR_DECL_STMT); typeDesc = parseComplexTypeDescriptor(typeDesc); STNode varName = parseVariableName(); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVarDeclRhs(annots, finalKeyword, typeDesc, varName, false); } /** * Parse statement which is only consists of an action or expression. * * @param nextTokenKind Next token kind * @return Parsed node */ private STNode parseStamentStartsWithExpr(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpression(nextTokenKind); STNode stmt = parseStamentStartWithExpr(expression); endContext(); return stmt; } /** * Parse statements that starts with an expression. * * @return Parsed node */ private STNode parseStamentStartWithExpr(STNode expression) { STToken nextToken = peek(); return parseStamentStartWithExpr(nextToken.kind, expression); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param nextTokenKind Kind of the next token * @return Parsed node */ private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode expression) { switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, expression); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStamentStartWithExpr(solution.tokenKind, expression); } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: return parseActionStatement(expression); default: this.errorHandler.reportInvalidNode(null, "left hand side of an assignment must be a variable reference"); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID, expression, semicolon); } } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { validateExprInCallStmt(expression); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private void validateExprInCallStmt(STNode expression) { switch (expression.kind) { case FUNCTION_CALL: case METHOD_CALL: break; case CHECK_EXPRESSION: validateExprInCallStmt(((STCheckExpressionNode) expression).expression); break; case REMOTE_METHOD_CALL_ACTION: break; case BRACED_EXPRESSION: validateExprInCallStmt(((STBracedExpressionNode) expression).expression); break; default: if (isMissingNode(expression)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the checking keyword must be a " + "func-call, a method-call or a check-expr"); break; } } /** * Check whether a node is a missing node. * * @param node Node to check * @return <code>true</code> if the node is a missing node. <code>false</code> otherwise */ private boolean isMissingNode(STNode node) { return node instanceof STMissingToken; } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } private STNode parseAction(SyntaxKind tokenKind, STNode lhsExpr) { switch (tokenKind) { case RIGHT_ARROW_TOKEN: return parseRemoteMethodCallAction(lhsExpr); default: return null; } } /** * Parse remote method call action, given the starting expression. * <p> * <code>remote-method-call-action := expression -> method-name ( arg-list )</code> * * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallAction(STNode expression) { STNode rightArrow = parseRightArrow(); STNode methodName = parseFunctionName(); STNode openParenToken = parseOpenParenthesis(); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, methodName, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW); return sol.recoveredNode; } } /** * Check whether this is a valid lhs expression. * * @param tokenKind Kind of the next token * @return <code>true</code>if this is a start of an expression. <code>false</code> otherwise */ private boolean isValidLHSExpression(SyntaxKind tokenKind) { switch (tokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return true; case PLUS_TOKEN: case MINUS_TOKEN: return !isCompoundBinaryOperator(tokenKind); case OPEN_PAREN_TOKEN: default: return false; } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor() { startContext(ParserRuleContext.PARAMETERIZED_TYPE_DESCRIPTOR); STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword(); STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(); STNode gtToken = parseGTToken(); endContext(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode, gtToken); } /** * Parse <code>map</code> or <code>future</code> or <code>typedesc</code> keyword token. * * @return Parsed node */ private STNode parseParameterizedTypeKeyword() { STToken nextToken = peek(); switch (nextToken.kind) { case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return consume(); default: Solution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE); return sol.recoveredNode; } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.GT); return sol.recoveredNode; } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.LT); return sol.recoveredNode; } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return sol.recoveredNode; } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); return parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword); } private STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(); STNode annotTag = parseAnnotationTag(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, annotTag, equalsToken, initializer, semicolonToken); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG); return sol.recoveredNode; } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseAnnotationTag(); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken token = peek(); return parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } private STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STNode typeDesc; STNode annotTag; switch (nextTokenKind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STToken nextToken = peek(); return parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; switch (nextTokenKind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNode(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * [object] type * | [object|resource] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing attach point"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { this.errorHandler.reportMissingTokenError("missing attach point"); attachPoint = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { STToken nextToken = peek(); return parseAttachPointEnd(nextToken.kind); } private STNode parseAttachPointEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: Solution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null; } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { return parseAnnotationAttachPoint(peek().kind); } private STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT); return solution.recoveredNode; } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD); return sol.recoveredNode; } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := [object] type | [object|resource] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { return parseAttachPointIdent(peek().kind, sourceKeyword); } private STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode secondIdent = STNodeFactory.createEmptyNode(); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } firstIdent = solution.recoveredNode; return parseDualAttachPointIdent(sourceKeyword, firstIdent); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: default: secondIdent = STNodeFactory.createEmptyNode(); break; } return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case TYPE_KEYWORD: case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: Solution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return sol.recoveredNode; } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_IDENT); return sol.recoveredNode; } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FIELD_IDENT); return sol.recoveredNode; } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamepsaceDeclaration() { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseXMLNamespaceUri(); STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD); return sol.recoveredNode; } } /** * Parse namespace uri. * * @return Parsed node */ private STNode parseXMLNamespaceUri() { STNode expr = parseConstExpr(); switch (expr.kind) { case STRING_LITERAL: case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: break; default: this.errorHandler.reportInvalidNode(null, "namespace uri must be a subtype of string"); } return expr; } /** * Parse constants expr. * * @return Parsed node */ private STNode parseConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STToken nextToken = peek(); STNode expr; switch (nextToken.kind) { case STRING_LITERAL: case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: expr = consume(); break; case IDENTIFIER_TOKEN: expr = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); break; case OPEN_BRACE_TOKEN: default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); expr = solution.recoveredNode; break; } endContext(); return expr; } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri) { return parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri); } private STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (nextTokenKind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri); } STNode semicolon = parseSemicolon(); return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return sol.recoveredNode; } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code> * * @param annots Annotations attached to the worker decl * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc, workerBody); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD); return sol.recoveredNode; } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_NAME); return sol.recoveredNode; } } /** * Parse documentation string. * <p> * <code>DocumentationString := DocumentationLine +</code> * <p> * Refer {@link BallerinaLexer * * @return Parsed node */ private STNode parseDocumentationString() { List<STNode> docLines = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_LINE) { docLines.add(consume()); nextToken = peek(); } STNode documentationLines = STNodeFactory.createNodeList(docLines); return STNodeFactory.createDocumentationStringNode(documentationLines); } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt ;</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LOCK_KEYWORD); return sol.recoveredNode; } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc) { STNode pipeToken = parsePipeToken(); STNode rightTypeDesc = parseTypeDescriptor(); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PIPE); return sol.recoveredNode; } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return true; default: return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case XML_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case SERVICE_KEYWORD: case VAR_KEYWORD: return true; case TYPE_DESC: return true; default: return false; } } private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; default: return SyntaxKind.TYPE_DESC; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FORK_KEYWORD); return sol.recoveredNode; } } /** * Parse multiple named worker declarations. * * @return named-worker-declarations node array */ private STNode parseMultileNamedWorkerDeclarations() { STToken token = peek(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: this.errorHandler.reportInvalidNode(null, "Only named-workers are allowed here"); break; } token = peek(); } if (workers.isEmpty()) { this.errorHandler.reportInvalidNode(null, "Fork Statement must contain atleast one named-worker"); } STNode namedWorkers = STNodeFactory.createNodeList(workers); return namedWorkers; } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); STNode namedWorkerDeclarations = parseMultileNamedWorkerDeclarations(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); } /** * Parse decimal floating point literal. * * @return Parsed node */ private STNode parseDecimalFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.DECIMAL_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse hex floating point literal. * * @return Parsed node */ private STNode parseHexFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.HEX_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param isRhsExpr * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTrapExpressionNode(trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TRAP_KEYWORD); return sol.recoveredNode; } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfExpressionsList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode expr = parseExpression(); expressions.add(expr); nextToken = peek(); STNode leadingComma; while (!isEndOfExpressionsList(nextToken.kind)) { leadingComma = parseComma(); expressions.add(leadingComma); expr = parseExpression(); expressions.add(expr); nextToken = peek(); } return STNodeFactory.createNodeList(expressions); } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode type = parseTypeDescriptor(); STNode varName = parseVariableName(); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createForEachStatementNode(forEachKeyword, type, varName, inKeyword, actionOrExpr, blockStatement); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD); return sol.recoveredNode; } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IN_KEYWORD); return sol.recoveredNode; } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr() { startContext(ParserRuleContext.TYPE_CAST_EXPRESSION); STNode ltToken = parseLTToken(); STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); STNode expression = parseExpression(); endContext(); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(); break; } return STNodeFactory.createTypeCastParamNode(annot, type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr := table [key-specifier] [ [row-list] ] * </code> * * @return Parsed node */ private STNode parseTableConstructorExpr() { startContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode tableKeyword = parseTableKeyword(); STNode keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExpr(tableKeyword, keySpecifier); } private STNode parseTableConstructorExpr(STNode tableKeyword, STNode keySpecifier) { return parseTableConstructorExpr(peek().kind, tableKeyword, keySpecifier); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TABLE_KEYWORD); return sol.recoveredNode; } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { List<STNode> mappings = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructorsList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode leadingComma; while (!isEndOfMappingConstructorsList(nextToken.kind)) { leadingComma = parseComma(); mappings.add(leadingComma); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfMappingConstructorsList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.KEY_KEYWORD); return sol.recoveredNode; } } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { List<STNode> fieldNames = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } }
class BallerinaParser { private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.ACTION; private final BallerinaParserErrorHandler errorHandler; private final AbstractTokenReader tokenReader; private ParserRuleContext currentParamKind = ParserRuleContext.REQUIRED_PARAM; protected BallerinaParser(AbstractTokenReader tokenReader) { this.tokenReader = tokenReader; this.errorHandler = new BallerinaParserErrorHandler(tokenReader, this); } /** * Start parsing the given input. * * @return Parsed node */ public STNode parse() { return parseCompUnit(); } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_DEFINITION); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /** * Resume the parsing from the given context. * * @param context Context to resume parsing * @param args Arguments that requires to continue parsing from the given parser context * @return Parsed node */ public STNode resumeParsing(ParserRuleContext context, Object... args) { switch (context) { case COMP_UNIT: return parseCompUnit(); case EXTERNAL_FUNC_BODY: return parseExternalFunctionBody(); case FUNC_BODY: return parseFunctionBody(); case OPEN_BRACE: return parseOpenBrace(); case CLOSE_BRACE: return parseCloseBrace(); case FUNC_NAME: return parseFunctionName(); case OPEN_PARENTHESIS: return parseOpenParenthesis(); case PARAM_LIST: return parseParamList(); case RETURN_TYPE_DESCRIPTOR: return parseReturnTypeDescriptor(); case SIMPLE_TYPE_DESCRIPTOR: return parseTypeDescriptor(); case ASSIGN_OP: return parseAssignOp(); case EXTERNAL_KEYWORD: return parseExternalKeyword(); case FUNC_BODY_BLOCK: return parseFunctionBodyBlock(); case SEMICOLON: return parseSemicolon(); case CLOSE_PARENTHESIS: return parseCloseParenthesis(); case VARIABLE_NAME: return parseVariableName(); case TERMINAL_EXPRESSION: return parseTerminalExpression((boolean) args[0], (boolean) args[1]); case STATEMENT: return parseStatement(); case STATEMENT_WITHOUT_ANNOTS: return parseStatement((STNode) args[0]); case EXPRESSION_RHS: return parseExpressionRhs((OperatorPrecedence) args[1], (STNode) args[0], (boolean) args[2], (boolean) args[3]); case PARAMETER: return parseParameter((STNode) args[0], (int) args[1]); case PARAMETER_WITHOUT_ANNOTS: return parseParamGivenAnnots((STNode) args[0], (STNode) args[1], (int) args[2]); case AFTER_PARAMETER_TYPE: return parseAfterParamType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case PARAMETER_RHS: return parseParameterRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case TOP_LEVEL_NODE: return parseTopLevelNode(); case TOP_LEVEL_NODE_WITHOUT_METADATA: return parseTopLevelNode((STNode) args[0]); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return parseTopLevelNode((STNode) args[0], (STNode) args[1]); case STATEMENT_START_IDENTIFIER: return parseStatementStartIdentifier(); case VAR_DECL_STMT_RHS: return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (boolean) args[4]); case TYPE_REFERENCE: return parseTypeReference(); case FIELD_DESCRIPTOR_RHS: return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]); case NAMED_OR_POSITIONAL_ARG_RHS: return parseNamedOrPositionalArg((STNode) args[0]); case RECORD_BODY_START: return parseRecordBodyStartDelimiter(); case TYPE_DESCRIPTOR: return parseTypeDescriptor(); case OBJECT_MEMBER: return parseObjectMember(); case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]); case OBJECT_FIELD_RHS: return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case OBJECT_TYPE_FIRST_QUALIFIER: return parseObjectTypeQualifiers(); case OBJECT_TYPE_SECOND_QUALIFIER: return parseObjectTypeSecondQualifier((STNode) args[0]); case OBJECT_KEYWORD: return parseObjectKeyword(); case TYPE_NAME: return parseTypeName(); case IF_KEYWORD: return parseIfKeyword(); case ELSE_KEYWORD: return parseElseKeyword(); case ELSE_BODY: return parseElseBody(); case WHILE_KEYWORD: return parseWhileKeyword(); case PANIC_KEYWORD: return parsePanicKeyword(); case MAJOR_VERSION: return parseMajorVersion(); case IMPORT_DECL_RHS: return parseImportDecl((STNode) args[0], (STNode) args[1]); case IMPORT_PREFIX: return parseImportPrefix(); case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case VARIABLE_REF: case FIELD_OR_FUNC_NAME: case SERVICE_NAME: return parseIdentifier(context); case IMPORT_KEYWORD: return parseImportKeyword(); case SLASH: return parseSlashToken(); case DOT: return parseDotToken(); case IMPORT_VERSION_DECL: return parseVersion(); case VERSION_KEYWORD: return parseVersionKeywrod(); case VERSION_NUMBER: return parseVersionNumber(); case DECIMAL_INTEGER_LITERAL: return parseDecimalIntLiteral(context); case IMPORT_SUB_VERSION: return parseSubVersion(context); case IMPORT_PREFIX_DECL: return parseImportPrefixDecl(); case AS_KEYWORD: return parseAsKeyword(); case CONTINUE_KEYWORD: return parseContinueKeyword(); case BREAK_KEYWORD: return parseBreakKeyword(); case RETURN_KEYWORD: return parseReturnKeyword(); case MAPPING_FIELD: return parseMappingField((STNode) args[0]); case SPECIFIC_FIELD_RHS: return parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]); case STRING_LITERAL: return parseStringLiteral(); case COLON: return parseColon(); case OPEN_BRACKET: return parseOpenBracket(); case RESOURCE_DEF: return parseResource(); case OPTIONAL_SERVICE_NAME: return parseServiceName(); case SERVICE_KEYWORD: return parseServiceKeyword(); case ON_KEYWORD: return parseOnKeyword(); case RESOURCE_KEYWORD: return parseResourceKeyword(); case LISTENER_KEYWORD: return parseListenerKeyword(); case NIL_TYPE_DESCRIPTOR: return parseNilTypeDescriptor(); case COMPOUND_ASSIGNMENT_STMT: return parseCompoundAssignmentStmt(); case TYPEOF_KEYWORD: return parseTypeofKeyword(); case ARRAY_TYPE_DESCRIPTOR: return parseArrayTypeDescriptor((STNode) args[0]); case ARRAY_LENGTH: return parseArrayLength(); case FUNC_DEFINITION: case REQUIRED_PARAM: case ANNOT_REFERENCE: return parseIdentifier(context); case IS_KEYWORD: return parseIsKeyword(); case STMT_START_WITH_EXPR_RHS: return parseStamentStartWithExpr((STNode) args[0]); case COMMA: return parseComma(); case CONST_DECL_TYPE: return parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]); case STMT_START_WITH_IDENTIFIER: return parseStatementStartsWithIdentifier((STNode) args[0], (STNode) args[1]); case PARAMETERIZED_TYPE_DESCRIPTOR: return parseParameterizedTypeDescriptor(); case LT: return parseLTToken(); case GT: return parseGTToken(); case NIL_LITERAL: return parseNilLiteral(); case RECORD_FIELD_OR_RECORD_END: return parseFieldOrRestDescriptor((boolean) args[0]); case ANNOTATION_KEYWORD: return parseAnnotationKeyword(); case ANNOT_DECL_OPTIONAL_TYPE: return parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]); case ANNOT_DECL_RHS: return parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4]); case ANNOT_OPTIONAL_ATTACH_POINTS: return parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3], (STNode) args[4], (STNode) args[5]); case SOURCE_KEYWORD: return parseSourceKeyword(); case ATTACH_POINT_IDENT: return parseAttachPointIdent((STNode) args[0]); case IDENT_AFTER_OBJECT_IDENT: return parseIdentAfterObjectIdent(); case FUNCTION_IDENT: return parseFunctionIdent(); case FIELD_IDENT: return parseFieldIdent(); case ATTACH_POINT_END: return parseAttachPointEnd(); case XMLNS_KEYWORD: return parseXMLNSKeyword(); case XML_NAMESPACE_PREFIX_DECL: return parseXMLDeclRhs((STNode) args[0], (STNode) args[1]); case NAMESPACE_PREFIX: return parseNamespacePrefix(); case WORKER_KEYWORD: return parseWorkerKeyword(); case WORKER_NAME: return parseWorkerName(); case FORK_KEYWORD: return parseForkKeyword(); case DECIMAL_FLOATING_POINT_LITERAL: return parseDecimalFloatingPointLiteral(); case HEX_FLOATING_POINT_LITERAL: return parseHexFloatingPointLiteral(); case TRAP_KEYWORD: return parseTrapKeyword(); case IN_KEYWORD: return parseInKeyword(); case FOREACH_KEYWORD: return parseForEachKeyword(); case TABLE_KEYWORD: return parseTableKeyword(); case KEY_KEYWORD: return parseKeyKeyword(); case TABLE_KEYWORD_RHS: return parseTableConstructorExpr((STNode) args[0], (STNode) args[1]); default: throw new IllegalStateException("Cannot re-parse rule: " + context); } } /* * Private methods */ private STToken peek() { return this.tokenReader.peek(); } private STToken peek(int k) { return this.tokenReader.peek(k); } private STToken consume() { return this.tokenReader.read(); } private Solution recover(STToken token, ParserRuleContext currentCtx, Object... parsedNodes) { return this.errorHandler.recover(currentCtx, token, parsedNodes); } private void startContext(ParserRuleContext context) { this.errorHandler.startContext(context); } private void endContext() { this.errorHandler.endContext(); } /** * Switch the current context to the provided one. This will replace the * existing context. * * @param context Context to switch to. */ private void switchContext(ParserRuleContext context) { this.errorHandler.switchContext(context); } /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); STToken token = peek(); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(token.kind); if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { otherDecls.add(decl); this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations"); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ private STNode parseTopLevelNode() { STToken token = peek(); return parseTopLevelNode(token.kind); } protected STNode parseTopLevelNode(SyntaxKind tokenKind) { STNode metadata; switch (tokenKind) { case EOF_TOKEN: return consume(); case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(tokenKind); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case SERVICE_KEYWORD: metadata = createEmptyMetadata(); break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(createEmptyMetadata(), null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { metadata = createEmptyMetadata(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTopLevelNode(solution.tokenKind); } return parseTopLevelNode(tokenKind, metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); return parseTopLevelNode(nextToken.kind, metadata); } private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) { STNode qualifier = null; switch (tokenKind) { case EOF_TOKEN: if (metadata != null) { this.errorHandler.reportInvalidNode(null, "invalid metadata"); } return consume(); case PUBLIC_KEYWORD: qualifier = parseQualifier(); tokenKind = peek().kind; break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } return parseTopLevelNode(solution.tokenKind, metadata); } return parseTopLevelNode(tokenKind, metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isModuleVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case OPEN_BRACKET_TOKEN: case QUESTION_MARK_TOKEN: case PIPE_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } case COLON_TOKEN: if (lookahead > 1) { return false; } if (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) { return false; } return isModuleVarDeclStart(lookahead + 2); default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.switchMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STToken token = peek(); STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier); this.tokenReader.resetMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD); return sol.recoveredNode; } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); return parseImportDecl(nextToken.kind, importKeyword, identifier); } private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) { STNode orgName; STNode moduleName; STNode version; STNode alias; switch (tokenKind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgNameNode(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(tokenKind, identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportDecl(solution.tokenKind, importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SLASH); return sol.recoveredNode; } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken nextToken = peek(); return parseDotToken(nextToken.kind); } private STNode parseDotToken(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.DOT_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.DOT); return sol.recoveredNode; } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(peek().kind, moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @param moduleNameStart Starting identifier of the module name * @return Parsed node */ private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); while (!isEndOfImportModuleName(nextTokenKind)) { moduleNameParts.add(parseDotToken()); moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME)); nextTokenKind = peek().kind; } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) { return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); return parseVersion(nextToken.kind); } private STNode parseVersion(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeywrod(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersion(solution.tokenKind); } } /** * Parse version keywrod. * * @return Parsed node */ private STNode parseVersionKeywrod() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD); return sol.recoveredNode; } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); return parseVersionNumber(nextToken.kind); } private STNode parseVersionNumber(SyntaxKind nextTokenKind) { STNode majorVersion; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL: majorVersion = parseMajorVersion(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVersionNumber(solution.tokenKind); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersion = parseMinorVersion(); if (minorVersion != null) { versionParts.add(minorVersion); STNode patchVersion = parsePatchVersion(); if (patchVersion != null) { versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseSubVersion(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseSubVersion(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) { return consume(); } else { Solution sol = recover(peek(), context); return sol.recoveredNode; } } /** * Parse sub version. i.e: minor-version/patch-version. * * @param context Context indicating what kind of sub-version is being parsed. * @return Parsed node */ private STNode parseSubVersion(ParserRuleContext context) { STToken nextToken = peek(); return parseSubVersion(nextToken.kind, context); } private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) { switch (nextTokenKind) { case AS_KEYWORD: case SEMICOLON_TOKEN: return null; case DOT_TOKEN: STNode leadingDot = parseDotToken(); STNode versionNumber = parseDecimalIntLiteral(context); return STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSubVersion(solution.tokenKind, context); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken token = peek(); return parseImportPrefixDecl(token.kind); } private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefixNode(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextTokenKind)) { return STNodeFactory.createEmptyNode(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseImportPrefixDecl(solution.tokenKind); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD); return sol.recoveredNode; } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX); return sol.recoveredNode; } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken token = peek(); return parseTopLevelNode(token.kind, metadata, qualifier); } /** * Parse top level node given the next token kind and the modifier that precedes it. * * @param tokenKind Next token kind * @param qualifier Qualifier that precedes the top level node * @return Parsed top-level node */ private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) { switch (tokenKind) { case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, getQualifier(qualifier)); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case ANNOTATION_KEYWORD: STNode constKeyword = STNodeFactory.createEmptyNode(); return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case XMLNS_KEYWORD: reportInvalidQualifier(qualifier); return parseXMLNamepsaceDeclaration(); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) { return parseModuleVarDecl(metadata, qualifier); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.action == Action.KEEP) { return parseModuleVarDecl(metadata, qualifier); } return parseTopLevelNode(solution.tokenKind, metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { this.errorHandler.reportInvalidNode((STToken) qualifier, "invalid qualifier '" + qualifier.toString().trim() + "'"); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse function definition. A function definition has the following structure. * </p> * <code> * function-defn := FUNCTION identifier function-signature function-body * </code> * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @return Parsed node */ private STNode parseFunctionDefinition(STNode metadata, STNode visibilityQualifier) { startContext(ParserRuleContext.FUNC_DEFINITION); STNode functionKeyword = parseFunctionKeyword(); STNode name = parseFunctionName(); STNode openParenthesis = parseOpenParenthesis(); STNode parameters = parseParamList(); STNode closeParenthesis = parseCloseParenthesis(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode body = parseFunctionBody(); endContext(); return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name, openParenthesis, parameters, closeParenthesis, returnTypeDesc, body); } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD); return sol.recoveredNode; } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNC_NAME); return sol.recoveredNode; } } /** * Parse open parenthesis. * * @return Parsed node */ private STNode parseOpenParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_PARENTHESIS); return sol.recoveredNode; } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return sol.recoveredNode; } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @return Parsed node */ private STNode parseParamList() { startContext(ParserRuleContext.PARAM_LIST); ArrayList<STNode> paramsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } STNode startingComma = STNodeFactory.createEmptyNode(); this.currentParamKind = ParserRuleContext.REQUIRED_PARAM; paramsList.add(parseParameter(startingComma)); token = peek(); while (!isEndOfParametersList(token.kind)) { STNode leadingComma = parseComma(); STNode param = parseParameter(leadingComma); paramsList.add(param); token = peek(); } STNode params = STNodeFactory.createNodeList(paramsList); endContext(); return params; } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param leadingComma Comma that occurs before the param * @return Parsed node */ private STNode parseParameter(STNode leadingComma) { STToken token = peek(); if (this.currentParamKind == ParserRuleContext.REST_PARAM) { this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter"); startContext(ParserRuleContext.REQUIRED_PARAM); } else { startContext(this.currentParamKind); } return parseParameter(token.kind, leadingComma, 1); } private STNode parseParameter(STNode leadingComma, int nextTokenOffset) { return parseParameter(peek().kind, leadingComma, nextTokenOffset); } private STNode parseParameter(SyntaxKind nextTokenKind, STNode leadingComma, int nextTokenOffset) { STNode annots; switch (nextTokenKind) { case AT_TOKEN: annots = parseAnnotations(nextTokenKind); nextTokenKind = peek().kind; break; case PUBLIC_KEYWORD: annots = STNodeFactory.createNodeList(new ArrayList<>()); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); STNode qualifier = STNodeFactory.createEmptyNode(); return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } default: if (nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN && isTypeStartingToken(nextTokenKind)) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER, leadingComma, nextTokenOffset); if (solution.action == Action.KEEP) { annots = STNodeFactory.createNodeList(new ArrayList<>()); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameter(solution.tokenKind, leadingComma, 0); } return parseParamGivenAnnots(nextTokenKind, leadingComma, annots, 1); } private STNode parseParamGivenAnnots(STNode leadingComma, STNode annots, int nextNextTokenOffset) { return parseParamGivenAnnots(peek().kind, leadingComma, annots, nextNextTokenOffset); } private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, STNode leadingComma, STNode annots, int nextTokenOffset) { STNode qualifier; switch (nextTokenKind) { case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case IDENTIFIER_TOKEN: if (isParamWithoutAnnotStart(nextTokenOffset)) { qualifier = STNodeFactory.createEmptyNode(); break; } case AT_TOKEN: default: if (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) { qualifier = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, leadingComma, annots, nextTokenOffset); if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParamGivenAnnots(solution.tokenKind, leadingComma, annots, 0); } return parseParamGivenAnnotsAndQualifier(leadingComma, annots, qualifier); } private STNode parseParamGivenAnnotsAndQualifier(STNode leadingComma, STNode annots, STNode qualifier) { STNode type = parseTypeDescriptor(); STNode param = parseAfterParamType(leadingComma, annots, qualifier, type); endContext(); return param; } /** * Check whether the cursor is at the start of a parameter that doesn't have annotations. * * @param tokenOffset Offset of the token to check * @return <code>true</code> if the cursor is at the start of a parameter. <code>false</code> otherwise. */ private boolean isParamWithoutAnnotStart(int tokenOffset) { STToken nextToken = peek(tokenOffset + 1); switch (nextToken.kind) { case PUBLIC_KEYWORD: return isParamWithoutAnnotStart(tokenOffset + 1); case ELLIPSIS_TOKEN: return true; case IDENTIFIER_TOKEN: return true; default: return false; } } private STNode parseAfterParamType(STNode leadingComma, STNode annots, STNode qualifier, STNode type) { STToken token = peek(); return parseAfterParamType(token.kind, leadingComma, annots, qualifier, type); } private STNode parseAfterParamType(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type) { switch (tokenKind) { case ELLIPSIS_TOKEN: this.currentParamKind = ParserRuleContext.REST_PARAM; switchContext(ParserRuleContext.REST_PARAM); reportInvalidQualifier(qualifier); STNode ellipsis = parseEllipsis(); STNode paramName = parseVariableName(); return STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(leadingComma, annots, qualifier, type, paramName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, leadingComma, annots, qualifier, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAfterParamType(solution.tokenKind, leadingComma, annots, qualifier, type); } } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELLIPSIS); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param leadingComma Comma that precedes this parameter * @param annots Annotations attached to the parameter * @param qualifier Visibility qualifier * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { STToken token = peek(); return parseParameterRhs(token.kind, leadingComma, annots, qualifier, type, paramName); } private STNode parseParameterRhs(SyntaxKind tokenKind, STNode leadingComma, STNode annots, STNode qualifier, STNode type, STNode paramName) { if (isEndOfParameter(tokenKind)) { if (this.currentParamKind == ParserRuleContext.DEFAULTABLE_PARAM) { this.errorHandler.reportInvalidNode(peek(), "cannot have a required parameter after a defaultable parameter"); } return STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName); } else if (tokenKind == SyntaxKind.EQUAL_TOKEN) { if (this.currentParamKind == ParserRuleContext.REQUIRED_PARAM) { this.currentParamKind = ParserRuleContext.DEFAULTABLE_PARAM; switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal, expr); } else { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_RHS, leadingComma, annots, qualifier, type, paramName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseParameterRhs(solution.tokenKind, leadingComma, annots, qualifier, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMMA); return sol.recoveredNode; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case AT_TOKEN: return true; default: return false; } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case OPEN_BRACE_TOKEN: return true; default: return false; } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseReturnTypeDescriptor() { startContext(ParserRuleContext.RETURN_TYPE_DESCRIPTOR); STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { endContext(); return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseAnnotations(); STNode type = parseTypeDescriptor(); endContext(); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor() { STToken token = peek(); STNode typeDesc = parseTypeDescriptor(token.kind); return parseComplexTypeDescriptor(typeDesc); } /** * This will handle the parsing of optional,array,union type desc to infinite length. * * @param typeDesc * * @return Parsed type descriptor node */ private STNode parseComplexTypeDescriptor(STNode typeDesc) { STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc)); case OPEN_BRACKET_TOKEN: return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc)); case PIPE_TOKEN: return parseComplexTypeDescriptor(parseUnionTypeDescriptor(typeDesc)); default: return typeDesc; } } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseTypeDescriptor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseTypeReference(); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilTypeDescriptor(); case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return parseParameterizedTypeDescriptor(); default: if (isSimpleType(tokenKind)) { return parseSimpleTypeDescriptor(); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseTypeDescriptor(solution.tokenKind); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken node = peek(); if (isSimpleType(node.kind)) { STToken token = consume(); SyntaxKind typeKind = getTypeSyntaxKind(token.kind); return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token); } else { Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return sol.recoveredNode; } } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init, named-worker-decl+] default-worker } * </code> * * @return Parsed node */ private STNode parseFunctionBody() { STToken token = peek(); return parseFunctionBody(token.kind); } /** * Parse function body, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ protected STNode parseFunctionBody(SyntaxKind tokenKind) { switch (tokenKind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FUNC_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.tokenKind == SyntaxKind.NONE) { return STNodeFactory.createMissingToken(solution.tokenKind); } return parseFunctionBody(solution.tokenKind); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @return Parsed node */ private STNode parseFunctionBodyBlock() { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STToken token = peek(); ArrayList<STNode> firstStmtList = new ArrayList<>(); ArrayList<STNode> workers = new ArrayList<>(); ArrayList<STNode> secondStmtList = new ArrayList<>(); ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT; boolean hasNamedWorkers = false; while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (currentCtx) { case DEFAULT_WORKER_INIT: if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) { firstStmtList.add(stmt); break; } currentCtx = ParserRuleContext.NAMED_WORKERS; hasNamedWorkers = true; case NAMED_WORKERS: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { workers.add(stmt); break; } currentCtx = ParserRuleContext.DEFAULT_WORKER; case DEFAULT_WORKER: default: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } secondStmtList.add(stmt); break; } token = peek(); } STNode namedWorkersList; STNode statements; if (hasNamedWorkers) { STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList); STNode namedWorkers = STNodeFactory.createNodeList(workers); namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers); statements = STNodeFactory.createNodeList(secondStmtList); } else { namedWorkersList = STNodeFactory.createEmptyNode(); statements = STNodeFactory.createNodeList(firstStmtList); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace); } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PUBLIC_KEYWORD: case LISTENER_KEYWORD: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.RECORD_FIELD, 1); default: return false; } } private boolean isEndOfObjectTypeNode(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case IMPORT_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); default: return false; } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); return parseVariableName(token.kind); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName(SyntaxKind tokenKind) { if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME); return sol.recoveredNode; } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACE); return sol.recoveredNode; } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE); return sol.recoveredNode; } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); STNode annotation = parseAnnotations(); STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SEMICOLON); return sol.recoveredNode; } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return sol.recoveredNode; } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ASSIGN_OP); return sol.recoveredNode; } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case EQUAL_GT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; case RIGHT_ARROW_TOKEN: return OperatorPrecedence.ACTION; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case UNARY: case ACTION: case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD); return sol.recoveredNode; } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPE_NAME); return sol.recoveredNode; } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; STNode fields = parseFieldDescriptors(isInclusive); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind); endContext(); return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken token = peek(); return parseRecordBodyStartDelimiter(token.kind); } private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) { switch (kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyStartDelimiter(solution.tokenKind); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return sol.recoveredNode; } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) { switch (startingDelimeter) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyEnd(); case OPEN_BRACE_TOKEN: return parseCloseBrace(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseRecordBodyCloseDelimiter(solution.tokenKind); } } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return sol.recoveredNode; } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse field descriptors. * </p> * * @return Parsed node */ private STNode parseFieldDescriptors(boolean isInclusive) { ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); boolean endOfFields = false; while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); if (field == null) { endOfFields = true; break; } recordFields.add(field); token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { break; } } while (!endOfFields && !isEndOfRecordTypeNode(token.kind)) { parseFieldOrRestDescriptor(isInclusive); this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor"); token = peek(); } return STNodeFactory.createNodeList(recordFields); } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { return parseFieldOrRestDescriptor(peek().kind, isInclusive); } private STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: return null; case ASTERISK_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); case AT_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode metadata = parseMetaData(nextTokenKind); type = parseTypeDescriptor(); STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; default: if (isTypeStartingToken(nextTokenKind)) { startContext(ParserRuleContext.RECORD_FIELD); metadata = createEmptyMetadata(); type = parseTypeDescriptor(nextTokenKind); fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata); endContext(); return fieldOrRestDesc; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptor(solution.tokenKind, isInclusive); } } private STNode parseFieldDescriptor(boolean isInclusive, STNode type, STNode metadata) { if (isInclusive) { STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); } else { return parseFieldOrRestDescriptorRhs(metadata, type); } } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode typeRefOrPkgRef = consume(); return parseQualifiedIdentifier(typeRefOrPkgRef); } else { Solution sol = recover(token, currentCtx); return sol.recoveredNode; } } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STToken colon = consume(); STToken varOrFuncName = consume(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); } else { this.errorHandler.removeInvalidToken(); return parseQualifiedIdentifier(identifier); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken token = peek(); return parseFieldOrRestDescriptorRhs(token.kind, metadata, type); } private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) { switch (kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, type, fieldName); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) { STToken token = peek(); return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName); } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param kind Kind of the next token * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) { switch (kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, type, fieldName, equalsToken, expression, semicolonToken); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.QUESTION_MARK); return sol.recoveredNode; } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { STToken token = peek(); ArrayList<STNode> stmts = new ArrayList<>(); while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here"); break; } stmts.add(stmt); token = peek(); } return STNodeFactory.createNodeList(stmts); } private boolean isEndOfStatements(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.STATEMENT, 1); default: return false; } } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken token = peek(); return parseStatement(token.kind); } private STNode parseStatement(SyntaxKind tokenKind) { STNode annots = null; switch (tokenKind) { case CLOSE_BRACE_TOKEN: return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(); case AT_TOKEN: annots = parseAnnotations(tokenKind); tokenKind = peek().kind; break; case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case PANIC_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case WORKER_KEYWORD: break; default: if (isTypeStartingToken(tokenKind)) { break; } if (isValidLHSExpression(tokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind); } return parseStatement(tokenKind, annots); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createNodeList(new ArrayList<>()); } private STNode parseStatement(STNode annots) { return parseStatement(peek().kind, annots); } /** * Parse a single statement, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatement(SyntaxKind tokenKind, STNode annots) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: this.errorHandler.reportInvalidNode(null, "invalid annotations"); return null; case SEMICOLON_TOKEN: this.errorHandler.removeInvalidToken(); return parseStatement(tokenKind, annots); case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseStamentStartsWithExpr(tokenKind); case IDENTIFIER_TOKEN: return parseStatementStartsWithIdentifier(getAnnotations(annots)); case LOCK_KEYWORD: return parseLockStatement(); case OPEN_BRACE_TOKEN: return parseBlockNode(); case WORKER_KEYWORD: return parseNamedWorkerDeclaration(getAnnotations(annots)); case FORK_KEYWORD: return parseForkStatement(); case FOREACH_KEYWORD: return parseForEachStatement(); default: if (isTypeStartingToken(tokenKind)) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatement(solution.tokenKind, annots); } } private STNode getNextNextToken(SyntaxKind tokenKind) { return peek(1).kind == tokenKind ? peek(2) : peek(1); } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode type = parseTypeDescriptor(); STNode varName = parseVariableName(); STNode varDecl = parseVarDeclRhs(annots, finalKeyword, type, varName, isModuleVar); endContext(); return varDecl; } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STToken token = peek(); return parseVarDeclRhs(token.kind, metadata, finalKeyword, type, varName, isModuleVar); } /** * Parse the right hand side of a variable declaration statement, given the * next token kind. * * @param tokenKind Next token kind * @param metadata Metadata * @param finalKeyword Final keyword * @param type Type descriptor * @param varName Variable name * @param isModuleVar flag indicating whether the var is module level * @return Parsed node */ private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode type, STNode varName, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; switch (tokenKind) { case EQUAL_TOKEN: assign = parseAssignOp(); if (isModuleVar) { expr = parseExpression(); } else { expr = parseActionOrExpression(); } semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: if (isModuleVar) { this.errorHandler.reportMissingTokenError("assignment required"); } assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, type, varName, isModuleVar); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, type, varName, isModuleVar); } if (isModuleVar) { return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr, semicolon); } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode assign = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, false); } /** * Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseActionOrExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(SyntaxKind tokenKind) { return parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true); } /** * Parse expression. * * @param isRhsExpr Flag indicating whether this is a rhs expression * @return Parsed node */ private STNode parseExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false); } private void validateLVExpr(STNode expression) { if (isValidLVExpr(expression)) { return; } this.errorHandler.reportInvalidNode(null, "invalid expression for assignment lhs"); } private boolean isValidLVExpr(STNode expression) { switch (expression.kind) { case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case FIELD_ACCESS: return isValidLVExpr(((STFieldAccessExpressionNode) expression).expression); case MEMBER_ACCESS: return isValidLVExpr(((STMemberAccessExpressionNode) expression).containerExpression); default: return false; } } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @param isRhsExpr Flag indicating whether this is a rhs expression * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STToken token = peek(); return parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions); } private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { STNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions); } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @param isRhsExpr Is a rhs expression * @param allowActions Allow actions * @return Parsed node */ private STNode parseTerminalExpression(boolean isRhsExpr, boolean allowActions) { return parseTerminalExpression(peek().kind, isRhsExpr, allowActions); } private STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case OPEN_PAREN_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } return parseBracedExpression(isRhsExpr, allowActions); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(isRhsExpr, allowActions); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(isRhsExpr); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(isRhsExpr); case TRAP_KEYWORD: return parseTrapExpression(isRhsExpr); case OPEN_BRACKET_TOKEN: return parseListConstructorExpr(); case LT_TOKEN: return parseTypeCastExpr(); case TABLE_KEYWORD: return parseTableConstructorExpr(); default: Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, isRhsExpr, allowActions); if (solution.recoveredNode.kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseQualifiedIdentifier(solution.recoveredNode); } if (solution.recoveredNode.kind == SyntaxKind.OPEN_PAREN_TOKEN && peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteral(); } if (solution.recoveredNode.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return parseListConstructorExpr(); } if (solution.recoveredNode.kind == SyntaxKind.LT_TOKEN) { return parseTypeCastExpr(); } if (solution.recoveredNode.kind == SyntaxKind.TABLE_KEYWORD) { return parseTableConstructorExpr(); } return solution.recoveredNode; } } private STNode parseActionOrExpressionInLhs(STNode lhsExpr) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, lhsExpr, false, true); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @param isLVExpr Flag indicating whether this is on a lhsExpr of a statement * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isLVExpr, boolean allowActions) { STToken token = peek(); return parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isLVExpr, allowActions); } /** * Parse the right hand side of an expression given the next token kind. * * @param tokenKind Next token kind * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression * @param isRhsExpr Flag indicating whether this is a rhs expr or not * @param allowActions Flag indicating whether to allow actions or not * @return Parsed node */ private STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { if (isEndOfExpression(tokenKind, isRhsExpr)) { return lhsExpr; } if (!isValidExprRhsStart(tokenKind)) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel); return parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } else { return parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions); } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind); if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence)) { return lhsExpr; } STNode newLhsExpr; switch (tokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr); break; case IS_KEYWORD: newLhsExpr = parseTypeTestExpression(lhsExpr); break; case RIGHT_ARROW_TOKEN: newLhsExpr = parseAction(tokenKind, lhsExpr); if (!allowActions) { this.errorHandler.reportInvalidNode(null, "actions are not allowed here"); } break; default: STNode operator = parseBinaryOperator(); STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false); newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions); } private boolean isValidExprRhsStart(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr) { STNode openBracket = consume(); STNode keyExpr; if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { this.errorHandler.reportMissingTokenError("missing expression"); keyExpr = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { keyExpr = parseExpression(); } STNode closeBracket = parseCloseBracket(); return STNodeFactory.createMemberAccessExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET); return sol.recoveredNode; } } /** * Parse field access expression and method call expression. * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) { STNode dotToken = parseDotToken(); STNode fieldOrMethodName = parseIdentifier(ParserRuleContext.FIELD_OR_FUNC_NAME); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Allow actions * @return Parsed node */ private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) { STNode openParen = parseOpenParenthesis(); STNode expr; if (allowActions) { expr = parseActionOrExpression(isRhsExpr); } else { expr = parseExpression(isRhsExpr); } STNode closeParen = parseCloseParenthesis(); if (isAction(expr)) { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } else { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } } /** * Check whether a given node is an action node. * * @param node Node to check * @return <code>true</code> if the node is an action node. <code>false</code> otherwise */ private boolean isAction(STNode node) { switch (node.kind) { case REMOTE_METHOD_CALL_ACTION: case BRACED_ACTION: case CHECK_ACTION: return true; default: return false; } } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr) { if (!isRhsExpr) { if (isCompoundBinaryOperator(tokenKind)) { return true; } return !isValidExprRhsStart(tokenKind); } switch (tokenKind) { case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case CONST_KEYWORD: case LISTENER_KEYWORD: case EQUAL_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case AS_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse basic literals. It is assumed that we come here after validation. * * @return Parsed node */ private STNode parseBasicLiteral() { STToken literalToken = consume(); return STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen); } /** * Parse function call argument list. * * @return Parsed agrs list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); ArrayList<STNode> argsList = new ArrayList<>(); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } SyntaxKind lastProcessedArgKind = parseFirstArg(argsList); parseFollowUpArg(argsList, lastProcessedArgKind); STNode args = STNodeFactory.createNodeList(argsList); endContext(); return args; } /** * Parse the first argument of a function call. * * @param argsList Arguments list to which the parsed argument must be added * @return Kind of the argument first argument. */ private SyntaxKind parseFirstArg(ArrayList<STNode> argsList) { startContext(ParserRuleContext.ARG); STNode leadingComma = STNodeFactory.createEmptyNode(); STNode arg = parseArg(leadingComma); endContext(); if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) { argsList.add(arg); return arg.kind; } else { reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind); return SyntaxKind.POSITIONAL_ARG; } } /** * Parse follow up arguments. * * @param argsList Arguments list to which the parsed argument must be added * @param lastProcessedArgKind Kind of the argument processed prior to this */ private void parseFollowUpArg(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) { STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { startContext(ParserRuleContext.ARG); STNode leadingComma = parseComma(); nextToken = peek(); if (isEndOfParametersList(nextToken.kind)) { this.errorHandler.reportInvalidNode((STToken) leadingComma, "invalid token " + leadingComma); endContext(); break; } STNode arg = parseArg(nextToken.kind, leadingComma); if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) { if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) { this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg"); } else { argsList.add(arg); lastProcessedArgKind = arg.kind; } } else { reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind); } nextToken = peek(); endContext(); } } /** * Report invalid order of args. * * @param token Staring token of the arg. * @param lastArgKind Kind of the previously processed arg * @param argKind Current arg */ private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) { this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind); } /** * Parse function call argument. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseArg(STNode leadingComma) { STToken token = peek(); return parseArg(token.kind, leadingComma); } private STNode parseArg(SyntaxKind kind, STNode leadingComma) { STNode arg; switch (kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgumentNode(leadingComma, ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(leadingComma); break; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: default: expr = parseExpression(); arg = STNodeFactory.createPositionalArgumentNode(leadingComma, expr); break; } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @param leadingComma Comma that occurs before the param * @return Parsed argument node */ private STNode parseNamedOrPositionalArg(STNode leadingComma) { STToken secondToken = peek(2); switch (secondToken.kind) { case EQUAL_TOKEN: STNode argNameOrVarRef = consume(); STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createNamedArgumentNode(leadingComma, argNameOrVarRef, equal, expr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: argNameOrVarRef = consume(); return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrVarRef); case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: default: expr = parseExpression(); return STNodeFactory.createPositionalArgumentNode(leadingComma, expr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STToken nextToken = peek(); return parseObjectTypeQualifiers(nextToken.kind); } private STNode parseObjectTypeQualifiers(SyntaxKind kind) { List<STNode> qualifiers = new ArrayList<>(); STNode firstQualifier; switch (kind) { case CLIENT_KEYWORD: STNode clientKeyword = parseClientKeyword(); firstQualifier = clientKeyword; break; case ABSTRACT_KEYWORD: STNode abstractKeyword = parseAbstractKeyword(); firstQualifier = abstractKeyword; break; case OBJECT_KEYWORD: return STNodeFactory.createNodeList(qualifiers); default: Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeQualifiers(solution.tokenKind); } STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier); qualifiers.add(firstQualifier); if (secondQualifier != null) { qualifiers.add(secondQualifier); } return STNodeFactory.createNodeList(qualifiers); } private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) { STToken nextToken = peek(); return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier); } private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) { if (firstQualifier.kind != kind) { switch (kind) { case CLIENT_KEYWORD: return parseClientKeyword(); case ABSTRACT_KEYWORD: return parseAbstractKeyword(); case OBJECT_KEYWORD: return null; default: break; } } Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD); return sol.recoveredNode; } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return sol.recoveredNode; } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD); return sol.recoveredNode; } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfObjectTypeNode(nextToken.kind)) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode member = parseObjectMember(nextToken.kind); endContext(); if (member == null) { break; } objectMembers.add(member); nextToken = peek(); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STToken nextToken = peek(); return parseObjectMember(nextToken.kind); } private STNode parseObjectMember(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isTypeStartingToken(nextTokenKind)) { metadata = createEmptyMetadata(); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return parseObjectMember(nextTokenKind, metadata); } private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) { STNode member; switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode()); break; case FUNCTION_KEYWORD: member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode()); break; default: if (isTypeStartingToken(nextTokenKind)) { member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMember(solution.tokenKind); } return member; } private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(1); STToken nextNextToken = peek(2); return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers); } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param visibilityQualifiers Visibility qualifiers. A modifier can be * a syntax node with either 'PUBLIC' or 'PRIVATE'. * @param nextTokenKind Next token kind * @param nextNextTokenKind Kind of the token after the * @param metadata Metadata * @param visibilityQualifiers Visibility qualifiers * @return Parse object member node */ private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata, STNode visibilityQualifiers) { switch (nextTokenKind) { case REMOTE_KEYWORD: STNode remoteKeyword = parseRemoteKeyword(); ArrayList<STNode> methodQualifiers = new ArrayList<>(); if (visibilityQualifiers.kind != SyntaxKind.NONE) { methodQualifiers.add(visibilityQualifiers); } methodQualifiers.add(remoteKeyword); return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers)); case FUNCTION_KEYWORD: return parseObjectMethod(metadata, visibilityQualifiers); case IDENTIFIER_TOKEN: if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifiers); } break; default: if (isTypeStartingToken(nextTokenKind)) { return parseObjectField(metadata, visibilityQualifiers); } break; } Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifiers); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers); } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD); return sol.recoveredNode; } } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD); return sol.recoveredNode; } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STNode type = parseTypeDescriptor(); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param nextTokenKind Kind of the next token * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) { STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextTokenKind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, type, fieldName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName); } return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) { return parseFunctionDefinition(metadata, methodQualifiers); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IF_KEYWORD); return sol.recoveredNode; } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD); return sol.recoveredNode; } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlockNode(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); return parseElseBody(nextToken.kind); } private STNode parseElseBody(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ELSE_BODY); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseElseBody(solution.tokenKind); } } /** * Parse while statement. * <code>while-stmt := while expression block-stmt</code> * * @return While statement */ private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody); } /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD); return sol.recoveredNode; } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD); return sol.recoveredNode; } } /** * Parse check expression. This method is used to parse both check expression * as well as check action. * * <p> * <code> * checking-expr := checking-keyword expression * checking-action := checking-keyword action * </code> * * @param allowActions Allow actions * @param isRhsExpr Is rhs expression * @return Check expression node */ private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions) { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, allowActions); if (isAction(expr)) { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr); } else { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr); } } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD); return sol.recoveredNode; } } /** * * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD); return sol.recoveredNode; } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD); return sol.recoveredNode; } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STNode expr; STNode semicolon; STToken token = peek(); switch (token.kind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseActionOrExpression(); break; } semicolon = parseSemicolon(); return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { List<STNode> fields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createNodeList(fields); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { leadingComma = parseComma(); field = parseMappingField(leadingComma); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: return false; case EOF_TOKEN: case AT_TOKEN: case DOCUMENTATION_LINE: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseMappingField(STNode leadingComma) { STToken nextToken = peek(); return parseMappingField(nextToken.kind, leadingComma); } private STNode parseMappingField(SyntaxKind tokenKind, STNode leadingComma) { switch (tokenKind) { case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionValue(leadingComma); case STRING_LITERAL: STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(leadingComma); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadFieldNode(leadingComma, ellipsis, expr); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD, leadingComma); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseMappingField(solution.tokenKind, leadingComma); } } /** * Parse mapping constructor specific-field with an optional value. * * @param leadingComma * @return Parsed node */ private STNode parseSpecificFieldWithOptionValue(STNode leadingComma) { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(leadingComma, key); } private STNode parseSpecificFieldRhs(STNode leadingComma, STNode key) { STToken nextToken = peek(); return parseSpecificFieldRhs(nextToken.kind, leadingComma, key); } private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode leadingComma, STNode key) { STNode colon; STNode valueExpr; switch (tokenKind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(tokenKind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, leadingComma, key); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseSpecificFieldRhs(solution.tokenKind, leadingComma, key); } return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.STRING_LITERAL); return sol.recoveredNode; } } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COLON); return sol.recoveredNode; } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseComputedField(STNode leadingComma) { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(leadingComma, openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET); return sol.recoveredNode; } } /** * <p> * Parse compound assignment statement, which takes the following format. * </p> * <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code> * * @return Parsed node */ private STNode parseCompoundAssignmentStmt() { startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT); STNode varName = parseVariableName(); STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName); endContext(); return compoundAssignmentStmt; } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) { validateLVExpr(lvExpr); STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return sol.recoveredNode; } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); STNode service = STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); return service; } private STNode parseServiceName() { STToken nextToken = peek(); return parseServiceName(nextToken.kind); } private STNode parseServiceName(SyntaxKind kind) { switch (kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseServiceName(solution.tokenKind); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD); return sol.recoveredNode; } } /** * Check whether the given token kind is a compound binary operator. * <p> * <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @param tokenKind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) { switch (tokenKind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ON_KEYWORD); return sol.recoveredNode; } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfExpressionsList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing expression"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode leadingComma = STNodeFactory.createEmptyNode(); STNode exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); while (!isEndOfExpressionsList(nextToken.kind)) { leadingComma = parseComma(); exprListItem = parseExpressionListItem(leadingComma); listeners.add(exprListItem); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfExpressionsList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case AT_TOKEN: case DOCUMENTATION_LINE: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse expression list item. * * @param leadingComma Leading comma * @return Parsed node */ private STNode parseExpressionListItem(STNode leadingComma) { STNode expr = parseExpression(); return STNodeFactory.createExpressionListItemNode(leadingComma, expr); } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STToken nextToken = peek(); return parseResource(nextToken.kind); } private STNode parseResource(SyntaxKind nextTokenKind) { STNode metadata; switch (nextTokenKind) { case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: metadata = createEmptyMetadata(); break; case DOCUMENTATION_LINE: case AT_TOKEN: metadata = parseMetaData(nextTokenKind); nextTokenKind = peek().kind; break; default: if (isEndOfServiceDecl(nextTokenKind)) { return null; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind); } return parseResource(nextTokenKind, metadata); } private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) { switch (nextTokenKind) { case RESOURCE_KEYWORD: STNode resourceKeyword = parseResourceKeyword(); return parseFunctionDefinition(metadata, resourceKeyword); case FUNCTION_KEYWORD: return parseFunctionDefinition(metadata, STNodeFactory.createEmptyNode()); default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseResource(solution.tokenKind, metadata); } } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD); return sol.recoveredNode; } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */ private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: return false; case ON_KEYWORD: return true; default: ParserRuleContext sol = this.errorHandler.findBestPath(currentContext); return sol == ParserRuleContext.SERVICE_DECL || sol == ParserRuleContext.CLOSE_BRACE; } case ON_KEYWORD: return true; default: this.errorHandler.removeInvalidToken(); return false; } } /** * Parse listener declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD); return sol.recoveredNode; } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword); } private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: switchContext(ParserRuleContext.ANNOTATION_DECL); return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STToken token = peek(); return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName); } private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) { STNode type; STNode variableName; switch (nextTokenKind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = typeOrVarName; type = STNodeFactory.createEmptyNode(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword, typeOrVarName); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName); } STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD); return sol.recoveredNode; } } /** * Parse nil type descriptor. * <p> * <code>nil-type-descriptor := ( ) </code> * </p> * * @return Parsed node */ private STNode parseNilTypeDescriptor() { startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken); } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD); return sol.recoveredNode; } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR); return sol.recoveredNode; } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param typeDescriptorNode * * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createArrayTypeDescriptorNode(typeDescriptorNode, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case ASTERISK_TOKEN: return consume(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: Solution sol = recover(token, ParserRuleContext.ARRAY_LENGTH); return sol.recoveredNode; } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseAnnotations() { STToken nextToken = peek(); return parseAnnotations(nextToken.kind); } private STNode parseAnnotations(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); while (nextTokenKind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextTokenKind = peek().kind; } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } STNode annotValue = parseMappingConstructorExpr(); return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.AT); return sol.recoveredNode; } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData(SyntaxKind nextTokenKind) { STNode docString; STNode annotations; switch (nextTokenKind) { case DOCUMENTATION_LINE: docString = parseDocumentationString(); annotations = parseAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseAnnotations(nextTokenKind); break; default: return createEmptyMetadata(); } return STNodeFactory.createMetadataNode(docString, annotations); } /** * Create empty metadata node. * * @return A metadata node with no doc string and no annotations */ private STNode createEmptyMetadata() { return STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(), STNodeFactory.createNodeList(new ArrayList<>())); } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr) { startContext(ParserRuleContext.TYPE_TEST_EXPRESSION); STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptor(); endContext(); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IS_KEYWORD); return sol.recoveredNode; } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Pass statements that starts with an identifier. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatementStartsWithIdentifier(STNode annots) { startContext(ParserRuleContext.STMT_START_WITH_IDENTIFIER); STNode identifier = parseStatementStartIdentifier(); STToken nextToken = peek(); STNode stmt = parseStatementStartsWithIdentifier(nextToken.kind, annots, identifier); endContext(); return stmt; } private STNode parseStatementStartsWithIdentifier(STNode annots, STNode identifier) { return parseStatementStartsWithIdentifier(peek().kind, annots, identifier); } private STNode parseStatementStartsWithIdentifier(SyntaxKind nextTokenKind, STNode annots, STNode identifier) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: return parseTypeDescStartsWithIdentifier(identifier, annots); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStamentStartWithExpr(nextTokenKind, identifier); case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) { return parseTypeDescStartsWithIdentifier(identifier, annots); } default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(identifier); } if (isValidExprRhsStart(nextTokenKind)) { STNode expression = parseActionOrExpressionInLhs(identifier); return parseStamentStartWithExpr(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_IDENTIFIER, annots, identifier); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStatementStartsWithIdentifier(solution.tokenKind, annots, identifier); } } private STNode parseTypeDescStartsWithIdentifier(STNode typeDesc, STNode annots) { switchContext(ParserRuleContext.VAR_DECL_STMT); typeDesc = parseComplexTypeDescriptor(typeDesc); STNode varName = parseVariableName(); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVarDeclRhs(annots, finalKeyword, typeDesc, varName, false); } /** * Parse statement which is only consists of an action or expression. * * @param nextTokenKind Next token kind * @return Parsed node */ private STNode parseStamentStartsWithExpr(SyntaxKind nextTokenKind) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpression(nextTokenKind); STNode stmt = parseStamentStartWithExpr(expression); endContext(); return stmt; } /** * Parse statements that starts with an expression. * * @return Parsed node */ private STNode parseStamentStartWithExpr(STNode expression) { STToken nextToken = peek(); return parseStamentStartWithExpr(nextToken.kind, expression); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param nextTokenKind Kind of the next token * @return Parsed node */ private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode expression) { switch (nextTokenKind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); default: if (isCompoundBinaryOperator(nextTokenKind)) { return parseCompoundAssignmentStmtRhs(expression); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, expression); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseStamentStartWithExpr(solution.tokenKind, expression); } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: return parseActionStatement(expression); default: this.errorHandler.reportInvalidNode(null, "left hand side of an assignment must be a variable reference"); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID, expression, semicolon); } } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { validateExprInCallStmt(expression); STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private void validateExprInCallStmt(STNode expression) { switch (expression.kind) { case FUNCTION_CALL: case METHOD_CALL: break; case CHECK_EXPRESSION: validateExprInCallStmt(((STCheckExpressionNode) expression).expression); break; case REMOTE_METHOD_CALL_ACTION: break; case BRACED_EXPRESSION: validateExprInCallStmt(((STBracedExpressionNode) expression).expression); break; default: if (isMissingNode(expression)) { break; } this.errorHandler.reportInvalidNode(null, "expression followed by the checking keyword must be a " + "func-call, a method-call or a check-expr"); break; } } /** * Check whether a node is a missing node. * * @param node Node to check * @return <code>true</code> if the node is a missing node. <code>false</code> otherwise */ private boolean isMissingNode(STNode node) { return node instanceof STMissingToken; } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } private STNode parseAction(SyntaxKind tokenKind, STNode lhsExpr) { switch (tokenKind) { case RIGHT_ARROW_TOKEN: return parseRemoteMethodCallAction(lhsExpr); default: return null; } } /** * Parse remote method call action, given the starting expression. * <p> * <code>remote-method-call-action := expression -> method-name ( arg-list )</code> * * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallAction(STNode expression) { STNode rightArrow = parseRightArrow(); STNode methodName = parseFunctionName(); STNode openParenToken = parseOpenParenthesis(); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, methodName, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW); return sol.recoveredNode; } } /** * Check whether this is a valid lhs expression. * * @param tokenKind Kind of the next token * @return <code>true</code>if this is a start of an expression. <code>false</code> otherwise */ private boolean isValidLHSExpression(SyntaxKind tokenKind) { switch (tokenKind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case IDENTIFIER_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return true; case PLUS_TOKEN: case MINUS_TOKEN: return !isCompoundBinaryOperator(tokenKind); case OPEN_PAREN_TOKEN: default: return false; } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor() { startContext(ParserRuleContext.PARAMETERIZED_TYPE_DESCRIPTOR); STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword(); STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(); STNode gtToken = parseGTToken(); endContext(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode, gtToken); } /** * Parse <code>map</code> or <code>future</code> or <code>typedesc</code> keyword token. * * @return Parsed node */ private STNode parseParameterizedTypeKeyword() { STToken nextToken = peek(); switch (nextToken.kind) { case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return consume(); default: Solution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE); return sol.recoveredNode; } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.GT); return sol.recoveredNode; } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { Solution sol = recover(nextToken, ParserRuleContext.LT); return sol.recoveredNode; } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return sol.recoveredNode; } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); return parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword); } private STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { switch (nextTokenKind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextTokenKind)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(); STNode annotTag = parseAnnotationTag(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, annotTag, equalsToken, initializer, semicolonToken); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG); return sol.recoveredNode; } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseAnnotationTag(); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken token = peek(); return parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } private STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STNode typeDesc; STNode annotTag; switch (nextTokenKind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STToken nextToken = peek(); return parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; switch (nextTokenKind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNode(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * [object] type * | [object|resource] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); this.errorHandler.reportMissingTokenError("missing attach point"); return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { this.errorHandler.reportMissingTokenError("missing attach point"); attachPoint = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { STToken nextToken = peek(); return parseAttachPointEnd(nextToken.kind); } private STNode parseAttachPointEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: Solution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END); if (sol.action == Action.REMOVE) { return sol.recoveredNode; } return sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null; } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { return parseAnnotationAttachPoint(peek().kind); } private STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT); return solution.recoveredNode; } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD); return sol.recoveredNode; } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := [object] type | [object|resource] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { return parseAttachPointIdent(peek().kind, sourceKeyword); } private STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) { switch (nextTokenKind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode secondIdent = STNodeFactory.createEmptyNode(); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } firstIdent = solution.recoveredNode; return parseDualAttachPointIdent(sourceKeyword, firstIdent); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: default: secondIdent = STNodeFactory.createEmptyNode(); break; } return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case TYPE_KEYWORD: case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: Solution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return sol.recoveredNode; } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FUNCTION_IDENT); return sol.recoveredNode; } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FIELD_IDENT); return sol.recoveredNode; } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamepsaceDeclaration() { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseXMLNamespaceUri(); STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD); return sol.recoveredNode; } } /** * Parse namespace uri. * * @return Parsed node */ private STNode parseXMLNamespaceUri() { STNode expr = parseConstExpr(); switch (expr.kind) { case STRING_LITERAL: case IDENTIFIER_TOKEN: case QUALIFIED_NAME_REFERENCE: break; default: this.errorHandler.reportInvalidNode(null, "namespace uri must be a subtype of string"); } return expr; } /** * Parse constants expr. * * @return Parsed node */ private STNode parseConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STToken nextToken = peek(); STNode expr; switch (nextToken.kind) { case STRING_LITERAL: case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: expr = consume(); break; case IDENTIFIER_TOKEN: expr = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); break; case OPEN_BRACE_TOKEN: default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); expr = solution.recoveredNode; break; } endContext(); return expr; } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri) { return parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri); } private STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (nextTokenKind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: STToken token = peek(); Solution solution = recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri); if (solution.action == Action.REMOVE) { return solution.recoveredNode; } return parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri); } STNode semicolon = parseSemicolon(); return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return sol.recoveredNode; } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code> * * @param annots Annotations attached to the worker decl * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc, workerBody); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD); return sol.recoveredNode; } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { Solution sol = recover(peek(), ParserRuleContext.WORKER_NAME); return sol.recoveredNode; } } /** * Parse documentation string. * <p> * <code>DocumentationString := DocumentationLine +</code> * <p> * Refer {@link BallerinaLexer * * @return Parsed node */ private STNode parseDocumentationString() { List<STNode> docLines = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_LINE) { docLines.add(consume()); nextToken = peek(); } STNode documentationLines = STNodeFactory.createNodeList(docLines); return STNodeFactory.createDocumentationStringNode(documentationLines); } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt ;</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.LOCK_KEYWORD); return sol.recoveredNode; } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc) { STNode pipeToken = parsePipeToken(); STNode rightTypeDesc = parseTypeDescriptor(); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.PIPE); return sol.recoveredNode; } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return true; default: return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case XML_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case SERVICE_KEYWORD: case VAR_KEYWORD: return true; case TYPE_DESC: return true; default: return false; } } private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; default: return SyntaxKind.TYPE_DESC; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FORK_KEYWORD); return sol.recoveredNode; } } /** * Parse multiple named worker declarations. * * @return named-worker-declarations node array */ private STNode parseMultileNamedWorkerDeclarations() { STToken token = peek(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements(token.kind)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: this.errorHandler.reportInvalidNode(null, "Only named-workers are allowed here"); break; } token = peek(); } if (workers.isEmpty()) { this.errorHandler.reportInvalidNode(null, "Fork Statement must contain atleast one named-worker"); } STNode namedWorkers = STNodeFactory.createNodeList(workers); return namedWorkers; } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); STNode namedWorkerDeclarations = parseMultileNamedWorkerDeclarations(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); } /** * Parse decimal floating point literal. * * @return Parsed node */ private STNode parseDecimalFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.DECIMAL_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse hex floating point literal. * * @return Parsed node */ private STNode parseHexFloatingPointLiteral() { STToken token = peek(); if (token.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.HEX_FLOATING_POINT_LITERAL); return sol.recoveredNode; } } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param isRhsExpr * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false); return STNodeFactory.createTrapExpressionNode(trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TRAP_KEYWORD); return sol.recoveredNode; } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfExpressionsList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode expr = parseExpression(); expressions.add(expr); nextToken = peek(); STNode leadingComma; while (!isEndOfExpressionsList(nextToken.kind)) { leadingComma = parseComma(); expressions.add(leadingComma); expr = parseExpression(); expressions.add(expr); nextToken = peek(); } return STNodeFactory.createNodeList(expressions); } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode type = parseTypeDescriptor(); STNode varName = parseVariableName(); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); return STNodeFactory.createForEachStatementNode(forEachKeyword, type, varName, inKeyword, actionOrExpr, blockStatement); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD); return sol.recoveredNode; } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.IN_KEYWORD); return sol.recoveredNode; } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr() { startContext(ParserRuleContext.TYPE_CAST_EXPRESSION); STNode ltToken = parseLTToken(); STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); STNode expression = parseExpression(); endContext(); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(); break; } return STNodeFactory.createTypeCastParamNode(annot, type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr := table [key-specifier] [ [row-list] ] * </code> * * @return Parsed node */ private STNode parseTableConstructorExpr() { startContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode tableKeyword = parseTableKeyword(); STNode keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExpr(tableKeyword, keySpecifier); } private STNode parseTableConstructorExpr(STNode tableKeyword, STNode keySpecifier) { return parseTableConstructorExpr(peek().kind, tableKeyword, keySpecifier); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.TABLE_KEYWORD); return sol.recoveredNode; } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { List<STNode> mappings = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfMappingConstructorsList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode leadingComma; while (!isEndOfMappingConstructorsList(nextToken.kind)) { leadingComma = parseComma(); mappings.add(leadingComma); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfMappingConstructorsList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.KEY_KEYWORD); return sol.recoveredNode; } } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { List<STNode> fieldNames = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createNodeList(new ArrayList<>()); } STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } }
```suggestion } if (symbol.restParam != null) { ``` Shouldn't there be a new line here?
private BInvokableSymbol resolveInvokableSymbol(BInvokableSymbol symbol, BType newInvokableType, BType boundType) { BInvokableSymbol newInvokableSymbol = duplicateSymbol(symbol); newInvokableSymbol.type = newInvokableType; for (BVarSymbol param : symbol.params) { BType newParamType = resolve(param.type, boundType); BVarSymbol newVarSymbol = createNewVarSymbol(param, newParamType); newInvokableSymbol.params.add(newVarSymbol); } if (symbol.restParam != null) { BType newReturnType = resolve(symbol.restParam.type, boundType); newInvokableSymbol.restParam = createNewVarSymbol(symbol.restParam, newReturnType); } newInvokableSymbol.retType = resolve(symbol.retType, boundType); return newInvokableSymbol; }
newInvokableSymbol.params.add(newVarSymbol);
private BInvokableSymbol resolveInvokableSymbol(BInvokableSymbol symbol, BType newInvokableType, BType boundType) { BInvokableSymbol newInvokableSymbol = duplicateSymbol(symbol); newInvokableSymbol.type = newInvokableType; for (BVarSymbol param : symbol.params) { BType newParamType = resolve(param.type, boundType); BVarSymbol newVarSymbol = createNewVarSymbol(param, newParamType); newInvokableSymbol.params.add(newVarSymbol); } if (symbol.restParam != null) { BType newReturnType = resolve(symbol.restParam.type, boundType); newInvokableSymbol.restParam = createNewVarSymbol(symbol.restParam, newReturnType); } newInvokableSymbol.retType = resolve(symbol.retType, boundType); return newInvokableSymbol; }
class TypeParamResolver implements BTypeVisitor<BType, BType> { private final Map<BType, BType> boundTypes = new HashMap<>(); private final BType typeParam; public TypeParamResolver(BType typeParam) { this.typeParam = typeParam; } /** * Given a type containing a type param component, this method will return a new type of the same kind with the type * param components bound to the specified bound type. This only works for a single type param, specified when * creating the TypeParamResolver instance. If the type doesn't contain any type param components, it'll return the * same type instance. * * @param typeParam The type containing the type param component * @param boundType The type to bind the type param to * @return The type param resolved type instance */ public BType resolve(BType typeParam, BType boundType) { if (boundTypes.containsKey(typeParam)) { return boundTypes.get(typeParam); } if (isTypeParam(typeParam)) { this.boundTypes.put(typeParam, boundType); return boundType; } BType type = typeParam.accept(this, boundType); boundTypes.put(typeParam, type); return type; } @Override public BType visit(BType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BBuiltInRefType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BAnyType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BAnydataType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BMapType typeInSymbol, BType boundType) { BType boundConstraintType = resolve(typeInSymbol.constraint, boundType); if (boundConstraintType == typeInSymbol) { return typeInSymbol; } return new BMapType(typeInSymbol.tag, boundConstraintType, typeInSymbol.tsymbol, typeInSymbol.flags); } @Override public BType visit(BXMLType typeInSymbol, BType boundType) { BType boundConstraintType = resolve(typeInSymbol.constraint, boundType); if (boundConstraintType == typeInSymbol) { return typeInSymbol; } return new BXMLType(boundConstraintType, typeInSymbol.tsymbol, typeInSymbol.flags); } @Override public BType visit(BJSONType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BArrayType typeInSymbol, BType boundType) { BType boundElemType = resolve(typeInSymbol.eType, boundType); if (boundElemType == typeInSymbol) { return typeInSymbol; } return new BArrayType(boundElemType, typeInSymbol.tsymbol, typeInSymbol.size, typeInSymbol.state, typeInSymbol.flags); } @Override public BType visit(BObjectType typeInSymbol, BType boundType) { BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) typeInSymbol.tsymbol; LinkedHashMap<String, BField> newObjectFields = new LinkedHashMap<>(); Set<Map.Entry<String, BField>> entries = typeInSymbol.getFields().entrySet(); for (Map.Entry<String, BField> entry: entries) { BType newType = resolve(entry.getValue().type, boundType); BVarSymbol newVarSymbol = createNewVarSymbol(entry.getValue().symbol, newType); BField newField = new BField(newVarSymbol.getName(), newVarSymbol.getPosition(), newVarSymbol); newObjectFields.put(entry.getKey(), newField); } List<BAttachedFunction> newAttachedFuncs = new ArrayList<>(); for (BAttachedFunction attachedFunc : objectTypeSymbol.attachedFuncs) { BType newType = resolve(attachedFunc.type, boundType); BInvokableSymbol newInvokableSymbol = resolveInvokableSymbol(attachedFunc.symbol, newType, boundType); BAttachedFunction newAttachedFunc = new BAttachedFunction(attachedFunc.funcName, newInvokableSymbol, (BInvokableType) newType, attachedFunc.pos); newAttachedFuncs.add(newAttachedFunc); } BObjectTypeSymbol newTypeSymbol = new BObjectTypeSymbol(objectTypeSymbol.tag, objectTypeSymbol.flags, objectTypeSymbol.name, objectTypeSymbol.pkgID, objectTypeSymbol.getType(), objectTypeSymbol.owner, objectTypeSymbol.pos, objectTypeSymbol.origin); BObjectType newObjectType = new BObjectType(newTypeSymbol, typeInSymbol.flags); newObjectType.fields = newObjectFields; newTypeSymbol.attachedFuncs = newAttachedFuncs; return newObjectType; } @Override public BType visit(BRecordType typeInSymbol, BType boundType) { LinkedHashMap<String, BField> newRecordFields = new LinkedHashMap<>(); Set<Map.Entry<String, BField>> entries = typeInSymbol.fields.entrySet(); for (Map.Entry<String, BField> entry: entries) { BType newType = resolve(entry.getValue().type, boundType); BVarSymbol newVarSymbol = createNewVarSymbol(entry.getValue().symbol, newType); BField newField = new BField(newVarSymbol.getName(), newVarSymbol.getPosition(), newVarSymbol); newRecordFields.put(entry.getKey(), newField); } BType newRestType = resolve(typeInSymbol.restFieldType, boundType); BRecordType newRecordType = new BRecordType(typeInSymbol.tsymbol, typeInSymbol.flags); newRecordType.fields = newRecordFields; newRecordType.restFieldType = newRestType; return newRecordType; } @Override public BType visit(BTupleType typeInSymbol, BType boundType) { List<BType> newTupleTypes = new ArrayList<>(); List<BType> tupleTypes = typeInSymbol.tupleTypes; boolean areAllSameType = true; for (BType type : tupleTypes) { BType newType = resolve(type, boundType); areAllSameType &= newType == type; newTupleTypes.add(newType); } BType newRestType = typeInSymbol.restType != null ? resolve(typeInSymbol.restType, boundType) : null; if (areAllSameType && newRestType == typeInSymbol.restType) { return typeInSymbol; } return new BTupleType(typeInSymbol.tsymbol, newTupleTypes, newRestType, typeInSymbol.flags, typeInSymbol.isCyclic); } @Override public BType visit(BStreamType typeInSymbol, BType boundType) { BType boundConstraintType = resolve(typeInSymbol.constraint, boundType); if (boundConstraintType == typeInSymbol) { return typeInSymbol; } return new BStreamType(typeInSymbol.tag, boundConstraintType, typeInSymbol.completionType, typeInSymbol.tsymbol); } @Override public BType visit(BTableType typeInSymbol, BType boundType) { BType boundConstraintType = resolve(typeInSymbol.constraint, boundType); if (boundConstraintType == typeInSymbol) { return typeInSymbol; } return new BTableType(typeInSymbol.tag, boundConstraintType, typeInSymbol.tsymbol, typeInSymbol.flags); } @Override public BType visit(BInvokableType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BUnionType typeInSymbol, BType boundType) { LinkedHashSet<BType> newMembers = new LinkedHashSet<>(); boolean areAllSameType = true; for (BType memberType : typeInSymbol.getOriginalMemberTypes()) { BType newType = resolve(memberType, boundType); areAllSameType &= newType == memberType; newMembers.add(newType); } if (areAllSameType) { return typeInSymbol; } return BUnionType.create(typeInSymbol.tsymbol, newMembers); } @Override public BType visit(BIntersectionType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BErrorType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BFutureType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BFiniteType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BTypedescType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BParameterizedType typeInSymbol, BType boundType) { return typeInSymbol; } public BType visit(BTypeReferenceType typeInSymbol, BType s) { return typeInSymbol; } private boolean isTypeParam(BType type) { return type == this.typeParam; } private BVarSymbol createNewVarSymbol(BVarSymbol symbol, BType newType) { if (symbol == null) { return null; } BType originalType = symbol.getType(); if (originalType == newType) { return symbol; } BVarSymbol duplicateSymbol = duplicateSymbol(symbol); duplicateSymbol.type = newType; return duplicateSymbol; } private BVarSymbol duplicateSymbol(BVarSymbol original) { BVarSymbol duplicate = new BVarSymbol(original.flags, original.isWildcard, original.name, original.originalName, original.pkgID, original.type, original.owner, original.pos, original.origin); duplicate.markdownDocumentation = original.markdownDocumentation; for (AnnotationAttachmentSymbol annot : original.getAnnotations()) { duplicate.addAnnotation(annot); } duplicate.isDefaultable = original.isDefaultable; duplicate.state = original.state; return duplicate; } private BInvokableSymbol duplicateSymbol(BInvokableSymbol original) { BInvokableSymbol duplicate = Symbols.createInvokableSymbol(original.tag, original.flags, original.name, original.originalName, original.pkgID, original.type, original.owner, original.pos, original.origin); ((List<AnnotationAttachmentSymbol>) duplicate.getAnnotations()).addAll(original.getAnnotations()); duplicate.bodyExist = original.bodyExist; duplicate.markdownDocumentation = original.markdownDocumentation; duplicate.receiverSymbol = original.receiverSymbol; return duplicate; } }
class TypeParamResolver implements BTypeVisitor<BType, BType> { private final Map<BType, BType> boundTypes = new HashMap<>(); private final BType typeParam; public TypeParamResolver(BType typeParam) { this.typeParam = typeParam; } /** * Given a type containing a type param component, this method will return a new type of the same kind with the type * param components bound to the specified bound type. This only works for a single type param, specified when * creating the TypeParamResolver instance. If the type doesn't contain any type param components, it'll return the * same type instance. * * @param typeParam The type containing the type param component * @param boundType The type to bind the type param to * @return The type param resolved type instance */ public BType resolve(BType typeParam, BType boundType) { if (boundTypes.containsKey(typeParam)) { return boundTypes.get(typeParam); } if (isTypeParam(typeParam)) { this.boundTypes.put(typeParam, boundType); return boundType; } BType type = typeParam.accept(this, boundType); boundTypes.put(typeParam, type); return type; } @Override public BType visit(BType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BBuiltInRefType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BAnyType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BAnydataType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BMapType typeInSymbol, BType boundType) { BType boundConstraintType = resolve(typeInSymbol.constraint, boundType); if (boundConstraintType == typeInSymbol) { return typeInSymbol; } return new BMapType(typeInSymbol.tag, boundConstraintType, typeInSymbol.tsymbol, typeInSymbol.flags); } @Override public BType visit(BXMLType typeInSymbol, BType boundType) { BType boundConstraintType = resolve(typeInSymbol.constraint, boundType); if (boundConstraintType == typeInSymbol) { return typeInSymbol; } return new BXMLType(boundConstraintType, typeInSymbol.tsymbol, typeInSymbol.flags); } @Override public BType visit(BJSONType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BArrayType typeInSymbol, BType boundType) { BType boundElemType = resolve(typeInSymbol.eType, boundType); if (boundElemType == typeInSymbol) { return typeInSymbol; } return new BArrayType(boundElemType, typeInSymbol.tsymbol, typeInSymbol.size, typeInSymbol.state, typeInSymbol.flags); } @Override public BType visit(BObjectType typeInSymbol, BType boundType) { BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) typeInSymbol.tsymbol; LinkedHashMap<String, BField> newObjectFields = new LinkedHashMap<>(); Set<Map.Entry<String, BField>> entries = typeInSymbol.getFields().entrySet(); for (Map.Entry<String, BField> entry: entries) { BType newType = resolve(entry.getValue().type, boundType); BVarSymbol newVarSymbol = createNewVarSymbol(entry.getValue().symbol, newType); BField newField = new BField(newVarSymbol.getName(), newVarSymbol.getPosition(), newVarSymbol); newObjectFields.put(entry.getKey(), newField); } List<BAttachedFunction> newAttachedFuncs = new ArrayList<>(); for (BAttachedFunction attachedFunc : objectTypeSymbol.attachedFuncs) { BType newType = resolve(attachedFunc.type, boundType); BInvokableSymbol newInvokableSymbol = resolveInvokableSymbol(attachedFunc.symbol, newType, boundType); BAttachedFunction newAttachedFunc = new BAttachedFunction(attachedFunc.funcName, newInvokableSymbol, (BInvokableType) newType, attachedFunc.pos); newAttachedFuncs.add(newAttachedFunc); } BObjectTypeSymbol newTypeSymbol = new BObjectTypeSymbol(objectTypeSymbol.tag, objectTypeSymbol.flags, objectTypeSymbol.name, objectTypeSymbol.pkgID, objectTypeSymbol.getType(), objectTypeSymbol.owner, objectTypeSymbol.pos, objectTypeSymbol.origin); BObjectType newObjectType = new BObjectType(newTypeSymbol, typeInSymbol.flags); newObjectType.fields = newObjectFields; newTypeSymbol.attachedFuncs = newAttachedFuncs; return newObjectType; } @Override public BType visit(BRecordType typeInSymbol, BType boundType) { LinkedHashMap<String, BField> newRecordFields = new LinkedHashMap<>(); Set<Map.Entry<String, BField>> entries = typeInSymbol.fields.entrySet(); for (Map.Entry<String, BField> entry: entries) { BType newType = resolve(entry.getValue().type, boundType); BVarSymbol newVarSymbol = createNewVarSymbol(entry.getValue().symbol, newType); BField newField = new BField(newVarSymbol.getName(), newVarSymbol.getPosition(), newVarSymbol); newRecordFields.put(entry.getKey(), newField); } BType newRestType = resolve(typeInSymbol.restFieldType, boundType); BRecordType newRecordType = new BRecordType(typeInSymbol.tsymbol, typeInSymbol.flags); newRecordType.fields = newRecordFields; newRecordType.restFieldType = newRestType; return newRecordType; } @Override public BType visit(BTupleType typeInSymbol, BType boundType) { List<BType> newTupleTypes = new ArrayList<>(); List<BType> tupleTypes = typeInSymbol.tupleTypes; boolean areAllSameType = true; for (BType type : tupleTypes) { BType newType = resolve(type, boundType); areAllSameType &= newType == type; newTupleTypes.add(newType); } BType newRestType = typeInSymbol.restType != null ? resolve(typeInSymbol.restType, boundType) : null; if (areAllSameType && newRestType == typeInSymbol.restType) { return typeInSymbol; } return new BTupleType(typeInSymbol.tsymbol, newTupleTypes, newRestType, typeInSymbol.flags, typeInSymbol.isCyclic); } @Override public BType visit(BStreamType typeInSymbol, BType boundType) { BType boundConstraintType = resolve(typeInSymbol.constraint, boundType); if (boundConstraintType == typeInSymbol) { return typeInSymbol; } return new BStreamType(typeInSymbol.tag, boundConstraintType, typeInSymbol.completionType, typeInSymbol.tsymbol); } @Override public BType visit(BTableType typeInSymbol, BType boundType) { BType boundConstraintType = resolve(typeInSymbol.constraint, boundType); if (boundConstraintType == typeInSymbol) { return typeInSymbol; } return new BTableType(typeInSymbol.tag, boundConstraintType, typeInSymbol.tsymbol, typeInSymbol.flags); } @Override public BType visit(BInvokableType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BUnionType typeInSymbol, BType boundType) { LinkedHashSet<BType> newMembers = new LinkedHashSet<>(); boolean areAllSameType = true; for (BType memberType : typeInSymbol.getOriginalMemberTypes()) { BType newType = resolve(memberType, boundType); areAllSameType &= newType == memberType; newMembers.add(newType); } if (areAllSameType) { return typeInSymbol; } return BUnionType.create(typeInSymbol.tsymbol, newMembers); } @Override public BType visit(BIntersectionType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BErrorType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BFutureType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BFiniteType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BTypedescType typeInSymbol, BType boundType) { return typeInSymbol; } @Override public BType visit(BParameterizedType typeInSymbol, BType boundType) { return typeInSymbol; } public BType visit(BTypeReferenceType typeInSymbol, BType s) { return typeInSymbol; } private boolean isTypeParam(BType type) { return type == this.typeParam; } private BVarSymbol createNewVarSymbol(BVarSymbol symbol, BType newType) { if (symbol == null) { return null; } BType originalType = symbol.getType(); if (originalType == newType) { return symbol; } BVarSymbol duplicateSymbol = duplicateSymbol(symbol); duplicateSymbol.type = newType; return duplicateSymbol; } private BVarSymbol duplicateSymbol(BVarSymbol original) { BVarSymbol duplicate = new BVarSymbol(original.flags, original.isWildcard, original.name, original.originalName, original.pkgID, original.type, original.owner, original.pos, original.origin); duplicate.markdownDocumentation = original.markdownDocumentation; for (AnnotationAttachmentSymbol annot : original.getAnnotations()) { duplicate.addAnnotation(annot); } duplicate.isDefaultable = original.isDefaultable; duplicate.state = original.state; return duplicate; } private BInvokableSymbol duplicateSymbol(BInvokableSymbol original) { BInvokableSymbol duplicate = Symbols.createInvokableSymbol(original.tag, original.flags, original.name, original.originalName, original.pkgID, original.type, original.owner, original.pos, original.origin); ((List<AnnotationAttachmentSymbol>) duplicate.getAnnotations()).addAll(original.getAnnotations()); duplicate.bodyExist = original.bodyExist; duplicate.markdownDocumentation = original.markdownDocumentation; duplicate.receiverSymbol = original.receiverSymbol; return duplicate; } }
Doesn't this mean that a given deployment will send one request per host in the node repository to the orchestrator? In larger zones that is ~400.
public boolean canAllocateTenantNodeTo(Node host, boolean dynamicProvisioning) { if ( ! host.type().canRun(NodeType.tenant)) return false; if (host.status().wantToRetire()) return false; if (host.allocation().map(alloc -> alloc.membership().retired()).orElse(false)) return false; if (suspended(host)) return false; if (dynamicProvisioning) return EnumSet.of(Node.State.active, Node.State.ready, Node.State.provisioned).contains(host.state()); else return host.state() == Node.State.active; }
if (suspended(host)) return false;
public boolean canAllocateTenantNodeTo(Node host, boolean dynamicProvisioning) { if ( ! host.type().canRun(NodeType.tenant)) return false; if (host.status().wantToRetire()) return false; if (host.allocation().map(alloc -> alloc.membership().retired()).orElse(false)) return false; if (suspended(host)) return false; if (dynamicProvisioning) return EnumSet.of(Node.State.active, Node.State.ready, Node.State.provisioned).contains(host.state()); else return host.state() == Node.State.active; }
class Nodes { private static final Logger log = Logger.getLogger(Nodes.class.getName()); private final CuratorDatabaseClient db; private final Zone zone; private final Clock clock; private final Orchestrator orchestrator; public Nodes(CuratorDatabaseClient db, Zone zone, Clock clock, Orchestrator orchestrator) { this.zone = zone; this.clock = clock; this.db = db; this.orchestrator = orchestrator; } /** Read and write all nodes to make sure they are stored in the latest version of the serialized format */ public void rewrite() { Instant start = clock.instant(); int nodesWritten = 0; for (Node.State state : Node.State.values()) { List<Node> nodes = db.readNodes(state); db.writeTo(state, nodes, Agent.system, Optional.empty()); nodesWritten += nodes.size(); } Instant end = clock.instant(); log.log(Level.INFO, String.format("Rewrote %d nodes in %s", nodesWritten, Duration.between(start, end))); } /** * Finds and returns the node with the hostname in any of the given states, or empty if not found * * @param hostname the full host name of the node * @param inState the states the node may be in. If no states are given, it will be returned from any state * @return the node, or empty if it was not found in any of the given states */ public Optional<Node> node(String hostname, Node.State... inState) { return db.readNode(hostname, inState); } /** * Returns a list of nodes in this repository in any of the given states * * @param inState the states to return nodes from. If no states are given, all nodes of the given type are returned */ public NodeList list(Node.State... inState) { return NodeList.copyOf(db.readNodes(inState)); } /** Returns a locked list of all nodes in this repository */ public LockedNodeList list(Mutex lock) { return new LockedNodeList(list().asList(), lock); } /** * Returns whether the zone managed by this node repository seems to be working. * If too many nodes are not responding, there is probably some zone-wide issue * and we should probably refrain from making changes to it. */ public boolean isWorking() { NodeList activeNodes = list(Node.State.active); if (activeNodes.size() <= 5) return true; NodeList downNodes = activeNodes.down(); return ! ( (double)downNodes.size() / (double)activeNodes.size() > 0.2 ); } /** Adds a list of newly created reserved nodes to the node repository */ public List<Node> addReservedNodes(LockedNodeList nodes) { for (Node node : nodes) { if ( node.flavor().getType() != Flavor.Type.DOCKER_CONTAINER) illegal("Cannot add " + node + ": This is not a child node"); if (node.allocation().isEmpty()) illegal("Cannot add " + node + ": Child nodes need to be allocated"); Optional<Node> existing = node(node.hostname()); if (existing.isPresent()) illegal("Cannot add " + node + ": A node with this name already exists (" + existing.get() + ", " + existing.get().history() + "). Node to be added: " + node + ", " + node.history()); } return db.addNodesInState(nodes.asList(), Node.State.reserved, Agent.system); } /** * Adds a list of (newly created) nodes to the node repository as provisioned nodes. * If any of the nodes already exists in the deprovisioned state, the new node will be merged * with the history of that node. */ public List<Node> addNodes(List<Node> nodes, Agent agent) { try (Mutex lock = lockUnallocated()) { List<Node> nodesToAdd = new ArrayList<>(); List<Node> nodesToRemove = new ArrayList<>(); for (int i = 0; i < nodes.size(); i++) { var node = nodes.get(i); for (int j = 0; j < i; j++) { if (node.equals(nodes.get(j))) illegal("Cannot add nodes: " + node + " is duplicated in the argument list"); } Optional<Node> existing = node(node.hostname()); if (existing.isPresent()) { if (existing.get().state() != Node.State.deprovisioned) illegal("Cannot add " + node + ": A node with this name already exists"); node = node.with(existing.get().history()); node = node.with(existing.get().reports()); node = node.with(node.status().withFailCount(existing.get().status().failCount())); if (existing.get().status().firmwareVerifiedAt().isPresent()) node = node.with(node.status().withFirmwareVerifiedAt(existing.get().status().firmwareVerifiedAt().get())); boolean rebuilding = existing.get().status().wantToRebuild(); if (rebuilding) { node = node.with(node.status().withWantToRetire(existing.get().status().wantToRetire(), false, rebuilding)); } nodesToRemove.add(existing.get()); } nodesToAdd.add(node); } NestedTransaction transaction = new NestedTransaction(); List<Node> resultingNodes = db.addNodesInState(IP.Config.verify(nodesToAdd, list(lock)), Node.State.provisioned, agent, transaction); db.removeNodes(nodesToRemove, transaction); transaction.commit(); return resultingNodes; } } /** Sets a list of nodes ready and returns the nodes in the ready state */ public List<Node> setReady(List<Node> nodes, Agent agent, String reason) { try (Mutex lock = lockUnallocated()) { List<Node> nodesWithResetFields = nodes.stream() .map(node -> { if (node.state() != Node.State.provisioned && node.state() != Node.State.dirty) illegal("Can not set " + node + " ready. It is not provisioned or dirty."); return node.withWantToRetire(false, false, false, Agent.system, clock.instant()); }) .collect(Collectors.toList()); return db.writeTo(Node.State.ready, nodesWithResetFields, agent, Optional.of(reason)); } } public Node setReady(String hostname, Agent agent, String reason) { Node nodeToReady = requireNode(hostname); if (nodeToReady.state() == Node.State.ready) return nodeToReady; return setReady(List.of(nodeToReady), agent, reason).get(0); } /** Reserve nodes. This method does <b>not</b> lock the node repository */ public List<Node> reserve(List<Node> nodes) { return db.writeTo(Node.State.reserved, nodes, Agent.application, Optional.empty()); } /** Activate nodes. This method does <b>not</b> lock the node repository */ public List<Node> activate(List<Node> nodes, NestedTransaction transaction) { return db.writeTo(Node.State.active, nodes, Agent.application, Optional.empty(), transaction); } /** * Sets a list of nodes to have their allocation removable (active to inactive) in the node repository. * * @param application the application the nodes belong to * @param nodes the nodes to make removable. These nodes MUST be in the active state. */ public void setRemovable(ApplicationId application, List<Node> nodes) { try (Mutex lock = lock(application)) { List<Node> removableNodes = nodes.stream() .map(node -> node.with(node.allocation().get().removable(true))) .collect(Collectors.toList()); write(removableNodes, lock); } } /** * Deactivates these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> deactivate(List<Node> nodes, ApplicationTransaction transaction) { if ( ! zone.environment().isProduction() || zone.system().isCd()) return deallocate(nodes, Agent.application, "Deactivated by application", transaction.nested()); var stateless = NodeList.copyOf(nodes).stateless(); var stateful = NodeList.copyOf(nodes).stateful(); List<Node> written = new ArrayList<>(); written.addAll(deallocate(stateless.asList(), Agent.application, "Deactivated by application", transaction.nested())); written.addAll(db.writeTo(Node.State.inactive, stateful.asList(), Agent.application, Optional.empty(), transaction.nested())); return written; } /** * Fails these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> fail(List<Node> nodes, ApplicationTransaction transaction) { return fail(nodes, Agent.application, "Failed by application", transaction.nested()); } public List<Node> fail(List<Node> nodes, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); nodes = fail(nodes, agent, reason, transaction); transaction.commit(); return nodes; } private List<Node> fail(List<Node> nodes, Agent agent, String reason, NestedTransaction transaction) { nodes = nodes.stream() .map(n -> n.withWantToFail(false, agent, clock.instant())) .collect(Collectors.toList()); return db.writeTo(Node.State.failed, nodes, agent, Optional.of(reason), transaction); } /** Move nodes to the dirty state */ public List<Node> deallocate(List<Node> nodes, Agent agent, String reason) { return performOn(NodeList.copyOf(nodes), (node, lock) -> deallocate(node, agent, reason)); } public List<Node> deallocateRecursively(String hostname, Agent agent, String reason) { Node nodeToDirty = node(hostname).orElseThrow(() -> new IllegalArgumentException("Could not deallocate " + hostname + ": Node not found")); List<Node> nodesToDirty = (nodeToDirty.type().isHost() ? Stream.concat(list().childrenOf(hostname).asList().stream(), Stream.of(nodeToDirty)) : Stream.of(nodeToDirty)) .filter(node -> node.state() != Node.State.dirty) .collect(Collectors.toList()); List<String> hostnamesNotAllowedToDirty = nodesToDirty.stream() .filter(node -> node.state() != Node.State.provisioned) .filter(node -> node.state() != Node.State.failed) .filter(node -> node.state() != Node.State.parked) .filter(node -> node.state() != Node.State.breakfixed) .map(Node::hostname) .collect(Collectors.toList()); if ( ! hostnamesNotAllowedToDirty.isEmpty()) illegal("Could not deallocate " + nodeToDirty + ": " + hostnamesNotAllowedToDirty + " are not in states [provisioned, failed, parked, breakfixed]"); return nodesToDirty.stream().map(node -> deallocate(node, agent, reason)).collect(Collectors.toList()); } /** * Set a node dirty or parked, allowed if it is in the provisioned, inactive, failed or parked state. * Use this to clean newly provisioned nodes or to recycle failed nodes which have been repaired or put on hold. */ public Node deallocate(Node node, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); Node deallocated = deallocate(node, agent, reason, transaction); transaction.commit(); return deallocated; } public List<Node> deallocate(List<Node> nodes, Agent agent, String reason, NestedTransaction transaction) { return nodes.stream().map(node -> deallocate(node, agent, reason, transaction)).collect(Collectors.toList()); } public Node deallocate(Node node, Agent agent, String reason, NestedTransaction transaction) { if (parkOnDeallocationOf(node, agent)) { return park(node.hostname(), false, agent, reason, transaction); } else { return db.writeTo(Node.State.dirty, List.of(node), agent, Optional.of(reason), transaction).get(0); } } /** * Fails this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node fail(String hostname, Agent agent, String reason) { return fail(hostname, true, agent, reason); } public Node fail(String hostname, boolean keepAllocation, Agent agent, String reason) { return move(hostname, Node.State.failed, agent, keepAllocation, Optional.of(reason)); } /** * Fails all the nodes that are children of hostname before finally failing the hostname itself. * Non-active nodes are failed immediately, while active nodes are marked as wantToFail. * The host is failed if it has no active nodes and marked wantToFail if it has. * * @return all the nodes that were changed by this request */ public List<Node> failOrMarkRecursively(String hostname, Agent agent, String reason) { NodeList children = list().childrenOf(hostname); List<Node> changed = performOn(children, (node, lock) -> failOrMark(node, agent, reason, lock)); if (children.state(Node.State.active).isEmpty()) changed.add(move(hostname, Node.State.failed, agent, true, Optional.of(reason))); else changed.addAll(performOn(NodeList.of(node(hostname).orElseThrow()), (node, lock) -> failOrMark(node, agent, reason, lock))); return changed; } private Node failOrMark(Node node, Agent agent, String reason, Mutex lock) { if (node.state() == Node.State.active) { node = node.withWantToFail(true, agent, clock.instant()); write(node, lock); return node; } else { return move(node.hostname(), Node.State.failed, agent, true, Optional.of(reason)); } } /** * Parks this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node park(String hostname, boolean keepAllocation, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); Node parked = park(hostname, keepAllocation, agent, reason, transaction); transaction.commit(); return parked; } private Node park(String hostname, boolean keepAllocation, Agent agent, String reason, NestedTransaction transaction) { return move(hostname, Node.State.parked, agent, keepAllocation, Optional.of(reason), transaction); } /** * Parks all the nodes that are children of hostname before finally parking the hostname itself. * * @return List of all the parked nodes in their new state */ public List<Node> parkRecursively(String hostname, Agent agent, String reason) { return moveRecursively(hostname, Node.State.parked, agent, Optional.of(reason)); } /** * Moves a previously failed or parked node back to the active state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node reactivate(String hostname, Agent agent, String reason) { return move(hostname, Node.State.active, agent, true, Optional.of(reason)); } /** * Moves a host to breakfixed state, removing any children. */ public List<Node> breakfixRecursively(String hostname, Agent agent, String reason) { Node node = requireNode(hostname); try (Mutex lock = lockUnallocated()) { requireBreakfixable(node); NestedTransaction transaction = new NestedTransaction(); List<Node> removed = removeChildren(node, false, transaction); removed.add(move(node.hostname(), Node.State.breakfixed, agent, true, Optional.of(reason), transaction)); transaction.commit(); return removed; } } private List<Node> moveRecursively(String hostname, Node.State toState, Agent agent, Optional<String> reason) { NestedTransaction transaction = new NestedTransaction(); List<Node> moved = list().childrenOf(hostname).asList().stream() .map(child -> move(child.hostname(), toState, agent, true, reason, transaction)) .collect(Collectors.toList()); moved.add(move(hostname, toState, agent, true, reason, transaction)); transaction.commit(); return moved; } /** Move a node to given state */ private Node move(String hostname, Node.State toState, Agent agent, boolean keepAllocation, Optional<String> reason) { NestedTransaction transaction = new NestedTransaction(); Node moved = move(hostname, toState, agent, keepAllocation, reason, transaction); transaction.commit(); return moved; } /** Move a node to given state as part of a transaction */ private Node move(String hostname, Node.State toState, Agent agent, boolean keepAllocation, Optional<String> reason, NestedTransaction transaction) { try (NodeMutex lock = lockAndGetRequired(hostname)) { Node node = lock.node(); if (toState == Node.State.active) { if (node.allocation().isEmpty()) illegal("Could not set " + node + " active: It has no allocation"); if (!keepAllocation) illegal("Could not set " + node + " active: Requested to discard allocation"); for (Node currentActive : list(Node.State.active).owner(node.allocation().get().owner())) { if (node.allocation().get().membership().cluster().equals(currentActive.allocation().get().membership().cluster()) && node.allocation().get().membership().index() == currentActive.allocation().get().membership().index()) illegal("Could not set " + node + " active: Same cluster and index as " + currentActive); } } if (!keepAllocation && node.allocation().isPresent()) { node = node.withoutAllocation(); } if (toState == Node.State.deprovisioned) { node = node.with(IP.Config.EMPTY); } return db.writeTo(toState, List.of(node), agent, reason, transaction).get(0); } } /* * This method is used by the REST API to handle readying nodes for new allocations. For Linux * containers this will remove the node from node repository, otherwise the node will be moved to state ready. */ public Node markNodeAvailableForNewAllocation(String hostname, Agent agent, String reason) { Node node = requireNode(hostname); if (node.flavor().getType() == Flavor.Type.DOCKER_CONTAINER && node.type() == NodeType.tenant) { if (node.state() != Node.State.dirty) illegal("Cannot make " + node + " available for new allocation as it is not in state [dirty]"); return removeRecursively(node, true).get(0); } if (node.state() == Node.State.ready) return node; Node parentHost = node.parentHostname().flatMap(this::node).orElse(node); List<String> failureReasons = NodeFailer.reasonsToFailHost(parentHost); if ( ! failureReasons.isEmpty()) illegal(node + " cannot be readied because it has hard failures: " + failureReasons); return setReady(List.of(node), agent, reason).get(0); } /** * Removes all the nodes that are children of hostname before finally removing the hostname itself. * * @return a List of all the nodes that have been removed or (for hosts) deprovisioned */ public List<Node> removeRecursively(String hostname) { Node node = requireNode(hostname); return removeRecursively(node, false); } public List<Node> removeRecursively(Node node, boolean force) { try (Mutex lock = lockUnallocated()) { requireRemovable(node, false, force); NestedTransaction transaction = new NestedTransaction(); final List<Node> removed; if (!node.type().isHost()) { removed = List.of(node); db.removeNodes(removed, transaction); } else { removed = removeChildren(node, force, transaction); if (zone.getCloud().dynamicProvisioning()) { db.removeNodes(List.of(node), transaction); } else { move(node.hostname(), Node.State.deprovisioned, Agent.system, false, Optional.empty(), transaction); } removed.add(node); } transaction.commit(); return removed; } } /** Forgets a deprovisioned node. This removes all traces of the node in the node repository. */ public void forget(Node node) { if (node.state() != Node.State.deprovisioned) throw new IllegalArgumentException(node + " must be deprovisioned before it can be forgotten"); if (node.status().wantToRebuild()) throw new IllegalArgumentException(node + " is rebuilding and cannot be forgotten"); NestedTransaction transaction = new NestedTransaction(); db.removeNodes(List.of(node), transaction); transaction.commit(); } private List<Node> removeChildren(Node node, boolean force, NestedTransaction transaction) { List<Node> children = list().childrenOf(node).asList(); children.forEach(child -> requireRemovable(child, true, force)); db.removeNodes(children, transaction); return new ArrayList<>(children); } /** * Throws if the given node cannot be removed. Removal is allowed if: * - Tenant node: * - non-recursively: node is unallocated * - recursively: node is unallocated or node is in failed|parked * - Host node: iff in state provisioned|failed|parked * - Child node: * - non-recursively: node in state ready * - recursively: child is in state provisioned|failed|parked|dirty|ready */ private void requireRemovable(Node node, boolean removingRecursively, boolean force) { if (force) return; if (node.type() == NodeType.tenant && node.allocation().isPresent()) { EnumSet<Node.State> removableStates = EnumSet.of(Node.State.failed, Node.State.parked); if (!removingRecursively || !removableStates.contains(node.state())) illegal(node + " is currently allocated and cannot be removed while in " + node.state()); } final Set<Node.State> removableStates; if (node.type().isHost()) { removableStates = EnumSet.of(Node.State.provisioned, Node.State.failed, Node.State.parked); } else { removableStates = removingRecursively ? EnumSet.of(Node.State.provisioned, Node.State.failed, Node.State.parked, Node.State.dirty, Node.State.ready) : EnumSet.of(Node.State.ready); } if (!removableStates.contains(node.state())) illegal(node + " can not be removed while in " + node.state()); } /** * Throws if given node cannot be breakfixed. * Breakfix is allowed if the following is true: * - Node is tenant host * - Node is in zone without dynamic provisioning * - Node is in parked or failed state */ private void requireBreakfixable(Node node) { if (zone.getCloud().dynamicProvisioning()) { illegal("Can not breakfix in zone: " + zone); } if (node.type() != NodeType.host) { illegal(node + " can not be breakfixed as it is not a tenant host"); } Set<Node.State> legalStates = EnumSet.of(Node.State.failed, Node.State.parked); if (! legalStates.contains(node.state())) { illegal(node + " can not be removed as it is not in the states " + legalStates); } } /** * Increases the restart generation of the active nodes matching given filter. * * @return the nodes in their new state */ public List<Node> restartActive(Predicate<Node> filter) { return restart(NodeFilter.in(Set.of(Node.State.active)).and(filter)); } /** * Increases the restart generation of the any nodes matching given filter. * * @return the nodes in their new state */ public List<Node> restart(Predicate<Node> filter) { return performOn(filter, (node, lock) -> write(node.withRestart(node.allocation().get().restartGeneration().withIncreasedWanted()), lock)); } /** * Increases the reboot generation of the nodes matching the filter. * * @return the nodes in their new state */ public List<Node> reboot(Predicate<Node> filter) { return performOn(filter, (node, lock) -> write(node.withReboot(node.status().reboot().withIncreasedWanted()), lock)); } /** * Set target OS version of all nodes matching given filter. * * @return the nodes in their new state */ public List<Node> upgradeOs(Predicate<Node> filter, Optional<Version> version) { return performOn(filter, (node, lock) -> { var newStatus = node.status().withOsVersion(node.status().osVersion().withWanted(version)); return write(node.with(newStatus), lock); }); } /** Retire nodes matching given filter */ public List<Node> retire(Predicate<Node> filter, Agent agent, Instant instant) { return performOn(filter, (node, lock) -> write(node.withWantToRetire(true, agent, instant), lock)); } /** Retire and deprovision given host and all of its children */ public List<Node> deprovision(String hostname, Agent agent, Instant instant) { return decommission(hostname, DecommissionOperation.deprovision, agent, instant); } /** Retire and rebuild given host and all of its children */ public List<Node> rebuild(String hostname, Agent agent, Instant instant) { return decommission(hostname, DecommissionOperation.rebuild, agent, instant); } private List<Node> decommission(String hostname, DecommissionOperation op, Agent agent, Instant instant) { Optional<NodeMutex> nodeMutex = lockAndGet(hostname); if (nodeMutex.isEmpty()) return List.of(); Node host = nodeMutex.get().node(); if (!host.type().isHost()) throw new IllegalArgumentException("Cannot " + op + " non-host " + host); List<Node> result; boolean wantToDeprovision = op == DecommissionOperation.deprovision; boolean wantToRebuild = op == DecommissionOperation.rebuild; try (NodeMutex lock = nodeMutex.get(); Mutex allocationLock = lockUnallocated()) { host = lock.node(); result = performOn(list(allocationLock).childrenOf(host), (node, nodeLock) -> { Node newNode = node.withWantToRetire(true, wantToDeprovision, wantToRebuild, agent, instant); return write(newNode, nodeLock); }); Node newHost = host.withWantToRetire(true, wantToDeprovision, wantToRebuild, agent, instant); result.add(write(newHost, lock)); } return result; } /** * Writes this node after it has changed some internal state but NOT changed its state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written node for convenience */ public Node write(Node node, Mutex lock) { return write(List.of(node), lock).get(0); } /** * Writes these nodes after they have changed some internal state but NOT changed their state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written nodes for convenience */ public List<Node> write(List<Node> nodes, @SuppressWarnings("unused") Mutex lock) { return db.writeTo(nodes, Agent.system, Optional.empty()); } private List<Node> performOn(Predicate<Node> filter, BiFunction<Node, Mutex, Node> action) { return performOn(list().matching(filter), action); } /** * Performs an operation requiring locking on all nodes matching some filter. * * @param action the action to perform * @return the set of nodes on which the action was performed, as they became as a result of the operation */ private List<Node> performOn(NodeList nodes, BiFunction<Node, Mutex, Node> action) { List<Node> unallocatedNodes = new ArrayList<>(); ListMap<ApplicationId, Node> allocatedNodes = new ListMap<>(); for (Node node : nodes) { if (node.allocation().isPresent()) allocatedNodes.put(node.allocation().get().owner(), node); else unallocatedNodes.add(node); } List<Node> resultingNodes = new ArrayList<>(); try (Mutex lock = lockUnallocated()) { for (Node node : unallocatedNodes) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } for (Map.Entry<ApplicationId, List<Node>> applicationNodes : allocatedNodes.entrySet()) { try (Mutex lock = lock(applicationNodes.getKey())) { for (Node node : applicationNodes.getValue()) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } } return resultingNodes; } public boolean canAllocateTenantNodeTo(Node host) { return canAllocateTenantNodeTo(host, zone.getCloud().dynamicProvisioning()); } public boolean suspended(Node node) { try { return orchestrator.getNodeStatus(new HostName(node.hostname())).isSuspended(); } catch (HostNameNotFoundException e) { return false; } } /** Create a lock which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application) { return db.lock(application); } /** Create a lock with a timeout which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application, Duration timeout) { return db.lock(application, timeout); } /** Create a lock which provides exclusive rights to modifying unallocated nodes */ public Mutex lockUnallocated() { return db.lockInactive(); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public Optional<NodeMutex> lockAndGet(Node node) { Node staleNode = node; final int maxRetries = 4; for (int i = 0; i < maxRetries; ++i) { Mutex lockToClose = lock(staleNode); try { Optional<Node> freshNode = node(staleNode.hostname(), staleNode.state()); if (freshNode.isEmpty()) { freshNode = node(staleNode.hostname()); if (freshNode.isEmpty()) { return Optional.empty(); } } if (Objects.equals(freshNode.get().allocation().map(Allocation::owner), staleNode.allocation().map(Allocation::owner))) { NodeMutex nodeMutex = new NodeMutex(freshNode.get(), lockToClose); lockToClose = null; return Optional.of(nodeMutex); } staleNode = freshNode.get(); } finally { if (lockToClose != null) lockToClose.close(); } } throw new IllegalStateException("Giving up (after " + maxRetries + " attempts) " + "fetching an up to date node under lock: " + node.hostname()); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public Optional<NodeMutex> lockAndGet(String hostname) { return node(hostname).flatMap(this::lockAndGet); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public NodeMutex lockAndGetRequired(Node node) { return lockAndGet(node).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + node.hostname() + "'")); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public NodeMutex lockAndGetRequired(String hostname) { return lockAndGet(hostname).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + hostname + "'")); } private Mutex lock(Node node) { return node.allocation().isPresent() ? lock(node.allocation().get().owner()) : lockUnallocated(); } private Node requireNode(String hostname) { return node(hostname).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + hostname + "'")); } private void illegal(String message) { throw new IllegalArgumentException(message); } /** Returns whether node should be parked when deallocated by given agent */ private static boolean parkOnDeallocationOf(Node node, Agent agent) { if (node.state() == Node.State.parked) return false; if (agent == Agent.operator) return false; if (!node.type().isHost() && node.status().wantToDeprovision()) return false; boolean retirementRequestedByOperator = node.status().wantToRetire() && node.history().event(History.Event.Type.wantToRetire) .map(History.Event::agent) .map(a -> a == Agent.operator) .orElse(false); return node.status().wantToDeprovision() || node.status().wantToRebuild() || retirementRequestedByOperator; } /** The different ways a host can be decommissioned */ private enum DecommissionOperation { deprovision, rebuild, } }
class Nodes { private static final Logger log = Logger.getLogger(Nodes.class.getName()); private final CuratorDatabaseClient db; private final Zone zone; private final Clock clock; private final Orchestrator orchestrator; public Nodes(CuratorDatabaseClient db, Zone zone, Clock clock, Orchestrator orchestrator) { this.zone = zone; this.clock = clock; this.db = db; this.orchestrator = orchestrator; } /** Read and write all nodes to make sure they are stored in the latest version of the serialized format */ public void rewrite() { Instant start = clock.instant(); int nodesWritten = 0; for (Node.State state : Node.State.values()) { List<Node> nodes = db.readNodes(state); db.writeTo(state, nodes, Agent.system, Optional.empty()); nodesWritten += nodes.size(); } Instant end = clock.instant(); log.log(Level.INFO, String.format("Rewrote %d nodes in %s", nodesWritten, Duration.between(start, end))); } /** * Finds and returns the node with the hostname in any of the given states, or empty if not found * * @param hostname the full host name of the node * @param inState the states the node may be in. If no states are given, it will be returned from any state * @return the node, or empty if it was not found in any of the given states */ public Optional<Node> node(String hostname, Node.State... inState) { return db.readNode(hostname, inState); } /** * Returns a list of nodes in this repository in any of the given states * * @param inState the states to return nodes from. If no states are given, all nodes of the given type are returned */ public NodeList list(Node.State... inState) { return NodeList.copyOf(db.readNodes(inState)); } /** Returns a locked list of all nodes in this repository */ public LockedNodeList list(Mutex lock) { return new LockedNodeList(list().asList(), lock); } /** * Returns whether the zone managed by this node repository seems to be working. * If too many nodes are not responding, there is probably some zone-wide issue * and we should probably refrain from making changes to it. */ public boolean isWorking() { NodeList activeNodes = list(Node.State.active); if (activeNodes.size() <= 5) return true; NodeList downNodes = activeNodes.down(); return ! ( (double)downNodes.size() / (double)activeNodes.size() > 0.2 ); } /** Adds a list of newly created reserved nodes to the node repository */ public List<Node> addReservedNodes(LockedNodeList nodes) { for (Node node : nodes) { if ( node.flavor().getType() != Flavor.Type.DOCKER_CONTAINER) illegal("Cannot add " + node + ": This is not a child node"); if (node.allocation().isEmpty()) illegal("Cannot add " + node + ": Child nodes need to be allocated"); Optional<Node> existing = node(node.hostname()); if (existing.isPresent()) illegal("Cannot add " + node + ": A node with this name already exists (" + existing.get() + ", " + existing.get().history() + "). Node to be added: " + node + ", " + node.history()); } return db.addNodesInState(nodes.asList(), Node.State.reserved, Agent.system); } /** * Adds a list of (newly created) nodes to the node repository as provisioned nodes. * If any of the nodes already exists in the deprovisioned state, the new node will be merged * with the history of that node. */ public List<Node> addNodes(List<Node> nodes, Agent agent) { try (Mutex lock = lockUnallocated()) { List<Node> nodesToAdd = new ArrayList<>(); List<Node> nodesToRemove = new ArrayList<>(); for (int i = 0; i < nodes.size(); i++) { var node = nodes.get(i); for (int j = 0; j < i; j++) { if (node.equals(nodes.get(j))) illegal("Cannot add nodes: " + node + " is duplicated in the argument list"); } Optional<Node> existing = node(node.hostname()); if (existing.isPresent()) { if (existing.get().state() != Node.State.deprovisioned) illegal("Cannot add " + node + ": A node with this name already exists"); node = node.with(existing.get().history()); node = node.with(existing.get().reports()); node = node.with(node.status().withFailCount(existing.get().status().failCount())); if (existing.get().status().firmwareVerifiedAt().isPresent()) node = node.with(node.status().withFirmwareVerifiedAt(existing.get().status().firmwareVerifiedAt().get())); boolean rebuilding = existing.get().status().wantToRebuild(); if (rebuilding) { node = node.with(node.status().withWantToRetire(existing.get().status().wantToRetire(), false, rebuilding)); } nodesToRemove.add(existing.get()); } nodesToAdd.add(node); } NestedTransaction transaction = new NestedTransaction(); List<Node> resultingNodes = db.addNodesInState(IP.Config.verify(nodesToAdd, list(lock)), Node.State.provisioned, agent, transaction); db.removeNodes(nodesToRemove, transaction); transaction.commit(); return resultingNodes; } } /** Sets a list of nodes ready and returns the nodes in the ready state */ public List<Node> setReady(List<Node> nodes, Agent agent, String reason) { try (Mutex lock = lockUnallocated()) { List<Node> nodesWithResetFields = nodes.stream() .map(node -> { if (node.state() != Node.State.provisioned && node.state() != Node.State.dirty) illegal("Can not set " + node + " ready. It is not provisioned or dirty."); return node.withWantToRetire(false, false, false, Agent.system, clock.instant()); }) .collect(Collectors.toList()); return db.writeTo(Node.State.ready, nodesWithResetFields, agent, Optional.of(reason)); } } public Node setReady(String hostname, Agent agent, String reason) { Node nodeToReady = requireNode(hostname); if (nodeToReady.state() == Node.State.ready) return nodeToReady; return setReady(List.of(nodeToReady), agent, reason).get(0); } /** Reserve nodes. This method does <b>not</b> lock the node repository */ public List<Node> reserve(List<Node> nodes) { return db.writeTo(Node.State.reserved, nodes, Agent.application, Optional.empty()); } /** Activate nodes. This method does <b>not</b> lock the node repository */ public List<Node> activate(List<Node> nodes, NestedTransaction transaction) { return db.writeTo(Node.State.active, nodes, Agent.application, Optional.empty(), transaction); } /** * Sets a list of nodes to have their allocation removable (active to inactive) in the node repository. * * @param application the application the nodes belong to * @param nodes the nodes to make removable. These nodes MUST be in the active state. */ public void setRemovable(ApplicationId application, List<Node> nodes) { try (Mutex lock = lock(application)) { List<Node> removableNodes = nodes.stream() .map(node -> node.with(node.allocation().get().removable(true))) .collect(Collectors.toList()); write(removableNodes, lock); } } /** * Deactivates these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> deactivate(List<Node> nodes, ApplicationTransaction transaction) { if ( ! zone.environment().isProduction() || zone.system().isCd()) return deallocate(nodes, Agent.application, "Deactivated by application", transaction.nested()); var stateless = NodeList.copyOf(nodes).stateless(); var stateful = NodeList.copyOf(nodes).stateful(); List<Node> written = new ArrayList<>(); written.addAll(deallocate(stateless.asList(), Agent.application, "Deactivated by application", transaction.nested())); written.addAll(db.writeTo(Node.State.inactive, stateful.asList(), Agent.application, Optional.empty(), transaction.nested())); return written; } /** * Fails these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> fail(List<Node> nodes, ApplicationTransaction transaction) { return fail(nodes, Agent.application, "Failed by application", transaction.nested()); } public List<Node> fail(List<Node> nodes, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); nodes = fail(nodes, agent, reason, transaction); transaction.commit(); return nodes; } private List<Node> fail(List<Node> nodes, Agent agent, String reason, NestedTransaction transaction) { nodes = nodes.stream() .map(n -> n.withWantToFail(false, agent, clock.instant())) .collect(Collectors.toList()); return db.writeTo(Node.State.failed, nodes, agent, Optional.of(reason), transaction); } /** Move nodes to the dirty state */ public List<Node> deallocate(List<Node> nodes, Agent agent, String reason) { return performOn(NodeList.copyOf(nodes), (node, lock) -> deallocate(node, agent, reason)); } public List<Node> deallocateRecursively(String hostname, Agent agent, String reason) { Node nodeToDirty = node(hostname).orElseThrow(() -> new IllegalArgumentException("Could not deallocate " + hostname + ": Node not found")); List<Node> nodesToDirty = (nodeToDirty.type().isHost() ? Stream.concat(list().childrenOf(hostname).asList().stream(), Stream.of(nodeToDirty)) : Stream.of(nodeToDirty)) .filter(node -> node.state() != Node.State.dirty) .collect(Collectors.toList()); List<String> hostnamesNotAllowedToDirty = nodesToDirty.stream() .filter(node -> node.state() != Node.State.provisioned) .filter(node -> node.state() != Node.State.failed) .filter(node -> node.state() != Node.State.parked) .filter(node -> node.state() != Node.State.breakfixed) .map(Node::hostname) .collect(Collectors.toList()); if ( ! hostnamesNotAllowedToDirty.isEmpty()) illegal("Could not deallocate " + nodeToDirty + ": " + hostnamesNotAllowedToDirty + " are not in states [provisioned, failed, parked, breakfixed]"); return nodesToDirty.stream().map(node -> deallocate(node, agent, reason)).collect(Collectors.toList()); } /** * Set a node dirty or parked, allowed if it is in the provisioned, inactive, failed or parked state. * Use this to clean newly provisioned nodes or to recycle failed nodes which have been repaired or put on hold. */ public Node deallocate(Node node, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); Node deallocated = deallocate(node, agent, reason, transaction); transaction.commit(); return deallocated; } public List<Node> deallocate(List<Node> nodes, Agent agent, String reason, NestedTransaction transaction) { return nodes.stream().map(node -> deallocate(node, agent, reason, transaction)).collect(Collectors.toList()); } public Node deallocate(Node node, Agent agent, String reason, NestedTransaction transaction) { if (parkOnDeallocationOf(node, agent)) { return park(node.hostname(), false, agent, reason, transaction); } else { return db.writeTo(Node.State.dirty, List.of(node), agent, Optional.of(reason), transaction).get(0); } } /** * Fails this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node fail(String hostname, Agent agent, String reason) { return fail(hostname, true, agent, reason); } public Node fail(String hostname, boolean keepAllocation, Agent agent, String reason) { return move(hostname, Node.State.failed, agent, keepAllocation, Optional.of(reason)); } /** * Fails all the nodes that are children of hostname before finally failing the hostname itself. * Non-active nodes are failed immediately, while active nodes are marked as wantToFail. * The host is failed if it has no active nodes and marked wantToFail if it has. * * @return all the nodes that were changed by this request */ public List<Node> failOrMarkRecursively(String hostname, Agent agent, String reason) { NodeList children = list().childrenOf(hostname); List<Node> changed = performOn(children, (node, lock) -> failOrMark(node, agent, reason, lock)); if (children.state(Node.State.active).isEmpty()) changed.add(move(hostname, Node.State.failed, agent, true, Optional.of(reason))); else changed.addAll(performOn(NodeList.of(node(hostname).orElseThrow()), (node, lock) -> failOrMark(node, agent, reason, lock))); return changed; } private Node failOrMark(Node node, Agent agent, String reason, Mutex lock) { if (node.state() == Node.State.active) { node = node.withWantToFail(true, agent, clock.instant()); write(node, lock); return node; } else { return move(node.hostname(), Node.State.failed, agent, true, Optional.of(reason)); } } /** * Parks this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node park(String hostname, boolean keepAllocation, Agent agent, String reason) { NestedTransaction transaction = new NestedTransaction(); Node parked = park(hostname, keepAllocation, agent, reason, transaction); transaction.commit(); return parked; } private Node park(String hostname, boolean keepAllocation, Agent agent, String reason, NestedTransaction transaction) { return move(hostname, Node.State.parked, agent, keepAllocation, Optional.of(reason), transaction); } /** * Parks all the nodes that are children of hostname before finally parking the hostname itself. * * @return List of all the parked nodes in their new state */ public List<Node> parkRecursively(String hostname, Agent agent, String reason) { return moveRecursively(hostname, Node.State.parked, agent, Optional.of(reason)); } /** * Moves a previously failed or parked node back to the active state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node reactivate(String hostname, Agent agent, String reason) { return move(hostname, Node.State.active, agent, true, Optional.of(reason)); } /** * Moves a host to breakfixed state, removing any children. */ public List<Node> breakfixRecursively(String hostname, Agent agent, String reason) { Node node = requireNode(hostname); try (Mutex lock = lockUnallocated()) { requireBreakfixable(node); NestedTransaction transaction = new NestedTransaction(); List<Node> removed = removeChildren(node, false, transaction); removed.add(move(node.hostname(), Node.State.breakfixed, agent, true, Optional.of(reason), transaction)); transaction.commit(); return removed; } } private List<Node> moveRecursively(String hostname, Node.State toState, Agent agent, Optional<String> reason) { NestedTransaction transaction = new NestedTransaction(); List<Node> moved = list().childrenOf(hostname).asList().stream() .map(child -> move(child.hostname(), toState, agent, true, reason, transaction)) .collect(Collectors.toList()); moved.add(move(hostname, toState, agent, true, reason, transaction)); transaction.commit(); return moved; } /** Move a node to given state */ private Node move(String hostname, Node.State toState, Agent agent, boolean keepAllocation, Optional<String> reason) { NestedTransaction transaction = new NestedTransaction(); Node moved = move(hostname, toState, agent, keepAllocation, reason, transaction); transaction.commit(); return moved; } /** Move a node to given state as part of a transaction */ private Node move(String hostname, Node.State toState, Agent agent, boolean keepAllocation, Optional<String> reason, NestedTransaction transaction) { try (NodeMutex lock = lockAndGetRequired(hostname)) { Node node = lock.node(); if (toState == Node.State.active) { if (node.allocation().isEmpty()) illegal("Could not set " + node + " active: It has no allocation"); if (!keepAllocation) illegal("Could not set " + node + " active: Requested to discard allocation"); for (Node currentActive : list(Node.State.active).owner(node.allocation().get().owner())) { if (node.allocation().get().membership().cluster().equals(currentActive.allocation().get().membership().cluster()) && node.allocation().get().membership().index() == currentActive.allocation().get().membership().index()) illegal("Could not set " + node + " active: Same cluster and index as " + currentActive); } } if (!keepAllocation && node.allocation().isPresent()) { node = node.withoutAllocation(); } if (toState == Node.State.deprovisioned) { node = node.with(IP.Config.EMPTY); } return db.writeTo(toState, List.of(node), agent, reason, transaction).get(0); } } /* * This method is used by the REST API to handle readying nodes for new allocations. For Linux * containers this will remove the node from node repository, otherwise the node will be moved to state ready. */ public Node markNodeAvailableForNewAllocation(String hostname, Agent agent, String reason) { Node node = requireNode(hostname); if (node.flavor().getType() == Flavor.Type.DOCKER_CONTAINER && node.type() == NodeType.tenant) { if (node.state() != Node.State.dirty) illegal("Cannot make " + node + " available for new allocation as it is not in state [dirty]"); return removeRecursively(node, true).get(0); } if (node.state() == Node.State.ready) return node; Node parentHost = node.parentHostname().flatMap(this::node).orElse(node); List<String> failureReasons = NodeFailer.reasonsToFailHost(parentHost); if ( ! failureReasons.isEmpty()) illegal(node + " cannot be readied because it has hard failures: " + failureReasons); return setReady(List.of(node), agent, reason).get(0); } /** * Removes all the nodes that are children of hostname before finally removing the hostname itself. * * @return a List of all the nodes that have been removed or (for hosts) deprovisioned */ public List<Node> removeRecursively(String hostname) { Node node = requireNode(hostname); return removeRecursively(node, false); } public List<Node> removeRecursively(Node node, boolean force) { try (Mutex lock = lockUnallocated()) { requireRemovable(node, false, force); NestedTransaction transaction = new NestedTransaction(); final List<Node> removed; if (!node.type().isHost()) { removed = List.of(node); db.removeNodes(removed, transaction); } else { removed = removeChildren(node, force, transaction); if (zone.getCloud().dynamicProvisioning()) { db.removeNodes(List.of(node), transaction); } else { move(node.hostname(), Node.State.deprovisioned, Agent.system, false, Optional.empty(), transaction); } removed.add(node); } transaction.commit(); return removed; } } /** Forgets a deprovisioned node. This removes all traces of the node in the node repository. */ public void forget(Node node) { if (node.state() != Node.State.deprovisioned) throw new IllegalArgumentException(node + " must be deprovisioned before it can be forgotten"); if (node.status().wantToRebuild()) throw new IllegalArgumentException(node + " is rebuilding and cannot be forgotten"); NestedTransaction transaction = new NestedTransaction(); db.removeNodes(List.of(node), transaction); transaction.commit(); } private List<Node> removeChildren(Node node, boolean force, NestedTransaction transaction) { List<Node> children = list().childrenOf(node).asList(); children.forEach(child -> requireRemovable(child, true, force)); db.removeNodes(children, transaction); return new ArrayList<>(children); } /** * Throws if the given node cannot be removed. Removal is allowed if: * - Tenant node: * - non-recursively: node is unallocated * - recursively: node is unallocated or node is in failed|parked * - Host node: iff in state provisioned|failed|parked * - Child node: * - non-recursively: node in state ready * - recursively: child is in state provisioned|failed|parked|dirty|ready */ private void requireRemovable(Node node, boolean removingRecursively, boolean force) { if (force) return; if (node.type() == NodeType.tenant && node.allocation().isPresent()) { EnumSet<Node.State> removableStates = EnumSet.of(Node.State.failed, Node.State.parked); if (!removingRecursively || !removableStates.contains(node.state())) illegal(node + " is currently allocated and cannot be removed while in " + node.state()); } final Set<Node.State> removableStates; if (node.type().isHost()) { removableStates = EnumSet.of(Node.State.provisioned, Node.State.failed, Node.State.parked); } else { removableStates = removingRecursively ? EnumSet.of(Node.State.provisioned, Node.State.failed, Node.State.parked, Node.State.dirty, Node.State.ready) : EnumSet.of(Node.State.ready); } if (!removableStates.contains(node.state())) illegal(node + " can not be removed while in " + node.state()); } /** * Throws if given node cannot be breakfixed. * Breakfix is allowed if the following is true: * - Node is tenant host * - Node is in zone without dynamic provisioning * - Node is in parked or failed state */ private void requireBreakfixable(Node node) { if (zone.getCloud().dynamicProvisioning()) { illegal("Can not breakfix in zone: " + zone); } if (node.type() != NodeType.host) { illegal(node + " can not be breakfixed as it is not a tenant host"); } Set<Node.State> legalStates = EnumSet.of(Node.State.failed, Node.State.parked); if (! legalStates.contains(node.state())) { illegal(node + " can not be removed as it is not in the states " + legalStates); } } /** * Increases the restart generation of the active nodes matching given filter. * * @return the nodes in their new state */ public List<Node> restartActive(Predicate<Node> filter) { return restart(NodeFilter.in(Set.of(Node.State.active)).and(filter)); } /** * Increases the restart generation of the any nodes matching given filter. * * @return the nodes in their new state */ public List<Node> restart(Predicate<Node> filter) { return performOn(filter, (node, lock) -> write(node.withRestart(node.allocation().get().restartGeneration().withIncreasedWanted()), lock)); } /** * Increases the reboot generation of the nodes matching the filter. * * @return the nodes in their new state */ public List<Node> reboot(Predicate<Node> filter) { return performOn(filter, (node, lock) -> write(node.withReboot(node.status().reboot().withIncreasedWanted()), lock)); } /** * Set target OS version of all nodes matching given filter. * * @return the nodes in their new state */ public List<Node> upgradeOs(Predicate<Node> filter, Optional<Version> version) { return performOn(filter, (node, lock) -> { var newStatus = node.status().withOsVersion(node.status().osVersion().withWanted(version)); return write(node.with(newStatus), lock); }); } /** Retire nodes matching given filter */ public List<Node> retire(Predicate<Node> filter, Agent agent, Instant instant) { return performOn(filter, (node, lock) -> write(node.withWantToRetire(true, agent, instant), lock)); } /** Retire and deprovision given host and all of its children */ public List<Node> deprovision(String hostname, Agent agent, Instant instant) { return decommission(hostname, DecommissionOperation.deprovision, agent, instant); } /** Retire and rebuild given host and all of its children */ public List<Node> rebuild(String hostname, Agent agent, Instant instant) { return decommission(hostname, DecommissionOperation.rebuild, agent, instant); } private List<Node> decommission(String hostname, DecommissionOperation op, Agent agent, Instant instant) { Optional<NodeMutex> nodeMutex = lockAndGet(hostname); if (nodeMutex.isEmpty()) return List.of(); Node host = nodeMutex.get().node(); if (!host.type().isHost()) throw new IllegalArgumentException("Cannot " + op + " non-host " + host); List<Node> result; boolean wantToDeprovision = op == DecommissionOperation.deprovision; boolean wantToRebuild = op == DecommissionOperation.rebuild; try (NodeMutex lock = nodeMutex.get(); Mutex allocationLock = lockUnallocated()) { host = lock.node(); result = performOn(list(allocationLock).childrenOf(host), (node, nodeLock) -> { Node newNode = node.withWantToRetire(true, wantToDeprovision, wantToRebuild, agent, instant); return write(newNode, nodeLock); }); Node newHost = host.withWantToRetire(true, wantToDeprovision, wantToRebuild, agent, instant); result.add(write(newHost, lock)); } return result; } /** * Writes this node after it has changed some internal state but NOT changed its state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written node for convenience */ public Node write(Node node, Mutex lock) { return write(List.of(node), lock).get(0); } /** * Writes these nodes after they have changed some internal state but NOT changed their state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written nodes for convenience */ public List<Node> write(List<Node> nodes, @SuppressWarnings("unused") Mutex lock) { return db.writeTo(nodes, Agent.system, Optional.empty()); } private List<Node> performOn(Predicate<Node> filter, BiFunction<Node, Mutex, Node> action) { return performOn(list().matching(filter), action); } /** * Performs an operation requiring locking on all nodes matching some filter. * * @param action the action to perform * @return the set of nodes on which the action was performed, as they became as a result of the operation */ private List<Node> performOn(NodeList nodes, BiFunction<Node, Mutex, Node> action) { List<Node> unallocatedNodes = new ArrayList<>(); ListMap<ApplicationId, Node> allocatedNodes = new ListMap<>(); for (Node node : nodes) { if (node.allocation().isPresent()) allocatedNodes.put(node.allocation().get().owner(), node); else unallocatedNodes.add(node); } List<Node> resultingNodes = new ArrayList<>(); try (Mutex lock = lockUnallocated()) { for (Node node : unallocatedNodes) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } for (Map.Entry<ApplicationId, List<Node>> applicationNodes : allocatedNodes.entrySet()) { try (Mutex lock = lock(applicationNodes.getKey())) { for (Node node : applicationNodes.getValue()) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } } return resultingNodes; } public boolean canAllocateTenantNodeTo(Node host) { return canAllocateTenantNodeTo(host, zone.getCloud().dynamicProvisioning()); } public boolean suspended(Node node) { try { return orchestrator.getNodeStatus(new HostName(node.hostname())).isSuspended(); } catch (HostNameNotFoundException e) { return false; } } /** Create a lock which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application) { return db.lock(application); } /** Create a lock with a timeout which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application, Duration timeout) { return db.lock(application, timeout); } /** Create a lock which provides exclusive rights to modifying unallocated nodes */ public Mutex lockUnallocated() { return db.lockInactive(); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public Optional<NodeMutex> lockAndGet(Node node) { Node staleNode = node; final int maxRetries = 4; for (int i = 0; i < maxRetries; ++i) { Mutex lockToClose = lock(staleNode); try { Optional<Node> freshNode = node(staleNode.hostname(), staleNode.state()); if (freshNode.isEmpty()) { freshNode = node(staleNode.hostname()); if (freshNode.isEmpty()) { return Optional.empty(); } } if (Objects.equals(freshNode.get().allocation().map(Allocation::owner), staleNode.allocation().map(Allocation::owner))) { NodeMutex nodeMutex = new NodeMutex(freshNode.get(), lockToClose); lockToClose = null; return Optional.of(nodeMutex); } staleNode = freshNode.get(); } finally { if (lockToClose != null) lockToClose.close(); } } throw new IllegalStateException("Giving up (after " + maxRetries + " attempts) " + "fetching an up to date node under lock: " + node.hostname()); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public Optional<NodeMutex> lockAndGet(String hostname) { return node(hostname).flatMap(this::lockAndGet); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public NodeMutex lockAndGetRequired(Node node) { return lockAndGet(node).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + node.hostname() + "'")); } /** Returns the unallocated/application lock, and the node acquired under that lock. */ public NodeMutex lockAndGetRequired(String hostname) { return lockAndGet(hostname).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + hostname + "'")); } private Mutex lock(Node node) { return node.allocation().isPresent() ? lock(node.allocation().get().owner()) : lockUnallocated(); } private Node requireNode(String hostname) { return node(hostname).orElseThrow(() -> new NoSuchNodeException("No node with hostname '" + hostname + "'")); } private void illegal(String message) { throw new IllegalArgumentException(message); } /** Returns whether node should be parked when deallocated by given agent */ private static boolean parkOnDeallocationOf(Node node, Agent agent) { if (node.state() == Node.State.parked) return false; if (agent == Agent.operator) return false; if (!node.type().isHost() && node.status().wantToDeprovision()) return false; boolean retirementRequestedByOperator = node.status().wantToRetire() && node.history().event(History.Event.Type.wantToRetire) .map(History.Event::agent) .map(a -> a == Agent.operator) .orElse(false); return node.status().wantToDeprovision() || node.status().wantToRebuild() || retirementRequestedByOperator; } /** The different ways a host can be decommissioned */ private enum DecommissionOperation { deprovision, rebuild, } }