comment
stringlengths
22
3.02k
method_body
stringlengths
46
368k
target_code
stringlengths
0
181
method_body_after
stringlengths
12
368k
context_before
stringlengths
11
634k
context_after
stringlengths
11
632k
Don't use `+ exception`. Instead pass it as param `LOGGER.logExceptionAsError(new IllegalArgumentException( "Can't parse Feature Flag configuration setting value.", exception))`. Or use `exception.getMessage()`.
private void updateSettingValue() { try { super.setValue(writeFeatureFlagConfigurationSetting(this)); } catch (IOException exception) { LOGGER.logExceptionAsError(new IllegalArgumentException( "Can't parse Feature Flag configuration setting value. Exception:" + exception)); } }
"Can't parse Feature Flag configuration setting value. Exception:" + exception));
private void updateSettingValue() { try { super.setValue(writeFeatureFlagConfigurationSetting(this)); } catch (IOException exception) { LOGGER.logExceptionAsError(new IllegalArgumentException( "Can't parse Feature Flag configuration setting value.", exception)); } }
class FeatureFlagConfigurationSetting extends ConfigurationSetting { private static final ClientLogger LOGGER = new ClientLogger(FeatureFlagConfigurationSetting.class); private static final String FEATURE_FLAG_CONTENT_TYPE = "application/vnd.microsoft.appconfig.ff+json;charset=utf-8"; private String featureId; private boolean isEnabled; private String description; private String displayName; private List<FeatureFlagFilter> clientFilters; /** * A prefix is used to construct a feature flag configuration setting's key. */ public static final String KEY_PREFIX = ".appconfig.featureflag/"; /** * The constructor for a feature flag configuration setting. * * @param featureId A feature flag identification value that used to construct in setting's key. The key of setting * is {@code KEY_PREFIX} concatenate {@code featureId}. * @param isEnabled A boolean value to turn on/off the feature flag setting. */ public FeatureFlagConfigurationSetting(String featureId, boolean isEnabled) { this.featureId = featureId; this.isEnabled = isEnabled; super.setKey(KEY_PREFIX + featureId); super.setContentType(FEATURE_FLAG_CONTENT_TYPE); } /** * Sets the key of this setting. * * @param key The key to associate with this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setKey(String key) { super.setKey(key); return this; } /** * Sets the value of this setting. * * @param value The value to associate with this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ @Override public FeatureFlagConfigurationSetting setValue(String value) { super.setValue(value); final FeatureFlagConfigurationSetting updatedSetting = readFeatureFlagConfigurationSettingValue(value); this.featureId = updatedSetting.getFeatureId(); this.description = updatedSetting.getDescription(); this.isEnabled = updatedSetting.isEnabled(); this.displayName = updatedSetting.getDisplayName(); if (updatedSetting.getClientFilters() != null) { this.clientFilters = StreamSupport.stream(updatedSetting.getClientFilters().spliterator(), false) .collect(Collectors.toList()); } else { this.clientFilters = null; } return this; } /** * Sets the label of this configuration setting. {@link * set. * * @param label The label of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setLabel(String label) { super.setLabel(label); return this; } /** * Sets the content type. By default, the content type is null. * * @param contentType The content type of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setContentType(String contentType) { super.setContentType(contentType); return this; } /** * Sets the ETag for this configuration setting. * * @param etag The ETag for the configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setETag(String etag) { super.setETag(etag); return this; } /** * Sets the tags for this configuration setting. * * @param tags The tags to add to this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setTags(Map<String, String> tags) { super.setTags(tags); return this; } /** * Get the feature ID of this configuration setting. * * @return the feature ID of this configuration setting. */ public String getFeatureId() { return featureId; } /** * Set the feature ID of this configuration setting. * * @param featureId the feature ID of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setFeatureId(String featureId) { this.featureId = featureId; super.setKey(KEY_PREFIX + featureId); updateSettingValue(); return this; } /** * Get the boolean indicator to show if the setting is turn on or off. * * @return the boolean indicator to show if the setting is turn on or off. */ public boolean isEnabled() { return this.isEnabled; } /** * Set the boolean indicator to show if the setting is turn on or off. * * @param isEnabled the boolean indicator to show if the setting is turn on or off. * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setEnabled(boolean isEnabled) { this.isEnabled = isEnabled; updateSettingValue(); return this; } /** * Get the description of this configuration setting. * * @return the description of this configuration setting. */ public String getDescription() { return description; } /** * Set the description of this configuration setting. * * @param description the description of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setDescription(String description) { this.description = description; updateSettingValue(); return this; } /** * Get the display name of this configuration setting. * * @return the display name of this configuration setting. */ public String getDisplayName() { return displayName; } /** * Set the display name of this configuration setting. * * @param displayName the display name of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setDisplayName(String displayName) { this.displayName = displayName; updateSettingValue(); return this; } /** * Gets the feature flag filters of this configuration setting. * * @return the feature flag filters of this configuration setting. */ public List<FeatureFlagFilter> getClientFilters() { return clientFilters; } /** * Sets the feature flag filters of this configuration setting. * * @param clientFilters the feature flag filters of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setClientFilters(List<FeatureFlagFilter> clientFilters) { this.clientFilters = clientFilters; updateSettingValue(); return this; } /** * Add a feature flag filter to this configuration setting. * * @param clientFilter a feature flag filter to add to this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ public FeatureFlagConfigurationSetting addClientFilter(FeatureFlagFilter clientFilter) { clientFilters.add(clientFilter); updateSettingValue(); return this; } }
class FeatureFlagConfigurationSetting extends ConfigurationSetting { private static final ClientLogger LOGGER = new ClientLogger(FeatureFlagConfigurationSetting.class); private static final String FEATURE_FLAG_CONTENT_TYPE = "application/vnd.microsoft.appconfig.ff+json;charset=utf-8"; private String featureId; private boolean isEnabled; private String description; private String displayName; private List<FeatureFlagFilter> clientFilters; /** * A prefix is used to construct a feature flag configuration setting's key. */ public static final String KEY_PREFIX = ".appconfig.featureflag/"; /** * The constructor for a feature flag configuration setting. * * @param featureId A feature flag identification value that used to construct in setting's key. The key of setting * is {@code KEY_PREFIX} concatenate {@code featureId}. * @param isEnabled A boolean value to turn on/off the feature flag setting. */ public FeatureFlagConfigurationSetting(String featureId, boolean isEnabled) { this.featureId = featureId; this.isEnabled = isEnabled; super.setKey(KEY_PREFIX + featureId); super.setContentType(FEATURE_FLAG_CONTENT_TYPE); } /** * Sets the key of this setting. * * @param key The key to associate with this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setKey(String key) { super.setKey(key); return this; } /** * Sets the value of this setting. * * @param value The value to associate with this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ @Override public FeatureFlagConfigurationSetting setValue(String value) { super.setValue(value); final FeatureFlagConfigurationSetting updatedSetting = readFeatureFlagConfigurationSettingValue(value); this.featureId = updatedSetting.getFeatureId(); this.description = updatedSetting.getDescription(); this.isEnabled = updatedSetting.isEnabled(); this.displayName = updatedSetting.getDisplayName(); if (updatedSetting.getClientFilters() != null) { this.clientFilters = StreamSupport.stream(updatedSetting.getClientFilters().spliterator(), false) .collect(Collectors.toList()); } else { this.clientFilters = null; } return this; } /** * Sets the label of this configuration setting. {@link * set. * * @param label The label of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setLabel(String label) { super.setLabel(label); return this; } /** * Sets the content type. By default, the content type is null. * * @param contentType The content type of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setContentType(String contentType) { super.setContentType(contentType); return this; } /** * Sets the ETag for this configuration setting. * * @param etag The ETag for the configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setETag(String etag) { super.setETag(etag); return this; } /** * Sets the tags for this configuration setting. * * @param tags The tags to add to this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setTags(Map<String, String> tags) { super.setTags(tags); return this; } /** * Get the feature ID of this configuration setting. * * @return the feature ID of this configuration setting. */ public String getFeatureId() { return featureId; } /** * Set the feature ID of this configuration setting. * * @param featureId the feature ID of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setFeatureId(String featureId) { this.featureId = featureId; super.setKey(KEY_PREFIX + featureId); updateSettingValue(); return this; } /** * Get the boolean indicator to show if the setting is turn on or off. * * @return the boolean indicator to show if the setting is turn on or off. */ public boolean isEnabled() { return this.isEnabled; } /** * Set the boolean indicator to show if the setting is turn on or off. * * @param isEnabled the boolean indicator to show if the setting is turn on or off. * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setEnabled(boolean isEnabled) { this.isEnabled = isEnabled; updateSettingValue(); return this; } /** * Get the description of this configuration setting. * * @return the description of this configuration setting. */ public String getDescription() { return description; } /** * Set the description of this configuration setting. * * @param description the description of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setDescription(String description) { this.description = description; updateSettingValue(); return this; } /** * Get the display name of this configuration setting. * * @return the display name of this configuration setting. */ public String getDisplayName() { return displayName; } /** * Set the display name of this configuration setting. * * @param displayName the display name of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setDisplayName(String displayName) { this.displayName = displayName; updateSettingValue(); return this; } /** * Gets the feature flag filters of this configuration setting. * * @return the feature flag filters of this configuration setting. */ public List<FeatureFlagFilter> getClientFilters() { return clientFilters; } /** * Sets the feature flag filters of this configuration setting. * * @param clientFilters the feature flag filters of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setClientFilters(List<FeatureFlagFilter> clientFilters) { this.clientFilters = clientFilters; updateSettingValue(); return this; } /** * Add a feature flag filter to this configuration setting. * * @param clientFilter a feature flag filter to add to this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ public FeatureFlagConfigurationSetting addClientFilter(FeatureFlagFilter clientFilter) { clientFilters.add(clientFilter); updateSettingValue(); return this; } }
Also printing that website is created - can be moved to map part of individual observables.
public static boolean runSample(final Azure azure) { final String suffix = ".azurewebsites.net"; final String app1Name = SdkContext.randomResourceName("webapp1-", 20); final String app2Name = SdkContext.randomResourceName("webapp2-", 20); final String app3Name = SdkContext.randomResourceName("webapp3-", 20); final String app4Name = SdkContext.randomResourceName("webapp4-", 20); final String app1Url = app1Name + suffix; final String app2Url = app2Name + suffix; final String app3Url = app3Name + suffix; final String app4Url = app4Name + suffix; final String planName = SdkContext.randomResourceName("jplan_", 15); final String rgName = SdkContext.randomResourceName("rg1NEMV_", 24); try { System.out.println("Creating web app " + app1Name + " in resource group " + rgName + "..."); Observable<?> app1Observable = azure.webApps().define(app1Name) .withRegion(Region.US_WEST) .withNewResourceGroup(rgName) .withNewWindowsPlan(PricingTier.STANDARD_S1) .withJavaVersion(JavaVersion.JAVA_8_NEWEST) .withWebContainer(WebContainer.TOMCAT_8_0_NEWEST) .createAsync() .flatMap(new Func1<Indexable, Observable<?>>() { @Override public Observable<?> call(Indexable indexable) { if (indexable instanceof WebApp) { WebApp app = (WebApp) indexable; System.out.println("Created web app " + app.name()); return Observable.merge( Observable.just(indexable), app.getPublishingProfileAsync() .map(new Func1<PublishingProfile, PublishingProfile>() { @Override public PublishingProfile call(PublishingProfile publishingProfile) { System.out.println("Deploying helloworld.war to " + app1Name + " through FTP..."); Utils.uploadFileToFtp(publishingProfile, "helloworld.war", ManageWebAppSourceControlAsync.class.getResourceAsStream("/helloworld.war")); System.out.println("Deployment helloworld.war to web app " + app1Name + " completed"); return publishingProfile; } })); } return Observable.just(indexable); } }); System.out.println("Creating another web app " + app2Name + " in resource group " + rgName + "..."); System.out.println("Creating another web app " + app3Name + "..."); System.out.println("Creating another web app " + app4Name + "..."); Observable<?> app234Observable = azure.appServices().appServicePlans() .getByResourceGroupAsync(rgName, planName) .flatMap(new Func1<AppServicePlan, Observable<Indexable>>() { @Override public Observable<Indexable> call(AppServicePlan plan) { return Observable.merge( azure.webApps().define(app2Name) .withExistingWindowsPlan(plan) .withExistingResourceGroup(rgName) .withLocalGitSourceControl() .withJavaVersion(JavaVersion.JAVA_8_NEWEST) .withWebContainer(WebContainer.TOMCAT_8_0_NEWEST) .createAsync(), azure.webApps().define(app3Name) .withExistingWindowsPlan(plan) .withNewResourceGroup(rgName) .defineSourceControl() .withPublicGitRepository( "https: .withBranch("master") .attach() .createAsync(), azure.webApps() .define(app4Name) .withExistingWindowsPlan(plan) .withExistingResourceGroup(rgName) .createAsync()); } }) .flatMap(new Func1<Indexable, Observable<?>>() { @Override public Observable<?> call(Indexable indexable) { if (indexable instanceof WebApp) { WebApp app = (WebApp) indexable; System.out.println("Created web app " + app.name()); if (!app.name().equals(app2Name)) { return Observable.just(indexable); } return app.getPublishingProfileAsync() .map(new Func1<PublishingProfile, PublishingProfile>() { @Override public PublishingProfile call(PublishingProfile profile) { System.out.println("Deploying a local Tomcat source to " + app2Name + " through Git..."); Git git = null; try { git = Git .init() .setDirectory(new File( ManageWebAppSourceControlAsync.class.getResource( "/azure-samples-appservice-helloworld/") .getPath())) .call(); git.add().addFilepattern(".").call(); git.commit().setMessage("Initial commit").call(); PushCommand command = git.push(); command.setRemote(profile.gitUrl()); command.setCredentialsProvider(new UsernamePasswordCredentialsProvider(profile.gitUsername(), profile.gitPassword())); command.setRefSpecs(new RefSpec("master:master")); command.setForce(true); command.call(); } catch (GitAPIException e) { e.printStackTrace(); } System.out.println("Deployment to web app " + app2Name + " completed"); return profile; } }); } return Observable.just(indexable); } }); Observable.merge(app1Observable, app234Observable).toBlocking().subscribe(); System.out.println("Warming up " + app1Url + "/helloworld..."); curl("http: System.out.println("Warming up " + app2Url + "/helloworld..."); curl("http: System.out.println("Warming up " + app3Url + "..."); curl("http: System.out.println("Warming up " + app4Url + "..."); curl("http: Thread.sleep(5000); System.out.println("CURLing " + app1Url + "/helloworld..."); System.out.println(curl("http: System.out.println("CURLing " + app2Url + "/helloworld..."); System.out.println(curl("http: System.out.println("CURLing " + app3Url + "..."); System.out.println(curl("http: System.out.println("CURLing " + app4Url + "..."); System.out.println(curl("http: return true; } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); } finally { try { System.out.println("Deleting Resource Group: " + rgName); azure.resourceGroups().beginDeleteByNameAsync(rgName).toBlocking().subscribe(); System.out.println("Deleted Resource Group: " + rgName); } catch (NullPointerException npe) { System.out.println("Did not create any resources in Azure. No clean up is necessary"); } catch (Exception g) { g.printStackTrace(); } } return false; }
Observable<?> app234Observable = azure.appServices().appServicePlans()
public static boolean runSample(final Azure azure) { final String suffix = ".azurewebsites.net"; final String app1Name = SdkContext.randomResourceName("webapp1-", 20); final String app2Name = SdkContext.randomResourceName("webapp2-", 20); final String app3Name = SdkContext.randomResourceName("webapp3-", 20); final String app4Name = SdkContext.randomResourceName("webapp4-", 20); final String app1Url = app1Name + suffix; final String app2Url = app2Name + suffix; final String app3Url = app3Name + suffix; final String app4Url = app4Name + suffix; final String planName = SdkContext.randomResourceName("jplan_", 15); final String rgName = SdkContext.randomResourceName("rg1NEMV_", 24); try { System.out.println("Creating web app " + app1Name + " in resource group " + rgName + "..."); Observable<?> app1Observable = azure.webApps().define(app1Name) .withRegion(Region.US_WEST) .withNewResourceGroup(rgName) .withNewWindowsPlan(PricingTier.STANDARD_S1) .withJavaVersion(JavaVersion.JAVA_8_NEWEST) .withWebContainer(WebContainer.TOMCAT_8_0_NEWEST) .createAsync() .flatMap(new Func1<Indexable, Observable<?>>() { @Override public Observable<?> call(Indexable indexable) { if (indexable instanceof WebApp) { WebApp app = (WebApp) indexable; System.out.println("Created web app " + app.name()); return Observable.merge( Observable.just(indexable), app.getPublishingProfileAsync() .map(new Func1<PublishingProfile, PublishingProfile>() { @Override public PublishingProfile call(PublishingProfile publishingProfile) { System.out.println("Deploying helloworld.war to " + app1Name + " through FTP..."); Utils.uploadFileToFtp(publishingProfile, "helloworld.war", ManageWebAppSourceControlAsync.class.getResourceAsStream("/helloworld.war")); System.out.println("Deployment helloworld.war to web app " + app1Name + " completed"); return publishingProfile; } })); } return Observable.just(indexable); } }); System.out.println("Creating another web app " + app2Name + " in resource group " + rgName + "..."); System.out.println("Creating another web app " + app3Name + "..."); System.out.println("Creating another web app " + app4Name + "..."); Observable<?> app234Observable = azure.appServices().appServicePlans() .getByResourceGroupAsync(rgName, planName) .flatMap(new Func1<AppServicePlan, Observable<Indexable>>() { @Override public Observable<Indexable> call(AppServicePlan plan) { return Observable.merge( azure.webApps().define(app2Name) .withExistingWindowsPlan(plan) .withExistingResourceGroup(rgName) .withLocalGitSourceControl() .withJavaVersion(JavaVersion.JAVA_8_NEWEST) .withWebContainer(WebContainer.TOMCAT_8_0_NEWEST) .createAsync(), azure.webApps().define(app3Name) .withExistingWindowsPlan(plan) .withNewResourceGroup(rgName) .defineSourceControl() .withPublicGitRepository( "https: .withBranch("master") .attach() .createAsync(), azure.webApps() .define(app4Name) .withExistingWindowsPlan(plan) .withExistingResourceGroup(rgName) .createAsync()); } }) .flatMap(new Func1<Indexable, Observable<?>>() { @Override public Observable<?> call(Indexable indexable) { if (indexable instanceof WebApp) { WebApp app = (WebApp) indexable; System.out.println("Created web app " + app.name()); if (!app.name().equals(app2Name)) { return Observable.just(indexable); } return app.getPublishingProfileAsync() .map(new Func1<PublishingProfile, PublishingProfile>() { @Override public PublishingProfile call(PublishingProfile profile) { System.out.println("Deploying a local Tomcat source to " + app2Name + " through Git..."); Git git = null; try { git = Git .init() .setDirectory(new File( ManageWebAppSourceControlAsync.class.getResource( "/azure-samples-appservice-helloworld/") .getPath())) .call(); git.add().addFilepattern(".").call(); git.commit().setMessage("Initial commit").call(); PushCommand command = git.push(); command.setRemote(profile.gitUrl()); command.setCredentialsProvider(new UsernamePasswordCredentialsProvider(profile.gitUsername(), profile.gitPassword())); command.setRefSpecs(new RefSpec("master:master")); command.setForce(true); command.call(); } catch (GitAPIException e) { e.printStackTrace(); } System.out.println("Deployment to web app " + app2Name + " completed"); return profile; } }); } return Observable.just(indexable); } }); Observable.merge(app1Observable, app234Observable).toBlocking().subscribe(); System.out.println("Warming up " + app1Url + "/helloworld..."); curl("http: System.out.println("Warming up " + app2Url + "/helloworld..."); curl("http: System.out.println("Warming up " + app3Url + "..."); curl("http: System.out.println("Warming up " + app4Url + "..."); curl("http: Thread.sleep(5000); System.out.println("CURLing " + app1Url + "/helloworld..."); System.out.println(curl("http: System.out.println("CURLing " + app2Url + "/helloworld..."); System.out.println(curl("http: System.out.println("CURLing " + app3Url + "..."); System.out.println(curl("http: System.out.println("CURLing " + app4Url + "..."); System.out.println(curl("http: return true; } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); } finally { try { System.out.println("Deleting Resource Group: " + rgName); azure.resourceGroups().beginDeleteByNameAsync(rgName).toBlocking().subscribe(); System.out.println("Deleted Resource Group: " + rgName); } catch (NullPointerException npe) { System.out.println("Did not create any resources in Azure. No clean up is necessary"); } catch (Exception g) { g.printStackTrace(); } } return false; }
class ManageWebAppSourceControlAsync { private static OkHttpClient httpClient; /** * Main function which runs the actual sample. * @param azure instance of the azure client * @return true if sample runs successfully */ /** * Main entry point. * @param args the parameters */ public static void main(String[] args) { try { final File credFile = new File(System.getenv("AZURE_AUTH_LOCATION")); Azure azure = Azure .configure() .withLogLevel(LogLevel.BASIC) .authenticate(credFile) .withDefaultSubscription(); System.out.println("Selected subscription: " + azure.subscriptionId()); runSample(azure); } catch (Exception e) { System.out.println(e.getMessage()); e.printStackTrace(); } } private static String curl(String url) { Request request = new Request.Builder().url(url).get().build(); try { return httpClient.newCall(request).execute().body().string(); } catch (IOException e) { return null; } } static { httpClient = new OkHttpClient.Builder().readTimeout(1, TimeUnit.MINUTES).build(); } }
class ManageWebAppSourceControlAsync { private static OkHttpClient httpClient; /** * Main function which runs the actual sample. * @param azure instance of the azure client * @return true if sample runs successfully */ /** * Main entry point. * @param args the parameters */ public static void main(String[] args) { try { final File credFile = new File(System.getenv("AZURE_AUTH_LOCATION")); Azure azure = Azure .configure() .withLogLevel(LogLevel.BASIC) .authenticate(credFile) .withDefaultSubscription(); System.out.println("Selected subscription: " + azure.subscriptionId()); runSample(azure); } catch (Exception e) { System.out.println(e.getMessage()); e.printStackTrace(); } } private static String curl(String url) { Request request = new Request.Builder().url(url).get().build(); try { return httpClient.newCall(request).execute().body().string(); } catch (IOException e) { return null; } } static { httpClient = new OkHttpClient.Builder().readTimeout(1, TimeUnit.MINUTES).build(); } }
The exception is not clear after change the comparison in the if branch.
public void renamePath(String srcPath, String destPath, Map<String, String> loadProperties) throws UserException { WildcardURI srcPathUri = new WildcardURI(srcPath); WildcardURI destPathUri = new WildcardURI(destPath); if ((srcPathUri.getAuthority() == null && destPathUri.getAuthority() != null) || (srcPathUri.getAuthority() != null && destPathUri.getAuthority() == null) || (srcPathUri.getAuthority() != null && destPathUri.getAuthority() != null && (!srcPathUri.getAuthority().trim().equals(destPathUri.getAuthority().trim())))) { throw new UserException( "only allow rename in same file system"); } HdfsFs fileSystem = getFileSystem(srcPath, loadProperties, null); Path srcfilePath = new Path(srcPathUri.getPath()); Path destfilePath = new Path(destPathUri.getPath()); try { boolean isRenameSuccess = fileSystem.getDFSFileSystem().rename(srcfilePath, destfilePath); if (!isRenameSuccess) { throw new UserException("failed to rename path from " + srcPath + " to " + destPath); } } catch (IOException e) { LOG.error("errors while rename path from " + srcPath + " to " + destPath); throw new UserException("errors while rename " + srcPath + "to " + destPath); } }
HdfsFs fileSystem = getFileSystem(srcPath, loadProperties, null);
public void renamePath(String srcPath, String destPath, Map<String, String> loadProperties) throws UserException { WildcardURI srcPathUri = new WildcardURI(srcPath); WildcardURI destPathUri = new WildcardURI(destPath); boolean srcAuthorityNull = (srcPathUri.getAuthority() == null); boolean destAuthorityNull = (destPathUri.getAuthority() == null); if (!srcAuthorityNull || !destAuthorityNull) { if (!srcAuthorityNull && !destAuthorityNull && !srcPathUri.getAuthority().trim().equals(destPathUri.getAuthority().trim())) { throw new UserException( "only allow rename in same file system"); } else { throw new UserException("Different authority info between srcPath: " + srcPath + " and destPath: " + destPath); } } HdfsFs fileSystem = getFileSystem(srcPath, loadProperties, null); Path srcfilePath = new Path(srcPathUri.getPath()); Path destfilePath = new Path(destPathUri.getPath()); try { boolean isRenameSuccess = fileSystem.getDFSFileSystem().rename(srcfilePath, destfilePath); if (!isRenameSuccess) { throw new UserException("failed to rename path from " + srcPath + " to " + destPath); } } catch (IOException e) { LOG.error("errors while rename path from " + srcPath + " to " + destPath); throw new UserException("errors while rename " + srcPath + "to " + destPath); } }
class HdfsFsManager { private static final Logger LOG = LogManager.getLogger(HdfsFsManager.class); private static final String HDFS_SCHEME = "hdfs"; private static final String VIEWFS_SCHEME = "viewfs"; private static final String S3_SCHEMA = "s3"; private static final String S3A_SCHEME = "s3a"; private static final String OSS_SCHEME = "oss"; private static final String COS_SCHEME = "cosn"; private static final String KS3_SCHEME = "ks3"; private static final String OBS_SCHEME = "obs"; private static final String TOS_SCHEME = "tos"; private static final String ABFS_SCHEMA = "abfs"; private static final String ABFSS_SCHEMA = "abfss"; private static final String ADL_SCHEMA = "adl"; private static final String WASB_SCHEMA = "wasb"; private static final String WASBS_SCHEMA = "wasbs"; private static final String GCS_SCHEMA = "gs"; private static final String USER_NAME_KEY = "username"; private static final String PASSWORD_KEY = "password"; private static final String DFS_NAMESERVICES_KEY = "dfs.nameservices"; private static final String FS_DEFAULTFS_KEY = "fs.defaultFS"; protected static final String FS_HDFS_IMPL_DISABLE_CACHE = "fs.hdfs.impl.disable.cache"; protected static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key"; protected static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key"; protected static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint"; protected static final String FS_S3A_IMPL_DISABLE_CACHE = "fs.s3a.impl.disable.cache"; protected static final String FS_S3_IMPL_DISABLE_CACHE = "fs.s3.impl.disable.cache"; protected static final String FS_S3A_CONNECTION_SSL_ENABLED = "fs.s3a.connection.ssl.enabled"; protected static final String FS_S3A_MAX_CONNECTION = "fs.s3a.connection.maximum"; protected static final String FS_S3A_AWS_CRED_PROVIDER = "fs.s3a.aws.credentials.provider"; protected static final String FS_KS3_ACCESS_KEY = "fs.ks3.AccessKey"; protected static final String FS_KS3_SECRET_KEY = "fs.ks3.AccessSecret"; protected static final String FS_KS3_ENDPOINT = "fs.ks3.endpoint"; protected static final String FS_KS3_IMPL = "fs.ks3.impl"; protected static final String FS_KS3_CONNECTION_SSL_ENABLED = "fs.ks3.connection.ssl.enabled"; protected static final String FS_KS3_IMPL_DISABLE_CACHE = "fs.ks3.impl.disable.cache"; protected static final String FS_OSS_ACCESS_KEY = "fs.oss.accessKeyId"; protected static final String FS_OSS_SECRET_KEY = "fs.oss.accessKeySecret"; protected static final String FS_OSS_ENDPOINT = "fs.oss.endpoint"; protected static final String FS_OSS_IMPL_DISABLE_CACHE = "fs.oss.impl.disable.cache"; protected static final String FS_OSS_CONNECTION_SSL_ENABLED = "fs.oss.connection.secure.enabled"; protected static final String FS_OSS_IMPL = "fs.oss.impl"; protected static final String FS_COS_ACCESS_KEY = "fs.cosn.userinfo.secretId"; protected static final String FS_COS_SECRET_KEY = "fs.cosn.userinfo.secretKey"; protected static final String FS_COS_ENDPOINT = "fs.cosn.bucket.endpoint_suffix"; protected static final String FS_COS_IMPL_DISABLE_CACHE = "fs.cosn.impl.disable.cache"; protected static final String FS_COS_CONNECTION_SSL_ENABLED = "fs.cos.connection.ssl.enabled"; protected static final String FS_COS_IMPL = "fs.cosn.impl"; protected static final String FS_OBS_ACCESS_KEY = "fs.obs.access.key"; protected static final String FS_OBS_SECRET_KEY = "fs.obs.secret.key"; protected static final String FS_OBS_ENDPOINT = "fs.obs.endpoint"; protected static final String FS_OBS_IMPL_DISABLE_CACHE = "fs.obs.impl.disable.cache"; protected static final String FS_OBS_CONNECTION_SSL_ENABLED = "fs.obs.connection.ssl.enabled"; protected static final String FS_OBS_IMPL = "fs.obs.impl"; protected static final String FS_ABFS_IMPL_DISABLE_CACHE = "fs.abfs.impl.disable.cache"; protected static final String FS_ABFSS_IMPL_DISABLE_CACHE = "fs.abfss.impl.disable.cache"; protected static final String FS_ADL_IMPL_DISABLE_CACHE = "fs.adl.impl.disable.cache"; protected static final String FS_WASB_IMPL_DISABLE_CACHE = "fs.wasb.impl.disable.cache"; protected static final String FS_WASBS_IMPL_DISABLE_CACHE = "fs.wasbs.impl.disable.cache"; protected static final String FS_GS_IMPL_DISABLE_CACHE = "fs.gs.impl.disable.cache"; protected static final String FS_TOS_ACCESS_KEY = "fs.tos.access.key"; protected static final String FS_TOS_SECRET_KEY = "fs.tos.secret.key"; protected static final String FS_TOS_ENDPOINT = "fs.tos.endpoint"; protected static final String FS_TOS_IMPL_DISABLE_CACHE = "fs.tos.impl.disable.cache"; protected static final String FS_TOS_CONNECTION_SSL_ENABLED = "fs.tos.connection.ssl.enabled"; protected static final String FS_TOS_IMPL = "fs.tos.impl"; protected static final String FS_TOS_REGION = "fs.tos.region"; private final ScheduledExecutorService handleManagementPool = Executors.newScheduledThreadPool(1); private int readBufferSize = 128 << 10; private int writeBufferSize = 128 << 10; private final ConcurrentHashMap<HdfsFsIdentity, HdfsFs> cachedFileSystem; private final HdfsFsStreamManager ioStreamManager; public HdfsFsManager() { cachedFileSystem = new ConcurrentHashMap<>(); ioStreamManager = new HdfsFsStreamManager(); readBufferSize = Config.hdfs_read_buffer_size_kb << 10; writeBufferSize = Config.hdfs_write_buffer_size_kb << 10; handleManagementPool.schedule(new FileSystemExpirationChecker(), 0, TimeUnit.SECONDS); } private static void convertHDFSConfToProperties(Configuration conf, THdfsProperties tProperties) { ((HDFSConfigurationWrap) conf).convertHDFSConfToProperties(tProperties); } private static void convertObjectStoreConfToProperties(String path, Configuration conf, THdfsProperties tProperties, TObjectStoreType tObjectStoreType) { ((ConfigurationWrap) conf).convertObjectStoreConfToProperties(path, tProperties, tObjectStoreType); } /** * visible for test * * @return BrokerFileSystem with different FileSystem based on scheme */ public HdfsFs getFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); String scheme = pathUri.getUri().getScheme(); if (Strings.isNullOrEmpty(scheme)) { throw new UserException("invalid path. scheme is null"); } switch (scheme) { case HDFS_SCHEME: case VIEWFS_SCHEME: return getDistributedFileSystem(scheme, path, loadProperties, tProperties); case S3A_SCHEME: return getS3AFileSystem(path, loadProperties, tProperties); case S3_SCHEMA: return getS3FileSystem(path, loadProperties, tProperties); case OSS_SCHEME: return getOSSFileSystem(path, loadProperties, tProperties); case COS_SCHEME: return getCOSFileSystem(path, loadProperties, tProperties); case KS3_SCHEME: return getKS3FileSystem(path, loadProperties, tProperties); case OBS_SCHEME: return getOBSFileSystem(path, loadProperties, tProperties); case TOS_SCHEME: return getTOSFileSystem(path, loadProperties, tProperties); case ABFS_SCHEMA: case ABFSS_SCHEMA: case ADL_SCHEMA: case WASB_SCHEMA: case WASBS_SCHEMA: return getAzureFileSystem(path, loadProperties, tProperties); case GCS_SCHEMA: return getGoogleFileSystem(path, loadProperties, tProperties); default: return getUniversalFileSystem(path, loadProperties, tProperties); } } /** * visible for test * <p> * file system handle is cached, the identity is host + username_password * it will have safety problem if only hostname is used because one user may * specify username and password * and then access hdfs, another user may not specify username and password but * could also access data * <p> * Configs related to viewfs in core-site.xml and hdfs-site.xml should be copied * to the broker conf directory. */ public HdfsFs getDistributedFileSystem(String scheme, String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); String host = scheme + ": if (Strings.isNullOrEmpty(pathUri.getAuthority())) { if (loadProperties.containsKey(FS_DEFAULTFS_KEY)) { host = loadProperties.get(FS_DEFAULTFS_KEY); LOG.info("no schema and authority in path. use fs.defaultFs"); } else { LOG.warn("invalid hdfs path. authority is null,path:" + path); throw new UserException("invalid hdfs path. authority is null"); } } String username = loadProperties.getOrDefault(USER_NAME_KEY, ""); String password = loadProperties.getOrDefault(PASSWORD_KEY, ""); String dfsNameServices = loadProperties.getOrDefault(DFS_NAMESERVICES_KEY, ""); String authentication = loadProperties.getOrDefault(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, ""); String disableCache = loadProperties.getOrDefault(FS_HDFS_IMPL_DISABLE_CACHE, "true"); String disableCacheLowerCase = disableCache.toLowerCase(); if (!(disableCacheLowerCase.equals("true") || disableCacheLowerCase.equals("false"))) { LOG.warn("invalid disable cache: " + disableCache); throw new UserException("invalid disable cache: " + disableCache); } if (!dfsNameServices.equals("")) { LOG.warn("Invalid load_properties, namenode HA should be set in hdfs/core-site.xml for" + "broker load without broke. For broker load with broker, you can set namenode HA in the load_properties"); throw new UserException("invalid load_properties, namenode HA should be set in hdfs/core-site.xml" + "for load without broker. For broker load with broker, you can set namenode HA in the load_properties"); } if (!authentication.equals("")) { LOG.warn("Invalid load_properties, kerberos should be set in hdfs/core-site.xml for broker " + "load without broker. For broker load with broker, you can set namenode HA in the load_properties"); throw new UserException("invalid load_properties, kerberos should be set in hdfs/core-site.xml " + "for load without broker. For broker load with broker, you can set namenode HA in the load_properties"); } String hdfsUgi = username + "," + password; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, hdfsUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new HDFSConfigurationWrap(); conf.set(FS_HDFS_IMPL_DISABLE_CACHE, disableCache); UserGroupInformation ugi = null; if (!Strings.isNullOrEmpty(username) && conf.get("hadoop.security.authentication").equals("simple")) { ugi = UserGroupInformation.createRemoteUser(username); } FileSystem dfsFileSystem = null; if (ugi != null) { dfsFileSystem = ugi.doAs( (PrivilegedExceptionAction<FileSystem>) () -> FileSystem.get(pathUri.getUri(), conf)); } else { dfsFileSystem = FileSystem.get(pathUri.getUri(), conf); } fileSystem.setFileSystem(dfsFileSystem); fileSystem.setConfiguration(conf); if (ugi != null) { fileSystem.setUserName(username); } if (tProperties != null) { convertHDFSConfToProperties(conf, tProperties); if (ugi != null) { tProperties.setHdfs_username(username); } } } else { if (tProperties != null) { convertHDFSConfToProperties(fileSystem.getConfiguration(), tProperties); if (fileSystem.getUserName() != null) { tProperties.setHdfs_username(fileSystem.getUserName()); } } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e.getMessage()); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey */ public HdfsFs getS3AFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_S3A_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_S3A_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_S3A_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_S3A_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_S3A_CONNECTION_SSL_ENABLED, "false"); String awsCredProvider = loadProperties.getOrDefault(FS_S3A_AWS_CRED_PROVIDER, null); String host = S3A_SCHEME + ": String s3aUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, s3aUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_S3A_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_S3A_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_S3A_ENDPOINT, endpoint); } conf.set(FS_S3A_CONNECTION_SSL_ENABLED, connectionSSLEnabled); if (awsCredProvider != null) { conf.set(FS_S3A_AWS_CRED_PROVIDER, awsCredProvider); } conf.set(FS_S3A_IMPL_DISABLE_CACHE, disableCache); FileSystem s3AFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(s3AFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.S3); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.S3); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } public HdfsFs getS3FileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } /** * Support for Azure Storage File System * Support abfs: */ public HdfsFs getAzureFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { loadProperties.put(AzureCloudConfigurationFactory.AZURE_PATH_KEY, path); CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } /** * Support for Google Cloud Storage File System * Support gs: */ public HdfsFs getGoogleFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } /** * This function create FileSystem by CloudConfiguration * Support s3: * tos: */ private HdfsFs getFileSystemByCloudConfiguration(CloudConfiguration cloudConfiguration, String path, THdfsProperties tProperties) throws UserException { Preconditions.checkArgument(cloudConfiguration != null); WildcardURI pathUri = new WildcardURI(path); String host = pathUri.getUri().getScheme() + ": HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, cloudConfiguration.getCredentialString()); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); cloudConfiguration.applyToConfiguration(conf); conf.set(FS_S3_IMPL_DISABLE_CACHE, "true"); conf.set(FS_S3A_IMPL_DISABLE_CACHE, "true"); conf.set(FS_ABFS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_ABFSS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_ADL_IMPL_DISABLE_CACHE, "true"); conf.set(FS_WASB_IMPL_DISABLE_CACHE, "true"); conf.set(FS_WASBS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_GS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_OSS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_OBS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_COS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_TOS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_KS3_IMPL_DISABLE_CACHE, "true"); FileSystem innerFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(innerFileSystem); fileSystem.setConfiguration(conf); } if (tProperties != null) { TCloudConfiguration tCloudConfiguration = new TCloudConfiguration(); cloudConfiguration.toThrift(tCloudConfiguration); tProperties.setCloud_configuration(tCloudConfiguration); } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey */ public HdfsFs getKS3FileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_KS3_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_KS3_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_KS3_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_KS3_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_KS3_CONNECTION_SSL_ENABLED, "false"); String host = KS3_SCHEME + ": String ks3aUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, ks3aUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_KS3_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_KS3_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_KS3_ENDPOINT, endpoint); } conf.set(FS_KS3_IMPL, "com.ksyun.kmr.hadoop.fs.ks3.Ks3FileSystem"); conf.set(FS_KS3_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_KS3_CONNECTION_SSL_ENABLED, connectionSSLEnabled); FileSystem ks3FileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(ks3FileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.KS3); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.KS3); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey */ public HdfsFs getOBSFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_OBS_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_OBS_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_OBS_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_OBS_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_OBS_CONNECTION_SSL_ENABLED, "false"); String host = OBS_SCHEME + ": String obsUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, obsUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_OBS_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_OBS_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_OBS_ENDPOINT, endpoint); } conf.set(FS_OBS_IMPL, "org.apache.hadoop.fs.obs.OBSFileSystem"); conf.set(FS_OBS_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_OBS_CONNECTION_SSL_ENABLED, connectionSSLEnabled); FileSystem obsFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(obsFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.OBS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.OBS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + accessKey_secretKey */ public HdfsFs getUniversalFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { String disableCacheHDFS = loadProperties.getOrDefault(FS_HDFS_IMPL_DISABLE_CACHE, "true"); String disableCacheHDFSLowerCase = disableCacheHDFS.toLowerCase(); if (!(disableCacheHDFSLowerCase.equals("true") || disableCacheHDFSLowerCase.equals("false"))) { LOG.warn("invalid disable cache: " + disableCacheHDFS); throw new UserException("invalid disable cache: " + disableCacheHDFS); } String disableCacheS3 = loadProperties.getOrDefault(FS_HDFS_IMPL_DISABLE_CACHE, "true"); String disableCacheS3LowerCase = disableCacheS3.toLowerCase(); if (!(disableCacheS3LowerCase.equals("true") || disableCacheS3LowerCase.equals("false"))) { LOG.warn("invalid disable cache: " + disableCacheS3); throw new UserException("invalid disable cache: " + disableCacheS3); } int bucketEndIndex = path.indexOf(": if (bucketEndIndex != -1) { bucketEndIndex = path.indexOf("/", bucketEndIndex + 3); } String host = path; if (bucketEndIndex != -1) { host = path.substring(0, bucketEndIndex); } HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, ""); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); conf.set(FS_S3A_IMPL_DISABLE_CACHE, disableCacheHDFS); conf.set(FS_S3A_IMPL_DISABLE_CACHE, disableCacheS3); FileSystem genericFileSystem = FileSystem.get(new Path(path).toUri(), conf); fileSystem.setFileSystem(genericFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.UNIVERSAL_FS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.UNIVERSAL_FS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey */ public HdfsFs getOSSFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_OSS_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_OSS_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_OSS_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_OSS_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_OSS_CONNECTION_SSL_ENABLED, "false"); String host = OSS_SCHEME + ": String ossUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, ossUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_OSS_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_OSS_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_OSS_ENDPOINT, endpoint); } conf.set(FS_OSS_IMPL, "org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem"); conf.set(FS_OSS_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_OSS_CONNECTION_SSL_ENABLED, connectionSSLEnabled); FileSystem ossFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(ossFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.OSS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.OSS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey * for cos */ public HdfsFs getCOSFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_COS_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_COS_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_COS_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_COS_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_COS_CONNECTION_SSL_ENABLED, "false"); String host = COS_SCHEME + ": String cosUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, cosUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_COS_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_COS_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_COS_ENDPOINT, endpoint); } conf.set(FS_COS_IMPL, "org.apache.hadoop.fs.CosFileSystem"); conf.set(FS_COS_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_COS_CONNECTION_SSL_ENABLED, connectionSSLEnabled); FileSystem cosFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(cosFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.COS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.COS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * file system handle is cached, the identity is endpoint + bucket + * accessKey secretKey * for tos */ public HdfsFs getTOSFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_TOS_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_TOS_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_TOS_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_TOS_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_TOS_CONNECTION_SSL_ENABLED, "false"); String region = loadProperties.getOrDefault(FS_TOS_REGION, ""); if (accessKey.equals("")) { LOG.warn("Invalid load_properties, TOS must provide access_key"); throw new UserException("Invalid load_properties, TOS must provide access_key"); } if (secretKey.equals("")) { LOG.warn("Invalid load_properties, TOS must provide secret_key"); throw new UserException("Invalid load_properties, TOS must provide secret_key"); } if (endpoint.equals("")) { LOG.warn("Invalid load_properties, TOS must provide endpoint"); throw new UserException("Invalid load_properties, TOS must provide endpoint"); } String host = TOS_SCHEME + ": String tosUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, tosUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); conf.set(FS_TOS_ACCESS_KEY, accessKey); conf.set(FS_TOS_SECRET_KEY, secretKey); conf.set(FS_TOS_ENDPOINT, endpoint); conf.set(FS_TOS_IMPL, "com.volcengine.cloudfs.fs.TosFileSystem"); conf.set(FS_TOS_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_TOS_CONNECTION_SSL_ENABLED, connectionSSLEnabled); conf.set(FS_S3A_CONNECTION_SSL_ENABLED, connectionSSLEnabled); conf.set(FS_TOS_REGION, region); FileSystem tosFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(tosFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.TOS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.TOS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } public void getTProperties(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { getFileSystem(path, loadProperties, tProperties); } public List<TBrokerFileStatus> listPath(String path, boolean fileNameOnly, Map<String, String> loadProperties) throws UserException { List<TBrokerFileStatus> resultFileStatus = null; WildcardURI pathUri = new WildcardURI(path); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); Path pathPattern = new Path(pathUri.getPath()); try { FileStatus[] files = fileSystem.getDFSFileSystem().globStatus(pathPattern); if (files == null) { resultFileStatus = new ArrayList<>(0); return resultFileStatus; } resultFileStatus = new ArrayList<>(files.length); for (FileStatus fileStatus : files) { TBrokerFileStatus brokerFileStatus = new TBrokerFileStatus(); brokerFileStatus.setIsDir(fileStatus.isDirectory()); if (fileStatus.isDirectory()) { brokerFileStatus.setIsSplitable(false); brokerFileStatus.setSize(-1); } else { brokerFileStatus.setSize(fileStatus.getLen()); brokerFileStatus.setIsSplitable(true); } if (fileNameOnly) { brokerFileStatus.setPath(fileStatus.getPath().getName()); } else { brokerFileStatus.setPath(fileStatus.getPath().toString()); } resultFileStatus.add(brokerFileStatus); } } catch (FileNotFoundException e) { LOG.info("file not found: " + e.getMessage()); throw new UserException("file not found: " + e.getMessage()); } catch (Exception e) { LOG.error("errors while get file status ", e); throw new UserException("unknown error when get file status: " + e.getMessage()); } return resultFileStatus; } public void deletePath(String path, Map<String, String> loadProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); Path filePath = new Path(pathUri.getPath()); try { fileSystem.getDFSFileSystem().delete(filePath, true); } catch (IOException e) { LOG.error("errors while delete path " + path); throw new UserException("delete path " + path + "error"); } } public boolean checkPathExist(String path, Map<String, String> loadProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); Path filePath = new Path(pathUri.getPath()); try { return fileSystem.getDFSFileSystem().exists(filePath); } catch (IOException e) { LOG.error("errors while check path exist: " + path); throw new UserException("errors while check if path " + path + " exist"); } } public TBrokerFD openReader(String path, long startOffset, Map<String, String> loadProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); Path inputFilePath = new Path(pathUri.getPath()); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); try { FSDataInputStream fsDataInputStream = fileSystem.getDFSFileSystem().open(inputFilePath, readBufferSize); fsDataInputStream.seek(startOffset); UUID uuid = UUID.randomUUID(); TBrokerFD fd = parseUUIDToFD(uuid); ioStreamManager.putNewInputStream(fd, fsDataInputStream, fileSystem); return fd; } catch (IOException e) { LOG.error("errors while open path", e); throw new UserException("could not open file " + path); } } public byte[] pread(TBrokerFD fd, long offset, long length) throws UserException { FSDataInputStream fsDataInputStream = ioStreamManager.getFsDataInputStream(fd); synchronized (fsDataInputStream) { long currentStreamOffset; try { currentStreamOffset = fsDataInputStream.getPos(); } catch (IOException e) { LOG.error("errors while get file pos from output stream", e); throw new UserException("errors while get file pos from output stream"); } if (currentStreamOffset != offset) { LOG.debug("invalid offset, current read offset is " + currentStreamOffset + " is not equal to request offset " + offset + " seek to it"); try { fsDataInputStream.seek(offset); } catch (IOException e) { throw new UserException("current read offset " + currentStreamOffset + " is not equal to " + offset + ", and could not seek to it"); } } byte[] buf; if (length > readBufferSize) { buf = new byte[readBufferSize]; } else { buf = new byte[(int) length]; } try { int readLength = readByteArrayFully(fsDataInputStream, buf); if (readLength < 0) { throw new UserException("end of file reached"); } if (LOG.isDebugEnabled()) { LOG.debug( "read buffer from input stream, buffer size:" + buf.length + ", read length:" + readLength); } if (readLength == readBufferSize) { return buf; } else { byte[] smallerBuf = new byte[readLength]; System.arraycopy(buf, 0, smallerBuf, 0, readLength); return smallerBuf; } } catch (IOException e) { LOG.error("errors while read data from stream", e); throw new UserException("errors while read data from stream"); } } } public void seek(TBrokerFD fd, long offset) throws NotImplementedException { throw new NotImplementedException("seek this method is not supported"); } public void closeReader(TBrokerFD fd) throws UserException { FSDataInputStream fsDataInputStream = ioStreamManager.getFsDataInputStream(fd); synchronized (fsDataInputStream) { try { fsDataInputStream.close(); } catch (IOException e) { LOG.error("errors while close file input stream", e); throw new UserException("errors while close file input stream"); } finally { ioStreamManager.removeInputStream(fd); } } } public TBrokerFD openWriter(String path, Map<String, String> loadProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); Path inputFilePath = new Path(pathUri.getPath()); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); try { FSDataOutputStream fsDataOutputStream = fileSystem.getDFSFileSystem().create(inputFilePath, true, writeBufferSize); UUID uuid = UUID.randomUUID(); TBrokerFD fd = parseUUIDToFD(uuid); LOG.info("finish a open writer request. fd: " + fd); ioStreamManager.putNewOutputStream(fd, fsDataOutputStream, fileSystem); return fd; } catch (IOException e) { LOG.error("errors while open path", e); throw new UserException("could not open file " + path); } } public void pwrite(TBrokerFD fd, long offset, byte[] data) throws UserException { FSDataOutputStream fsDataOutputStream = ioStreamManager.getFsDataOutputStream(fd); synchronized (fsDataOutputStream) { long currentStreamOffset = fsDataOutputStream.getPos(); if (currentStreamOffset != offset) { throw new UserException("current outputstream offset is " + currentStreamOffset + " not equal to request " + offset); } try { fsDataOutputStream.write(data); } catch (IOException e) { LOG.error("errors while write file " + fd + " to output stream", e); throw new UserException("errors while write data to output stream"); } } } public void closeWriter(TBrokerFD fd) throws UserException { FSDataOutputStream fsDataOutputStream = ioStreamManager.getFsDataOutputStream(fd); synchronized (fsDataOutputStream) { try { fsDataOutputStream.hsync(); fsDataOutputStream.close(); } catch (IOException e) { LOG.error("errors while close file " + fd + " output stream", e); throw new UserException("errors while close file output stream"); } finally { ioStreamManager.removeOutputStream(fd); } } } private static TBrokerFD parseUUIDToFD(UUID uuid) { return new TBrokerFD(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); } private int readByteArrayFully(FSDataInputStream is, byte[] dest) throws IOException { int readLength = 0; while (readLength < dest.length) { int n = is.read(dest, readLength, dest.length - readLength); if (n <= 0) { break; } readLength += n; } return readLength; } class FileSystemExpirationChecker implements Runnable { @Override public void run() { try { for (HdfsFs fileSystem : cachedFileSystem.values()) { if (fileSystem.isExpired(Config.hdfs_file_system_expire_seconds)) { LOG.info("file system " + fileSystem + " is expired, close and remove it"); fileSystem.getLock().lock(); try { fileSystem.closeFileSystem(); } catch (Throwable t) { LOG.error("errors while close file system", t); } finally { cachedFileSystem.remove(fileSystem.getIdentity()); fileSystem.getLock().unlock(); } } } } finally { HdfsFsManager.this.handleManagementPool.schedule(this, 60, TimeUnit.SECONDS); } } } }
class HdfsFsManager { private static final Logger LOG = LogManager.getLogger(HdfsFsManager.class); private static final String HDFS_SCHEME = "hdfs"; private static final String VIEWFS_SCHEME = "viewfs"; private static final String S3_SCHEMA = "s3"; private static final String S3A_SCHEME = "s3a"; private static final String OSS_SCHEME = "oss"; private static final String COS_SCHEME = "cosn"; private static final String KS3_SCHEME = "ks3"; private static final String OBS_SCHEME = "obs"; private static final String TOS_SCHEME = "tos"; private static final String ABFS_SCHEMA = "abfs"; private static final String ABFSS_SCHEMA = "abfss"; private static final String ADL_SCHEMA = "adl"; private static final String WASB_SCHEMA = "wasb"; private static final String WASBS_SCHEMA = "wasbs"; private static final String GCS_SCHEMA = "gs"; private static final String USER_NAME_KEY = "username"; private static final String PASSWORD_KEY = "password"; private static final String DFS_NAMESERVICES_KEY = "dfs.nameservices"; private static final String FS_DEFAULTFS_KEY = "fs.defaultFS"; protected static final String FS_HDFS_IMPL_DISABLE_CACHE = "fs.hdfs.impl.disable.cache"; protected static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key"; protected static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key"; protected static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint"; protected static final String FS_S3A_IMPL_DISABLE_CACHE = "fs.s3a.impl.disable.cache"; protected static final String FS_S3_IMPL_DISABLE_CACHE = "fs.s3.impl.disable.cache"; protected static final String FS_S3A_CONNECTION_SSL_ENABLED = "fs.s3a.connection.ssl.enabled"; protected static final String FS_S3A_MAX_CONNECTION = "fs.s3a.connection.maximum"; protected static final String FS_S3A_AWS_CRED_PROVIDER = "fs.s3a.aws.credentials.provider"; protected static final String FS_KS3_ACCESS_KEY = "fs.ks3.AccessKey"; protected static final String FS_KS3_SECRET_KEY = "fs.ks3.AccessSecret"; protected static final String FS_KS3_ENDPOINT = "fs.ks3.endpoint"; protected static final String FS_KS3_IMPL = "fs.ks3.impl"; protected static final String FS_KS3_CONNECTION_SSL_ENABLED = "fs.ks3.connection.ssl.enabled"; protected static final String FS_KS3_IMPL_DISABLE_CACHE = "fs.ks3.impl.disable.cache"; protected static final String FS_OSS_ACCESS_KEY = "fs.oss.accessKeyId"; protected static final String FS_OSS_SECRET_KEY = "fs.oss.accessKeySecret"; protected static final String FS_OSS_ENDPOINT = "fs.oss.endpoint"; protected static final String FS_OSS_IMPL_DISABLE_CACHE = "fs.oss.impl.disable.cache"; protected static final String FS_OSS_CONNECTION_SSL_ENABLED = "fs.oss.connection.secure.enabled"; protected static final String FS_OSS_IMPL = "fs.oss.impl"; protected static final String FS_COS_ACCESS_KEY = "fs.cosn.userinfo.secretId"; protected static final String FS_COS_SECRET_KEY = "fs.cosn.userinfo.secretKey"; protected static final String FS_COS_ENDPOINT = "fs.cosn.bucket.endpoint_suffix"; protected static final String FS_COS_IMPL_DISABLE_CACHE = "fs.cosn.impl.disable.cache"; protected static final String FS_COS_CONNECTION_SSL_ENABLED = "fs.cos.connection.ssl.enabled"; protected static final String FS_COS_IMPL = "fs.cosn.impl"; protected static final String FS_OBS_ACCESS_KEY = "fs.obs.access.key"; protected static final String FS_OBS_SECRET_KEY = "fs.obs.secret.key"; protected static final String FS_OBS_ENDPOINT = "fs.obs.endpoint"; protected static final String FS_OBS_IMPL_DISABLE_CACHE = "fs.obs.impl.disable.cache"; protected static final String FS_OBS_CONNECTION_SSL_ENABLED = "fs.obs.connection.ssl.enabled"; protected static final String FS_OBS_IMPL = "fs.obs.impl"; protected static final String FS_ABFS_IMPL_DISABLE_CACHE = "fs.abfs.impl.disable.cache"; protected static final String FS_ABFSS_IMPL_DISABLE_CACHE = "fs.abfss.impl.disable.cache"; protected static final String FS_ADL_IMPL_DISABLE_CACHE = "fs.adl.impl.disable.cache"; protected static final String FS_WASB_IMPL_DISABLE_CACHE = "fs.wasb.impl.disable.cache"; protected static final String FS_WASBS_IMPL_DISABLE_CACHE = "fs.wasbs.impl.disable.cache"; protected static final String FS_GS_IMPL_DISABLE_CACHE = "fs.gs.impl.disable.cache"; protected static final String FS_TOS_ACCESS_KEY = "fs.tos.access.key"; protected static final String FS_TOS_SECRET_KEY = "fs.tos.secret.key"; protected static final String FS_TOS_ENDPOINT = "fs.tos.endpoint"; protected static final String FS_TOS_IMPL_DISABLE_CACHE = "fs.tos.impl.disable.cache"; protected static final String FS_TOS_CONNECTION_SSL_ENABLED = "fs.tos.connection.ssl.enabled"; protected static final String FS_TOS_IMPL = "fs.tos.impl"; protected static final String FS_TOS_REGION = "fs.tos.region"; private final ScheduledExecutorService handleManagementPool = Executors.newScheduledThreadPool(1); private int readBufferSize = 128 << 10; private int writeBufferSize = 128 << 10; private final ConcurrentHashMap<HdfsFsIdentity, HdfsFs> cachedFileSystem; private final HdfsFsStreamManager ioStreamManager; public HdfsFsManager() { cachedFileSystem = new ConcurrentHashMap<>(); ioStreamManager = new HdfsFsStreamManager(); readBufferSize = Config.hdfs_read_buffer_size_kb << 10; writeBufferSize = Config.hdfs_write_buffer_size_kb << 10; handleManagementPool.schedule(new FileSystemExpirationChecker(), 0, TimeUnit.SECONDS); } private static void convertHDFSConfToProperties(Configuration conf, THdfsProperties tProperties) { ((HDFSConfigurationWrap) conf).convertHDFSConfToProperties(tProperties); } private static void convertObjectStoreConfToProperties(String path, Configuration conf, THdfsProperties tProperties, TObjectStoreType tObjectStoreType) { ((ConfigurationWrap) conf).convertObjectStoreConfToProperties(path, tProperties, tObjectStoreType); } /** * visible for test * * @return BrokerFileSystem with different FileSystem based on scheme */ public HdfsFs getFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); String scheme = pathUri.getUri().getScheme(); if (Strings.isNullOrEmpty(scheme)) { throw new UserException("invalid path. scheme is null"); } switch (scheme) { case HDFS_SCHEME: case VIEWFS_SCHEME: return getDistributedFileSystem(scheme, path, loadProperties, tProperties); case S3A_SCHEME: return getS3AFileSystem(path, loadProperties, tProperties); case S3_SCHEMA: return getS3FileSystem(path, loadProperties, tProperties); case OSS_SCHEME: return getOSSFileSystem(path, loadProperties, tProperties); case COS_SCHEME: return getCOSFileSystem(path, loadProperties, tProperties); case KS3_SCHEME: return getKS3FileSystem(path, loadProperties, tProperties); case OBS_SCHEME: return getOBSFileSystem(path, loadProperties, tProperties); case TOS_SCHEME: return getTOSFileSystem(path, loadProperties, tProperties); case ABFS_SCHEMA: case ABFSS_SCHEMA: case ADL_SCHEMA: case WASB_SCHEMA: case WASBS_SCHEMA: return getAzureFileSystem(path, loadProperties, tProperties); case GCS_SCHEMA: return getGoogleFileSystem(path, loadProperties, tProperties); default: return getUniversalFileSystem(path, loadProperties, tProperties); } } /** * visible for test * <p> * file system handle is cached, the identity is host + username_password * it will have safety problem if only hostname is used because one user may * specify username and password * and then access hdfs, another user may not specify username and password but * could also access data * <p> * Configs related to viewfs in core-site.xml and hdfs-site.xml should be copied * to the broker conf directory. */ public HdfsFs getDistributedFileSystem(String scheme, String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); String host = scheme + ": if (Strings.isNullOrEmpty(pathUri.getAuthority())) { if (loadProperties.containsKey(FS_DEFAULTFS_KEY)) { host = loadProperties.get(FS_DEFAULTFS_KEY); LOG.info("no schema and authority in path. use fs.defaultFs"); } else { LOG.warn("invalid hdfs path. authority is null,path:" + path); throw new UserException("invalid hdfs path. authority is null"); } } String username = loadProperties.getOrDefault(USER_NAME_KEY, ""); String password = loadProperties.getOrDefault(PASSWORD_KEY, ""); String dfsNameServices = loadProperties.getOrDefault(DFS_NAMESERVICES_KEY, ""); String authentication = loadProperties.getOrDefault(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, ""); String disableCache = loadProperties.getOrDefault(FS_HDFS_IMPL_DISABLE_CACHE, "true"); String disableCacheLowerCase = disableCache.toLowerCase(); if (!(disableCacheLowerCase.equals("true") || disableCacheLowerCase.equals("false"))) { LOG.warn("invalid disable cache: " + disableCache); throw new UserException("invalid disable cache: " + disableCache); } if (!dfsNameServices.equals("")) { LOG.warn("Invalid load_properties, namenode HA should be set in hdfs/core-site.xml for" + "broker load without broke. For broker load with broker, you can set namenode HA in the load_properties"); throw new UserException("invalid load_properties, namenode HA should be set in hdfs/core-site.xml" + "for load without broker. For broker load with broker, you can set namenode HA in the load_properties"); } if (!authentication.equals("")) { LOG.warn("Invalid load_properties, kerberos should be set in hdfs/core-site.xml for broker " + "load without broker. For broker load with broker, you can set namenode HA in the load_properties"); throw new UserException("invalid load_properties, kerberos should be set in hdfs/core-site.xml " + "for load without broker. For broker load with broker, you can set namenode HA in the load_properties"); } String hdfsUgi = username + "," + password; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, hdfsUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new HDFSConfigurationWrap(); conf.set(FS_HDFS_IMPL_DISABLE_CACHE, disableCache); UserGroupInformation ugi = null; if (!Strings.isNullOrEmpty(username) && conf.get("hadoop.security.authentication").equals("simple")) { ugi = UserGroupInformation.createRemoteUser(username); } FileSystem dfsFileSystem = null; if (ugi != null) { dfsFileSystem = ugi.doAs( (PrivilegedExceptionAction<FileSystem>) () -> FileSystem.get(pathUri.getUri(), conf)); } else { dfsFileSystem = FileSystem.get(pathUri.getUri(), conf); } fileSystem.setFileSystem(dfsFileSystem); fileSystem.setConfiguration(conf); if (ugi != null) { fileSystem.setUserName(username); } if (tProperties != null) { convertHDFSConfToProperties(conf, tProperties); if (ugi != null) { tProperties.setHdfs_username(username); } } } else { if (tProperties != null) { convertHDFSConfToProperties(fileSystem.getConfiguration(), tProperties); if (fileSystem.getUserName() != null) { tProperties.setHdfs_username(fileSystem.getUserName()); } } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e.getMessage()); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey */ public HdfsFs getS3AFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_S3A_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_S3A_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_S3A_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_S3A_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_S3A_CONNECTION_SSL_ENABLED, "false"); String awsCredProvider = loadProperties.getOrDefault(FS_S3A_AWS_CRED_PROVIDER, null); String host = S3A_SCHEME + ": String s3aUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, s3aUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_S3A_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_S3A_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_S3A_ENDPOINT, endpoint); } conf.set(FS_S3A_CONNECTION_SSL_ENABLED, connectionSSLEnabled); if (awsCredProvider != null) { conf.set(FS_S3A_AWS_CRED_PROVIDER, awsCredProvider); } conf.set(FS_S3A_IMPL_DISABLE_CACHE, disableCache); FileSystem s3AFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(s3AFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.S3); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.S3); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } public HdfsFs getS3FileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } /** * Support for Azure Storage File System * Support abfs: */ public HdfsFs getAzureFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { loadProperties.put(AzureCloudConfigurationFactory.AZURE_PATH_KEY, path); CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } /** * Support for Google Cloud Storage File System * Support gs: */ public HdfsFs getGoogleFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } /** * This function create FileSystem by CloudConfiguration * Support s3: * tos: */ private HdfsFs getFileSystemByCloudConfiguration(CloudConfiguration cloudConfiguration, String path, THdfsProperties tProperties) throws UserException { Preconditions.checkArgument(cloudConfiguration != null); WildcardURI pathUri = new WildcardURI(path); String host = pathUri.getUri().getScheme() + ": HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, cloudConfiguration.getCredentialString()); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); cloudConfiguration.applyToConfiguration(conf); conf.set(FS_S3_IMPL_DISABLE_CACHE, "true"); conf.set(FS_S3A_IMPL_DISABLE_CACHE, "true"); conf.set(FS_ABFS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_ABFSS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_ADL_IMPL_DISABLE_CACHE, "true"); conf.set(FS_WASB_IMPL_DISABLE_CACHE, "true"); conf.set(FS_WASBS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_GS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_OSS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_OBS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_COS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_TOS_IMPL_DISABLE_CACHE, "true"); conf.set(FS_KS3_IMPL_DISABLE_CACHE, "true"); FileSystem innerFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(innerFileSystem); fileSystem.setConfiguration(conf); } if (tProperties != null) { TCloudConfiguration tCloudConfiguration = new TCloudConfiguration(); cloudConfiguration.toThrift(tCloudConfiguration); tProperties.setCloud_configuration(tCloudConfiguration); } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey */ public HdfsFs getKS3FileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_KS3_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_KS3_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_KS3_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_KS3_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_KS3_CONNECTION_SSL_ENABLED, "false"); String host = KS3_SCHEME + ": String ks3aUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, ks3aUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_KS3_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_KS3_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_KS3_ENDPOINT, endpoint); } conf.set(FS_KS3_IMPL, "com.ksyun.kmr.hadoop.fs.ks3.Ks3FileSystem"); conf.set(FS_KS3_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_KS3_CONNECTION_SSL_ENABLED, connectionSSLEnabled); FileSystem ks3FileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(ks3FileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.KS3); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.KS3); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey */ public HdfsFs getOBSFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_OBS_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_OBS_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_OBS_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_OBS_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_OBS_CONNECTION_SSL_ENABLED, "false"); String host = OBS_SCHEME + ": String obsUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, obsUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_OBS_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_OBS_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_OBS_ENDPOINT, endpoint); } conf.set(FS_OBS_IMPL, "org.apache.hadoop.fs.obs.OBSFileSystem"); conf.set(FS_OBS_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_OBS_CONNECTION_SSL_ENABLED, connectionSSLEnabled); FileSystem obsFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(obsFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.OBS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.OBS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + accessKey_secretKey */ public HdfsFs getUniversalFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { String disableCacheHDFS = loadProperties.getOrDefault(FS_HDFS_IMPL_DISABLE_CACHE, "true"); String disableCacheHDFSLowerCase = disableCacheHDFS.toLowerCase(); if (!(disableCacheHDFSLowerCase.equals("true") || disableCacheHDFSLowerCase.equals("false"))) { LOG.warn("invalid disable cache: " + disableCacheHDFS); throw new UserException("invalid disable cache: " + disableCacheHDFS); } String disableCacheS3 = loadProperties.getOrDefault(FS_HDFS_IMPL_DISABLE_CACHE, "true"); String disableCacheS3LowerCase = disableCacheS3.toLowerCase(); if (!(disableCacheS3LowerCase.equals("true") || disableCacheS3LowerCase.equals("false"))) { LOG.warn("invalid disable cache: " + disableCacheS3); throw new UserException("invalid disable cache: " + disableCacheS3); } int bucketEndIndex = path.indexOf(": if (bucketEndIndex != -1) { bucketEndIndex = path.indexOf("/", bucketEndIndex + 3); } String host = path; if (bucketEndIndex != -1) { host = path.substring(0, bucketEndIndex); } HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, ""); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); conf.set(FS_S3A_IMPL_DISABLE_CACHE, disableCacheHDFS); conf.set(FS_S3A_IMPL_DISABLE_CACHE, disableCacheS3); FileSystem genericFileSystem = FileSystem.get(new Path(path).toUri(), conf); fileSystem.setFileSystem(genericFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.UNIVERSAL_FS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.UNIVERSAL_FS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * visible for test * <p> * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey */ public HdfsFs getOSSFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_OSS_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_OSS_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_OSS_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_OSS_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_OSS_CONNECTION_SSL_ENABLED, "false"); String host = OSS_SCHEME + ": String ossUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, ossUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_OSS_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_OSS_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_OSS_ENDPOINT, endpoint); } conf.set(FS_OSS_IMPL, "org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem"); conf.set(FS_OSS_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_OSS_CONNECTION_SSL_ENABLED, connectionSSLEnabled); FileSystem ossFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(ossFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.OSS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.OSS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * file system handle is cached, the identity is endpoint + bucket + * accessKey_secretKey * for cos */ public HdfsFs getCOSFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_COS_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_COS_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_COS_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_COS_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_COS_CONNECTION_SSL_ENABLED, "false"); String host = COS_SCHEME + ": String cosUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, cosUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); if (!accessKey.isEmpty()) { conf.set(FS_COS_ACCESS_KEY, accessKey); } if (!secretKey.isEmpty()) { conf.set(FS_COS_SECRET_KEY, secretKey); } if (!endpoint.isEmpty()) { conf.set(FS_COS_ENDPOINT, endpoint); } conf.set(FS_COS_IMPL, "org.apache.hadoop.fs.CosFileSystem"); conf.set(FS_COS_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_COS_CONNECTION_SSL_ENABLED, connectionSSLEnabled); FileSystem cosFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(cosFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.COS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.COS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } /** * file system handle is cached, the identity is endpoint + bucket + * accessKey secretKey * for tos */ public HdfsFs getTOSFileSystem(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { CloudConfiguration cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(loadProperties); if (cloudConfiguration.getCloudType() != CloudType.DEFAULT) { return getFileSystemByCloudConfiguration(cloudConfiguration, path, tProperties); } WildcardURI pathUri = new WildcardURI(path); String accessKey = loadProperties.getOrDefault(FS_TOS_ACCESS_KEY, ""); String secretKey = loadProperties.getOrDefault(FS_TOS_SECRET_KEY, ""); String endpoint = loadProperties.getOrDefault(FS_TOS_ENDPOINT, ""); String disableCache = loadProperties.getOrDefault(FS_TOS_IMPL_DISABLE_CACHE, "true"); String connectionSSLEnabled = loadProperties.getOrDefault(FS_TOS_CONNECTION_SSL_ENABLED, "false"); String region = loadProperties.getOrDefault(FS_TOS_REGION, ""); if (accessKey.equals("")) { LOG.warn("Invalid load_properties, TOS must provide access_key"); throw new UserException("Invalid load_properties, TOS must provide access_key"); } if (secretKey.equals("")) { LOG.warn("Invalid load_properties, TOS must provide secret_key"); throw new UserException("Invalid load_properties, TOS must provide secret_key"); } if (endpoint.equals("")) { LOG.warn("Invalid load_properties, TOS must provide endpoint"); throw new UserException("Invalid load_properties, TOS must provide endpoint"); } String host = TOS_SCHEME + ": String tosUgi = accessKey + "," + secretKey; HdfsFsIdentity fileSystemIdentity = new HdfsFsIdentity(host, tosUgi); cachedFileSystem.putIfAbsent(fileSystemIdentity, new HdfsFs(fileSystemIdentity)); HdfsFs fileSystem = cachedFileSystem.get(fileSystemIdentity); if (fileSystem == null) { return null; } fileSystem.getLock().lock(); try { if (!cachedFileSystem.containsKey(fileSystemIdentity)) { return null; } if (fileSystem.getDFSFileSystem() == null) { LOG.info("could not find file system for path " + path + " create a new one"); Configuration conf = new ConfigurationWrap(); conf.set(FS_TOS_ACCESS_KEY, accessKey); conf.set(FS_TOS_SECRET_KEY, secretKey); conf.set(FS_TOS_ENDPOINT, endpoint); conf.set(FS_TOS_IMPL, "com.volcengine.cloudfs.fs.TosFileSystem"); conf.set(FS_TOS_IMPL_DISABLE_CACHE, disableCache); conf.set(FS_TOS_CONNECTION_SSL_ENABLED, connectionSSLEnabled); conf.set(FS_S3A_CONNECTION_SSL_ENABLED, connectionSSLEnabled); conf.set(FS_TOS_REGION, region); FileSystem tosFileSystem = FileSystem.get(pathUri.getUri(), conf); fileSystem.setFileSystem(tosFileSystem); fileSystem.setConfiguration(conf); if (tProperties != null) { convertObjectStoreConfToProperties(path, conf, tProperties, TObjectStoreType.TOS); } } else { if (tProperties != null) { convertObjectStoreConfToProperties(path, fileSystem.getConfiguration(), tProperties, TObjectStoreType.TOS); } } return fileSystem; } catch (Exception e) { LOG.error("errors while connect to " + path, e); throw new UserException(e); } finally { fileSystem.getLock().unlock(); } } public void getTProperties(String path, Map<String, String> loadProperties, THdfsProperties tProperties) throws UserException { getFileSystem(path, loadProperties, tProperties); } public List<TBrokerFileStatus> listPath(String path, boolean fileNameOnly, Map<String, String> loadProperties) throws UserException { List<TBrokerFileStatus> resultFileStatus = null; WildcardURI pathUri = new WildcardURI(path); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); Path pathPattern = new Path(pathUri.getPath()); try { FileStatus[] files = fileSystem.getDFSFileSystem().globStatus(pathPattern); if (files == null) { resultFileStatus = new ArrayList<>(0); return resultFileStatus; } resultFileStatus = new ArrayList<>(files.length); for (FileStatus fileStatus : files) { TBrokerFileStatus brokerFileStatus = new TBrokerFileStatus(); brokerFileStatus.setIsDir(fileStatus.isDirectory()); if (fileStatus.isDirectory()) { brokerFileStatus.setIsSplitable(false); brokerFileStatus.setSize(-1); } else { brokerFileStatus.setSize(fileStatus.getLen()); brokerFileStatus.setIsSplitable(true); } if (fileNameOnly) { brokerFileStatus.setPath(fileStatus.getPath().getName()); } else { brokerFileStatus.setPath(fileStatus.getPath().toString()); } resultFileStatus.add(brokerFileStatus); } } catch (FileNotFoundException e) { LOG.info("file not found: " + e.getMessage()); throw new UserException("file not found: " + e.getMessage()); } catch (Exception e) { LOG.error("errors while get file status ", e); throw new UserException("unknown error when get file status: " + e.getMessage()); } return resultFileStatus; } public void deletePath(String path, Map<String, String> loadProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); Path filePath = new Path(pathUri.getPath()); try { fileSystem.getDFSFileSystem().delete(filePath, true); } catch (IOException e) { LOG.error("errors while delete path " + path); throw new UserException("delete path " + path + "error"); } } public boolean checkPathExist(String path, Map<String, String> loadProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); Path filePath = new Path(pathUri.getPath()); try { return fileSystem.getDFSFileSystem().exists(filePath); } catch (IOException e) { LOG.error("errors while check path exist: " + path); throw new UserException("errors while check if path " + path + " exist"); } } public TBrokerFD openReader(String path, long startOffset, Map<String, String> loadProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); Path inputFilePath = new Path(pathUri.getPath()); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); try { FSDataInputStream fsDataInputStream = fileSystem.getDFSFileSystem().open(inputFilePath, readBufferSize); fsDataInputStream.seek(startOffset); UUID uuid = UUID.randomUUID(); TBrokerFD fd = parseUUIDToFD(uuid); ioStreamManager.putNewInputStream(fd, fsDataInputStream, fileSystem); return fd; } catch (IOException e) { LOG.error("errors while open path", e); throw new UserException("could not open file " + path); } } public byte[] pread(TBrokerFD fd, long offset, long length) throws UserException { FSDataInputStream fsDataInputStream = ioStreamManager.getFsDataInputStream(fd); synchronized (fsDataInputStream) { long currentStreamOffset; try { currentStreamOffset = fsDataInputStream.getPos(); } catch (IOException e) { LOG.error("errors while get file pos from output stream", e); throw new UserException("errors while get file pos from output stream"); } if (currentStreamOffset != offset) { LOG.debug("invalid offset, current read offset is " + currentStreamOffset + " is not equal to request offset " + offset + " seek to it"); try { fsDataInputStream.seek(offset); } catch (IOException e) { throw new UserException("current read offset " + currentStreamOffset + " is not equal to " + offset + ", and could not seek to it"); } } byte[] buf; if (length > readBufferSize) { buf = new byte[readBufferSize]; } else { buf = new byte[(int) length]; } try { int readLength = readByteArrayFully(fsDataInputStream, buf); if (readLength < 0) { throw new UserException("end of file reached"); } if (LOG.isDebugEnabled()) { LOG.debug( "read buffer from input stream, buffer size:" + buf.length + ", read length:" + readLength); } if (readLength == readBufferSize) { return buf; } else { byte[] smallerBuf = new byte[readLength]; System.arraycopy(buf, 0, smallerBuf, 0, readLength); return smallerBuf; } } catch (IOException e) { LOG.error("errors while read data from stream", e); throw new UserException("errors while read data from stream"); } } } public void seek(TBrokerFD fd, long offset) throws NotImplementedException { throw new NotImplementedException("seek this method is not supported"); } public void closeReader(TBrokerFD fd) throws UserException { FSDataInputStream fsDataInputStream = ioStreamManager.getFsDataInputStream(fd); synchronized (fsDataInputStream) { try { fsDataInputStream.close(); } catch (IOException e) { LOG.error("errors while close file input stream", e); throw new UserException("errors while close file input stream"); } finally { ioStreamManager.removeInputStream(fd); } } } public TBrokerFD openWriter(String path, Map<String, String> loadProperties) throws UserException { WildcardURI pathUri = new WildcardURI(path); Path inputFilePath = new Path(pathUri.getPath()); HdfsFs fileSystem = getFileSystem(path, loadProperties, null); try { FSDataOutputStream fsDataOutputStream = fileSystem.getDFSFileSystem().create(inputFilePath, true, writeBufferSize); UUID uuid = UUID.randomUUID(); TBrokerFD fd = parseUUIDToFD(uuid); LOG.info("finish a open writer request. fd: " + fd); ioStreamManager.putNewOutputStream(fd, fsDataOutputStream, fileSystem); return fd; } catch (IOException e) { LOG.error("errors while open path", e); throw new UserException("could not open file " + path); } } public void pwrite(TBrokerFD fd, long offset, byte[] data) throws UserException { FSDataOutputStream fsDataOutputStream = ioStreamManager.getFsDataOutputStream(fd); synchronized (fsDataOutputStream) { long currentStreamOffset = fsDataOutputStream.getPos(); if (currentStreamOffset != offset) { throw new UserException("current outputstream offset is " + currentStreamOffset + " not equal to request " + offset); } try { fsDataOutputStream.write(data); } catch (IOException e) { LOG.error("errors while write file " + fd + " to output stream", e); throw new UserException("errors while write data to output stream"); } } } public void closeWriter(TBrokerFD fd) throws UserException { FSDataOutputStream fsDataOutputStream = ioStreamManager.getFsDataOutputStream(fd); synchronized (fsDataOutputStream) { try { fsDataOutputStream.hsync(); fsDataOutputStream.close(); } catch (IOException e) { LOG.error("errors while close file " + fd + " output stream", e); throw new UserException("errors while close file output stream"); } finally { ioStreamManager.removeOutputStream(fd); } } } private static TBrokerFD parseUUIDToFD(UUID uuid) { return new TBrokerFD(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); } private int readByteArrayFully(FSDataInputStream is, byte[] dest) throws IOException { int readLength = 0; while (readLength < dest.length) { int n = is.read(dest, readLength, dest.length - readLength); if (n <= 0) { break; } readLength += n; } return readLength; } class FileSystemExpirationChecker implements Runnable { @Override public void run() { try { for (HdfsFs fileSystem : cachedFileSystem.values()) { if (fileSystem.isExpired(Config.hdfs_file_system_expire_seconds)) { LOG.info("file system " + fileSystem + " is expired, close and remove it"); fileSystem.getLock().lock(); try { fileSystem.closeFileSystem(); } catch (Throwable t) { LOG.error("errors while close file system", t); } finally { cachedFileSystem.remove(fileSystem.getIdentity()); fileSystem.getLock().unlock(); } } } } finally { HdfsFsManager.this.handleManagementPool.schedule(this, 60, TimeUnit.SECONDS); } } } }
I wanted to say that what the user ultimately needs to do is _not_ setting our config property, but make sure that the app's service account has access to secrets. Obviously `quarkus.kubernetes-config.secrets.enabled` is the easiest way, if people use the Kubernetes extension. But that doesn't always have to be the case.
public void warnAboutSecrets(KubernetesConfigSourceConfig config, KubernetesConfigBuildTimeConfig buildTimeConfig) { if (config.enabled && config.secrets.isPresent() && !config.secrets.get().isEmpty() && !buildTimeConfig.secretsEnabled) { log.warn("Configuration is read from Secrets " + config.secrets.get() + ", but quarkus.kubernetes-config.secrets.enabled is false." + " Check if your application's service account has enough permissions to read secrets."); } }
+ " Check if your application's service account has enough permissions to read secrets.");
public void warnAboutSecrets(KubernetesConfigSourceConfig config, KubernetesConfigBuildTimeConfig buildTimeConfig) { if (config.enabled && config.secrets.isPresent() && !config.secrets.get().isEmpty() && !buildTimeConfig.secretsEnabled) { log.warn("Configuration is read from Secrets " + config.secrets.get() + ", but quarkus.kubernetes-config.secrets.enabled is false." + " Check if your application's service account has enough permissions to read secrets."); } }
class KubernetesConfigRecorder { private static final Logger log = Logger.getLogger(KubernetesConfigRecorder.class); public RuntimeValue<ConfigSourceProvider> configSources(KubernetesConfigSourceConfig kubernetesConfigSourceConfig, KubernetesClientBuildConfig clientConfig) { if (!kubernetesConfigSourceConfig.enabled) { log.debug( "No attempt will be made to obtain configuration from the Kubernetes API server because the functionality has been disabled via configuration"); return emptyRuntimeValue(); } return new RuntimeValue<>(new KubernetesConfigSourceProvider(kubernetesConfigSourceConfig, KubernetesClientUtils.createClient(clientConfig))); } private RuntimeValue<ConfigSourceProvider> emptyRuntimeValue() { return new RuntimeValue<>(new EmptyConfigSourceProvider()); } private static class EmptyConfigSourceProvider implements ConfigSourceProvider { @Override public Iterable<ConfigSource> getConfigSources(ClassLoader forClassLoader) { return Collections.emptyList(); } } }
class KubernetesConfigRecorder { private static final Logger log = Logger.getLogger(KubernetesConfigRecorder.class); public RuntimeValue<ConfigSourceProvider> configSources(KubernetesConfigSourceConfig kubernetesConfigSourceConfig, KubernetesClientBuildConfig clientConfig) { if (!kubernetesConfigSourceConfig.enabled) { log.debug( "No attempt will be made to obtain configuration from the Kubernetes API server because the functionality has been disabled via configuration"); return emptyRuntimeValue(); } return new RuntimeValue<>(new KubernetesConfigSourceProvider(kubernetesConfigSourceConfig, KubernetesClientUtils.createClient(clientConfig))); } private RuntimeValue<ConfigSourceProvider> emptyRuntimeValue() { return new RuntimeValue<>(new EmptyConfigSourceProvider()); } private static class EmptyConfigSourceProvider implements ConfigSourceProvider { @Override public Iterable<ConfigSource> getConfigSources(ClassLoader forClassLoader) { return Collections.emptyList(); } } }
why we can insert null partition? There are many points that need to be adapted if we support it.
public static PartitionData partitionDataFromPath(String relativePartitionPath, PartitionSpec spec) { PartitionData data = new PartitionData(spec.fields().size()); String[] partitions = relativePartitionPath.split("/", -1); List<PartitionField> partitionFields = spec.fields(); for (int i = 0; i < partitions.length; i++) { PartitionField field = partitionFields.get(i); String[] parts = partitions[i].split("=", 2); Preconditions.checkArgument(parts.length == 2 && parts[0] != null && field.name().equals(parts[0]), "Invalid partition: %s", partitions[i]); org.apache.iceberg.types.Type sourceType = spec.partitionType().fields().get(i).type(); if (parts[1].equals("null")) { data.set(i, null); } else { data.set(i, Conversions.fromPartitionString(sourceType, parts[1])); } } return data; }
if (parts[1].equals("null")) {
public static PartitionData partitionDataFromPath(String relativePartitionPath, PartitionSpec spec) { PartitionData data = new PartitionData(spec.fields().size()); String[] partitions = relativePartitionPath.split("/", -1); List<PartitionField> partitionFields = spec.fields(); for (int i = 0; i < partitions.length; i++) { PartitionField field = partitionFields.get(i); String[] parts = partitions[i].split("=", 2); Preconditions.checkArgument(parts.length == 2 && parts[0] != null && field.name().equals(parts[0]), "Invalid partition: %s", partitions[i]); org.apache.iceberg.types.Type sourceType = spec.partitionType().fields().get(i).type(); if (parts[1].equals("null")) { data.set(i, null); } else { data.set(i, Conversions.fromPartitionString(sourceType, parts[1])); } } return data; }
class IcebergMetadata implements ConnectorMetadata { private static final Logger LOG = LogManager.getLogger(IcebergMetadata.class); public static final String LOCATION_PROPERTY = "location"; public static final String FILE_FORMAT = "file_format"; public static final String COMPRESSION_CODEC = "compression_codec"; public static final String COMMENT = "comment"; private final String catalogName; private final HdfsEnvironment hdfsEnvironment; private final IcebergCatalog icebergCatalog; private final IcebergStatisticProvider statisticProvider = new IcebergStatisticProvider(); private final Map<TableIdentifier, Table> tables = new ConcurrentHashMap<>(); private final Map<String, Database> databases = new ConcurrentHashMap<>(); private final Map<IcebergFilter, List<FileScanTask>> splitTasks = new ConcurrentHashMap<>(); private final Set<IcebergFilter> scannedTables = new HashSet<>(); private final Set<IcebergFilter> preparedTables = ConcurrentHashMap.newKeySet(); private final Map<FileScanTaskSchema, Pair<String, String>> fileScanTaskSchemas = new ConcurrentHashMap<>(); private final ExecutorService jobPlanningExecutor; private final ExecutorService refreshOtherFeExecutor; private final IcebergMetricsReporter metricsReporter; public IcebergMetadata(String catalogName, HdfsEnvironment hdfsEnvironment, IcebergCatalog icebergCatalog, ExecutorService jobPlanningExecutor, ExecutorService refreshOtherFeExecutor) { this.catalogName = catalogName; this.hdfsEnvironment = hdfsEnvironment; this.icebergCatalog = icebergCatalog; this.metricsReporter = new IcebergMetricsReporter(); this.jobPlanningExecutor = jobPlanningExecutor; this.refreshOtherFeExecutor = refreshOtherFeExecutor; } @Override public Table.TableType getTableType() { return ICEBERG; } @Override public List<String> listDbNames() { return icebergCatalog.listAllDatabases(); } @Override public void createDb(String dbName, Map<String, String> properties) throws AlreadyExistsException { if (dbExists(dbName)) { throw new AlreadyExistsException("Database Already Exists"); } icebergCatalog.createDb(dbName, properties); } @Override public void dropDb(String dbName, boolean isForceDrop) throws MetaNotFoundException { if (listTableNames(dbName).size() != 0) { throw new StarRocksConnectorException("Database %s not empty", dbName); } icebergCatalog.dropDb(dbName); databases.remove(dbName); } @Override public Database getDb(String dbName) { if (databases.containsKey(dbName)) { return databases.get(dbName); } Database db; try { db = icebergCatalog.getDB(dbName); } catch (NoSuchNamespaceException e) { LOG.error("Database {} not found", dbName, e); return null; } databases.put(dbName, db); return db; } @Override public List<String> listTableNames(String dbName) { return icebergCatalog.listTables(dbName); } @Override public boolean createTable(CreateTableStmt stmt) throws DdlException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); Schema schema = toIcebergApiSchema(stmt.getColumns()); PartitionDesc partitionDesc = stmt.getPartitionDesc(); List<String> partitionColNames = partitionDesc == null ? Lists.newArrayList() : ((ListPartitionDesc) partitionDesc).getPartitionColNames(); PartitionSpec partitionSpec = parsePartitionFields(schema, partitionColNames); Map<String, String> properties = stmt.getProperties() == null ? new HashMap<>() : stmt.getProperties(); String tableLocation = properties.get(LOCATION_PROPERTY); String comment = stmt.getComment(); if (comment != null && !comment.isEmpty()) { properties.put(COMMENT, comment); } Map<String, String> createTableProperties = IcebergApiConverter.rebuildCreateTableProperties(properties); return icebergCatalog.createTable(dbName, tableName, schema, partitionSpec, tableLocation, createTableProperties); } @Override public void alterTable(AlterTableStmt stmt) throws UserException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); org.apache.iceberg.Table table = icebergCatalog.getTable(dbName, tableName); if (table == null) { throw new StarRocksConnectorException( "Failed to load iceberg table: " + stmt.getTbl().toString()); } IcebergAlterTableExecutor executor = new IcebergAlterTableExecutor(stmt, table, icebergCatalog); executor.execute(); synchronized (this) { tables.remove(TableIdentifier.of(dbName, tableName)); try { icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor); } catch (Exception exception) { LOG.error("Failed to refresh caching iceberg table."); icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } asyncRefreshOthersFeMetadataCache(dbName, tableName); } } @Override public void dropTable(DropTableStmt stmt) { Table icebergTable = getTable(stmt.getDbName(), stmt.getTableName()); if (icebergTable == null) { return; } icebergCatalog.dropTable(stmt.getDbName(), stmt.getTableName(), stmt.isForceDrop()); tables.remove(TableIdentifier.of(stmt.getDbName(), stmt.getTableName())); StatisticUtils.dropStatisticsAfterDropTable(icebergTable); asyncRefreshOthersFeMetadataCache(stmt.getDbName(), stmt.getTableName()); } @Override public Table getTable(String dbName, String tblName) { TableIdentifier identifier = TableIdentifier.of(dbName, tblName); if (tables.containsKey(identifier)) { return tables.get(identifier); } try { IcebergCatalogType catalogType = icebergCatalog.getIcebergCatalogType(); org.apache.iceberg.Table icebergTable = icebergCatalog.getTable(dbName, tblName); Table table = IcebergApiConverter.toIcebergTable(icebergTable, catalogName, dbName, tblName, catalogType.name()); table.setComment(icebergTable.properties().getOrDefault(COMMENT, "")); tables.put(identifier, table); return table; } catch (StarRocksConnectorException | NoSuchTableException e) { LOG.error("Failed to get iceberg table {}", identifier, e); return null; } } @Override public boolean tableExists(String dbName, String tblName) { return icebergCatalog.tableExists(dbName, tblName); } @Override public List<String> listPartitionNames(String dbName, String tblName) { IcebergCatalogType nativeType = icebergCatalog.getIcebergCatalogType(); if (nativeType != HIVE_CATALOG && nativeType != REST_CATALOG && nativeType != GLUE_CATALOG) { throw new StarRocksConnectorException( "Do not support get partitions from catalog type: " + nativeType); } return icebergCatalog.listPartitionNames(dbName, tblName, jobPlanningExecutor); } @Override public List<RemoteFileInfo> getRemoteFileInfos(Table table, List<PartitionKey> partitionKeys, long snapshotId, ScalarOperator predicate, List<String> fieldNames, long limit) { return getRemoteFileInfos((IcebergTable) table, snapshotId, predicate, limit); } private List<RemoteFileInfo> getRemoteFileInfos(IcebergTable table, long snapshotId, ScalarOperator predicate, long limit) { RemoteFileInfo remoteFileInfo = new RemoteFileInfo(); String dbName = table.getRemoteDbName(); String tableName = table.getRemoteTableName(); IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate); triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit); List<FileScanTask> icebergScanTasks = splitTasks.get(key); if (icebergScanTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", dbName, tableName, predicate); } List<RemoteFileDesc> remoteFileDescs = Lists.newArrayList(RemoteFileDesc.createIcebergRemoteFileDesc(icebergScanTasks)); remoteFileInfo.setFiles(remoteFileDescs); return Lists.newArrayList(remoteFileInfo); } @Override public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) { Map<String, Partition> partitionMap = Maps.newHashMap(); IcebergTable icebergTable = (IcebergTable) table; PartitionsTable partitionsTable = (PartitionsTable) MetadataTableUtils. createMetadataTableInstance(icebergTable.getNativeTable(), MetadataTableType.PARTITIONS); if (icebergTable.isUnPartitioned()) { try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) { for (FileScanTask task : tasks) { CloseableIterable<StructLike> rows = task.asDataTask().rows(); for (StructLike row : rows) { long lastUpdated = row.get(7, Long.class); Partition partition = new Partition(lastUpdated); return ImmutableList.of(partition); } } return ImmutableList.of(new Partition(-1)); } catch (IOException e) { throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e); } } else { try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) { for (FileScanTask task : tasks) { CloseableIterable<StructLike> rows = task.asDataTask().rows(); for (StructLike row : rows) { StructProjection partitionData = row.get(0, StructProjection.class); int specId = row.get(1, Integer.class); PartitionSpec spec = icebergTable.getNativeTable().specs().get(specId); String partitionName = PartitionUtil.convertIcebergPartitionToPartitionName(spec, partitionData); long lastUpdated = -1; try { lastUpdated = row.get(9, Long.class); } catch (NullPointerException e) { LOG.error("The table [{}.{}] snapshot [{}] has been expired", icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), partitionName, e); } Partition partition = new Partition(lastUpdated); partitionMap.put(partitionName, partition); } } } catch (IOException e) { throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e); } } ImmutableList.Builder<PartitionInfo> partitions = ImmutableList.builder(); partitionNames.forEach(partitionName -> partitions.add(partitionMap.get(partitionName))); return partitions.build(); } @Override public boolean prepareMetadata(MetaPreparationItem item, Tracers tracers) { IcebergFilter key; IcebergTable icebergTable; icebergTable = (IcebergTable) item.getTable(); String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (snapshot.isEmpty()) { return true; } key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), item.getPredicate()); if (!preparedTables.add(key)) { return true; } triggerIcebergPlanFilesIfNeeded(key, icebergTable, item.getPredicate(), item.getLimit(), tracers); return true; } private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate, long limit) { triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit, null); } private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate, long limit, Tracers tracers) { if (!scannedTables.contains(key)) { try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.processSplit." + key)) { collectTableStatisticsAndCacheIcebergSplit(table, predicate, limit, tracers); } } } public List<PartitionKey> getPrunedPartitions(Table table, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (!snapshot.isPresent()) { return new ArrayList<>(); } IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), predicate); triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit); List<PartitionKey> partitionKeys = new ArrayList<>(); List<FileScanTask> icebergSplitTasks = splitTasks.get(key); if (icebergSplitTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", dbName, tableName, predicate); } Set<List<String>> scannedPartitions = new HashSet<>(); PartitionSpec spec = icebergTable.getNativeTable().spec(); List<Column> partitionColumns = icebergTable.getPartitionColumnsIncludeTransformed(); for (FileScanTask fileScanTask : icebergSplitTasks) { org.apache.iceberg.PartitionData partitionData = (org.apache.iceberg.PartitionData) fileScanTask.file().partition(); List<String> values = PartitionUtil.getIcebergPartitionValues(spec, partitionData); if (values.size() != partitionColumns.size()) { continue; } if (scannedPartitions.contains(values)) { continue; } else { scannedPartitions.add(values); } try { List<com.starrocks.catalog.Type> srTypes = new ArrayList<>(); for (PartitionField partitionField : spec.fields()) { if (partitionField.transform().isVoid()) { continue; } if (!partitionField.transform().isIdentity()) { Type sourceType = spec.schema().findType(partitionField.sourceId()); Type resultType = partitionField.transform().getResultType(sourceType); if (resultType == Types.DateType.get()) { resultType = Types.IntegerType.get(); } srTypes.add(fromIcebergType(resultType)); continue; } srTypes.add(icebergTable.getColumn(icebergTable.getPartitionSourceName(spec.schema(), partitionField)).getType()); } if (icebergTable.hasPartitionTransformedEvolution()) { srTypes = partitionColumns.stream() .map(Column::getType) .collect(Collectors.toList()); } partitionKeys.add(createPartitionKeyWithType(values, srTypes, table.getType())); } catch (Exception e) { LOG.error("create partition key failed.", e); throw new StarRocksConnectorException(e.getMessage()); } } return partitionKeys; } private void collectTableStatisticsAndCacheIcebergSplit(Table table, ScalarOperator predicate, long limit, Tracers tracers) { IcebergTable icebergTable = (IcebergTable) table; Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (!snapshot.isPresent()) { return; } long snapshotId = snapshot.get().snapshotId(); String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate); org.apache.iceberg.Table nativeTbl = icebergTable.getNativeTable(); Types.StructType schema = nativeTbl.schema().asStruct(); Map<String, MetricsModes.MetricsMode> fieldToMetricsMode = getIcebergMetricsConfig(icebergTable); if (!fieldToMetricsMode.isEmpty()) { Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".write_metrics_mode_default", DEFAULT_WRITE_METRICS_MODE_DEFAULT); Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".non-default.size", String.valueOf(fieldToMetricsMode.size())); Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".non-default.columns", fieldToMetricsMode.toString()); } List<ScalarOperator> scalarOperators = Utils.extractConjuncts(predicate); ScalarOperatorToIcebergExpr.IcebergContext icebergContext = new ScalarOperatorToIcebergExpr.IcebergContext(schema); Expression icebergPredicate = new ScalarOperatorToIcebergExpr().convert(scalarOperators, icebergContext); TableScan scan = nativeTbl.newScan() .useSnapshot(snapshotId) .metricsReporter(metricsReporter) .planWith(jobPlanningExecutor); if (enableCollectColumnStatistics()) { scan = scan.includeColumnStats(); } if (icebergPredicate.op() != Expression.Operation.TRUE) { scan = scan.filter(icebergPredicate); } CloseableIterable<FileScanTask> fileScanTaskIterable = TableScanUtil.splitFiles( scan.planFiles(), scan.targetSplitSize()); CloseableIterator<FileScanTask> fileScanTaskIterator = fileScanTaskIterable.iterator(); Iterator<FileScanTask> fileScanTasks; boolean canPruneManifests = limit != -1 && !icebergTable.isV2Format() && onlyHasPartitionPredicate(table, predicate) && limit < Integer.MAX_VALUE && nativeTbl.spec().specId() == 0 && enablePruneManifest(); if (canPruneManifests) { fileScanTasks = Iterators.limit(fileScanTaskIterator, (int) limit); } else { fileScanTasks = fileScanTaskIterator; } List<FileScanTask> icebergScanTasks = Lists.newArrayList(); long totalReadCount = 0; Set<String> filePaths = new HashSet<>(); while (fileScanTasks.hasNext()) { FileScanTask scanTask = fileScanTasks.next(); FileScanTask icebergSplitScanTask = scanTask; if (enableCollectColumnStatistics()) { try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.buildSplitScanTask")) { icebergSplitScanTask = buildIcebergSplitScanTask(scanTask, icebergPredicate, key); } List<Types.NestedField> fullColumns = nativeTbl.schema().columns(); Map<Integer, Type.PrimitiveType> idToTypeMapping = fullColumns.stream() .filter(column -> column.type().isPrimitiveType()) .collect(Collectors.toMap(Types.NestedField::fieldId, column -> column.type().asPrimitiveType())); Set<Integer> identityPartitionIds = nativeTbl.spec().fields().stream() .filter(x -> x.transform().isIdentity()) .map(PartitionField::sourceId) .collect(Collectors.toSet()); List<Types.NestedField> nonPartitionPrimitiveColumns = fullColumns.stream() .filter(column -> !identityPartitionIds.contains(column.fieldId()) && column.type().isPrimitiveType()) .collect(toImmutableList()); try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.updateIcebergFileStats")) { statisticProvider.updateIcebergFileStats( icebergTable, scanTask, idToTypeMapping, nonPartitionPrimitiveColumns, key); } } icebergScanTasks.add(icebergSplitScanTask); if (canPruneManifests) { String filePath = icebergSplitScanTask.file().path().toString(); if (!filePaths.contains(filePath)) { filePaths.add(filePath); totalReadCount += scanTask.file().recordCount(); } if (totalReadCount >= limit) { break; } } } try { fileScanTaskIterable.close(); fileScanTaskIterator.close(); } catch (IOException e) { } Optional<ScanReport> metrics = metricsReporter.getReporter(catalogName, dbName, tableName, snapshotId, icebergPredicate); if (metrics.isPresent()) { Tracers.Module module = Tracers.Module.EXTERNAL; String name = "ICEBERG.ScanMetrics." + metrics.get().tableName() + "[" + icebergPredicate + "]"; String value = metrics.get().scanMetrics().toString(); if (tracers == null) { Tracers.record(module, name, value); } else { synchronized (this) { Tracers.record(tracers, module, name, value); } } } splitTasks.put(key, icebergScanTasks); scannedTables.add(key); } /** * To optimize the MetricsModes of the Iceberg tables, it's necessary to display the columns MetricsMode in the * ICEBERG query profile. * <br> * None: * <p> * Under this mode, value_counts, null_value_counts, nan_value_counts, lower_bounds, upper_bounds * are not persisted. * </p> * Counts: * <p> * Under this mode, only value_counts, null_value_counts, nan_value_counts are persisted. * </p> * Truncate: * <p> * Under this mode, value_counts, null_value_counts, nan_value_counts and truncated lower_bounds, * upper_bounds are persisted. * </p> * Full: * <p> * Under this mode, value_counts, null_value_counts, nan_value_counts and full lower_bounds, * upper_bounds are persisted. * </p> */ public static Map<String, MetricsModes.MetricsMode> getIcebergMetricsConfig(IcebergTable table) { MetricsModes.MetricsMode defaultMode = MetricsModes.fromString(DEFAULT_WRITE_METRICS_MODE_DEFAULT); MetricsConfig metricsConf = MetricsConfig.forTable(table.getNativeTable()); Map<String, MetricsModes.MetricsMode> filedToMetricsMode = Maps.newHashMap(); for (Types.NestedField field : table.getNativeTable().schema().columns()) { MetricsModes.MetricsMode mode = metricsConf.columnMode(field.name()); if (!mode.equals(defaultMode)) { filedToMetricsMode.put(field.name(), mode); } } return filedToMetricsMode; } @Override public Statistics getTableStatistics(OptimizerContext session, Table table, Map<ColumnRefOperator, Column> columns, List<PartitionKey> partitionKeys, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; Optional<Snapshot> snapshot = icebergTable.getSnapshot(); long snapshotId; if (snapshot.isPresent()) { snapshotId = snapshot.get().snapshotId(); } else { Statistics.Builder statisticsBuilder = Statistics.builder(); statisticsBuilder.setOutputRowCount(1); statisticsBuilder.addColumnStatistics(statisticProvider.buildUnknownColumnStatistics(columns.keySet())); return statisticsBuilder.build(); } IcebergFilter key = IcebergFilter.of( icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), snapshotId, predicate); triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit); if (!session.getSessionVariable().enableIcebergColumnStatistics()) { List<FileScanTask> icebergScanTasks = splitTasks.get(key); if (icebergScanTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), predicate); } try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.calculateCardinality" + key)) { return statisticProvider.getCardinalityStats(columns, icebergScanTasks); } } else { return statisticProvider.getTableStatistics(icebergTable, columns, session, predicate); } } private IcebergSplitScanTask buildIcebergSplitScanTask( FileScanTask fileScanTask, Expression icebergPredicate, IcebergFilter filter) { long offset = fileScanTask.start(); long length = fileScanTask.length(); DataFile dataFileWithoutStats = fileScanTask.file().copyWithoutStats(); DeleteFile[] deleteFiles = fileScanTask.deletes().stream() .map(DeleteFile::copyWithoutStats) .toArray(DeleteFile[]::new); PartitionSpec taskSpec = fileScanTask.spec(); Schema taskSchema = fileScanTask.spec().schema(); String schemaString; String partitionString; FileScanTaskSchema schemaKey = new FileScanTaskSchema(filter.getDatabaseName(), filter.getTableName(), taskSchema.schemaId(), taskSpec.specId()); Pair<String, String> schema = fileScanTaskSchemas.get(schemaKey); if (schema == null) { schemaString = SchemaParser.toJson(fileScanTask.spec().schema()); partitionString = PartitionSpecParser.toJson(fileScanTask.spec()); fileScanTaskSchemas.put(schemaKey, Pair.create(schemaString, partitionString)); } else { schemaString = schema.first; partitionString = schema.second; } ResidualEvaluator residualEvaluator = ResidualEvaluator.of(taskSpec, icebergPredicate, true); BaseFileScanTask baseFileScanTask = new BaseFileScanTask( dataFileWithoutStats, deleteFiles, schemaString, partitionString, residualEvaluator); return new IcebergSplitScanTask(offset, length, baseFileScanTask); } @Override public void refreshTable(String srDbName, Table table, List<String> partitionNames, boolean onlyCachedPartitions) { if (isResourceMappingCatalog(catalogName)) { refreshTableWithResource(table); } else { IcebergTable icebergTable = (IcebergTable) table; String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); tables.remove(TableIdentifier.of(dbName, tableName)); try { icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor); } catch (Exception e) { LOG.error("Failed to refresh table {}.{}.{}. invalidate cache", catalogName, dbName, tableName, e); icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } } } private void refreshTableWithResource(Table table) { IcebergTable icebergTable = (IcebergTable) table; org.apache.iceberg.Table nativeTable = icebergTable.getNativeTable(); try { if (nativeTable instanceof BaseTable) { BaseTable baseTable = (BaseTable) nativeTable; if (baseTable.operations().refresh() == null) { throw new NoSuchTableException("No such table: %s", nativeTable.name()); } } else { throw new StarRocksConnectorException("Invalid table type of %s, it should be a BaseTable!", nativeTable.name()); } } catch (NoSuchTableException e) { throw new StarRocksConnectorException("No such table %s", nativeTable.name()); } catch (IllegalStateException ei) { throw new StarRocksConnectorException("Refresh table %s with failure, the table under hood" + " may have been dropped. You should re-create the external table. cause %s", nativeTable.name(), ei.getMessage()); } icebergTable.resetSnapshot(); } @Override public void finishSink(String dbName, String tableName, List<TSinkCommitInfo> commitInfos) { boolean isOverwrite = false; if (!commitInfos.isEmpty()) { TSinkCommitInfo sinkCommitInfo = commitInfos.get(0); if (sinkCommitInfo.isSetIs_overwrite()) { isOverwrite = sinkCommitInfo.is_overwrite; } } List<TIcebergDataFile> dataFiles = commitInfos.stream() .map(TSinkCommitInfo::getIceberg_data_file).collect(Collectors.toList()); IcebergTable table = (IcebergTable) getTable(dbName, tableName); org.apache.iceberg.Table nativeTbl = table.getNativeTable(); Transaction transaction = nativeTbl.newTransaction(); BatchWrite batchWrite = getBatchWrite(transaction, isOverwrite); PartitionSpec partitionSpec = nativeTbl.spec(); for (TIcebergDataFile dataFile : dataFiles) { Metrics metrics = IcebergApiConverter.buildDataFileMetrics(dataFile); DataFiles.Builder builder = DataFiles.builder(partitionSpec) .withMetrics(metrics) .withPath(dataFile.path) .withFormat(dataFile.format) .withRecordCount(dataFile.record_count) .withFileSizeInBytes(dataFile.file_size_in_bytes) .withSplitOffsets(dataFile.split_offsets); if (partitionSpec.isPartitioned()) { String relativePartitionLocation = getIcebergRelativePartitionPath( nativeTbl.location(), dataFile.partition_path); PartitionData partitionData = partitionDataFromPath( relativePartitionLocation, partitionSpec); builder.withPartition(partitionData); } batchWrite.addFile(builder.build()); } try { batchWrite.commit(); transaction.commitTransaction(); asyncRefreshOthersFeMetadataCache(dbName, tableName); } catch (Exception e) { List<String> toDeleteFiles = dataFiles.stream() .map(TIcebergDataFile::getPath) .collect(Collectors.toList()); icebergCatalog.deleteUncommittedDataFiles(toDeleteFiles); LOG.error("Failed to commit iceberg transaction on {}.{}", dbName, tableName, e); throw new StarRocksConnectorException(e.getMessage()); } finally { icebergCatalog.invalidateCacheWithoutTable(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } } private void asyncRefreshOthersFeMetadataCache(String dbName, String tableName) { refreshOtherFeExecutor.execute(() -> { LOG.info("Start to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName); try { GlobalStateMgr.getCurrentState().refreshOthersFeTable( new TableName(catalogName, dbName, tableName), new ArrayList<>(), false); } catch (DdlException e) { LOG.error("Failed to refresh others fe iceberg metadata cache {}.{}.{}", catalogName, dbName, tableName, e); throw new StarRocksConnectorException(e.getMessage()); } LOG.info("Finish to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName); }); } public BatchWrite getBatchWrite(Transaction transaction, boolean isOverwrite) { return isOverwrite ? new DynamicOverwrite(transaction) : new Append(transaction); } public static String getIcebergRelativePartitionPath(String tableLocation, String partitionLocation) { tableLocation = tableLocation.endsWith("/") ? tableLocation.substring(0, tableLocation.length() - 1) : tableLocation; String tableLocationWithData = tableLocation + "/data/"; String path = PartitionUtil.getSuffixName(tableLocationWithData, partitionLocation); if (path.startsWith("/")) { path = path.substring(1); } if (path.endsWith("/")) { path = path.substring(0, path.length() - 1); } return path; } public static boolean onlyHasPartitionPredicate(Table table, ScalarOperator predicate) { if (predicate == null) { return true; } List<ColumnRefOperator> columnRefOperators = predicate.getColumnRefs(); List<String> partitionColNames = table.getPartitionColumnNames(); for (ColumnRefOperator c : columnRefOperators) { if (!partitionColNames.contains(c.getName())) { return false; } } return true; } private boolean enablePruneManifest() { if (ConnectContext.get() == null) { return false; } if (ConnectContext.get().getSessionVariable() == null) { return false; } return ConnectContext.get().getSessionVariable().isEnablePruneIcebergManifest(); } private boolean enableCollectColumnStatistics() { if (ConnectContext.get() == null) { return false; } if (ConnectContext.get().getSessionVariable() == null) { return false; } return ConnectContext.get().getSessionVariable().enableIcebergColumnStatistics(); } @Override public void clear() { splitTasks.clear(); databases.clear(); tables.clear(); scannedTables.clear(); metricsReporter.clear(); } interface BatchWrite { void addFile(DataFile file); void commit(); } static class Append implements BatchWrite { private final AppendFiles append; public Append(Transaction txn) { append = txn.newAppend(); } @Override public void addFile(DataFile file) { append.appendFile(file); } @Override public void commit() { append.commit(); } } static class DynamicOverwrite implements BatchWrite { private final ReplacePartitions replace; public DynamicOverwrite(Transaction txn) { replace = txn.newReplacePartitions(); } @Override public void addFile(DataFile file) { replace.addFile(file); } @Override public void commit() { replace.commit(); } } public static class PartitionData implements StructLike { private final Object[] values; private PartitionData(int size) { this.values = new Object[size]; } @Override public int size() { return values.length; } @Override public <T> T get(int pos, Class<T> javaClass) { return javaClass.cast(values[pos]); } @Override public <T> void set(int pos, T value) { if (value instanceof ByteBuffer) { ByteBuffer buffer = (ByteBuffer) value; byte[] bytes = new byte[buffer.remaining()]; buffer.duplicate().get(bytes); values[pos] = bytes; } else { values[pos] = value; } } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } PartitionData that = (PartitionData) other; return Arrays.equals(values, that.values); } @Override public int hashCode() { return Arrays.hashCode(values); } } @Override public CloudConfiguration getCloudConfiguration() { return hdfsEnvironment.getCloudConfiguration(); } private static class FileScanTaskSchema { private final String dbName; private final String tableName; private final int schemaId; private final int specId; public FileScanTaskSchema(String dbName, String tableName, int schemaId, int specId) { this.dbName = dbName; this.tableName = tableName; this.schemaId = schemaId; this.specId = specId; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } FileScanTaskSchema that = (FileScanTaskSchema) o; return schemaId == that.schemaId && specId == that.specId && Objects.equals(dbName, that.dbName) && Objects.equals(tableName, that.tableName); } @Override public int hashCode() { return Objects.hash(dbName, tableName, schemaId, specId); } } }
class IcebergMetadata implements ConnectorMetadata { private static final Logger LOG = LogManager.getLogger(IcebergMetadata.class); public static final String LOCATION_PROPERTY = "location"; public static final String FILE_FORMAT = "file_format"; public static final String COMPRESSION_CODEC = "compression_codec"; public static final String COMMENT = "comment"; private final String catalogName; private final HdfsEnvironment hdfsEnvironment; private final IcebergCatalog icebergCatalog; private final IcebergStatisticProvider statisticProvider = new IcebergStatisticProvider(); private final Map<TableIdentifier, Table> tables = new ConcurrentHashMap<>(); private final Map<String, Database> databases = new ConcurrentHashMap<>(); private final Map<IcebergFilter, List<FileScanTask>> splitTasks = new ConcurrentHashMap<>(); private final Set<IcebergFilter> scannedTables = new HashSet<>(); private final Set<IcebergFilter> preparedTables = ConcurrentHashMap.newKeySet(); private final Map<FileScanTaskSchema, Pair<String, String>> fileScanTaskSchemas = new ConcurrentHashMap<>(); private final ExecutorService jobPlanningExecutor; private final ExecutorService refreshOtherFeExecutor; private final IcebergMetricsReporter metricsReporter; public IcebergMetadata(String catalogName, HdfsEnvironment hdfsEnvironment, IcebergCatalog icebergCatalog, ExecutorService jobPlanningExecutor, ExecutorService refreshOtherFeExecutor) { this.catalogName = catalogName; this.hdfsEnvironment = hdfsEnvironment; this.icebergCatalog = icebergCatalog; this.metricsReporter = new IcebergMetricsReporter(); this.jobPlanningExecutor = jobPlanningExecutor; this.refreshOtherFeExecutor = refreshOtherFeExecutor; } @Override public Table.TableType getTableType() { return ICEBERG; } @Override public List<String> listDbNames() { return icebergCatalog.listAllDatabases(); } @Override public void createDb(String dbName, Map<String, String> properties) throws AlreadyExistsException { if (dbExists(dbName)) { throw new AlreadyExistsException("Database Already Exists"); } icebergCatalog.createDb(dbName, properties); } @Override public void dropDb(String dbName, boolean isForceDrop) throws MetaNotFoundException { if (listTableNames(dbName).size() != 0) { throw new StarRocksConnectorException("Database %s not empty", dbName); } icebergCatalog.dropDb(dbName); databases.remove(dbName); } @Override public Database getDb(String dbName) { if (databases.containsKey(dbName)) { return databases.get(dbName); } Database db; try { db = icebergCatalog.getDB(dbName); } catch (NoSuchNamespaceException e) { LOG.error("Database {} not found", dbName, e); return null; } databases.put(dbName, db); return db; } @Override public List<String> listTableNames(String dbName) { return icebergCatalog.listTables(dbName); } @Override public boolean createTable(CreateTableStmt stmt) throws DdlException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); Schema schema = toIcebergApiSchema(stmt.getColumns()); PartitionDesc partitionDesc = stmt.getPartitionDesc(); List<String> partitionColNames = partitionDesc == null ? Lists.newArrayList() : ((ListPartitionDesc) partitionDesc).getPartitionColNames(); PartitionSpec partitionSpec = parsePartitionFields(schema, partitionColNames); Map<String, String> properties = stmt.getProperties() == null ? new HashMap<>() : stmt.getProperties(); String tableLocation = properties.get(LOCATION_PROPERTY); String comment = stmt.getComment(); if (comment != null && !comment.isEmpty()) { properties.put(COMMENT, comment); } Map<String, String> createTableProperties = IcebergApiConverter.rebuildCreateTableProperties(properties); return icebergCatalog.createTable(dbName, tableName, schema, partitionSpec, tableLocation, createTableProperties); } @Override public void alterTable(AlterTableStmt stmt) throws UserException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); org.apache.iceberg.Table table = icebergCatalog.getTable(dbName, tableName); if (table == null) { throw new StarRocksConnectorException( "Failed to load iceberg table: " + stmt.getTbl().toString()); } IcebergAlterTableExecutor executor = new IcebergAlterTableExecutor(stmt, table, icebergCatalog); executor.execute(); synchronized (this) { tables.remove(TableIdentifier.of(dbName, tableName)); try { icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor); } catch (Exception exception) { LOG.error("Failed to refresh caching iceberg table."); icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } asyncRefreshOthersFeMetadataCache(dbName, tableName); } } @Override public void dropTable(DropTableStmt stmt) { Table icebergTable = getTable(stmt.getDbName(), stmt.getTableName()); if (icebergTable == null) { return; } icebergCatalog.dropTable(stmt.getDbName(), stmt.getTableName(), stmt.isForceDrop()); tables.remove(TableIdentifier.of(stmt.getDbName(), stmt.getTableName())); StatisticUtils.dropStatisticsAfterDropTable(icebergTable); asyncRefreshOthersFeMetadataCache(stmt.getDbName(), stmt.getTableName()); } @Override public Table getTable(String dbName, String tblName) { TableIdentifier identifier = TableIdentifier.of(dbName, tblName); if (tables.containsKey(identifier)) { return tables.get(identifier); } try { IcebergCatalogType catalogType = icebergCatalog.getIcebergCatalogType(); org.apache.iceberg.Table icebergTable = icebergCatalog.getTable(dbName, tblName); Table table = IcebergApiConverter.toIcebergTable(icebergTable, catalogName, dbName, tblName, catalogType.name()); table.setComment(icebergTable.properties().getOrDefault(COMMENT, "")); tables.put(identifier, table); return table; } catch (StarRocksConnectorException | NoSuchTableException e) { LOG.error("Failed to get iceberg table {}", identifier, e); return null; } } @Override public boolean tableExists(String dbName, String tblName) { return icebergCatalog.tableExists(dbName, tblName); } @Override public List<String> listPartitionNames(String dbName, String tblName) { IcebergCatalogType nativeType = icebergCatalog.getIcebergCatalogType(); if (nativeType != HIVE_CATALOG && nativeType != REST_CATALOG && nativeType != GLUE_CATALOG) { throw new StarRocksConnectorException( "Do not support get partitions from catalog type: " + nativeType); } return icebergCatalog.listPartitionNames(dbName, tblName, jobPlanningExecutor); } @Override public List<RemoteFileInfo> getRemoteFileInfos(Table table, List<PartitionKey> partitionKeys, long snapshotId, ScalarOperator predicate, List<String> fieldNames, long limit) { return getRemoteFileInfos((IcebergTable) table, snapshotId, predicate, limit); } private List<RemoteFileInfo> getRemoteFileInfos(IcebergTable table, long snapshotId, ScalarOperator predicate, long limit) { RemoteFileInfo remoteFileInfo = new RemoteFileInfo(); String dbName = table.getRemoteDbName(); String tableName = table.getRemoteTableName(); IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate); triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit); List<FileScanTask> icebergScanTasks = splitTasks.get(key); if (icebergScanTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", dbName, tableName, predicate); } List<RemoteFileDesc> remoteFileDescs = Lists.newArrayList(RemoteFileDesc.createIcebergRemoteFileDesc(icebergScanTasks)); remoteFileInfo.setFiles(remoteFileDescs); return Lists.newArrayList(remoteFileInfo); } @Override public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) { Map<String, Partition> partitionMap = Maps.newHashMap(); IcebergTable icebergTable = (IcebergTable) table; PartitionsTable partitionsTable = (PartitionsTable) MetadataTableUtils. createMetadataTableInstance(icebergTable.getNativeTable(), MetadataTableType.PARTITIONS); if (icebergTable.isUnPartitioned()) { try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) { for (FileScanTask task : tasks) { CloseableIterable<StructLike> rows = task.asDataTask().rows(); for (StructLike row : rows) { long lastUpdated = row.get(7, Long.class); Partition partition = new Partition(lastUpdated); return ImmutableList.of(partition); } } return ImmutableList.of(new Partition(-1)); } catch (IOException e) { throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e); } } else { try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) { for (FileScanTask task : tasks) { CloseableIterable<StructLike> rows = task.asDataTask().rows(); for (StructLike row : rows) { StructProjection partitionData = row.get(0, StructProjection.class); int specId = row.get(1, Integer.class); PartitionSpec spec = icebergTable.getNativeTable().specs().get(specId); String partitionName = PartitionUtil.convertIcebergPartitionToPartitionName(spec, partitionData); long lastUpdated = -1; try { lastUpdated = row.get(9, Long.class); } catch (NullPointerException e) { LOG.error("The table [{}.{}] snapshot [{}] has been expired", icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), partitionName, e); } Partition partition = new Partition(lastUpdated); partitionMap.put(partitionName, partition); } } } catch (IOException e) { throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e); } } ImmutableList.Builder<PartitionInfo> partitions = ImmutableList.builder(); partitionNames.forEach(partitionName -> partitions.add(partitionMap.get(partitionName))); return partitions.build(); } @Override public boolean prepareMetadata(MetaPreparationItem item, Tracers tracers) { IcebergFilter key; IcebergTable icebergTable; icebergTable = (IcebergTable) item.getTable(); String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (snapshot.isEmpty()) { return true; } key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), item.getPredicate()); if (!preparedTables.add(key)) { return true; } triggerIcebergPlanFilesIfNeeded(key, icebergTable, item.getPredicate(), item.getLimit(), tracers); return true; } private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate, long limit) { triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit, null); } private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate, long limit, Tracers tracers) { if (!scannedTables.contains(key)) { try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.processSplit." + key)) { collectTableStatisticsAndCacheIcebergSplit(table, predicate, limit, tracers); } } } public List<PartitionKey> getPrunedPartitions(Table table, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (!snapshot.isPresent()) { return new ArrayList<>(); } IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), predicate); triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit); List<PartitionKey> partitionKeys = new ArrayList<>(); List<FileScanTask> icebergSplitTasks = splitTasks.get(key); if (icebergSplitTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", dbName, tableName, predicate); } Set<List<String>> scannedPartitions = new HashSet<>(); PartitionSpec spec = icebergTable.getNativeTable().spec(); List<Column> partitionColumns = icebergTable.getPartitionColumnsIncludeTransformed(); for (FileScanTask fileScanTask : icebergSplitTasks) { org.apache.iceberg.PartitionData partitionData = (org.apache.iceberg.PartitionData) fileScanTask.file().partition(); List<String> values = PartitionUtil.getIcebergPartitionValues(spec, partitionData); if (values.size() != partitionColumns.size()) { continue; } if (scannedPartitions.contains(values)) { continue; } else { scannedPartitions.add(values); } try { List<com.starrocks.catalog.Type> srTypes = new ArrayList<>(); for (PartitionField partitionField : spec.fields()) { if (partitionField.transform().isVoid()) { continue; } if (!partitionField.transform().isIdentity()) { Type sourceType = spec.schema().findType(partitionField.sourceId()); Type resultType = partitionField.transform().getResultType(sourceType); if (resultType == Types.DateType.get()) { resultType = Types.IntegerType.get(); } srTypes.add(fromIcebergType(resultType)); continue; } srTypes.add(icebergTable.getColumn(icebergTable.getPartitionSourceName(spec.schema(), partitionField)).getType()); } if (icebergTable.hasPartitionTransformedEvolution()) { srTypes = partitionColumns.stream() .map(Column::getType) .collect(Collectors.toList()); } partitionKeys.add(createPartitionKeyWithType(values, srTypes, table.getType())); } catch (Exception e) { LOG.error("create partition key failed.", e); throw new StarRocksConnectorException(e.getMessage()); } } return partitionKeys; } private void collectTableStatisticsAndCacheIcebergSplit(Table table, ScalarOperator predicate, long limit, Tracers tracers) { IcebergTable icebergTable = (IcebergTable) table; Optional<Snapshot> snapshot = icebergTable.getSnapshot(); if (!snapshot.isPresent()) { return; } long snapshotId = snapshot.get().snapshotId(); String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate); org.apache.iceberg.Table nativeTbl = icebergTable.getNativeTable(); Types.StructType schema = nativeTbl.schema().asStruct(); Map<String, MetricsModes.MetricsMode> fieldToMetricsMode = getIcebergMetricsConfig(icebergTable); if (!fieldToMetricsMode.isEmpty()) { Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".write_metrics_mode_default", DEFAULT_WRITE_METRICS_MODE_DEFAULT); Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".non-default.size", String.valueOf(fieldToMetricsMode.size())); Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".non-default.columns", fieldToMetricsMode.toString()); } List<ScalarOperator> scalarOperators = Utils.extractConjuncts(predicate); ScalarOperatorToIcebergExpr.IcebergContext icebergContext = new ScalarOperatorToIcebergExpr.IcebergContext(schema); Expression icebergPredicate = new ScalarOperatorToIcebergExpr().convert(scalarOperators, icebergContext); TableScan scan = nativeTbl.newScan() .useSnapshot(snapshotId) .metricsReporter(metricsReporter) .planWith(jobPlanningExecutor); if (enableCollectColumnStatistics()) { scan = scan.includeColumnStats(); } if (icebergPredicate.op() != Expression.Operation.TRUE) { scan = scan.filter(icebergPredicate); } CloseableIterable<FileScanTask> fileScanTaskIterable = TableScanUtil.splitFiles( scan.planFiles(), scan.targetSplitSize()); CloseableIterator<FileScanTask> fileScanTaskIterator = fileScanTaskIterable.iterator(); Iterator<FileScanTask> fileScanTasks; boolean canPruneManifests = limit != -1 && !icebergTable.isV2Format() && onlyHasPartitionPredicate(table, predicate) && limit < Integer.MAX_VALUE && nativeTbl.spec().specId() == 0 && enablePruneManifest(); if (canPruneManifests) { fileScanTasks = Iterators.limit(fileScanTaskIterator, (int) limit); } else { fileScanTasks = fileScanTaskIterator; } List<FileScanTask> icebergScanTasks = Lists.newArrayList(); long totalReadCount = 0; Set<String> filePaths = new HashSet<>(); while (fileScanTasks.hasNext()) { FileScanTask scanTask = fileScanTasks.next(); FileScanTask icebergSplitScanTask = scanTask; if (enableCollectColumnStatistics()) { try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.buildSplitScanTask")) { icebergSplitScanTask = buildIcebergSplitScanTask(scanTask, icebergPredicate, key); } List<Types.NestedField> fullColumns = nativeTbl.schema().columns(); Map<Integer, Type.PrimitiveType> idToTypeMapping = fullColumns.stream() .filter(column -> column.type().isPrimitiveType()) .collect(Collectors.toMap(Types.NestedField::fieldId, column -> column.type().asPrimitiveType())); Set<Integer> identityPartitionIds = nativeTbl.spec().fields().stream() .filter(x -> x.transform().isIdentity()) .map(PartitionField::sourceId) .collect(Collectors.toSet()); List<Types.NestedField> nonPartitionPrimitiveColumns = fullColumns.stream() .filter(column -> !identityPartitionIds.contains(column.fieldId()) && column.type().isPrimitiveType()) .collect(toImmutableList()); try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.updateIcebergFileStats")) { statisticProvider.updateIcebergFileStats( icebergTable, scanTask, idToTypeMapping, nonPartitionPrimitiveColumns, key); } } icebergScanTasks.add(icebergSplitScanTask); if (canPruneManifests) { String filePath = icebergSplitScanTask.file().path().toString(); if (!filePaths.contains(filePath)) { filePaths.add(filePath); totalReadCount += scanTask.file().recordCount(); } if (totalReadCount >= limit) { break; } } } try { fileScanTaskIterable.close(); fileScanTaskIterator.close(); } catch (IOException e) { } Optional<ScanReport> metrics = metricsReporter.getReporter(catalogName, dbName, tableName, snapshotId, icebergPredicate); if (metrics.isPresent()) { Tracers.Module module = Tracers.Module.EXTERNAL; String name = "ICEBERG.ScanMetrics." + metrics.get().tableName() + "[" + icebergPredicate + "]"; String value = metrics.get().scanMetrics().toString(); if (tracers == null) { Tracers.record(module, name, value); } else { synchronized (this) { Tracers.record(tracers, module, name, value); } } } splitTasks.put(key, icebergScanTasks); scannedTables.add(key); } /** * To optimize the MetricsModes of the Iceberg tables, it's necessary to display the columns MetricsMode in the * ICEBERG query profile. * <br> * None: * <p> * Under this mode, value_counts, null_value_counts, nan_value_counts, lower_bounds, upper_bounds * are not persisted. * </p> * Counts: * <p> * Under this mode, only value_counts, null_value_counts, nan_value_counts are persisted. * </p> * Truncate: * <p> * Under this mode, value_counts, null_value_counts, nan_value_counts and truncated lower_bounds, * upper_bounds are persisted. * </p> * Full: * <p> * Under this mode, value_counts, null_value_counts, nan_value_counts and full lower_bounds, * upper_bounds are persisted. * </p> */ public static Map<String, MetricsModes.MetricsMode> getIcebergMetricsConfig(IcebergTable table) { MetricsModes.MetricsMode defaultMode = MetricsModes.fromString(DEFAULT_WRITE_METRICS_MODE_DEFAULT); MetricsConfig metricsConf = MetricsConfig.forTable(table.getNativeTable()); Map<String, MetricsModes.MetricsMode> filedToMetricsMode = Maps.newHashMap(); for (Types.NestedField field : table.getNativeTable().schema().columns()) { MetricsModes.MetricsMode mode = metricsConf.columnMode(field.name()); if (!mode.equals(defaultMode)) { filedToMetricsMode.put(field.name(), mode); } } return filedToMetricsMode; } @Override public Statistics getTableStatistics(OptimizerContext session, Table table, Map<ColumnRefOperator, Column> columns, List<PartitionKey> partitionKeys, ScalarOperator predicate, long limit) { IcebergTable icebergTable = (IcebergTable) table; Optional<Snapshot> snapshot = icebergTable.getSnapshot(); long snapshotId; if (snapshot.isPresent()) { snapshotId = snapshot.get().snapshotId(); } else { Statistics.Builder statisticsBuilder = Statistics.builder(); statisticsBuilder.setOutputRowCount(1); statisticsBuilder.addColumnStatistics(statisticProvider.buildUnknownColumnStatistics(columns.keySet())); return statisticsBuilder.build(); } IcebergFilter key = IcebergFilter.of( icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), snapshotId, predicate); triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit); if (!session.getSessionVariable().enableIcebergColumnStatistics()) { List<FileScanTask> icebergScanTasks = splitTasks.get(key); if (icebergScanTasks == null) { throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]", icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), predicate); } try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.calculateCardinality" + key)) { return statisticProvider.getCardinalityStats(columns, icebergScanTasks); } } else { return statisticProvider.getTableStatistics(icebergTable, columns, session, predicate); } } private IcebergSplitScanTask buildIcebergSplitScanTask( FileScanTask fileScanTask, Expression icebergPredicate, IcebergFilter filter) { long offset = fileScanTask.start(); long length = fileScanTask.length(); DataFile dataFileWithoutStats = fileScanTask.file().copyWithoutStats(); DeleteFile[] deleteFiles = fileScanTask.deletes().stream() .map(DeleteFile::copyWithoutStats) .toArray(DeleteFile[]::new); PartitionSpec taskSpec = fileScanTask.spec(); Schema taskSchema = fileScanTask.spec().schema(); String schemaString; String partitionString; FileScanTaskSchema schemaKey = new FileScanTaskSchema(filter.getDatabaseName(), filter.getTableName(), taskSchema.schemaId(), taskSpec.specId()); Pair<String, String> schema = fileScanTaskSchemas.get(schemaKey); if (schema == null) { schemaString = SchemaParser.toJson(fileScanTask.spec().schema()); partitionString = PartitionSpecParser.toJson(fileScanTask.spec()); fileScanTaskSchemas.put(schemaKey, Pair.create(schemaString, partitionString)); } else { schemaString = schema.first; partitionString = schema.second; } ResidualEvaluator residualEvaluator = ResidualEvaluator.of(taskSpec, icebergPredicate, true); BaseFileScanTask baseFileScanTask = new BaseFileScanTask( dataFileWithoutStats, deleteFiles, schemaString, partitionString, residualEvaluator); return new IcebergSplitScanTask(offset, length, baseFileScanTask); } @Override public void refreshTable(String srDbName, Table table, List<String> partitionNames, boolean onlyCachedPartitions) { if (isResourceMappingCatalog(catalogName)) { refreshTableWithResource(table); } else { IcebergTable icebergTable = (IcebergTable) table; String dbName = icebergTable.getRemoteDbName(); String tableName = icebergTable.getRemoteTableName(); tables.remove(TableIdentifier.of(dbName, tableName)); try { icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor); } catch (Exception e) { LOG.error("Failed to refresh table {}.{}.{}. invalidate cache", catalogName, dbName, tableName, e); icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } } } private void refreshTableWithResource(Table table) { IcebergTable icebergTable = (IcebergTable) table; org.apache.iceberg.Table nativeTable = icebergTable.getNativeTable(); try { if (nativeTable instanceof BaseTable) { BaseTable baseTable = (BaseTable) nativeTable; if (baseTable.operations().refresh() == null) { throw new NoSuchTableException("No such table: %s", nativeTable.name()); } } else { throw new StarRocksConnectorException("Invalid table type of %s, it should be a BaseTable!", nativeTable.name()); } } catch (NoSuchTableException e) { throw new StarRocksConnectorException("No such table %s", nativeTable.name()); } catch (IllegalStateException ei) { throw new StarRocksConnectorException("Refresh table %s with failure, the table under hood" + " may have been dropped. You should re-create the external table. cause %s", nativeTable.name(), ei.getMessage()); } icebergTable.resetSnapshot(); } @Override public void finishSink(String dbName, String tableName, List<TSinkCommitInfo> commitInfos) { boolean isOverwrite = false; if (!commitInfos.isEmpty()) { TSinkCommitInfo sinkCommitInfo = commitInfos.get(0); if (sinkCommitInfo.isSetIs_overwrite()) { isOverwrite = sinkCommitInfo.is_overwrite; } } List<TIcebergDataFile> dataFiles = commitInfos.stream() .map(TSinkCommitInfo::getIceberg_data_file).collect(Collectors.toList()); IcebergTable table = (IcebergTable) getTable(dbName, tableName); org.apache.iceberg.Table nativeTbl = table.getNativeTable(); Transaction transaction = nativeTbl.newTransaction(); BatchWrite batchWrite = getBatchWrite(transaction, isOverwrite); PartitionSpec partitionSpec = nativeTbl.spec(); for (TIcebergDataFile dataFile : dataFiles) { Metrics metrics = IcebergApiConverter.buildDataFileMetrics(dataFile); DataFiles.Builder builder = DataFiles.builder(partitionSpec) .withMetrics(metrics) .withPath(dataFile.path) .withFormat(dataFile.format) .withRecordCount(dataFile.record_count) .withFileSizeInBytes(dataFile.file_size_in_bytes) .withSplitOffsets(dataFile.split_offsets); if (partitionSpec.isPartitioned()) { String relativePartitionLocation = getIcebergRelativePartitionPath( nativeTbl.location(), dataFile.partition_path); PartitionData partitionData = partitionDataFromPath( relativePartitionLocation, partitionSpec); builder.withPartition(partitionData); } batchWrite.addFile(builder.build()); } try { batchWrite.commit(); transaction.commitTransaction(); asyncRefreshOthersFeMetadataCache(dbName, tableName); } catch (Exception e) { List<String> toDeleteFiles = dataFiles.stream() .map(TIcebergDataFile::getPath) .collect(Collectors.toList()); icebergCatalog.deleteUncommittedDataFiles(toDeleteFiles); LOG.error("Failed to commit iceberg transaction on {}.{}", dbName, tableName, e); throw new StarRocksConnectorException(e.getMessage()); } finally { icebergCatalog.invalidateCacheWithoutTable(new CachingIcebergCatalog.IcebergTableName(dbName, tableName)); } } private void asyncRefreshOthersFeMetadataCache(String dbName, String tableName) { refreshOtherFeExecutor.execute(() -> { LOG.info("Start to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName); try { GlobalStateMgr.getCurrentState().refreshOthersFeTable( new TableName(catalogName, dbName, tableName), new ArrayList<>(), false); } catch (DdlException e) { LOG.error("Failed to refresh others fe iceberg metadata cache {}.{}.{}", catalogName, dbName, tableName, e); throw new StarRocksConnectorException(e.getMessage()); } LOG.info("Finish to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName); }); } public BatchWrite getBatchWrite(Transaction transaction, boolean isOverwrite) { return isOverwrite ? new DynamicOverwrite(transaction) : new Append(transaction); } public static String getIcebergRelativePartitionPath(String tableLocation, String partitionLocation) { tableLocation = tableLocation.endsWith("/") ? tableLocation.substring(0, tableLocation.length() - 1) : tableLocation; String tableLocationWithData = tableLocation + "/data/"; String path = PartitionUtil.getSuffixName(tableLocationWithData, partitionLocation); if (path.startsWith("/")) { path = path.substring(1); } if (path.endsWith("/")) { path = path.substring(0, path.length() - 1); } return path; } public static boolean onlyHasPartitionPredicate(Table table, ScalarOperator predicate) { if (predicate == null) { return true; } List<ColumnRefOperator> columnRefOperators = predicate.getColumnRefs(); List<String> partitionColNames = table.getPartitionColumnNames(); for (ColumnRefOperator c : columnRefOperators) { if (!partitionColNames.contains(c.getName())) { return false; } } return true; } private boolean enablePruneManifest() { if (ConnectContext.get() == null) { return false; } if (ConnectContext.get().getSessionVariable() == null) { return false; } return ConnectContext.get().getSessionVariable().isEnablePruneIcebergManifest(); } private boolean enableCollectColumnStatistics() { if (ConnectContext.get() == null) { return false; } if (ConnectContext.get().getSessionVariable() == null) { return false; } return ConnectContext.get().getSessionVariable().enableIcebergColumnStatistics(); } @Override public void clear() { splitTasks.clear(); databases.clear(); tables.clear(); scannedTables.clear(); metricsReporter.clear(); } interface BatchWrite { void addFile(DataFile file); void commit(); } static class Append implements BatchWrite { private final AppendFiles append; public Append(Transaction txn) { append = txn.newAppend(); } @Override public void addFile(DataFile file) { append.appendFile(file); } @Override public void commit() { append.commit(); } } static class DynamicOverwrite implements BatchWrite { private final ReplacePartitions replace; public DynamicOverwrite(Transaction txn) { replace = txn.newReplacePartitions(); } @Override public void addFile(DataFile file) { replace.addFile(file); } @Override public void commit() { replace.commit(); } } public static class PartitionData implements StructLike { private final Object[] values; private PartitionData(int size) { this.values = new Object[size]; } @Override public int size() { return values.length; } @Override public <T> T get(int pos, Class<T> javaClass) { return javaClass.cast(values[pos]); } @Override public <T> void set(int pos, T value) { if (value instanceof ByteBuffer) { ByteBuffer buffer = (ByteBuffer) value; byte[] bytes = new byte[buffer.remaining()]; buffer.duplicate().get(bytes); values[pos] = bytes; } else { values[pos] = value; } } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } PartitionData that = (PartitionData) other; return Arrays.equals(values, that.values); } @Override public int hashCode() { return Arrays.hashCode(values); } } @Override public CloudConfiguration getCloudConfiguration() { return hdfsEnvironment.getCloudConfiguration(); } private static class FileScanTaskSchema { private final String dbName; private final String tableName; private final int schemaId; private final int specId; public FileScanTaskSchema(String dbName, String tableName, int schemaId, int specId) { this.dbName = dbName; this.tableName = tableName; this.schemaId = schemaId; this.specId = specId; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } FileScanTaskSchema that = (FileScanTaskSchema) o; return schemaId == that.schemaId && specId == that.specId && Objects.equals(dbName, that.dbName) && Objects.equals(tableName, that.tableName); } @Override public int hashCode() { return Objects.hash(dbName, tableName, schemaId, specId); } } }
I guess empty String is better.
public String getSchema() throws SQLException { return null; }
return null;
public String getSchema() throws SQLException { return ""; }
class CircuitBreakerConnection extends AbstractUnsupportedOperationConnection { @Override public DatabaseMetaData getMetaData() { return new CircuitBreakerDatabaseMetaData(); } @Override public void setReadOnly(final boolean readOnly) { } @Override public boolean isReadOnly() { return false; } @Override public void setCatalog(final String catalog) throws SQLException { } @Override public String getCatalog() throws SQLException { return null; } @Override public void setTransactionIsolation(final int level) { } @Override public int getTransactionIsolation() { return Connection.TRANSACTION_NONE; } @Override public SQLWarning getWarnings() { return null; } @Override public void clearWarnings() { } @Override public void setAutoCommit(final boolean autoCommit) { } @Override public boolean getAutoCommit() { return false; } @Override public void commit() { } @Override public void rollback() { } @Override public void setHoldability(final int holdability) { } @Override public int getHoldability() { return 0; } @Override public PreparedStatement prepareStatement(final String sql) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final int resultSetType, final int resultSetConcurrency) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final int resultSetType, final int resultSetConcurrency, final int resultSetHoldability) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final int autoGeneratedKeys) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final int[] columnIndexes) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final String[] columnNames) { return new CircuitBreakerPreparedStatement(); } @Override public boolean isValid(final int timeout) { return true; } @Override public void setSchema(final String schema) throws SQLException { } @Override @Override public Statement createStatement() { return new CircuitBreakerStatement(); } @Override public Statement createStatement(final int resultSetType, final int resultSetConcurrency) { return new CircuitBreakerStatement(); } @Override public Statement createStatement(final int resultSetType, final int resultSetConcurrency, final int resultSetHoldability) { return new CircuitBreakerStatement(); } @Override public void close() { } @Override public boolean isClosed() { return false; } }
class CircuitBreakerConnection extends AbstractUnsupportedOperationConnection { @Override public DatabaseMetaData getMetaData() { return new CircuitBreakerDatabaseMetaData(); } @Override public void setReadOnly(final boolean readOnly) { } @Override public boolean isReadOnly() { return false; } @Override public void setCatalog(final String catalog) throws SQLException { } @Override public String getCatalog() throws SQLException { return ""; } @Override public void setTransactionIsolation(final int level) { } @Override public int getTransactionIsolation() { return Connection.TRANSACTION_NONE; } @Override public SQLWarning getWarnings() { return null; } @Override public void clearWarnings() { } @Override public void setAutoCommit(final boolean autoCommit) { } @Override public boolean getAutoCommit() { return false; } @Override public void commit() { } @Override public void rollback() { } @Override public void setHoldability(final int holdability) { } @Override public int getHoldability() { return 0; } @Override public PreparedStatement prepareStatement(final String sql) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final int resultSetType, final int resultSetConcurrency) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final int resultSetType, final int resultSetConcurrency, final int resultSetHoldability) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final int autoGeneratedKeys) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final int[] columnIndexes) { return new CircuitBreakerPreparedStatement(); } @Override public PreparedStatement prepareStatement(final String sql, final String[] columnNames) { return new CircuitBreakerPreparedStatement(); } @Override public boolean isValid(final int timeout) { return true; } @Override public void setSchema(final String schema) throws SQLException { } @Override @Override public Statement createStatement() { return new CircuitBreakerStatement(); } @Override public Statement createStatement(final int resultSetType, final int resultSetConcurrency) { return new CircuitBreakerStatement(); } @Override public Statement createStatement(final int resultSetType, final int resultSetConcurrency, final int resultSetHoldability) { return new CircuitBreakerStatement(); } @Override public void close() { } @Override public boolean isClosed() { return false; } }
If all the user-provided tablet_ids are invalid, will here throw an `Unknown Error` to the MySQL client?
public PlanFragment visitPhysicalOlapScan(OptExpression optExpr, ExecPlan context) { PhysicalOlapScanOperator node = (PhysicalOlapScanOperator) optExpr.getOp(); OlapTable referenceTable = (OlapTable) node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); OlapScanNode scanNode = new OlapScanNode(context.getNextNodeId(), tupleDescriptor, "OlapScanNode"); scanNode.setLimit(node.getLimit()); scanNode.computeStatistics(optExpr.getStatistics()); try { scanNode.updateScanInfo(node.getSelectedPartitionId(), node.getSelectedTabletId(), node.getSelectedIndexId()); long selectedIndexId = node.getSelectedIndexId(); long totalTabletsNum = 0; long localBeId = -1; if (Config.enable_local_replica_selection) { localBeId = GlobalStateMgr.getCurrentSystemInfo() .getBackendIdByHost(FrontendOptions.getLocalHostAddress()); } List<Long> selectedNonEmptyPartitionIds = node.getSelectedPartitionId().stream().filter(p -> { List<Long> selectTabletIds = scanNode.getPartitionToScanTabletMap().get(p); return selectTabletIds != null && !selectTabletIds.isEmpty(); }).collect(Collectors.toList()); scanNode.setSelectedPartitionIds(selectedNonEmptyPartitionIds); for (Long partitionId : scanNode.getSelectedPartitionIds()) { List<Long> selectTabletIds = scanNode.getPartitionToScanTabletMap().get(partitionId); Preconditions.checkState(selectTabletIds != null && !selectTabletIds.isEmpty()); final Partition partition = referenceTable.getPartition(partitionId); final MaterializedIndex selectedTable = partition.getIndex(selectedIndexId); List<Long> allTabletIds = selectedTable.getTabletIdsInOrder(); Map<Long, Integer> tabletId2BucketSeq = Maps.newHashMap(); for (int i = 0; i < allTabletIds.size(); i++) { tabletId2BucketSeq.put(allTabletIds.get(i), i); } totalTabletsNum += selectedTable.getTablets().size(); scanNode.setTabletId2BucketSeq(tabletId2BucketSeq); List<Tablet> tablets = selectTabletIds.stream().map(selectedTable::getTablet).collect(Collectors.toList()); scanNode.addScanRangeLocations(partition, selectedTable, tablets, localBeId); } scanNode.setTotalTabletsNum(totalTabletsNum); } catch (UserException e) { throw new StarRocksPlannerException( "Build Exec OlapScanNode fail, scan info is invalid," + e.getMessage(), INTERNAL_ERROR); } for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } for (ColumnRefOperator entry : node.getGlobalDictStringColumns()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getId())); slotDescriptor.setIsNullable(entry.isNullable()); slotDescriptor.setType(entry.getType()); slotDescriptor.setIsMaterialized(false); context.getColRefToExpr().put(entry, new SlotRef(entry.toString(), slotDescriptor)); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } tupleDescriptor.computeMemLayout(); setUnUsedOutputColumns(node, scanNode, predicates, referenceTable); scanNode.setIsSortedByKeyPerTablet(node.needSortedByKeyPerTablet()); scanNode.setIsPreAggregation(node.isPreAggregation(), node.getTurnOffReason()); scanNode.setDictStringIdToIntIds(node.getDictStringIdToIntIds()); scanNode.updateAppliedDictStringColumns(node.getGlobalDicts().stream(). map(entry -> entry.first).collect(Collectors.toSet())); List<ColumnRefOperator> bucketColumns = getShuffleColumns(node.getDistributionSpec()); boolean useAllBucketColumns = bucketColumns.stream().allMatch(c -> node.getColRefToColumnMetaMap().containsKey(c)); if (useAllBucketColumns) { List<Expr> bucketExprs = bucketColumns.stream() .map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); scanNode.setBucketExprs(bucketExprs); scanNode.setBucketColumns(bucketColumns); } context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM); fragment.setQueryGlobalDicts(node.getGlobalDicts()); context.getFragments().add(fragment); return fragment; }
Preconditions.checkState(selectTabletIds != null && !selectTabletIds.isEmpty());
public PlanFragment visitPhysicalOlapScan(OptExpression optExpr, ExecPlan context) { PhysicalOlapScanOperator node = (PhysicalOlapScanOperator) optExpr.getOp(); OlapTable referenceTable = (OlapTable) node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); OlapScanNode scanNode = new OlapScanNode(context.getNextNodeId(), tupleDescriptor, "OlapScanNode"); scanNode.setLimit(node.getLimit()); scanNode.computeStatistics(optExpr.getStatistics()); try { scanNode.updateScanInfo(node.getSelectedPartitionId(), node.getSelectedTabletId(), node.getSelectedIndexId()); long selectedIndexId = node.getSelectedIndexId(); long totalTabletsNum = 0; long localBeId = -1; if (Config.enable_local_replica_selection) { localBeId = GlobalStateMgr.getCurrentSystemInfo() .getBackendIdByHost(FrontendOptions.getLocalHostAddress()); } List<Long> selectedNonEmptyPartitionIds = node.getSelectedPartitionId().stream().filter(p -> { List<Long> selectTabletIds = scanNode.getPartitionToScanTabletMap().get(p); return selectTabletIds != null && !selectTabletIds.isEmpty(); }).collect(Collectors.toList()); scanNode.setSelectedPartitionIds(selectedNonEmptyPartitionIds); for (Long partitionId : scanNode.getSelectedPartitionIds()) { List<Long> selectTabletIds = scanNode.getPartitionToScanTabletMap().get(partitionId); Preconditions.checkState(selectTabletIds != null && !selectTabletIds.isEmpty()); final Partition partition = referenceTable.getPartition(partitionId); final MaterializedIndex selectedTable = partition.getIndex(selectedIndexId); List<Long> allTabletIds = selectedTable.getTabletIdsInOrder(); Map<Long, Integer> tabletId2BucketSeq = Maps.newHashMap(); for (int i = 0; i < allTabletIds.size(); i++) { tabletId2BucketSeq.put(allTabletIds.get(i), i); } totalTabletsNum += selectedTable.getTablets().size(); scanNode.setTabletId2BucketSeq(tabletId2BucketSeq); List<Tablet> tablets = selectTabletIds.stream().map(selectedTable::getTablet).collect(Collectors.toList()); scanNode.addScanRangeLocations(partition, selectedTable, tablets, localBeId); } scanNode.setTotalTabletsNum(totalTabletsNum); } catch (UserException e) { throw new StarRocksPlannerException( "Build Exec OlapScanNode fail, scan info is invalid," + e.getMessage(), INTERNAL_ERROR); } for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } for (ColumnRefOperator entry : node.getGlobalDictStringColumns()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getId())); slotDescriptor.setIsNullable(entry.isNullable()); slotDescriptor.setType(entry.getType()); slotDescriptor.setIsMaterialized(false); context.getColRefToExpr().put(entry, new SlotRef(entry.toString(), slotDescriptor)); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } tupleDescriptor.computeMemLayout(); setUnUsedOutputColumns(node, scanNode, predicates, referenceTable); scanNode.setIsSortedByKeyPerTablet(node.needSortedByKeyPerTablet()); scanNode.setIsPreAggregation(node.isPreAggregation(), node.getTurnOffReason()); scanNode.setDictStringIdToIntIds(node.getDictStringIdToIntIds()); scanNode.updateAppliedDictStringColumns(node.getGlobalDicts().stream(). map(entry -> entry.first).collect(Collectors.toSet())); List<ColumnRefOperator> bucketColumns = getShuffleColumns(node.getDistributionSpec()); boolean useAllBucketColumns = bucketColumns.stream().allMatch(c -> node.getColRefToColumnMetaMap().containsKey(c)); if (useAllBucketColumns) { List<Expr> bucketExprs = bucketColumns.stream() .map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); scanNode.setBucketExprs(bucketExprs); scanNode.setBucketColumns(bucketColumns); } context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM); fragment.setQueryGlobalDicts(node.getGlobalDicts()); context.getFragments().add(fragment); return fragment; }
class PhysicalPlanTranslator extends OptExpressionVisitor<PlanFragment, ExecPlan> { private final ColumnRefFactory columnRefFactory; private final IdGenerator<RuntimeFilterId> runtimeFilterIdIdGenerator = RuntimeFilterId.createGenerator(); public PhysicalPlanTranslator(ColumnRefFactory columnRefFactory) { this.columnRefFactory = columnRefFactory; } public PlanFragment translate(OptExpression optExpression, ExecPlan context) { return visit(optExpression, context); } @Override public PlanFragment visit(OptExpression optExpression, ExecPlan context) { PlanFragment fragment = optExpression.getOp().accept(this, optExpression, context); Projection projection = (optExpression.getOp()).getProjection(); if (projection == null) { return fragment; } else { return buildProjectNode(optExpression, projection, fragment, context); } } private void setUnUsedOutputColumns(PhysicalOlapScanOperator node, OlapScanNode scanNode, List<ScalarOperator> predicates, OlapTable referenceTable) { if (!ConnectContext.get().getSessionVariable().isEnableFilterUnusedColumnsInScanStage()) { return; } if (referenceTable.getKeysType().isAggregationFamily() && !node.isPreAggregation()) { return; } List<ColumnRefOperator> outputColumns = node.getOutputColumns(); if (outputColumns.isEmpty()) { return; } Set<Integer> outputColumnIds = new HashSet<Integer>(); for (ColumnRefOperator colref : outputColumns) { outputColumnIds.add(colref.getId()); } Set<Integer> singlePredColumnIds = new HashSet<Integer>(); Set<Integer> complexPredColumnIds = new HashSet<Integer>(); Set<String> aggOrPrimaryKeyTableValueColumnNames = new HashSet<String>(); if (referenceTable.getKeysType().isAggregationFamily() || referenceTable.getKeysType() == KeysType.PRIMARY_KEYS) { aggOrPrimaryKeyTableValueColumnNames = referenceTable.getFullSchema().stream() .filter(col -> !col.isKey()) .map(col -> col.getName()) .collect(Collectors.toSet()); } for (ScalarOperator predicate : predicates) { ColumnRefSet usedColumns = predicate.getUsedColumns(); if (DecodeVisitor.isSimpleStrictPredicate(predicate)) { for (int cid : usedColumns.getColumnIds()) { singlePredColumnIds.add(cid); } } else { for (int cid : usedColumns.getColumnIds()) { complexPredColumnIds.add(cid); } } } Set<Integer> unUsedOutputColumnIds = new HashSet<Integer>(); Map<Integer, Integer> dictStringIdToIntIds = node.getDictStringIdToIntIds(); for (Integer cid : singlePredColumnIds) { Integer newCid = cid; if (dictStringIdToIntIds.containsKey(cid)) { newCid = dictStringIdToIntIds.get(cid); } if (!complexPredColumnIds.contains(newCid) && !outputColumnIds.contains(newCid)) { unUsedOutputColumnIds.add(newCid); } } scanNode.setUnUsedOutputStringColumns(unUsedOutputColumnIds, aggOrPrimaryKeyTableValueColumnNames); } @Override public PlanFragment visitPhysicalProject(OptExpression optExpr, ExecPlan context) { PhysicalProjectOperator node = (PhysicalProjectOperator) optExpr.getOp(); PlanFragment inputFragment = visit(optExpr.inputAt(0), context); Preconditions.checkState(!node.getColumnRefMap().isEmpty()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); Map<SlotId, Expr> commonSubOperatorMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getCommonSubOperatorMap().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getCommonSubOperatorMap())); commonSubOperatorMap.put(new SlotId(entry.getKey().getId()), expr); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setIsNullable(expr.isNullable()); slotDescriptor.setIsMaterialized(false); slotDescriptor.setType(expr.getType()); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } Map<SlotId, Expr> projectMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getColumnRefMap().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getColumnRefMap())); projectMap.put(new SlotId(entry.getKey().getId()), expr); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setIsNullable(expr.isNullable()); slotDescriptor.setIsMaterialized(true); slotDescriptor.setType(expr.getType()); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } ProjectNode projectNode = new ProjectNode(context.getNextNodeId(), tupleDescriptor, inputFragment.getPlanRoot(), projectMap, commonSubOperatorMap); projectNode.setHasNullableGenerateChild(); projectNode.computeStatistics(optExpr.getStatistics()); for (SlotId sid : projectMap.keySet()) { SlotDescriptor slotDescriptor = tupleDescriptor.getSlot(sid.asInt()); slotDescriptor.setIsNullable(slotDescriptor.getIsNullable() | projectNode.isHasNullableGenerateChild()); } tupleDescriptor.computeMemLayout(); projectNode.setLimit(inputFragment.getPlanRoot().getLimit()); inputFragment.setPlanRoot(projectNode); return inputFragment; } public PlanFragment buildProjectNode(OptExpression optExpression, Projection node, PlanFragment inputFragment, ExecPlan context) { if (node == null) { return inputFragment; } Preconditions.checkState(!node.getColumnRefMap().isEmpty()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); Map<SlotId, Expr> commonSubOperatorMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getCommonSubOperatorMap().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getCommonSubOperatorMap())); commonSubOperatorMap.put(new SlotId(entry.getKey().getId()), expr); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setIsNullable(expr.isNullable()); slotDescriptor.setIsMaterialized(false); slotDescriptor.setType(expr.getType()); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } Map<SlotId, Expr> projectMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getColumnRefMap().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getColumnRefMap())); projectMap.put(new SlotId(entry.getKey().getId()), expr); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setIsNullable(expr.isNullable()); slotDescriptor.setIsMaterialized(true); slotDescriptor.setType(expr.getType()); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } ProjectNode projectNode = new ProjectNode(context.getNextNodeId(), tupleDescriptor, inputFragment.getPlanRoot(), projectMap, commonSubOperatorMap); projectNode.setHasNullableGenerateChild(); Statistics statistics = optExpression.getStatistics(); Statistics.Builder b = Statistics.builder(); b.setOutputRowCount(statistics.getOutputRowCount()); b.addColumnStatistics(statistics.getOutputColumnsStatistics(new ColumnRefSet(node.getOutputColumns()))); projectNode.computeStatistics(b.build()); for (SlotId sid : projectMap.keySet()) { SlotDescriptor slotDescriptor = tupleDescriptor.getSlot(sid.asInt()); slotDescriptor.setIsNullable(slotDescriptor.getIsNullable() | projectNode.isHasNullableGenerateChild()); } tupleDescriptor.computeMemLayout(); projectNode.setLimit(inputFragment.getPlanRoot().getLimit()); inputFragment.setPlanRoot(projectNode); return inputFragment; } @Override public PlanFragment visitPhysicalDecode(OptExpression optExpression, ExecPlan context) { PhysicalDecodeOperator node = (PhysicalDecodeOperator) optExpression.getOp(); PlanFragment inputFragment = visit(optExpression.inputAt(0), context); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); for (TupleId tupleId : inputFragment.getPlanRoot().getTupleIds()) { TupleDescriptor childTuple = context.getDescTbl().getTupleDesc(tupleId); ArrayList<SlotDescriptor> slots = childTuple.getSlots(); for (SlotDescriptor slot : slots) { int slotId = slot.getId().asInt(); boolean isNullable = slot.getIsNullable(); if (node.getDictToStrings().containsKey(slotId)) { Integer stringSlotId = node.getDictToStrings().get(slotId); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(stringSlotId)); slotDescriptor.setIsNullable(isNullable); slotDescriptor.setIsMaterialized(true); slotDescriptor.setType(Type.VARCHAR); context.getColRefToExpr().put(new ColumnRefOperator(stringSlotId, Type.VARCHAR, "<dict-code>", slotDescriptor.getIsNullable()), new SlotRef(stringSlotId.toString(), slotDescriptor)); } else { SlotDescriptor slotDescriptor = new SlotDescriptor(slot.getId(), tupleDescriptor, slot); tupleDescriptor.addSlot(slotDescriptor); } } } Map<SlotId, Expr> projectMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getStringFunctions().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getStringFunctions())); projectMap.put(new SlotId(entry.getKey().getId()), expr); Preconditions.checkState(context.getColRefToExpr().containsKey(entry.getKey())); } tupleDescriptor.computeMemLayout(); DecodeNode decodeNode = new DecodeNode(context.getNextNodeId(), tupleDescriptor, inputFragment.getPlanRoot(), node.getDictToStrings(), projectMap); decodeNode.computeStatistics(optExpression.getStatistics()); decodeNode.setLimit(node.getLimit()); inputFragment.setPlanRoot(decodeNode); return inputFragment; } @Override @Override public PlanFragment visitPhysicalMetaScan(OptExpression optExpression, ExecPlan context) { PhysicalMetaScanOperator scan = (PhysicalMetaScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(scan.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(scan.getTable()); MetaScanNode scanNode = new MetaScanNode(context.getNextNodeId(), tupleDescriptor, (OlapTable) scan.getTable(), scan.getAggColumnIdToNames()); scanNode.computeRangeLocations(); for (Map.Entry<ColumnRefOperator, Column> entry : scan.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } private void prepareContextSlots(PhysicalScanOperator node, ExecPlan context, TupleDescriptor tupleDescriptor) { for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); if (slotDescriptor.getType().isComplexType()) { slotDescriptor.setUsedSubfieldPosGroup(entry.getKey().getUsedSubfieldPosGroup()); } context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } } private void prepareCommonExpr(HDFSScanNodePredicates scanNodePredicates, ScanOperatorPredicates predicates, ExecPlan context) { List<ScalarOperator> noEvalPartitionConjuncts = predicates.getNoEvalPartitionConjuncts(); List<ScalarOperator> nonPartitionConjuncts = predicates.getNonPartitionConjuncts(); List<ScalarOperator> partitionConjuncts = predicates.getPartitionConjuncts(); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator partitionConjunct : partitionConjuncts) { scanNodePredicates.getPartitionConjuncts(). add(ScalarOperatorToExpr.buildExecExpression(partitionConjunct, formatterContext)); } for (ScalarOperator noEvalPartitionConjunct : noEvalPartitionConjuncts) { scanNodePredicates.getNoEvalPartitionConjuncts(). add(ScalarOperatorToExpr.buildExecExpression(noEvalPartitionConjunct, formatterContext)); } for (ScalarOperator nonPartitionConjunct : nonPartitionConjuncts) { scanNodePredicates.getNonPartitionConjuncts(). add(ScalarOperatorToExpr.buildExecExpression(nonPartitionConjunct, formatterContext)); } } private void prepareMinMaxExpr(HDFSScanNodePredicates scanNodePredicates, ScanOperatorPredicates predicates, ExecPlan context) { /* * populates 'minMaxTuple' with slots for statistics values, * and populates 'minMaxConjuncts' with conjuncts pointing into the 'minMaxTuple' */ List<ScalarOperator> minMaxConjuncts = predicates.getMinMaxConjuncts(); TupleDescriptor minMaxTuple = context.getDescTbl().createTupleDescriptor(); for (ScalarOperator minMaxConjunct : minMaxConjuncts) { for (ColumnRefOperator columnRefOperator : Utils.extractColumnRef(minMaxConjunct)) { SlotDescriptor slotDescriptor = context.getDescTbl() .addSlotDescriptor(minMaxTuple, new SlotId(columnRefOperator.getId())); Column column = predicates.getMinMaxColumnRefMap().get(columnRefOperator); slotDescriptor.setColumn(column); slotDescriptor.setIsNullable(column.isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr() .put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDescriptor)); } } minMaxTuple.computeMemLayout(); scanNodePredicates.setMinMaxTuple(minMaxTuple); ScalarOperatorToExpr.FormatterContext minMaxFormatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator minMaxConjunct : minMaxConjuncts) { scanNodePredicates.getMinMaxConjuncts(). add(ScalarOperatorToExpr.buildExecExpression(minMaxConjunct, minMaxFormatterContext)); } } @Override public PlanFragment visitPhysicalHudiScan(OptExpression optExpression, ExecPlan context) { PhysicalHudiScanOperator node = (PhysicalHudiScanOperator) optExpression.getOp(); ScanOperatorPredicates predicates = node.getScanOperatorPredicates(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); prepareContextSlots(node, context, tupleDescriptor); HudiScanNode hudiScanNode = new HudiScanNode(context.getNextNodeId(), tupleDescriptor, "HudiScanNode"); hudiScanNode.computeStatistics(optExpression.getStatistics()); try { HDFSScanNodePredicates scanNodePredicates = hudiScanNode.getScanNodePredicates(); scanNodePredicates.setSelectedPartitionIds(predicates.getSelectedPartitionIds()); scanNodePredicates.setIdToPartitionKey(predicates.getIdToPartitionKey()); hudiScanNode.setupScanRangeLocations(context.getDescTbl()); prepareCommonExpr(scanNodePredicates, predicates, context); prepareMinMaxExpr(scanNodePredicates, predicates, context); } catch (Exception e) { LOG.warn("Hudi scan node get scan range locations failed : " + e); LOG.warn(e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } hudiScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(hudiScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), hudiScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalHiveScan(OptExpression optExpression, ExecPlan context) { PhysicalHiveScanOperator node = (PhysicalHiveScanOperator) optExpression.getOp(); ScanOperatorPredicates predicates = node.getScanOperatorPredicates(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); prepareContextSlots(node, context, tupleDescriptor); HdfsScanNode hdfsScanNode = new HdfsScanNode(context.getNextNodeId(), tupleDescriptor, "HdfsScanNode"); hdfsScanNode.computeStatistics(optExpression.getStatistics()); try { HDFSScanNodePredicates scanNodePredicates = hdfsScanNode.getScanNodePredicates(); scanNodePredicates.setSelectedPartitionIds(predicates.getSelectedPartitionIds()); scanNodePredicates.setIdToPartitionKey(predicates.getIdToPartitionKey()); hdfsScanNode.setupScanRangeLocations(context.getDescTbl()); prepareCommonExpr(scanNodePredicates, predicates, context); prepareMinMaxExpr(scanNodePredicates, predicates, context); } catch (Exception e) { LOG.warn("Hdfs scan node get scan range locations failed : " + e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } hdfsScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(hdfsScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), hdfsScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalFileScan(OptExpression optExpression, ExecPlan context) { PhysicalFileScanOperator node = (PhysicalFileScanOperator) optExpression.getOp(); ScanOperatorPredicates predicates = node.getScanOperatorPredicates(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); prepareContextSlots(node, context, tupleDescriptor); FileTableScanNode fileTableScanNode = new FileTableScanNode(context.getNextNodeId(), tupleDescriptor, "FileTableScanNode"); fileTableScanNode.computeStatistics(optExpression.getStatistics()); try { HDFSScanNodePredicates scanNodePredicates = fileTableScanNode.getScanNodePredicates(); fileTableScanNode.setupScanRangeLocations(); prepareCommonExpr(scanNodePredicates, predicates, context); prepareMinMaxExpr(scanNodePredicates, predicates, context); } catch (Exception e) { LOG.warn("Hdfs scan node get scan range locations failed : ", e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } fileTableScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(fileTableScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), fileTableScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalDeltaLakeScan(OptExpression optExpression, ExecPlan context) { PhysicalDeltaLakeScanOperator node = (PhysicalDeltaLakeScanOperator) optExpression.getOp(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } DeltaLakeScanNode deltaLakeScanNode = new DeltaLakeScanNode(context.getNextNodeId(), tupleDescriptor, "DeltaLakeScanNode"); deltaLakeScanNode.computeStatistics(optExpression.getStatistics()); try { ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); for (ScalarOperator predicate : predicates) { deltaLakeScanNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } deltaLakeScanNode.setupScanRangeLocations(context.getDescTbl()); HDFSScanNodePredicates scanNodePredicates = deltaLakeScanNode.getScanNodePredicates(); prepareMinMaxExpr(scanNodePredicates, node.getScanOperatorPredicates(), context); } catch (AnalysisException e) { LOG.warn("Delta lake scan node get scan range locations failed : " + e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } deltaLakeScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(deltaLakeScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), deltaLakeScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalIcebergScan(OptExpression optExpression, ExecPlan context) { PhysicalIcebergScanOperator node = (PhysicalIcebergScanOperator) optExpression.getOp(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); prepareContextSlots(node, context, tupleDescriptor); IcebergScanNode icebergScanNode = new IcebergScanNode(context.getNextNodeId(), tupleDescriptor, "IcebergScanNode"); icebergScanNode.computeStatistics(optExpression.getStatistics()); try { ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); for (ScalarOperator predicate : predicates) { icebergScanNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } icebergScanNode.preProcessIcebergPredicate(predicates); icebergScanNode.setupScanRangeLocations(); icebergScanNode.appendEqualityColumns(node, columnRefFactory, context); HDFSScanNodePredicates scanNodePredicates = icebergScanNode.getScanNodePredicates(); prepareMinMaxExpr(scanNodePredicates, node.getScanOperatorPredicates(), context); } catch (UserException e) { LOG.warn("Iceberg scan node get scan range locations failed : " + e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } icebergScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(icebergScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), icebergScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalSchemaScan(OptExpression optExpression, ExecPlan context) { PhysicalSchemaScanOperator node = (PhysicalSchemaScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(node.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(node.getTable()); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); SchemaScanNode scanNode = new SchemaScanNode(context.getNextNodeId(), tupleDescriptor); scanNode.setFrontendIP(FrontendOptions.getLocalHostAddress()); scanNode.setFrontendPort(Config.rpc_port); scanNode.setUser(context.getConnectContext().getQualifiedUser()); scanNode.setUserIp(context.getConnectContext().getRemoteIP()); scanNode.setLimit(node.getLimit()); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); if (predicate instanceof BinaryPredicateOperator) { if (((BinaryPredicateOperator) predicate).getBinaryType() == BinaryPredicateOperator.BinaryType.EQ) { if (predicate.getChildren().get(0) instanceof ColumnRefOperator && predicate.getChildren().get(1) instanceof ConstantOperator) { ColumnRefOperator columnRefOperator = (ColumnRefOperator) predicate.getChildren().get(0); ConstantOperator constantOperator = (ConstantOperator) predicate.getChildren().get(1); switch (columnRefOperator.getName()) { case "TABLE_SCHEMA": scanNode.setSchemaDb(constantOperator.getVarchar()); break; case "TABLE_NAME": scanNode.setSchemaTable(constantOperator.getVarchar()); break; default: break; } } } } } context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalMysqlScan(OptExpression optExpression, ExecPlan context) { PhysicalMysqlScanOperator node = (PhysicalMysqlScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(node.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(node.getTable()); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); MysqlScanNode scanNode = new MysqlScanNode(context.getNextNodeId(), tupleDescriptor, (MysqlTable) node.getTable()); if (node.getTemporalClause() != null) { scanNode.setTemporalClause(node.getTemporalClause()); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); formatterContext.setImplicitCast(true); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } scanNode.setLimit(node.getLimit()); scanNode.computeColumnsAndFilters(); scanNode.computeStatistics(optExpression.getStatistics()); context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalEsScan(OptExpression optExpression, ExecPlan context) { PhysicalEsScanOperator node = (PhysicalEsScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(node.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(node.getTable()); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); EsScanNode scanNode = new EsScanNode(context.getNextNodeId(), tupleDescriptor, "EsScanNode"); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } scanNode.setLimit(node.getLimit()); scanNode.computeStatistics(optExpression.getStatistics()); try { scanNode.assignBackends(); } catch (UserException e) { throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } scanNode.setShardScanRanges(scanNode.computeShardLocations(node.getSelectedIndex())); context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalJDBCScan(OptExpression optExpression, ExecPlan context) { PhysicalJDBCScanOperator node = (PhysicalJDBCScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(node.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(node.getTable()); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); JDBCScanNode scanNode = new JDBCScanNode(context.getNextNodeId(), tupleDescriptor, (JDBCTable) node.getTable()); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); formatterContext.setImplicitCast(true); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } scanNode.setLimit(node.getLimit()); scanNode.computeColumnsAndFilters(); scanNode.computeStatistics(optExpression.getStatistics()); context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalValues(OptExpression optExpr, ExecPlan context) { PhysicalValuesOperator valuesOperator = (PhysicalValuesOperator) optExpr.getOp(); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); for (ColumnRefOperator columnRefOperator : valuesOperator.getColumnRefSet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(columnRefOperator.getId())); slotDescriptor.setIsNullable(columnRefOperator.isNullable()); slotDescriptor.setIsMaterialized(true); slotDescriptor.setType(columnRefOperator.getType()); context.getColRefToExpr() .put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); if (valuesOperator.getRows().isEmpty()) { EmptySetNode emptyNode = new EmptySetNode(context.getNextNodeId(), Lists.newArrayList(tupleDescriptor.getId())); emptyNode.computeStatistics(optExpr.getStatistics()); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), emptyNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } else { UnionNode unionNode = new UnionNode(context.getNextNodeId(), tupleDescriptor.getId()); unionNode.setLimit(valuesOperator.getLimit()); List<List<Expr>> consts = new ArrayList<>(); for (List<ScalarOperator> row : valuesOperator.getRows()) { List<Expr> exprRow = new ArrayList<>(); for (ScalarOperator field : row) { exprRow.add(ScalarOperatorToExpr.buildExecExpression( field, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))); } consts.add(exprRow); } unionNode.setMaterializedConstExprLists_(consts); unionNode.computeStatistics(optExpr.getStatistics()); /* * TODO(lhy): * It doesn't make sense for vectorized execution engines, but it will appear in explain. * we can delete this when refactoring explain in the future, */ consts.forEach(unionNode::addConstExprList); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), unionNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } } public static boolean hasNoExchangeNodes(PlanNode root) { if (root instanceof ExchangeNode) { return false; } for (PlanNode childNode : root.getChildren()) { if (!hasNoExchangeNodes(childNode)) { return false; } } return true; } /** * Whether all the nodes of the plan tree only contain the specific node types. * * @param root The plan tree root. * @param requiredNodeTypes The specific node type. * @return true if all the nodes belong to the node types, otherwise false. */ private boolean onlyContainNodeTypes(PlanNode root, List<Class<? extends PlanNode>> requiredNodeTypes) { boolean rootMatched = requiredNodeTypes.stream().anyMatch(type -> type.isInstance(root)); if (!rootMatched) { return false; } for (PlanNode child : root.getChildren()) { if (!onlyContainNodeTypes(child, requiredNodeTypes)) { return false; } } return true; } /** * Remove ExchangeNode between AggNode and ScanNode for the single backend. * <p> * This is used to generate "ScanNode->LocalShuffle->OnePhaseLocalAgg" for the single backend, * which contains two steps: * 1. Ignore the network cost for ExchangeNode when estimating cost model. * 2. Remove ExchangeNode between AggNode and ScanNode when building fragments. * <p> * Specifically, transfer * (AggNode->ExchangeNode)->([ProjectNode->]ScanNode) * - *inputFragment sourceFragment * to * (AggNode->[ProjectNode->]ScanNode) * - *sourceFragment * That is, when matching this fragment pattern, remove inputFragment and return sourceFragment. * * @param inputFragment The input fragment to match the above pattern. * @param context The context of building fragment, which contains all the fragments. * @return SourceFragment if it matches th pattern, otherwise the original inputFragment. */ private PlanFragment removeExchangeNodeForLocalShuffleAgg(PlanFragment inputFragment, ExecPlan context) { if (ConnectContext.get() == null) { return inputFragment; } SessionVariable sessionVariable = ConnectContext.get().getSessionVariable(); boolean enableLocalShuffleAgg = sessionVariable.isEnableLocalShuffleAgg() && sessionVariable.isEnablePipelineEngine() && GlobalStateMgr.getCurrentSystemInfo().isSingleBackendAndComputeNode(); if (!enableLocalShuffleAgg) { return inputFragment; } if (!(inputFragment.getPlanRoot() instanceof ExchangeNode)) { return inputFragment; } PlanNode sourceFragmentRoot = inputFragment.getPlanRoot().getChild(0); if (!onlyContainNodeTypes(sourceFragmentRoot, ImmutableList.of(ScanNode.class, ProjectNode.class))) { return inputFragment; } PlanFragment sourceFragment = sourceFragmentRoot.getFragment(); if (sourceFragment instanceof MultiCastPlanFragment) { return inputFragment; } ArrayList<PlanFragment> fragments = context.getFragments(); for (int i = fragments.size() - 1; i >= 0; --i) { if (fragments.get(i).equals(inputFragment)) { fragments.remove(i); break; } } return sourceFragment; } /** * Clear partitionExprs of OlapScanNode (the bucket keys to pass to BE), if they don't satisfy * the required hash property of blocking aggregation. * <p> * When partitionExprs of OlapScanNode are passed to BE, the post operators will use them as * local shuffle partition exprs. * Otherwise, the operators will use the original partition exprs (group by keys or join on keys). * <p> * The bucket keys can satisfy the required hash property of blocking aggregation except two scenarios: * - OlapScanNode only has one tablet after pruned. * - It is executed on the single BE. * As for these two scenarios, which will generate ScanNode(k1)->LocalShuffle(c1)->BlockingAgg(c1), * partitionExprs of OlapScanNode must be cleared to make BE use group by keys not bucket keys as * local shuffle partition exprs. * * @param fragment The fragment which need to check whether to clear bucket keys of OlapScanNode. * @param aggOp The aggregate which need to check whether OlapScanNode satisfies its reuiqred hash property. */ private void clearOlapScanNodePartitionsIfNotSatisfy(PlanFragment fragment, PhysicalHashAggregateOperator aggOp) { if (!aggOp.isOnePhaseAgg() && !aggOp.isMergedLocalAgg()) { return; } if (aggOp.getPartitionByColumns().isEmpty()) { return; } PlanNode leafNode = fragment.getLeftMostLeafNode(); if (!(leafNode instanceof OlapScanNode)) { return; } OlapScanNode olapScanNode = (OlapScanNode) leafNode; Set<ColumnRefOperator> requiredPartColumns = new HashSet<>(aggOp.getPartitionByColumns()); boolean satisfy = requiredPartColumns.containsAll(olapScanNode.getBucketColumns()); if (satisfy) { return; } olapScanNode.setBucketExprs(Lists.newArrayList()); olapScanNode.setBucketColumns(Lists.newArrayList()); } private static class AggregateExprInfo { public final ArrayList<Expr> groupExpr; public final ArrayList<FunctionCallExpr> aggregateExpr; public final ArrayList<Expr> partitionExpr; public final ArrayList<Expr> intermediateExpr; public AggregateExprInfo(ArrayList<Expr> groupExpr, ArrayList<FunctionCallExpr> aggregateExpr, ArrayList<Expr> partitionExpr, ArrayList<Expr> intermediateExpr) { this.groupExpr = groupExpr; this.aggregateExpr = aggregateExpr; this.partitionExpr = partitionExpr; this.intermediateExpr = intermediateExpr; } } private AggregateExprInfo buildAggregateTuple( Map<ColumnRefOperator, CallOperator> aggregations, List<ColumnRefOperator> groupBys, List<ColumnRefOperator> partitionBys, TupleDescriptor outputTupleDesc, ExecPlan context) { ArrayList<Expr> groupingExpressions = Lists.newArrayList(); boolean forExchangePerf = aggregations.values().stream().anyMatch(aggFunc -> aggFunc.getFnName().equals(FunctionSet.EXCHANGE_BYTES) || aggFunc.getFnName().equals(FunctionSet.EXCHANGE_SPEED)) && ConnectContext.get().getSessionVariable().getNewPlannerAggStage() == 1; if (!forExchangePerf) { for (ColumnRefOperator grouping : CollectionUtils.emptyIfNull(groupBys)) { Expr groupingExpr = ScalarOperatorToExpr.buildExecExpression(grouping, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); groupingExpressions.add(groupingExpr); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(outputTupleDesc, new SlotId(grouping.getId())); slotDesc.setType(groupingExpr.getType()); slotDesc.setIsNullable(groupingExpr.isNullable()); slotDesc.setIsMaterialized(true); } } ArrayList<FunctionCallExpr> aggregateExprList = Lists.newArrayList(); ArrayList<Expr> intermediateAggrExprs = Lists.newArrayList(); for (Map.Entry<ColumnRefOperator, CallOperator> aggregation : aggregations.entrySet()) { FunctionCallExpr aggExpr = (FunctionCallExpr) ScalarOperatorToExpr.buildExecExpression( aggregation.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); aggregateExprList.add(aggExpr); SlotDescriptor slotDesc = context.getDescTbl() .addSlotDescriptor(outputTupleDesc, new SlotId(aggregation.getKey().getId())); slotDesc.setType(aggregation.getValue().getType()); slotDesc.setIsNullable(aggExpr.isNullable()); slotDesc.setIsMaterialized(true); context.getColRefToExpr() .put(aggregation.getKey(), new SlotRef(aggregation.getKey().toString(), slotDesc)); SlotDescriptor intermediateSlotDesc = new SlotDescriptor(slotDesc.getId(), slotDesc.getParent()); AggregateFunction aggrFn = (AggregateFunction) aggExpr.getFn(); Type intermediateType = aggrFn.getIntermediateType() != null ? aggrFn.getIntermediateType() : aggrFn.getReturnType(); intermediateSlotDesc.setType(intermediateType); intermediateSlotDesc.setIsNullable(aggrFn.isNullable()); intermediateSlotDesc.setIsMaterialized(true); SlotRef intermediateSlotRef = new SlotRef(aggregation.getKey().toString(), intermediateSlotDesc); intermediateAggrExprs.add(intermediateSlotRef); } ArrayList<Expr> partitionExpressions = Lists.newArrayList(); for (ColumnRefOperator column : CollectionUtils.emptyIfNull(partitionBys)) { Expr partitionExpr = ScalarOperatorToExpr.buildExecExpression(column, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(outputTupleDesc, new SlotId(column.getId())); slotDesc.setType(partitionExpr.getType()); slotDesc.setIsNullable(partitionExpr.isNullable()); slotDesc.setIsMaterialized(true); context.getColRefToExpr().put(column, new SlotRef(column.toString(), slotDesc)); partitionExpressions.add(new SlotRef(slotDesc)); } outputTupleDesc.computeMemLayout(); return new AggregateExprInfo(groupingExpressions, aggregateExprList, partitionExpressions, intermediateAggrExprs); } @Override public PlanFragment visitPhysicalHashAggregate(OptExpression optExpr, ExecPlan context) { PhysicalHashAggregateOperator node = (PhysicalHashAggregateOperator) optExpr.getOp(); PlanFragment originalInputFragment = visit(optExpr.inputAt(0), context); PlanFragment inputFragment = removeExchangeNodeForLocalShuffleAgg(originalInputFragment, context); boolean withLocalShuffle = inputFragment != originalInputFragment; Map<ColumnRefOperator, CallOperator> aggregations = node.getAggregations(); List<ColumnRefOperator> groupBys = node.getGroupBys(); List<ColumnRefOperator> partitionBys = node.getPartitionByColumns(); TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor(); AggregateExprInfo aggExpr = buildAggregateTuple(aggregations, groupBys, partitionBys, outputTupleDesc, context); ArrayList<Expr> groupingExpressions = aggExpr.groupExpr; ArrayList<FunctionCallExpr> aggregateExprList = aggExpr.aggregateExpr; ArrayList<Expr> partitionExpressions = aggExpr.partitionExpr; ArrayList<Expr> intermediateAggrExprs = aggExpr.intermediateExpr; AggregationNode aggregationNode; if (node.getType().isLocal()) { AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.FIRST); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); aggregationNode.unsetNeedsFinalize(); aggregationNode.setIsPreagg(node.isUseStreamingPreAgg()); aggregationNode.setIntermediateTuple(); if (!partitionExpressions.isEmpty()) { inputFragment.setOutputPartition(DataPartition.hashPartitioned(partitionExpressions)); } if (!node.isUseStreamingPreAgg() && hasColocateOlapScanChildInFragment(aggregationNode)) { aggregationNode.setColocate(true); } } else if (node.getType().isGlobal()) { if (node.hasSingleDistinct()) { for (int i = 0; i < aggregateExprList.size(); i++) { if (i != node.getSingleDistinctFunctionPos()) { aggregateExprList.get(i).setMergeAggFn(); } } AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.SECOND); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); } else if (!node.isSplit()) { rewriteAggDistinctFirstStageFunction(aggregateExprList); AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.FIRST); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); } else { aggregateExprList.forEach(FunctionCallExpr::setMergeAggFn); AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.SECOND_MERGE); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { aggregationNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } aggregationNode.setLimit(node.getLimit()); if (hasColocateOlapScanChildInFragment(aggregationNode)) { aggregationNode.setColocate(true); } } else if (node.getType().isDistinctGlobal()) { aggregateExprList.forEach(FunctionCallExpr::setMergeAggFn); AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.FIRST_MERGE); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); aggregationNode.unsetNeedsFinalize(); aggregationNode.setIntermediateTuple(); } else if (node.getType().isDistinctLocal()) { for (int i = 0; i < aggregateExprList.size(); i++) { if (i != node.getSingleDistinctFunctionPos()) { aggregateExprList.get(i).setMergeAggFn(); } } AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.SECOND); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); aggregationNode.unsetNeedsFinalize(); aggregationNode.setIsPreagg(node.isUseStreamingPreAgg()); aggregationNode.setIntermediateTuple(); } else { throw unsupportedException("Not support aggregate type : " + node.getType()); } aggregationNode.setUseSortAgg(node.isUseSortAgg()); aggregationNode.setStreamingPreaggregationMode(context.getConnectContext(). getSessionVariable().getStreamingPreaggregationMode()); aggregationNode.setHasNullableGenerateChild(); aggregationNode.computeStatistics(optExpr.getStatistics()); if (node.isOnePhaseAgg() || node.isMergedLocalAgg()) { clearOlapScanNodePartitionsIfNotSatisfy(inputFragment, node); inputFragment.setAssignScanRangesPerDriverSeq(!withLocalShuffle); aggregationNode.setWithLocalShuffle(withLocalShuffle); } aggregationNode.getAggInfo().setIntermediateAggrExprs(intermediateAggrExprs); inputFragment.setPlanRoot(aggregationNode); return inputFragment; } public boolean hasColocateOlapScanChildInFragment(PlanNode node) { if (node instanceof OlapScanNode) { ColocateTableIndex colocateIndex = GlobalStateMgr.getCurrentColocateIndex(); OlapScanNode scanNode = (OlapScanNode) node; if (colocateIndex.isColocateTable(scanNode.getOlapTable().getId())) { return true; } } if (node instanceof ExchangeNode) { return false; } boolean hasOlapScanChild = false; for (PlanNode child : node.getChildren()) { hasOlapScanChild |= hasColocateOlapScanChildInFragment(child); } return hasOlapScanChild; } public void rewriteAggDistinctFirstStageFunction(List<FunctionCallExpr> aggregateExprList) { int singleDistinctCount = 0; int singleDistinctIndex = 0; FunctionCallExpr functionCallExpr = null; for (int i = 0; i < aggregateExprList.size(); ++i) { FunctionCallExpr callExpr = aggregateExprList.get(i); if (callExpr.isDistinct()) { ++singleDistinctCount; functionCallExpr = callExpr; singleDistinctIndex = i; } } if (singleDistinctCount == 1) { FunctionCallExpr replaceExpr = null; final String functionName = functionCallExpr.getFnName().getFunction(); if (functionName.equalsIgnoreCase(FunctionSet.COUNT)) { replaceExpr = new FunctionCallExpr(FunctionSet.MULTI_DISTINCT_COUNT, functionCallExpr.getParams()); replaceExpr.setFn(Expr.getBuiltinFunction(FunctionSet.MULTI_DISTINCT_COUNT, new Type[] {functionCallExpr.getChild(0).getType()}, IS_NONSTRICT_SUPERTYPE_OF)); replaceExpr.getParams().setIsDistinct(false); } else if (functionName.equalsIgnoreCase(FunctionSet.SUM)) { replaceExpr = new FunctionCallExpr(FunctionSet.MULTI_DISTINCT_SUM, functionCallExpr.getParams()); Function multiDistinctSum = DecimalV3FunctionAnalyzer.convertSumToMultiDistinctSum( functionCallExpr.getFn(), functionCallExpr.getChild(0).getType()); replaceExpr.setFn(multiDistinctSum); replaceExpr.getParams().setIsDistinct(false); } Preconditions.checkState(replaceExpr != null); ExpressionAnalyzer.analyzeExpressionIgnoreSlot(replaceExpr, ConnectContext.get()); aggregateExprList.set(singleDistinctIndex, replaceExpr); } } @Override public PlanFragment visitPhysicalDistribution(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalDistributionOperator distribution = (PhysicalDistributionOperator) optExpr.getOp(); ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(), inputFragment.getPlanRoot(), distribution.getDistributionSpec().getType()); DataPartition dataPartition; if (DistributionSpec.DistributionType.GATHER.equals(distribution.getDistributionSpec().getType())) { exchangeNode.setNumInstances(1); dataPartition = DataPartition.UNPARTITIONED; GatherDistributionSpec spec = (GatherDistributionSpec) distribution.getDistributionSpec(); if (spec.hasLimit()) { exchangeNode.setLimit(spec.getLimit()); } } else if (DistributionSpec.DistributionType.BROADCAST .equals(distribution.getDistributionSpec().getType())) { exchangeNode.setNumInstances(inputFragment.getPlanRoot().getNumInstances()); dataPartition = DataPartition.UNPARTITIONED; } else if (DistributionSpec.DistributionType.SHUFFLE.equals(distribution.getDistributionSpec().getType())) { exchangeNode.setNumInstances(inputFragment.getPlanRoot().getNumInstances()); List<ColumnRefOperator> partitionColumns = getShuffleColumns((HashDistributionSpec) distribution.getDistributionSpec()); List<Expr> distributeExpressions = partitionColumns.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); dataPartition = DataPartition.hashPartitioned(distributeExpressions); } else { throw new StarRocksPlannerException("Unsupport exchange type : " + distribution.getDistributionSpec().getType(), INTERNAL_ERROR); } exchangeNode.setDataPartition(dataPartition); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), exchangeNode, dataPartition); fragment.setQueryGlobalDicts(distribution.getGlobalDicts()); inputFragment.setDestination(exchangeNode); inputFragment.setOutputPartition(dataPartition); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalTopN(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalTopNOperator topN = (PhysicalTopNOperator) optExpr.getOp(); Preconditions.checkState(topN.getOffset() >= 0); if (!topN.isSplit()) { return buildPartialTopNFragment(optExpr, context, topN.getPartitionByColumns(), topN.getPartitionLimit(), topN.getOrderSpec(), topN.getTopNType(), topN.getLimit(), topN.getOffset(), inputFragment); } else { return buildFinalTopNFragment(context, topN.getTopNType(), topN.getLimit(), topN.getOffset(), inputFragment, optExpr); } } private PlanFragment buildFinalTopNFragment(ExecPlan context, TopNType topNType, long limit, long offset, PlanFragment inputFragment, OptExpression optExpr) { ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(), inputFragment.getPlanRoot(), DistributionSpec.DistributionType.GATHER); exchangeNode.setNumInstances(1); DataPartition dataPartition = DataPartition.UNPARTITIONED; exchangeNode.setDataPartition(dataPartition); Preconditions.checkState(inputFragment.getPlanRoot() instanceof SortNode); SortNode sortNode = (SortNode) inputFragment.getPlanRoot(); sortNode.setTopNType(topNType); exchangeNode.setMergeInfo(sortNode.getSortInfo(), offset); exchangeNode.computeStatistics(optExpr.getStatistics()); if (TopNType.ROW_NUMBER.equals(topNType)) { exchangeNode.setLimit(limit); } else { exchangeNode.unsetLimit(); } PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), exchangeNode, dataPartition); inputFragment.setDestination(exchangeNode); inputFragment.setOutputPartition(dataPartition); fragment.setQueryGlobalDicts(inputFragment.getQueryGlobalDicts()); context.getFragments().add(fragment); return fragment; } private PlanFragment buildPartialTopNFragment(OptExpression optExpr, ExecPlan context, List<ColumnRefOperator> partitionByColumns, long partitionLimit, OrderSpec orderSpec, TopNType topNType, long limit, long offset, PlanFragment inputFragment) { List<Expr> resolvedTupleExprs = Lists.newArrayList(); List<Expr> partitionExprs = Lists.newArrayList(); List<Expr> sortExprs = Lists.newArrayList(); TupleDescriptor sortTuple = context.getDescTbl().createTupleDescriptor(); if (CollectionUtils.isNotEmpty(partitionByColumns)) { for (ColumnRefOperator partitionByColumn : partitionByColumns) { Expr expr = ScalarOperatorToExpr.buildExecExpression(partitionByColumn, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); partitionExprs.add(expr); } } for (Ordering ordering : orderSpec.getOrderDescs()) { Expr sortExpr = ScalarOperatorToExpr.buildExecExpression(ordering.getColumnRef(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(sortTuple, new SlotId(ordering.getColumnRef().getId())); slotDesc.initFromExpr(sortExpr); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(sortExpr.isNullable()); slotDesc.setType(sortExpr.getType()); context.getColRefToExpr() .put(ordering.getColumnRef(), new SlotRef(ordering.getColumnRef().toString(), slotDesc)); resolvedTupleExprs.add(sortExpr); sortExprs.add(new SlotRef(slotDesc)); } ColumnRefSet columnRefSet = optExpr.inputAt(0).getLogicalProperty().getOutputColumns(); for (int i = 0; i < columnRefSet.getColumnIds().length; ++i) { /* * Add column not be used in ordering */ ColumnRefOperator columnRef = columnRefFactory.getColumnRef(columnRefSet.getColumnIds()[i]); if (orderSpec.getOrderDescs().stream().map(Ordering::getColumnRef) .noneMatch(c -> c.equals(columnRef))) { Expr outputExpr = ScalarOperatorToExpr.buildExecExpression(columnRef, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(sortTuple, new SlotId(columnRef.getId())); slotDesc.initFromExpr(outputExpr); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(outputExpr.isNullable()); slotDesc.setType(outputExpr.getType()); context.getColRefToExpr().put(columnRef, new SlotRef(columnRef.toString(), slotDesc)); resolvedTupleExprs.add(outputExpr); } } sortTuple.computeMemLayout(); SortInfo sortInfo = new SortInfo(partitionExprs, partitionLimit, sortExprs, orderSpec.getOrderDescs().stream().map(Ordering::isAscending).collect(Collectors.toList()), orderSpec.getOrderDescs().stream().map(Ordering::isNullsFirst).collect(Collectors.toList())); sortInfo.setMaterializedTupleInfo(sortTuple, resolvedTupleExprs); SortNode sortNode = new SortNode( context.getNextNodeId(), inputFragment.getPlanRoot(), sortInfo, limit != Operator.DEFAULT_LIMIT, limit == Operator.DEFAULT_LIMIT, 0); sortNode.setTopNType(topNType); sortNode.setLimit(limit); sortNode.setOffset(offset); sortNode.resolvedTupleExprs = resolvedTupleExprs; sortNode.setHasNullableGenerateChild(); sortNode.computeStatistics(optExpr.getStatistics()); if (shouldBuildGlobalRuntimeFilter()) { sortNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl()); } inputFragment.setPlanRoot(sortNode); return inputFragment; } private void setJoinPushDown(JoinNode node) { node.setIsPushDown(ConnectContext.get().getSessionVariable().isHashJoinPushDownRightTable() && (node.getJoinOp().isInnerJoin() || node.getJoinOp().isLeftSemiJoin() || node.getJoinOp().isRightJoin())); } private boolean shouldBuildGlobalRuntimeFilter() { return ConnectContext.get() != null && (ConnectContext.get().getSessionVariable().getEnableGlobalRuntimeFilter() || ConnectContext.get().getSessionVariable().isEnablePipelineEngine()); } @Override public PlanFragment visitPhysicalHashJoin(OptExpression optExpr, ExecPlan context) { PlanFragment leftFragment = visit(optExpr.inputAt(0), context); PlanFragment rightFragment = visit(optExpr.inputAt(1), context); return visitPhysicalJoin(leftFragment, rightFragment, optExpr, context); } private List<Expr> extractConjuncts(ScalarOperator predicate, ExecPlan context) { return Utils.extractConjuncts(predicate).stream() .map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); } private void setNullableForJoin(JoinOperator joinOperator, PlanFragment leftFragment, PlanFragment rightFragment, ExecPlan context) { Set<TupleId> nullableTupleIds = new HashSet<>(); nullableTupleIds.addAll(leftFragment.getPlanRoot().getNullableTupleIds()); nullableTupleIds.addAll(rightFragment.getPlanRoot().getNullableTupleIds()); if (joinOperator.isLeftOuterJoin()) { nullableTupleIds.addAll(rightFragment.getPlanRoot().getTupleIds()); } else if (joinOperator.isRightOuterJoin()) { nullableTupleIds.addAll(leftFragment.getPlanRoot().getTupleIds()); } else if (joinOperator.isFullOuterJoin()) { nullableTupleIds.addAll(leftFragment.getPlanRoot().getTupleIds()); nullableTupleIds.addAll(rightFragment.getPlanRoot().getTupleIds()); } for (TupleId tupleId : nullableTupleIds) { TupleDescriptor tupleDescriptor = context.getDescTbl().getTupleDesc(tupleId); tupleDescriptor.getSlots().forEach(slot -> slot.setIsNullable(true)); tupleDescriptor.computeMemLayout(); } } @Override public PlanFragment visitPhysicalNestLoopJoin(OptExpression optExpr, ExecPlan context) { PhysicalJoinOperator node = (PhysicalJoinOperator) optExpr.getOp(); PlanFragment leftFragment = visit(optExpr.inputAt(0), context); PlanFragment rightFragment = visit(optExpr.inputAt(1), context); List<Expr> conjuncts = extractConjuncts(node.getPredicate(), context); List<Expr> joinOnConjuncts = extractConjuncts(node.getOnPredicate(), context); List<Expr> probePartitionByExprs = Lists.newArrayList(); DistributionSpec leftDistributionSpec = optExpr.getRequiredProperties().get(0).getDistributionProperty().getSpec(); DistributionSpec rightDistributionSpec = optExpr.getRequiredProperties().get(1).getDistributionProperty().getSpec(); if (leftDistributionSpec instanceof HashDistributionSpec && rightDistributionSpec instanceof HashDistributionSpec) { probePartitionByExprs = getShuffleExprs((HashDistributionSpec) leftDistributionSpec, context); } setNullableForJoin(node.getJoinType(), leftFragment, rightFragment, context); NestLoopJoinNode joinNode = new NestLoopJoinNode(context.getNextNodeId(), leftFragment.getPlanRoot(), rightFragment.getPlanRoot(), null, node.getJoinType(), Lists.newArrayList(), joinOnConjuncts); joinNode.setLimit(node.getLimit()); joinNode.computeStatistics(optExpr.getStatistics()); joinNode.addConjuncts(conjuncts); joinNode.setProbePartitionByExprs(probePartitionByExprs); rightFragment.getPlanRoot().setFragment(leftFragment); context.getFragments().remove(rightFragment); context.getFragments().remove(leftFragment); context.getFragments().add(leftFragment); leftFragment.setPlanRoot(joinNode); leftFragment.addChildren(rightFragment.getChildren()); if (!(joinNode.getChild(1) instanceof ExchangeNode)) { joinNode.setReplicated(true); } if (shouldBuildGlobalRuntimeFilter()) { joinNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl()); } leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); return leftFragment; } @Override public PlanFragment visitPhysicalMergeJoin(OptExpression optExpr, ExecPlan context) { PlanFragment leftFragment = visit(optExpr.inputAt(0), context); PlanFragment rightFragment = visit(optExpr.inputAt(1), context); PlanNode leftPlanRoot = leftFragment.getPlanRoot(); PlanNode rightPlanRoot = rightFragment.getPlanRoot(); OptExpression leftExpression = optExpr.inputAt(0); OptExpression rightExpression = optExpr.inputAt(1); boolean needDealSort = leftExpression.getInputs().size() > 0 && rightExpression.getInputs().size() > 0; if (needDealSort) { optExpr.setChild(0, leftExpression.inputAt(0)); optExpr.setChild(1, rightExpression.inputAt(0)); leftFragment.setPlanRoot(leftPlanRoot.getChild(0)); rightFragment.setPlanRoot(rightPlanRoot.getChild(0)); } PlanFragment planFragment = visitPhysicalJoin(leftFragment, rightFragment, optExpr, context); if (needDealSort) { leftExpression.setChild(0, optExpr.inputAt(0)); rightExpression.setChild(0, optExpr.inputAt(1)); optExpr.setChild(0, leftExpression); optExpr.setChild(1, rightExpression); planFragment.getPlanRoot().setChild(0, leftPlanRoot); planFragment.getPlanRoot().setChild(1, rightPlanRoot); } return planFragment; } private List<ColumnRefOperator> getShuffleColumns(HashDistributionSpec spec) { List<Integer> columnRefs = spec.getShuffleColumns(); Preconditions.checkState(!columnRefs.isEmpty()); List<ColumnRefOperator> shuffleColumns = new ArrayList<>(); for (int columnId : columnRefs) { shuffleColumns.add(columnRefFactory.getColumnRef(columnId)); } return shuffleColumns; } private List<Expr> getShuffleExprs(HashDistributionSpec hashDistributionSpec, ExecPlan context) { List<ColumnRefOperator> shuffleColumns = getShuffleColumns(hashDistributionSpec); return shuffleColumns.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); } private PlanFragment visitPhysicalJoin(PlanFragment leftFragment, PlanFragment rightFragment, OptExpression optExpr, ExecPlan context) { PhysicalJoinOperator node = (PhysicalJoinOperator) optExpr.getOp(); ColumnRefSet leftChildColumns = optExpr.inputAt(0).getLogicalProperty().getOutputColumns(); ColumnRefSet rightChildColumns = optExpr.inputAt(1).getLogicalProperty().getOutputColumns(); JoinOperator joinOperator = node.getJoinType(); Preconditions.checkState(!joinOperator.isCrossJoin(), "should not be cross join"); PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot(); PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot(); if (leftFragmentPlanRoot instanceof DecodeNode) { leftFragmentPlanRoot = leftFragmentPlanRoot.getChild(0); } if (rightFragmentPlanRoot instanceof DecodeNode) { rightFragmentPlanRoot = rightFragmentPlanRoot.getChild(0); } List<Expr> probePartitionByExprs = Lists.newArrayList(); DistributionSpec leftDistributionSpec = optExpr.getRequiredProperties().get(0).getDistributionProperty().getSpec(); DistributionSpec rightDistributionSpec = optExpr.getRequiredProperties().get(1).getDistributionProperty().getSpec(); if (leftDistributionSpec instanceof HashDistributionSpec && rightDistributionSpec instanceof HashDistributionSpec) { probePartitionByExprs = getShuffleExprs((HashDistributionSpec) leftDistributionSpec, context); } JoinNode.DistributionMode distributionMode = inferDistributionMode(optExpr, leftFragmentPlanRoot, rightFragmentPlanRoot); JoinExprInfo joinExpr = buildJoinExpr(optExpr, context); List<Expr> eqJoinConjuncts = joinExpr.eqJoinConjuncts; List<Expr> otherJoinConjuncts = joinExpr.otherJoin; List<Expr> conjuncts = joinExpr.conjuncts; setNullableForJoin(joinOperator, leftFragment, rightFragment, context); JoinNode joinNode; if (node instanceof PhysicalHashJoinOperator) { joinNode = new HashJoinNode( context.getNextNodeId(), leftFragment.getPlanRoot(), rightFragment.getPlanRoot(), joinOperator, eqJoinConjuncts, otherJoinConjuncts); } else if (node instanceof PhysicalMergeJoinOperator) { joinNode = new MergeJoinNode( context.getNextNodeId(), leftFragment.getPlanRoot(), rightFragment.getPlanRoot(), joinOperator, eqJoinConjuncts, otherJoinConjuncts); } else { throw new StarRocksPlannerException("unknown join operator: " + node, INTERNAL_ERROR); } if (node.getProjection() != null) { ColumnRefSet outputColumns = new ColumnRefSet(); for (ScalarOperator s : node.getProjection().getColumnRefMap().values()) { outputColumns.union(s.getUsedColumns()); } for (ScalarOperator s : node.getProjection().getCommonSubOperatorMap().values()) { outputColumns.union(s.getUsedColumns()); } outputColumns.except(new ArrayList<>(node.getProjection().getCommonSubOperatorMap().keySet())); joinNode.setOutputSlots(outputColumns.getStream().boxed().collect(Collectors.toList())); } joinNode.setDistributionMode(distributionMode); joinNode.getConjuncts().addAll(conjuncts); joinNode.setLimit(node.getLimit()); joinNode.computeStatistics(optExpr.getStatistics()); joinNode.setProbePartitionByExprs(probePartitionByExprs); if (shouldBuildGlobalRuntimeFilter()) { joinNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl()); } return buildJoinFragment(context, leftFragment, rightFragment, distributionMode, joinNode); } private boolean isExchangeWithDistributionType(PlanNode node, DistributionSpec.DistributionType expectedType) { if (!(node instanceof ExchangeNode)) { return false; } ExchangeNode exchangeNode = (ExchangeNode) node; return Objects.equals(exchangeNode.getDistributionType(), expectedType); } private boolean isColocateJoin(OptExpression optExpression) { return optExpression.getRequiredProperties().stream().allMatch( physicalPropertySet -> { if (!physicalPropertySet.getDistributionProperty().isShuffle()) { return false; } HashDistributionDesc.SourceType hashSourceType = ((HashDistributionSpec) (physicalPropertySet.getDistributionProperty().getSpec())) .getHashDistributionDesc().getSourceType(); return hashSourceType.equals(HashDistributionDesc.SourceType.LOCAL); }); } public boolean isShuffleJoin(OptExpression optExpression) { return optExpression.getRequiredProperties().stream().allMatch( physicalPropertySet -> { if (!physicalPropertySet.getDistributionProperty().isShuffle()) { return false; } HashDistributionDesc.SourceType hashSourceType = ((HashDistributionSpec) (physicalPropertySet.getDistributionProperty().getSpec())) .getHashDistributionDesc().getSourceType(); return hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_JOIN) || hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_ENFORCE) || hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_AGG); }); } public PlanFragment computeBucketShufflePlanFragment(ExecPlan context, PlanFragment stayFragment, PlanFragment removeFragment, JoinNode hashJoinNode) { hashJoinNode.setLocalHashBucket(true); hashJoinNode.setPartitionExprs(removeFragment.getDataPartition().getPartitionExprs()); removeFragment.getChild(0) .setOutputPartition(new DataPartition(TPartitionType.BUCKET_SHUFFLE_HASH_PARTITIONED, removeFragment.getDataPartition().getPartitionExprs())); context.getFragments().remove(removeFragment); context.getFragments().remove(stayFragment); context.getFragments().add(stayFragment); stayFragment.setPlanRoot(hashJoinNode); stayFragment.addChildren(removeFragment.getChildren()); stayFragment.mergeQueryGlobalDicts(removeFragment.getQueryGlobalDicts()); return stayFragment; } public PlanFragment computeShuffleHashBucketPlanFragment(ExecPlan context, PlanFragment stayFragment, PlanFragment removeFragment, JoinNode hashJoinNode) { hashJoinNode.setPartitionExprs(removeFragment.getDataPartition().getPartitionExprs()); DataPartition dataPartition = new DataPartition(TPartitionType.HASH_PARTITIONED, removeFragment.getDataPartition().getPartitionExprs()); removeFragment.getChild(0).setOutputPartition(dataPartition); context.getFragments().remove(removeFragment); context.getFragments().remove(stayFragment); context.getFragments().add(stayFragment); stayFragment.setPlanRoot(hashJoinNode); stayFragment.addChildren(removeFragment.getChildren()); stayFragment.mergeQueryGlobalDicts(removeFragment.getQueryGlobalDicts()); return stayFragment; } @Override public PlanFragment visitPhysicalAssertOneRow(OptExpression optExpression, ExecPlan context) { PlanFragment inputFragment = visit(optExpression.inputAt(0), context); for (TupleId id : inputFragment.getPlanRoot().getTupleIds()) { context.getDescTbl().getTupleDesc(id).getSlots().forEach(s -> s.setIsNullable(true)); } PhysicalAssertOneRowOperator assertOneRow = (PhysicalAssertOneRowOperator) optExpression.getOp(); AssertNumRowsNode node = new AssertNumRowsNode(context.getNextNodeId(), inputFragment.getPlanRoot(), new AssertNumRowsElement(assertOneRow.getCheckRows(), assertOneRow.getTips(), assertOneRow.getAssertion())); node.computeStatistics(optExpression.getStatistics()); inputFragment.setPlanRoot(node); return inputFragment; } @Override public PlanFragment visitPhysicalAnalytic(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalWindowOperator node = (PhysicalWindowOperator) optExpr.getOp(); List<Expr> analyticFnCalls = new ArrayList<>(); TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor(); for (Map.Entry<ColumnRefOperator, CallOperator> analyticCall : node.getAnalyticCall().entrySet()) { Expr analyticFunction = ScalarOperatorToExpr.buildExecExpression(analyticCall.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); analyticFnCalls.add(analyticFunction); SlotDescriptor slotDesc = context.getDescTbl() .addSlotDescriptor(outputTupleDesc, new SlotId(analyticCall.getKey().getId())); slotDesc.setType(analyticFunction.getType()); slotDesc.setIsNullable(analyticFunction.isNullable()); slotDesc.setIsMaterialized(true); context.getColRefToExpr() .put(analyticCall.getKey(), new SlotRef(analyticCall.getKey().toString(), slotDesc)); } List<Expr> partitionExprs = node.getPartitionExpressions().stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); List<OrderByElement> orderByElements = node.getOrderByElements().stream().map(e -> new OrderByElement( ScalarOperatorToExpr.buildExecExpression(e.getColumnRef(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())), e.isAscending(), e.isNullsFirst())).collect(Collectors.toList()); AnalyticEvalNode analyticEvalNode = new AnalyticEvalNode( context.getNextNodeId(), inputFragment.getPlanRoot(), analyticFnCalls, partitionExprs, orderByElements, node.getAnalyticWindow(), null, outputTupleDesc, null, null, context.getDescTbl().createTupleDescriptor()); analyticEvalNode.setSubstitutedPartitionExprs(partitionExprs); analyticEvalNode.setLimit(node.getLimit()); analyticEvalNode.setHasNullableGenerateChild(); analyticEvalNode.computeStatistics(optExpr.getStatistics()); if (hasColocateOlapScanChildInFragment(analyticEvalNode)) { analyticEvalNode.setColocate(true); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { analyticEvalNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } PlanNode root = inputFragment.getPlanRoot(); if (root instanceof SortNode) { SortNode sortNode = (SortNode) root; sortNode.setAnalyticPartitionExprs(analyticEvalNode.getPartitionExprs()); } inputFragment.setPlanRoot(analyticEvalNode); return inputFragment; } private PlanFragment buildSetOperation(OptExpression optExpr, ExecPlan context, OperatorType operatorType) { PhysicalSetOperation setOperation = (PhysicalSetOperation) optExpr.getOp(); TupleDescriptor setOperationTuple = context.getDescTbl().createTupleDescriptor(); for (ColumnRefOperator columnRefOperator : setOperation.getOutputColumnRefOp()) { SlotDescriptor slotDesc = context.getDescTbl() .addSlotDescriptor(setOperationTuple, new SlotId(columnRefOperator.getId())); slotDesc.setType(columnRefOperator.getType()); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(columnRefOperator.isNullable()); context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc)); } SetOperationNode setOperationNode; boolean isUnion = false; if (operatorType.equals(OperatorType.PHYSICAL_UNION)) { isUnion = true; setOperationNode = new UnionNode(context.getNextNodeId(), setOperationTuple.getId()); setOperationNode.setFirstMaterializedChildIdx_(optExpr.arity()); } else if (operatorType.equals(OperatorType.PHYSICAL_EXCEPT)) { setOperationNode = new ExceptNode(context.getNextNodeId(), setOperationTuple.getId()); } else if (operatorType.equals(OperatorType.PHYSICAL_INTERSECT)) { setOperationNode = new IntersectNode(context.getNextNodeId(), setOperationTuple.getId()); } else { throw new StarRocksPlannerException("Unsupported set operation", INTERNAL_ERROR); } List<Map<Integer, Integer>> outputSlotIdToChildSlotIdMaps = new ArrayList<>(); for (int childIdx = 0; childIdx < optExpr.arity(); ++childIdx) { Map<Integer, Integer> slotIdMap = new HashMap<>(); List<ColumnRefOperator> childOutput = setOperation.getChildOutputColumns().get(childIdx); Preconditions.checkState(childOutput.size() == setOperation.getOutputColumnRefOp().size()); for (int columnIdx = 0; columnIdx < setOperation.getOutputColumnRefOp().size(); ++columnIdx) { Integer resultColumnIdx = setOperation.getOutputColumnRefOp().get(columnIdx).getId(); slotIdMap.put(resultColumnIdx, childOutput.get(columnIdx).getId()); } outputSlotIdToChildSlotIdMaps.add(slotIdMap); Preconditions.checkState(slotIdMap.size() == setOperation.getOutputColumnRefOp().size()); } setOperationNode.setOutputSlotIdToChildSlotIdMaps(outputSlotIdToChildSlotIdMaps); Preconditions.checkState(optExpr.getInputs().size() == setOperation.getChildOutputColumns().size()); PlanFragment setOperationFragment = new PlanFragment(context.getNextFragmentId(), setOperationNode, DataPartition.RANDOM); List<List<Expr>> materializedResultExprLists = Lists.newArrayList(); for (int i = 0; i < optExpr.getInputs().size(); i++) { List<ColumnRefOperator> childOutput = setOperation.getChildOutputColumns().get(i); PlanFragment fragment = visit(optExpr.getInputs().get(i), context); List<Expr> materializedExpressions = Lists.newArrayList(); for (ColumnRefOperator ref : childOutput) { SlotDescriptor slotDescriptor = context.getDescTbl().getSlotDesc(new SlotId(ref.getId())); materializedExpressions.add(new SlotRef(slotDescriptor)); } materializedResultExprLists.add(materializedExpressions); if (isUnion) { fragment.setOutputPartition(DataPartition.RANDOM); } else { fragment.setOutputPartition(DataPartition.hashPartitioned(materializedExpressions)); } ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(), fragment.getPlanRoot(), fragment.getDataPartition()); exchangeNode.setFragment(setOperationFragment); fragment.setDestination(exchangeNode); setOperationNode.addChild(exchangeNode); } setOperationNode.setHasNullableGenerateChild(); List<Expr> setOutputList = Lists.newArrayList(); for (ColumnRefOperator columnRefOperator : setOperation.getOutputColumnRefOp()) { SlotDescriptor slotDesc = context.getDescTbl().getSlotDesc(new SlotId(columnRefOperator.getId())); slotDesc.setIsNullable(slotDesc.getIsNullable() | setOperationNode.isHasNullableGenerateChild()); setOutputList.add(new SlotRef(String.valueOf(columnRefOperator.getId()), slotDesc)); } setOperationTuple.computeMemLayout(); setOperationNode.setSetOperationOutputList(setOutputList); setOperationNode.setMaterializedResultExprLists_(materializedResultExprLists); setOperationNode.setLimit(setOperation.getLimit()); setOperationNode.computeStatistics(optExpr.getStatistics()); context.getFragments().add(setOperationFragment); return setOperationFragment; } @Override public PlanFragment visitPhysicalUnion(OptExpression optExpr, ExecPlan context) { return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_UNION); } @Override public PlanFragment visitPhysicalExcept(OptExpression optExpr, ExecPlan context) { return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_EXCEPT); } @Override public PlanFragment visitPhysicalIntersect(OptExpression optExpr, ExecPlan context) { return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_INTERSECT); } @Override public PlanFragment visitPhysicalRepeat(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalRepeatOperator repeatOperator = (PhysicalRepeatOperator) optExpr.getOp(); TupleDescriptor outputGroupingTuple = context.getDescTbl().createTupleDescriptor(); for (ColumnRefOperator columnRefOperator : repeatOperator.getOutputGrouping()) { SlotDescriptor slotDesc = context.getDescTbl() .addSlotDescriptor(outputGroupingTuple, new SlotId(columnRefOperator.getId())); slotDesc.setType(columnRefOperator.getType()); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(columnRefOperator.isNullable()); context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc)); } outputGroupingTuple.computeMemLayout(); List<Set<Integer>> repeatSlotIdList = new ArrayList<>(); for (List<ColumnRefOperator> repeat : repeatOperator.getRepeatColumnRef()) { repeatSlotIdList.add( repeat.stream().map(ColumnRefOperator::getId).collect(Collectors.toSet())); } RepeatNode repeatNode = new RepeatNode( context.getNextNodeId(), inputFragment.getPlanRoot(), outputGroupingTuple, repeatSlotIdList, repeatOperator.getGroupingIds()); List<ScalarOperator> predicates = Utils.extractConjuncts(repeatOperator.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { repeatNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } repeatNode.computeStatistics(optExpr.getStatistics()); inputFragment.setPlanRoot(repeatNode); return inputFragment; } @Override public PlanFragment visitPhysicalFilter(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalFilterOperator filter = (PhysicalFilterOperator) optExpr.getOp(); List<Expr> predicates = Utils.extractConjuncts(filter.getPredicate()).stream() .map(d -> ScalarOperatorToExpr.buildExecExpression(d, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); SelectNode selectNode = new SelectNode(context.getNextNodeId(), inputFragment.getPlanRoot(), predicates); selectNode.setLimit(filter.getLimit()); selectNode.computeStatistics(optExpr.getStatistics()); inputFragment.setPlanRoot(selectNode); return inputFragment; } @Override public PlanFragment visitPhysicalTableFunction(OptExpression optExpression, ExecPlan context) { PlanFragment inputFragment = visit(optExpression.inputAt(0), context); PhysicalTableFunctionOperator physicalTableFunction = (PhysicalTableFunctionOperator) optExpression.getOp(); TupleDescriptor udtfOutputTuple = context.getDescTbl().createTupleDescriptor(); for (int columnId : physicalTableFunction.getOutputColumns().getColumnIds()) { ColumnRefOperator columnRefOperator = columnRefFactory.getColumnRef(columnId); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(udtfOutputTuple, new SlotId(columnRefOperator.getId())); slotDesc.setType(columnRefOperator.getType()); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(columnRefOperator.isNullable()); context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc)); } udtfOutputTuple.computeMemLayout(); TableFunctionNode tableFunctionNode = new TableFunctionNode(context.getNextNodeId(), inputFragment.getPlanRoot(), udtfOutputTuple, physicalTableFunction.getFn(), physicalTableFunction.getFnParamColumnRef().stream().map(ColumnRefOperator::getId) .collect(Collectors.toList()), Arrays.stream(physicalTableFunction.getOuterColumnRefSet().getColumnIds()).boxed() .collect(Collectors.toList()), Arrays.stream(physicalTableFunction.getFnResultColumnRefSet().getColumnIds()).boxed() .collect(Collectors.toList())); tableFunctionNode.computeStatistics(optExpression.getStatistics()); tableFunctionNode.setLimit(physicalTableFunction.getLimit()); inputFragment.setPlanRoot(tableFunctionNode); return inputFragment; } @Override public PlanFragment visitPhysicalLimit(OptExpression optExpression, ExecPlan context) { return visit(optExpression.inputAt(0), context); } @Override public PlanFragment visitPhysicalCTEConsume(OptExpression optExpression, ExecPlan context) { PhysicalCTEConsumeOperator consume = (PhysicalCTEConsumeOperator) optExpression.getOp(); int cteId = consume.getCteId(); MultiCastPlanFragment cteFragment = (MultiCastPlanFragment) context.getCteProduceFragments().get(cteId); ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(), cteFragment.getPlanRoot(), DistributionSpec.DistributionType.SHUFFLE); exchangeNode.setReceiveColumns(consume.getCteOutputColumnRefMap().values().stream() .map(ColumnRefOperator::getId).collect(Collectors.toList())); exchangeNode.setDataPartition(cteFragment.getDataPartition()); exchangeNode.setNumInstances(cteFragment.getPlanRoot().getNumInstances()); PlanFragment consumeFragment = new PlanFragment(context.getNextFragmentId(), exchangeNode, cteFragment.getDataPartition()); Map<ColumnRefOperator, ScalarOperator> projectMap = Maps.newHashMap(); projectMap.putAll(consume.getCteOutputColumnRefMap()); consumeFragment = buildProjectNode(optExpression, new Projection(projectMap), consumeFragment, context); consumeFragment.setQueryGlobalDicts(cteFragment.getQueryGlobalDicts()); consumeFragment.setLoadGlobalDicts(cteFragment.getLoadGlobalDicts()); if (consume.getPredicate() != null) { List<Expr> predicates = Utils.extractConjuncts(consume.getPredicate()).stream() .map(d -> ScalarOperatorToExpr.buildExecExpression(d, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); SelectNode selectNode = new SelectNode(context.getNextNodeId(), consumeFragment.getPlanRoot(), predicates); selectNode.computeStatistics(optExpression.getStatistics()); consumeFragment.setPlanRoot(selectNode); } if (consume.hasLimit()) { consumeFragment.getPlanRoot().setLimit(consume.getLimit()); } cteFragment.getDestNodeList().add(exchangeNode); consumeFragment.addChild(cteFragment); context.getFragments().add(consumeFragment); return consumeFragment; } @Override public PlanFragment visitPhysicalCTEProduce(OptExpression optExpression, ExecPlan context) { PlanFragment child = visit(optExpression.inputAt(0), context); int cteId = ((PhysicalCTEProduceOperator) optExpression.getOp()).getCteId(); context.getFragments().remove(child); MultiCastPlanFragment cteProduce = new MultiCastPlanFragment(child); List<Expr> outputs = Lists.newArrayList(); optExpression.getOutputColumns().getStream() .forEach(i -> outputs.add(context.getColRefToExpr().get(columnRefFactory.getColumnRef(i)))); cteProduce.setOutputExprs(outputs); context.getCteProduceFragments().put(cteId, cteProduce); context.getFragments().add(cteProduce); return child; } @Override public PlanFragment visitPhysicalCTEAnchor(OptExpression optExpression, ExecPlan context) { visit(optExpression.inputAt(0), context); return visit(optExpression.inputAt(1), context); } @Override public PlanFragment visitPhysicalNoCTE(OptExpression optExpression, ExecPlan context) { return visit(optExpression.inputAt(0), context); } static class JoinExprInfo { public final List<Expr> eqJoinConjuncts; public final List<Expr> otherJoin; public final List<Expr> conjuncts; public JoinExprInfo(List<Expr> eqJoinConjuncts, List<Expr> otherJoin, List<Expr> conjuncts) { this.eqJoinConjuncts = eqJoinConjuncts; this.otherJoin = otherJoin; this.conjuncts = conjuncts; } } private JoinExprInfo buildJoinExpr(OptExpression optExpr, ExecPlan context) { ScalarOperator predicate = optExpr.getOp().getPredicate(); ScalarOperator onPredicate; if (optExpr.getOp() instanceof PhysicalJoinOperator) { onPredicate = ((PhysicalJoinOperator) optExpr.getOp()).getOnPredicate(); } else if (optExpr.getOp() instanceof PhysicalStreamJoinOperator) { onPredicate = ((PhysicalStreamJoinOperator) optExpr.getOp()).getOnPredicate(); } else { throw new IllegalStateException("not supported join " + optExpr.getOp()); } List<ScalarOperator> onPredicates = Utils.extractConjuncts(onPredicate); ColumnRefSet leftChildColumns = optExpr.inputAt(0).getOutputColumns(); ColumnRefSet rightChildColumns = optExpr.inputAt(1).getOutputColumns(); List<BinaryPredicateOperator> eqOnPredicates = JoinHelper.getEqualsPredicate( leftChildColumns, rightChildColumns, onPredicates); Preconditions.checkState(!eqOnPredicates.isEmpty(), "must be eq-join"); for (BinaryPredicateOperator s : eqOnPredicates) { if (!optExpr.inputAt(0).getLogicalProperty().getOutputColumns() .containsAll(s.getChild(0).getUsedColumns())) { s.swap(); } } List<Expr> eqJoinConjuncts = eqOnPredicates.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); for (Expr expr : eqJoinConjuncts) { if (expr.isConstant()) { throw unsupportedException("Support join on constant predicate later"); } } List<ScalarOperator> otherJoin = Utils.extractConjuncts(onPredicate); otherJoin.removeAll(eqOnPredicates); List<Expr> otherJoinConjuncts = otherJoin.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); List<ScalarOperator> predicates = Utils.extractConjuncts(predicate); List<Expr> conjuncts = predicates.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); return new JoinExprInfo(eqJoinConjuncts, otherJoinConjuncts, conjuncts); } @Override public PlanFragment visitPhysicalStreamJoin(OptExpression optExpr, ExecPlan context) { PhysicalStreamJoinOperator node = (PhysicalStreamJoinOperator) optExpr.getOp(); PlanFragment leftFragment = visit(optExpr.inputAt(0), context); PlanFragment rightFragment = visit(optExpr.inputAt(1), context); ColumnRefSet leftChildColumns = optExpr.inputAt(0).getLogicalProperty().getOutputColumns(); ColumnRefSet rightChildColumns = optExpr.inputAt(1).getLogicalProperty().getOutputColumns(); if (!node.getJoinType().isInnerJoin()) { throw new NotImplementedException("Only inner join is supported"); } JoinOperator joinOperator = node.getJoinType(); PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot(); PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot(); JoinNode.DistributionMode distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET; JoinExprInfo joinExpr = buildJoinExpr(optExpr, context); List<Expr> eqJoinConjuncts = joinExpr.eqJoinConjuncts; List<Expr> otherJoinConjuncts = joinExpr.otherJoin; List<Expr> conjuncts = joinExpr.conjuncts; List<PlanFragment> nullablePlanFragments = new ArrayList<>(); if (joinOperator.isLeftOuterJoin()) { nullablePlanFragments.add(rightFragment); } else if (joinOperator.isRightOuterJoin()) { nullablePlanFragments.add(leftFragment); } else if (joinOperator.isFullOuterJoin()) { nullablePlanFragments.add(leftFragment); nullablePlanFragments.add(rightFragment); } for (PlanFragment planFragment : nullablePlanFragments) { for (TupleId tupleId : planFragment.getPlanRoot().getTupleIds()) { context.getDescTbl().getTupleDesc(tupleId).getSlots().forEach(slot -> slot.setIsNullable(true)); } } JoinNode joinNode = new StreamJoinNode(context.getNextNodeId(), leftFragmentPlanRoot, rightFragmentPlanRoot, node.getJoinType(), eqJoinConjuncts, otherJoinConjuncts); if (node.getProjection() != null) { ColumnRefSet outputColumns = new ColumnRefSet(); for (ScalarOperator s : node.getProjection().getColumnRefMap().values()) { outputColumns.union(s.getUsedColumns()); } for (ScalarOperator s : node.getProjection().getCommonSubOperatorMap().values()) { outputColumns.union(s.getUsedColumns()); } outputColumns.except(new ArrayList<>(node.getProjection().getCommonSubOperatorMap().keySet())); joinNode.setOutputSlots(outputColumns.getStream().boxed().collect(Collectors.toList())); } joinNode.setDistributionMode(distributionMode); joinNode.getConjuncts().addAll(conjuncts); joinNode.setLimit(node.getLimit()); joinNode.computeStatistics(optExpr.getStatistics()); return buildJoinFragment(context, leftFragment, rightFragment, distributionMode, joinNode); } @NotNull private PlanFragment buildJoinFragment(ExecPlan context, PlanFragment leftFragment, PlanFragment rightFragment, JoinNode.DistributionMode distributionMode, JoinNode joinNode) { if (distributionMode.equals(JoinNode.DistributionMode.BROADCAST)) { setJoinPushDown(joinNode); rightFragment.getPlanRoot().setFragment(leftFragment); context.getFragments().remove(rightFragment); context.getFragments().remove(leftFragment); context.getFragments().add(leftFragment); leftFragment.setPlanRoot(joinNode); leftFragment.addChildren(rightFragment.getChildren()); leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); return leftFragment; } else if (distributionMode.equals(JoinNode.DistributionMode.PARTITIONED)) { DataPartition lhsJoinPartition = new DataPartition(TPartitionType.HASH_PARTITIONED, leftFragment.getDataPartition().getPartitionExprs()); DataPartition rhsJoinPartition = new DataPartition(TPartitionType.HASH_PARTITIONED, rightFragment.getDataPartition().getPartitionExprs()); leftFragment.getChild(0).setOutputPartition(lhsJoinPartition); rightFragment.getChild(0).setOutputPartition(rhsJoinPartition); context.getFragments().remove(leftFragment); context.getFragments().remove(rightFragment); PlanFragment joinFragment = new PlanFragment(context.getNextFragmentId(), joinNode, lhsJoinPartition); joinFragment.addChildren(leftFragment.getChildren()); joinFragment.addChildren(rightFragment.getChildren()); joinFragment.mergeQueryGlobalDicts(leftFragment.getQueryGlobalDicts()); joinFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); context.getFragments().add(joinFragment); return joinFragment; } else if (distributionMode.equals(JoinNode.DistributionMode.COLOCATE) || distributionMode.equals(JoinNode.DistributionMode.REPLICATED)) { if (distributionMode.equals(JoinNode.DistributionMode.COLOCATE)) { joinNode.setColocate(true, ""); } else { joinNode.setReplicated(true); } setJoinPushDown(joinNode); joinNode.setChild(0, leftFragment.getPlanRoot()); joinNode.setChild(1, rightFragment.getPlanRoot()); leftFragment.setPlanRoot(joinNode); leftFragment.addChildren(rightFragment.getChildren()); context.getFragments().remove(rightFragment); context.getFragments().remove(leftFragment); context.getFragments().add(leftFragment); leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); return leftFragment; } else if (distributionMode.equals(JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET)) { setJoinPushDown(joinNode); if (!(leftFragment.getPlanRoot() instanceof ExchangeNode) && !(rightFragment.getPlanRoot() instanceof ExchangeNode)) { joinNode.setChild(0, leftFragment.getPlanRoot()); joinNode.setChild(1, rightFragment.getPlanRoot()); leftFragment.setPlanRoot(joinNode); leftFragment.addChildren(rightFragment.getChildren()); context.getFragments().remove(rightFragment); context.getFragments().remove(leftFragment); context.getFragments().add(leftFragment); leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); return leftFragment; } else if (leftFragment.getPlanRoot() instanceof ExchangeNode && !(rightFragment.getPlanRoot() instanceof ExchangeNode)) { return computeShuffleHashBucketPlanFragment(context, rightFragment, leftFragment, joinNode); } else { return computeShuffleHashBucketPlanFragment(context, leftFragment, rightFragment, joinNode); } } else { setJoinPushDown(joinNode); if (leftFragment.getPlanRoot() instanceof ExchangeNode && !(rightFragment.getPlanRoot() instanceof ExchangeNode)) { leftFragment = computeBucketShufflePlanFragment(context, rightFragment, leftFragment, joinNode); } else { leftFragment = computeBucketShufflePlanFragment(context, leftFragment, rightFragment, joinNode); } return leftFragment; } } @NotNull private JoinNode.DistributionMode inferDistributionMode(OptExpression optExpr, PlanNode leftFragmentPlanRoot, PlanNode rightFragmentPlanRoot) { JoinNode.DistributionMode distributionMode; if (isExchangeWithDistributionType(leftFragmentPlanRoot, DistributionSpec.DistributionType.SHUFFLE) && isExchangeWithDistributionType(rightFragmentPlanRoot, DistributionSpec.DistributionType.SHUFFLE)) { distributionMode = JoinNode.DistributionMode.PARTITIONED; } else if (isExchangeWithDistributionType(rightFragmentPlanRoot, DistributionSpec.DistributionType.BROADCAST)) { distributionMode = JoinNode.DistributionMode.BROADCAST; } else if (!(leftFragmentPlanRoot instanceof ExchangeNode) && !(rightFragmentPlanRoot instanceof ExchangeNode)) { if (isColocateJoin(optExpr)) { distributionMode = HashJoinNode.DistributionMode.COLOCATE; } else if (ConnectContext.get().getSessionVariable().isEnableReplicationJoin() && rightFragmentPlanRoot.canDoReplicatedJoin()) { distributionMode = JoinNode.DistributionMode.REPLICATED; } else if (isShuffleJoin(optExpr)) { distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET; } else { Preconditions.checkState(false, "Must be colocate/bucket/replicate join"); distributionMode = JoinNode.DistributionMode.COLOCATE; } } else if (isShuffleJoin(optExpr)) { distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET; } else { distributionMode = JoinNode.DistributionMode.LOCAL_HASH_BUCKET; } return distributionMode; } @Override public PlanFragment visitPhysicalStreamAgg(OptExpression optExpr, ExecPlan context) { PhysicalStreamAggOperator node = (PhysicalStreamAggOperator) optExpr.getOp(); PlanFragment inputFragment = visit(optExpr.inputAt(0), context); TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor(); AggregateExprInfo aggExpr = buildAggregateTuple(node.getAggregations(), node.getGroupBys(), null, outputTupleDesc, context); AggregateInfo aggInfo = AggregateInfo.create(aggExpr.groupExpr, aggExpr.aggregateExpr, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.FIRST); StreamAggNode aggNode = new StreamAggNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); aggNode.setHasNullableGenerateChild(); aggNode.computeStatistics(optExpr.getStatistics()); inputFragment.setPlanRoot(aggNode); return inputFragment; } @Override public PlanFragment visitPhysicalStreamScan(OptExpression optExpr, ExecPlan context) { PhysicalStreamScanOperator node = (PhysicalStreamScanOperator) optExpr.getOp(); OlapTable scanTable = (OlapTable) node.getTable(); context.getDescTbl().addReferencedTable(scanTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(scanTable); BinlogScanNode binlogScanNode = new BinlogScanNode(context.getNextNodeId(), tupleDescriptor); binlogScanNode.computeStatistics(optExpr.getStatistics()); try { binlogScanNode.computeScanRanges(); } catch (UserException e) { throw new StarRocksPlannerException( "Failed to compute scan ranges for StreamScanNode, " + e.getMessage(), INTERNAL_ERROR); } for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { binlogScanNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } tupleDescriptor.computeMemLayout(); context.getScanNodes().add(binlogScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), binlogScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } }
class PhysicalPlanTranslator extends OptExpressionVisitor<PlanFragment, ExecPlan> { private final ColumnRefFactory columnRefFactory; private final IdGenerator<RuntimeFilterId> runtimeFilterIdIdGenerator = RuntimeFilterId.createGenerator(); public PhysicalPlanTranslator(ColumnRefFactory columnRefFactory) { this.columnRefFactory = columnRefFactory; } public PlanFragment translate(OptExpression optExpression, ExecPlan context) { return visit(optExpression, context); } @Override public PlanFragment visit(OptExpression optExpression, ExecPlan context) { PlanFragment fragment = optExpression.getOp().accept(this, optExpression, context); Projection projection = (optExpression.getOp()).getProjection(); if (projection == null) { return fragment; } else { return buildProjectNode(optExpression, projection, fragment, context); } } private void setUnUsedOutputColumns(PhysicalOlapScanOperator node, OlapScanNode scanNode, List<ScalarOperator> predicates, OlapTable referenceTable) { if (!ConnectContext.get().getSessionVariable().isEnableFilterUnusedColumnsInScanStage()) { return; } if (referenceTable.getKeysType().isAggregationFamily() && !node.isPreAggregation()) { return; } List<ColumnRefOperator> outputColumns = node.getOutputColumns(); if (outputColumns.isEmpty()) { return; } Set<Integer> outputColumnIds = new HashSet<Integer>(); for (ColumnRefOperator colref : outputColumns) { outputColumnIds.add(colref.getId()); } Set<Integer> singlePredColumnIds = new HashSet<Integer>(); Set<Integer> complexPredColumnIds = new HashSet<Integer>(); Set<String> aggOrPrimaryKeyTableValueColumnNames = new HashSet<String>(); if (referenceTable.getKeysType().isAggregationFamily() || referenceTable.getKeysType() == KeysType.PRIMARY_KEYS) { aggOrPrimaryKeyTableValueColumnNames = referenceTable.getFullSchema().stream() .filter(col -> !col.isKey()) .map(col -> col.getName()) .collect(Collectors.toSet()); } for (ScalarOperator predicate : predicates) { ColumnRefSet usedColumns = predicate.getUsedColumns(); if (DecodeVisitor.isSimpleStrictPredicate(predicate)) { for (int cid : usedColumns.getColumnIds()) { singlePredColumnIds.add(cid); } } else { for (int cid : usedColumns.getColumnIds()) { complexPredColumnIds.add(cid); } } } Set<Integer> unUsedOutputColumnIds = new HashSet<Integer>(); Map<Integer, Integer> dictStringIdToIntIds = node.getDictStringIdToIntIds(); for (Integer cid : singlePredColumnIds) { Integer newCid = cid; if (dictStringIdToIntIds.containsKey(cid)) { newCid = dictStringIdToIntIds.get(cid); } if (!complexPredColumnIds.contains(newCid) && !outputColumnIds.contains(newCid)) { unUsedOutputColumnIds.add(newCid); } } scanNode.setUnUsedOutputStringColumns(unUsedOutputColumnIds, aggOrPrimaryKeyTableValueColumnNames); } @Override public PlanFragment visitPhysicalProject(OptExpression optExpr, ExecPlan context) { PhysicalProjectOperator node = (PhysicalProjectOperator) optExpr.getOp(); PlanFragment inputFragment = visit(optExpr.inputAt(0), context); Preconditions.checkState(!node.getColumnRefMap().isEmpty()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); Map<SlotId, Expr> commonSubOperatorMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getCommonSubOperatorMap().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getCommonSubOperatorMap())); commonSubOperatorMap.put(new SlotId(entry.getKey().getId()), expr); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setIsNullable(expr.isNullable()); slotDescriptor.setIsMaterialized(false); slotDescriptor.setType(expr.getType()); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } Map<SlotId, Expr> projectMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getColumnRefMap().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getColumnRefMap())); projectMap.put(new SlotId(entry.getKey().getId()), expr); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setIsNullable(expr.isNullable()); slotDescriptor.setIsMaterialized(true); slotDescriptor.setType(expr.getType()); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } ProjectNode projectNode = new ProjectNode(context.getNextNodeId(), tupleDescriptor, inputFragment.getPlanRoot(), projectMap, commonSubOperatorMap); projectNode.setHasNullableGenerateChild(); projectNode.computeStatistics(optExpr.getStatistics()); for (SlotId sid : projectMap.keySet()) { SlotDescriptor slotDescriptor = tupleDescriptor.getSlot(sid.asInt()); slotDescriptor.setIsNullable(slotDescriptor.getIsNullable() | projectNode.isHasNullableGenerateChild()); } tupleDescriptor.computeMemLayout(); projectNode.setLimit(inputFragment.getPlanRoot().getLimit()); inputFragment.setPlanRoot(projectNode); return inputFragment; } public PlanFragment buildProjectNode(OptExpression optExpression, Projection node, PlanFragment inputFragment, ExecPlan context) { if (node == null) { return inputFragment; } Preconditions.checkState(!node.getColumnRefMap().isEmpty()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); Map<SlotId, Expr> commonSubOperatorMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getCommonSubOperatorMap().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getCommonSubOperatorMap())); commonSubOperatorMap.put(new SlotId(entry.getKey().getId()), expr); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setIsNullable(expr.isNullable()); slotDescriptor.setIsMaterialized(false); slotDescriptor.setType(expr.getType()); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } Map<SlotId, Expr> projectMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getColumnRefMap().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getColumnRefMap())); projectMap.put(new SlotId(entry.getKey().getId()), expr); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setIsNullable(expr.isNullable()); slotDescriptor.setIsMaterialized(true); slotDescriptor.setType(expr.getType()); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } ProjectNode projectNode = new ProjectNode(context.getNextNodeId(), tupleDescriptor, inputFragment.getPlanRoot(), projectMap, commonSubOperatorMap); projectNode.setHasNullableGenerateChild(); Statistics statistics = optExpression.getStatistics(); Statistics.Builder b = Statistics.builder(); b.setOutputRowCount(statistics.getOutputRowCount()); b.addColumnStatistics(statistics.getOutputColumnsStatistics(new ColumnRefSet(node.getOutputColumns()))); projectNode.computeStatistics(b.build()); for (SlotId sid : projectMap.keySet()) { SlotDescriptor slotDescriptor = tupleDescriptor.getSlot(sid.asInt()); slotDescriptor.setIsNullable(slotDescriptor.getIsNullable() | projectNode.isHasNullableGenerateChild()); } tupleDescriptor.computeMemLayout(); projectNode.setLimit(inputFragment.getPlanRoot().getLimit()); inputFragment.setPlanRoot(projectNode); return inputFragment; } @Override public PlanFragment visitPhysicalDecode(OptExpression optExpression, ExecPlan context) { PhysicalDecodeOperator node = (PhysicalDecodeOperator) optExpression.getOp(); PlanFragment inputFragment = visit(optExpression.inputAt(0), context); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); for (TupleId tupleId : inputFragment.getPlanRoot().getTupleIds()) { TupleDescriptor childTuple = context.getDescTbl().getTupleDesc(tupleId); ArrayList<SlotDescriptor> slots = childTuple.getSlots(); for (SlotDescriptor slot : slots) { int slotId = slot.getId().asInt(); boolean isNullable = slot.getIsNullable(); if (node.getDictToStrings().containsKey(slotId)) { Integer stringSlotId = node.getDictToStrings().get(slotId); SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(stringSlotId)); slotDescriptor.setIsNullable(isNullable); slotDescriptor.setIsMaterialized(true); slotDescriptor.setType(Type.VARCHAR); context.getColRefToExpr().put(new ColumnRefOperator(stringSlotId, Type.VARCHAR, "<dict-code>", slotDescriptor.getIsNullable()), new SlotRef(stringSlotId.toString(), slotDescriptor)); } else { SlotDescriptor slotDescriptor = new SlotDescriptor(slot.getId(), tupleDescriptor, slot); tupleDescriptor.addSlot(slotDescriptor); } } } Map<SlotId, Expr> projectMap = Maps.newHashMap(); for (Map.Entry<ColumnRefOperator, ScalarOperator> entry : node.getStringFunctions().entrySet()) { Expr expr = ScalarOperatorToExpr.buildExecExpression(entry.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr(), node.getStringFunctions())); projectMap.put(new SlotId(entry.getKey().getId()), expr); Preconditions.checkState(context.getColRefToExpr().containsKey(entry.getKey())); } tupleDescriptor.computeMemLayout(); DecodeNode decodeNode = new DecodeNode(context.getNextNodeId(), tupleDescriptor, inputFragment.getPlanRoot(), node.getDictToStrings(), projectMap); decodeNode.computeStatistics(optExpression.getStatistics()); decodeNode.setLimit(node.getLimit()); inputFragment.setPlanRoot(decodeNode); return inputFragment; } @Override @Override public PlanFragment visitPhysicalMetaScan(OptExpression optExpression, ExecPlan context) { PhysicalMetaScanOperator scan = (PhysicalMetaScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(scan.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(scan.getTable()); MetaScanNode scanNode = new MetaScanNode(context.getNextNodeId(), tupleDescriptor, (OlapTable) scan.getTable(), scan.getAggColumnIdToNames()); scanNode.computeRangeLocations(); for (Map.Entry<ColumnRefOperator, Column> entry : scan.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } private void prepareContextSlots(PhysicalScanOperator node, ExecPlan context, TupleDescriptor tupleDescriptor) { for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); if (slotDescriptor.getType().isComplexType()) { slotDescriptor.setUsedSubfieldPosGroup(entry.getKey().getUsedSubfieldPosGroup()); } context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } } private void prepareCommonExpr(HDFSScanNodePredicates scanNodePredicates, ScanOperatorPredicates predicates, ExecPlan context) { List<ScalarOperator> noEvalPartitionConjuncts = predicates.getNoEvalPartitionConjuncts(); List<ScalarOperator> nonPartitionConjuncts = predicates.getNonPartitionConjuncts(); List<ScalarOperator> partitionConjuncts = predicates.getPartitionConjuncts(); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator partitionConjunct : partitionConjuncts) { scanNodePredicates.getPartitionConjuncts(). add(ScalarOperatorToExpr.buildExecExpression(partitionConjunct, formatterContext)); } for (ScalarOperator noEvalPartitionConjunct : noEvalPartitionConjuncts) { scanNodePredicates.getNoEvalPartitionConjuncts(). add(ScalarOperatorToExpr.buildExecExpression(noEvalPartitionConjunct, formatterContext)); } for (ScalarOperator nonPartitionConjunct : nonPartitionConjuncts) { scanNodePredicates.getNonPartitionConjuncts(). add(ScalarOperatorToExpr.buildExecExpression(nonPartitionConjunct, formatterContext)); } } private void prepareMinMaxExpr(HDFSScanNodePredicates scanNodePredicates, ScanOperatorPredicates predicates, ExecPlan context) { /* * populates 'minMaxTuple' with slots for statistics values, * and populates 'minMaxConjuncts' with conjuncts pointing into the 'minMaxTuple' */ List<ScalarOperator> minMaxConjuncts = predicates.getMinMaxConjuncts(); TupleDescriptor minMaxTuple = context.getDescTbl().createTupleDescriptor(); for (ScalarOperator minMaxConjunct : minMaxConjuncts) { for (ColumnRefOperator columnRefOperator : Utils.extractColumnRef(minMaxConjunct)) { SlotDescriptor slotDescriptor = context.getDescTbl() .addSlotDescriptor(minMaxTuple, new SlotId(columnRefOperator.getId())); Column column = predicates.getMinMaxColumnRefMap().get(columnRefOperator); slotDescriptor.setColumn(column); slotDescriptor.setIsNullable(column.isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr() .put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDescriptor)); } } minMaxTuple.computeMemLayout(); scanNodePredicates.setMinMaxTuple(minMaxTuple); ScalarOperatorToExpr.FormatterContext minMaxFormatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator minMaxConjunct : minMaxConjuncts) { scanNodePredicates.getMinMaxConjuncts(). add(ScalarOperatorToExpr.buildExecExpression(minMaxConjunct, minMaxFormatterContext)); } } @Override public PlanFragment visitPhysicalHudiScan(OptExpression optExpression, ExecPlan context) { PhysicalHudiScanOperator node = (PhysicalHudiScanOperator) optExpression.getOp(); ScanOperatorPredicates predicates = node.getScanOperatorPredicates(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); prepareContextSlots(node, context, tupleDescriptor); HudiScanNode hudiScanNode = new HudiScanNode(context.getNextNodeId(), tupleDescriptor, "HudiScanNode"); hudiScanNode.computeStatistics(optExpression.getStatistics()); try { HDFSScanNodePredicates scanNodePredicates = hudiScanNode.getScanNodePredicates(); scanNodePredicates.setSelectedPartitionIds(predicates.getSelectedPartitionIds()); scanNodePredicates.setIdToPartitionKey(predicates.getIdToPartitionKey()); hudiScanNode.setupScanRangeLocations(context.getDescTbl()); prepareCommonExpr(scanNodePredicates, predicates, context); prepareMinMaxExpr(scanNodePredicates, predicates, context); } catch (Exception e) { LOG.warn("Hudi scan node get scan range locations failed : " + e); LOG.warn(e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } hudiScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(hudiScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), hudiScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalHiveScan(OptExpression optExpression, ExecPlan context) { PhysicalHiveScanOperator node = (PhysicalHiveScanOperator) optExpression.getOp(); ScanOperatorPredicates predicates = node.getScanOperatorPredicates(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); prepareContextSlots(node, context, tupleDescriptor); HdfsScanNode hdfsScanNode = new HdfsScanNode(context.getNextNodeId(), tupleDescriptor, "HdfsScanNode"); hdfsScanNode.computeStatistics(optExpression.getStatistics()); try { HDFSScanNodePredicates scanNodePredicates = hdfsScanNode.getScanNodePredicates(); scanNodePredicates.setSelectedPartitionIds(predicates.getSelectedPartitionIds()); scanNodePredicates.setIdToPartitionKey(predicates.getIdToPartitionKey()); hdfsScanNode.setupScanRangeLocations(context.getDescTbl()); prepareCommonExpr(scanNodePredicates, predicates, context); prepareMinMaxExpr(scanNodePredicates, predicates, context); } catch (Exception e) { LOG.warn("Hdfs scan node get scan range locations failed : " + e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } hdfsScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(hdfsScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), hdfsScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalFileScan(OptExpression optExpression, ExecPlan context) { PhysicalFileScanOperator node = (PhysicalFileScanOperator) optExpression.getOp(); ScanOperatorPredicates predicates = node.getScanOperatorPredicates(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); prepareContextSlots(node, context, tupleDescriptor); FileTableScanNode fileTableScanNode = new FileTableScanNode(context.getNextNodeId(), tupleDescriptor, "FileTableScanNode"); fileTableScanNode.computeStatistics(optExpression.getStatistics()); try { HDFSScanNodePredicates scanNodePredicates = fileTableScanNode.getScanNodePredicates(); fileTableScanNode.setupScanRangeLocations(); prepareCommonExpr(scanNodePredicates, predicates, context); prepareMinMaxExpr(scanNodePredicates, predicates, context); } catch (Exception e) { LOG.warn("Hdfs scan node get scan range locations failed : ", e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } fileTableScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(fileTableScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), fileTableScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalDeltaLakeScan(OptExpression optExpression, ExecPlan context) { PhysicalDeltaLakeScanOperator node = (PhysicalDeltaLakeScanOperator) optExpression.getOp(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } DeltaLakeScanNode deltaLakeScanNode = new DeltaLakeScanNode(context.getNextNodeId(), tupleDescriptor, "DeltaLakeScanNode"); deltaLakeScanNode.computeStatistics(optExpression.getStatistics()); try { ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); for (ScalarOperator predicate : predicates) { deltaLakeScanNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } deltaLakeScanNode.setupScanRangeLocations(context.getDescTbl()); HDFSScanNodePredicates scanNodePredicates = deltaLakeScanNode.getScanNodePredicates(); prepareMinMaxExpr(scanNodePredicates, node.getScanOperatorPredicates(), context); } catch (AnalysisException e) { LOG.warn("Delta lake scan node get scan range locations failed : " + e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } deltaLakeScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(deltaLakeScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), deltaLakeScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalIcebergScan(OptExpression optExpression, ExecPlan context) { PhysicalIcebergScanOperator node = (PhysicalIcebergScanOperator) optExpression.getOp(); Table referenceTable = node.getTable(); context.getDescTbl().addReferencedTable(referenceTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(referenceTable); prepareContextSlots(node, context, tupleDescriptor); IcebergScanNode icebergScanNode = new IcebergScanNode(context.getNextNodeId(), tupleDescriptor, "IcebergScanNode"); icebergScanNode.computeStatistics(optExpression.getStatistics()); try { ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); for (ScalarOperator predicate : predicates) { icebergScanNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } icebergScanNode.preProcessIcebergPredicate(predicates); icebergScanNode.setupScanRangeLocations(); icebergScanNode.appendEqualityColumns(node, columnRefFactory, context); HDFSScanNodePredicates scanNodePredicates = icebergScanNode.getScanNodePredicates(); prepareMinMaxExpr(scanNodePredicates, node.getScanOperatorPredicates(), context); } catch (UserException e) { LOG.warn("Iceberg scan node get scan range locations failed : " + e); throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } icebergScanNode.setLimit(node.getLimit()); tupleDescriptor.computeMemLayout(); context.getScanNodes().add(icebergScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), icebergScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalSchemaScan(OptExpression optExpression, ExecPlan context) { PhysicalSchemaScanOperator node = (PhysicalSchemaScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(node.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(node.getTable()); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); SchemaScanNode scanNode = new SchemaScanNode(context.getNextNodeId(), tupleDescriptor); scanNode.setFrontendIP(FrontendOptions.getLocalHostAddress()); scanNode.setFrontendPort(Config.rpc_port); scanNode.setUser(context.getConnectContext().getQualifiedUser()); scanNode.setUserIp(context.getConnectContext().getRemoteIP()); scanNode.setLimit(node.getLimit()); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); if (predicate instanceof BinaryPredicateOperator) { if (((BinaryPredicateOperator) predicate).getBinaryType() == BinaryPredicateOperator.BinaryType.EQ) { if (predicate.getChildren().get(0) instanceof ColumnRefOperator && predicate.getChildren().get(1) instanceof ConstantOperator) { ColumnRefOperator columnRefOperator = (ColumnRefOperator) predicate.getChildren().get(0); ConstantOperator constantOperator = (ConstantOperator) predicate.getChildren().get(1); switch (columnRefOperator.getName()) { case "TABLE_SCHEMA": scanNode.setSchemaDb(constantOperator.getVarchar()); break; case "TABLE_NAME": scanNode.setSchemaTable(constantOperator.getVarchar()); break; default: break; } } } } } context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalMysqlScan(OptExpression optExpression, ExecPlan context) { PhysicalMysqlScanOperator node = (PhysicalMysqlScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(node.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(node.getTable()); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); MysqlScanNode scanNode = new MysqlScanNode(context.getNextNodeId(), tupleDescriptor, (MysqlTable) node.getTable()); if (node.getTemporalClause() != null) { scanNode.setTemporalClause(node.getTemporalClause()); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); formatterContext.setImplicitCast(true); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } scanNode.setLimit(node.getLimit()); scanNode.computeColumnsAndFilters(); scanNode.computeStatistics(optExpression.getStatistics()); context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalEsScan(OptExpression optExpression, ExecPlan context) { PhysicalEsScanOperator node = (PhysicalEsScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(node.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(node.getTable()); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); EsScanNode scanNode = new EsScanNode(context.getNextNodeId(), tupleDescriptor, "EsScanNode"); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } scanNode.setLimit(node.getLimit()); scanNode.computeStatistics(optExpression.getStatistics()); try { scanNode.assignBackends(); } catch (UserException e) { throw new StarRocksPlannerException(e.getMessage(), INTERNAL_ERROR); } scanNode.setShardScanRanges(scanNode.computeShardLocations(node.getSelectedIndex())); context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalJDBCScan(OptExpression optExpression, ExecPlan context) { PhysicalJDBCScanOperator node = (PhysicalJDBCScanOperator) optExpression.getOp(); context.getDescTbl().addReferencedTable(node.getTable()); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(node.getTable()); for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().getName(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); JDBCScanNode scanNode = new JDBCScanNode(context.getNextNodeId(), tupleDescriptor, (JDBCTable) node.getTable()); List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); formatterContext.setImplicitCast(true); for (ScalarOperator predicate : predicates) { scanNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } scanNode.setLimit(node.getLimit()); scanNode.computeColumnsAndFilters(); scanNode.computeStatistics(optExpression.getStatistics()); context.getScanNodes().add(scanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), scanNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalValues(OptExpression optExpr, ExecPlan context) { PhysicalValuesOperator valuesOperator = (PhysicalValuesOperator) optExpr.getOp(); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); for (ColumnRefOperator columnRefOperator : valuesOperator.getColumnRefSet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(columnRefOperator.getId())); slotDescriptor.setIsNullable(columnRefOperator.isNullable()); slotDescriptor.setIsMaterialized(true); slotDescriptor.setType(columnRefOperator.getType()); context.getColRefToExpr() .put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDescriptor)); } tupleDescriptor.computeMemLayout(); if (valuesOperator.getRows().isEmpty()) { EmptySetNode emptyNode = new EmptySetNode(context.getNextNodeId(), Lists.newArrayList(tupleDescriptor.getId())); emptyNode.computeStatistics(optExpr.getStatistics()); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), emptyNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } else { UnionNode unionNode = new UnionNode(context.getNextNodeId(), tupleDescriptor.getId()); unionNode.setLimit(valuesOperator.getLimit()); List<List<Expr>> consts = new ArrayList<>(); for (List<ScalarOperator> row : valuesOperator.getRows()) { List<Expr> exprRow = new ArrayList<>(); for (ScalarOperator field : row) { exprRow.add(ScalarOperatorToExpr.buildExecExpression( field, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))); } consts.add(exprRow); } unionNode.setMaterializedConstExprLists_(consts); unionNode.computeStatistics(optExpr.getStatistics()); /* * TODO(lhy): * It doesn't make sense for vectorized execution engines, but it will appear in explain. * we can delete this when refactoring explain in the future, */ consts.forEach(unionNode::addConstExprList); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), unionNode, DataPartition.UNPARTITIONED); context.getFragments().add(fragment); return fragment; } } public static boolean hasNoExchangeNodes(PlanNode root) { if (root instanceof ExchangeNode) { return false; } for (PlanNode childNode : root.getChildren()) { if (!hasNoExchangeNodes(childNode)) { return false; } } return true; } /** * Whether all the nodes of the plan tree only contain the specific node types. * * @param root The plan tree root. * @param requiredNodeTypes The specific node type. * @return true if all the nodes belong to the node types, otherwise false. */ private boolean onlyContainNodeTypes(PlanNode root, List<Class<? extends PlanNode>> requiredNodeTypes) { boolean rootMatched = requiredNodeTypes.stream().anyMatch(type -> type.isInstance(root)); if (!rootMatched) { return false; } for (PlanNode child : root.getChildren()) { if (!onlyContainNodeTypes(child, requiredNodeTypes)) { return false; } } return true; } /** * Remove ExchangeNode between AggNode and ScanNode for the single backend. * <p> * This is used to generate "ScanNode->LocalShuffle->OnePhaseLocalAgg" for the single backend, * which contains two steps: * 1. Ignore the network cost for ExchangeNode when estimating cost model. * 2. Remove ExchangeNode between AggNode and ScanNode when building fragments. * <p> * Specifically, transfer * (AggNode->ExchangeNode)->([ProjectNode->]ScanNode) * - *inputFragment sourceFragment * to * (AggNode->[ProjectNode->]ScanNode) * - *sourceFragment * That is, when matching this fragment pattern, remove inputFragment and return sourceFragment. * * @param inputFragment The input fragment to match the above pattern. * @param context The context of building fragment, which contains all the fragments. * @return SourceFragment if it matches th pattern, otherwise the original inputFragment. */ private PlanFragment removeExchangeNodeForLocalShuffleAgg(PlanFragment inputFragment, ExecPlan context) { if (ConnectContext.get() == null) { return inputFragment; } SessionVariable sessionVariable = ConnectContext.get().getSessionVariable(); boolean enableLocalShuffleAgg = sessionVariable.isEnableLocalShuffleAgg() && sessionVariable.isEnablePipelineEngine() && GlobalStateMgr.getCurrentSystemInfo().isSingleBackendAndComputeNode(); if (!enableLocalShuffleAgg) { return inputFragment; } if (!(inputFragment.getPlanRoot() instanceof ExchangeNode)) { return inputFragment; } PlanNode sourceFragmentRoot = inputFragment.getPlanRoot().getChild(0); if (!onlyContainNodeTypes(sourceFragmentRoot, ImmutableList.of(ScanNode.class, ProjectNode.class))) { return inputFragment; } PlanFragment sourceFragment = sourceFragmentRoot.getFragment(); if (sourceFragment instanceof MultiCastPlanFragment) { return inputFragment; } ArrayList<PlanFragment> fragments = context.getFragments(); for (int i = fragments.size() - 1; i >= 0; --i) { if (fragments.get(i).equals(inputFragment)) { fragments.remove(i); break; } } return sourceFragment; } /** * Clear partitionExprs of OlapScanNode (the bucket keys to pass to BE), if they don't satisfy * the required hash property of blocking aggregation. * <p> * When partitionExprs of OlapScanNode are passed to BE, the post operators will use them as * local shuffle partition exprs. * Otherwise, the operators will use the original partition exprs (group by keys or join on keys). * <p> * The bucket keys can satisfy the required hash property of blocking aggregation except two scenarios: * - OlapScanNode only has one tablet after pruned. * - It is executed on the single BE. * As for these two scenarios, which will generate ScanNode(k1)->LocalShuffle(c1)->BlockingAgg(c1), * partitionExprs of OlapScanNode must be cleared to make BE use group by keys not bucket keys as * local shuffle partition exprs. * * @param fragment The fragment which need to check whether to clear bucket keys of OlapScanNode. * @param aggOp The aggregate which need to check whether OlapScanNode satisfies its reuiqred hash property. */ private void clearOlapScanNodePartitionsIfNotSatisfy(PlanFragment fragment, PhysicalHashAggregateOperator aggOp) { if (!aggOp.isOnePhaseAgg() && !aggOp.isMergedLocalAgg()) { return; } if (aggOp.getPartitionByColumns().isEmpty()) { return; } PlanNode leafNode = fragment.getLeftMostLeafNode(); if (!(leafNode instanceof OlapScanNode)) { return; } OlapScanNode olapScanNode = (OlapScanNode) leafNode; Set<ColumnRefOperator> requiredPartColumns = new HashSet<>(aggOp.getPartitionByColumns()); boolean satisfy = requiredPartColumns.containsAll(olapScanNode.getBucketColumns()); if (satisfy) { return; } olapScanNode.setBucketExprs(Lists.newArrayList()); olapScanNode.setBucketColumns(Lists.newArrayList()); } private static class AggregateExprInfo { public final ArrayList<Expr> groupExpr; public final ArrayList<FunctionCallExpr> aggregateExpr; public final ArrayList<Expr> partitionExpr; public final ArrayList<Expr> intermediateExpr; public AggregateExprInfo(ArrayList<Expr> groupExpr, ArrayList<FunctionCallExpr> aggregateExpr, ArrayList<Expr> partitionExpr, ArrayList<Expr> intermediateExpr) { this.groupExpr = groupExpr; this.aggregateExpr = aggregateExpr; this.partitionExpr = partitionExpr; this.intermediateExpr = intermediateExpr; } } private AggregateExprInfo buildAggregateTuple( Map<ColumnRefOperator, CallOperator> aggregations, List<ColumnRefOperator> groupBys, List<ColumnRefOperator> partitionBys, TupleDescriptor outputTupleDesc, ExecPlan context) { ArrayList<Expr> groupingExpressions = Lists.newArrayList(); boolean forExchangePerf = aggregations.values().stream().anyMatch(aggFunc -> aggFunc.getFnName().equals(FunctionSet.EXCHANGE_BYTES) || aggFunc.getFnName().equals(FunctionSet.EXCHANGE_SPEED)) && ConnectContext.get().getSessionVariable().getNewPlannerAggStage() == 1; if (!forExchangePerf) { for (ColumnRefOperator grouping : CollectionUtils.emptyIfNull(groupBys)) { Expr groupingExpr = ScalarOperatorToExpr.buildExecExpression(grouping, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); groupingExpressions.add(groupingExpr); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(outputTupleDesc, new SlotId(grouping.getId())); slotDesc.setType(groupingExpr.getType()); slotDesc.setIsNullable(groupingExpr.isNullable()); slotDesc.setIsMaterialized(true); } } ArrayList<FunctionCallExpr> aggregateExprList = Lists.newArrayList(); ArrayList<Expr> intermediateAggrExprs = Lists.newArrayList(); for (Map.Entry<ColumnRefOperator, CallOperator> aggregation : aggregations.entrySet()) { FunctionCallExpr aggExpr = (FunctionCallExpr) ScalarOperatorToExpr.buildExecExpression( aggregation.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); aggregateExprList.add(aggExpr); SlotDescriptor slotDesc = context.getDescTbl() .addSlotDescriptor(outputTupleDesc, new SlotId(aggregation.getKey().getId())); slotDesc.setType(aggregation.getValue().getType()); slotDesc.setIsNullable(aggExpr.isNullable()); slotDesc.setIsMaterialized(true); context.getColRefToExpr() .put(aggregation.getKey(), new SlotRef(aggregation.getKey().toString(), slotDesc)); SlotDescriptor intermediateSlotDesc = new SlotDescriptor(slotDesc.getId(), slotDesc.getParent()); AggregateFunction aggrFn = (AggregateFunction) aggExpr.getFn(); Type intermediateType = aggrFn.getIntermediateType() != null ? aggrFn.getIntermediateType() : aggrFn.getReturnType(); intermediateSlotDesc.setType(intermediateType); intermediateSlotDesc.setIsNullable(aggrFn.isNullable()); intermediateSlotDesc.setIsMaterialized(true); SlotRef intermediateSlotRef = new SlotRef(aggregation.getKey().toString(), intermediateSlotDesc); intermediateAggrExprs.add(intermediateSlotRef); } ArrayList<Expr> partitionExpressions = Lists.newArrayList(); for (ColumnRefOperator column : CollectionUtils.emptyIfNull(partitionBys)) { Expr partitionExpr = ScalarOperatorToExpr.buildExecExpression(column, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(outputTupleDesc, new SlotId(column.getId())); slotDesc.setType(partitionExpr.getType()); slotDesc.setIsNullable(partitionExpr.isNullable()); slotDesc.setIsMaterialized(true); context.getColRefToExpr().put(column, new SlotRef(column.toString(), slotDesc)); partitionExpressions.add(new SlotRef(slotDesc)); } outputTupleDesc.computeMemLayout(); return new AggregateExprInfo(groupingExpressions, aggregateExprList, partitionExpressions, intermediateAggrExprs); } @Override public PlanFragment visitPhysicalHashAggregate(OptExpression optExpr, ExecPlan context) { PhysicalHashAggregateOperator node = (PhysicalHashAggregateOperator) optExpr.getOp(); PlanFragment originalInputFragment = visit(optExpr.inputAt(0), context); PlanFragment inputFragment = removeExchangeNodeForLocalShuffleAgg(originalInputFragment, context); boolean withLocalShuffle = inputFragment != originalInputFragment; Map<ColumnRefOperator, CallOperator> aggregations = node.getAggregations(); List<ColumnRefOperator> groupBys = node.getGroupBys(); List<ColumnRefOperator> partitionBys = node.getPartitionByColumns(); TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor(); AggregateExprInfo aggExpr = buildAggregateTuple(aggregations, groupBys, partitionBys, outputTupleDesc, context); ArrayList<Expr> groupingExpressions = aggExpr.groupExpr; ArrayList<FunctionCallExpr> aggregateExprList = aggExpr.aggregateExpr; ArrayList<Expr> partitionExpressions = aggExpr.partitionExpr; ArrayList<Expr> intermediateAggrExprs = aggExpr.intermediateExpr; AggregationNode aggregationNode; if (node.getType().isLocal()) { AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.FIRST); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); aggregationNode.unsetNeedsFinalize(); aggregationNode.setIsPreagg(node.isUseStreamingPreAgg()); aggregationNode.setIntermediateTuple(); if (!partitionExpressions.isEmpty()) { inputFragment.setOutputPartition(DataPartition.hashPartitioned(partitionExpressions)); } if (!node.isUseStreamingPreAgg() && hasColocateOlapScanChildInFragment(aggregationNode)) { aggregationNode.setColocate(true); } } else if (node.getType().isGlobal()) { if (node.hasSingleDistinct()) { for (int i = 0; i < aggregateExprList.size(); i++) { if (i != node.getSingleDistinctFunctionPos()) { aggregateExprList.get(i).setMergeAggFn(); } } AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.SECOND); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); } else if (!node.isSplit()) { rewriteAggDistinctFirstStageFunction(aggregateExprList); AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.FIRST); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); } else { aggregateExprList.forEach(FunctionCallExpr::setMergeAggFn); AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.SECOND_MERGE); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { aggregationNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } aggregationNode.setLimit(node.getLimit()); if (hasColocateOlapScanChildInFragment(aggregationNode)) { aggregationNode.setColocate(true); } } else if (node.getType().isDistinctGlobal()) { aggregateExprList.forEach(FunctionCallExpr::setMergeAggFn); AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.FIRST_MERGE); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); aggregationNode.unsetNeedsFinalize(); aggregationNode.setIntermediateTuple(); } else if (node.getType().isDistinctLocal()) { for (int i = 0; i < aggregateExprList.size(); i++) { if (i != node.getSingleDistinctFunctionPos()) { aggregateExprList.get(i).setMergeAggFn(); } } AggregateInfo aggInfo = AggregateInfo.create( groupingExpressions, aggregateExprList, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.SECOND); aggregationNode = new AggregationNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); aggregationNode.unsetNeedsFinalize(); aggregationNode.setIsPreagg(node.isUseStreamingPreAgg()); aggregationNode.setIntermediateTuple(); } else { throw unsupportedException("Not support aggregate type : " + node.getType()); } aggregationNode.setUseSortAgg(node.isUseSortAgg()); aggregationNode.setStreamingPreaggregationMode(context.getConnectContext(). getSessionVariable().getStreamingPreaggregationMode()); aggregationNode.setHasNullableGenerateChild(); aggregationNode.computeStatistics(optExpr.getStatistics()); if (node.isOnePhaseAgg() || node.isMergedLocalAgg()) { clearOlapScanNodePartitionsIfNotSatisfy(inputFragment, node); inputFragment.setAssignScanRangesPerDriverSeq(!withLocalShuffle); aggregationNode.setWithLocalShuffle(withLocalShuffle); } aggregationNode.getAggInfo().setIntermediateAggrExprs(intermediateAggrExprs); inputFragment.setPlanRoot(aggregationNode); return inputFragment; } public boolean hasColocateOlapScanChildInFragment(PlanNode node) { if (node instanceof OlapScanNode) { ColocateTableIndex colocateIndex = GlobalStateMgr.getCurrentColocateIndex(); OlapScanNode scanNode = (OlapScanNode) node; if (colocateIndex.isColocateTable(scanNode.getOlapTable().getId())) { return true; } } if (node instanceof ExchangeNode) { return false; } boolean hasOlapScanChild = false; for (PlanNode child : node.getChildren()) { hasOlapScanChild |= hasColocateOlapScanChildInFragment(child); } return hasOlapScanChild; } public void rewriteAggDistinctFirstStageFunction(List<FunctionCallExpr> aggregateExprList) { int singleDistinctCount = 0; int singleDistinctIndex = 0; FunctionCallExpr functionCallExpr = null; for (int i = 0; i < aggregateExprList.size(); ++i) { FunctionCallExpr callExpr = aggregateExprList.get(i); if (callExpr.isDistinct()) { ++singleDistinctCount; functionCallExpr = callExpr; singleDistinctIndex = i; } } if (singleDistinctCount == 1) { FunctionCallExpr replaceExpr = null; final String functionName = functionCallExpr.getFnName().getFunction(); if (functionName.equalsIgnoreCase(FunctionSet.COUNT)) { replaceExpr = new FunctionCallExpr(FunctionSet.MULTI_DISTINCT_COUNT, functionCallExpr.getParams()); replaceExpr.setFn(Expr.getBuiltinFunction(FunctionSet.MULTI_DISTINCT_COUNT, new Type[] {functionCallExpr.getChild(0).getType()}, IS_NONSTRICT_SUPERTYPE_OF)); replaceExpr.getParams().setIsDistinct(false); } else if (functionName.equalsIgnoreCase(FunctionSet.SUM)) { replaceExpr = new FunctionCallExpr(FunctionSet.MULTI_DISTINCT_SUM, functionCallExpr.getParams()); Function multiDistinctSum = DecimalV3FunctionAnalyzer.convertSumToMultiDistinctSum( functionCallExpr.getFn(), functionCallExpr.getChild(0).getType()); replaceExpr.setFn(multiDistinctSum); replaceExpr.getParams().setIsDistinct(false); } Preconditions.checkState(replaceExpr != null); ExpressionAnalyzer.analyzeExpressionIgnoreSlot(replaceExpr, ConnectContext.get()); aggregateExprList.set(singleDistinctIndex, replaceExpr); } } @Override public PlanFragment visitPhysicalDistribution(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalDistributionOperator distribution = (PhysicalDistributionOperator) optExpr.getOp(); ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(), inputFragment.getPlanRoot(), distribution.getDistributionSpec().getType()); DataPartition dataPartition; if (DistributionSpec.DistributionType.GATHER.equals(distribution.getDistributionSpec().getType())) { exchangeNode.setNumInstances(1); dataPartition = DataPartition.UNPARTITIONED; GatherDistributionSpec spec = (GatherDistributionSpec) distribution.getDistributionSpec(); if (spec.hasLimit()) { exchangeNode.setLimit(spec.getLimit()); } } else if (DistributionSpec.DistributionType.BROADCAST .equals(distribution.getDistributionSpec().getType())) { exchangeNode.setNumInstances(inputFragment.getPlanRoot().getNumInstances()); dataPartition = DataPartition.UNPARTITIONED; } else if (DistributionSpec.DistributionType.SHUFFLE.equals(distribution.getDistributionSpec().getType())) { exchangeNode.setNumInstances(inputFragment.getPlanRoot().getNumInstances()); List<ColumnRefOperator> partitionColumns = getShuffleColumns((HashDistributionSpec) distribution.getDistributionSpec()); List<Expr> distributeExpressions = partitionColumns.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); dataPartition = DataPartition.hashPartitioned(distributeExpressions); } else { throw new StarRocksPlannerException("Unsupport exchange type : " + distribution.getDistributionSpec().getType(), INTERNAL_ERROR); } exchangeNode.setDataPartition(dataPartition); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), exchangeNode, dataPartition); fragment.setQueryGlobalDicts(distribution.getGlobalDicts()); inputFragment.setDestination(exchangeNode); inputFragment.setOutputPartition(dataPartition); context.getFragments().add(fragment); return fragment; } @Override public PlanFragment visitPhysicalTopN(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalTopNOperator topN = (PhysicalTopNOperator) optExpr.getOp(); Preconditions.checkState(topN.getOffset() >= 0); if (!topN.isSplit()) { return buildPartialTopNFragment(optExpr, context, topN.getPartitionByColumns(), topN.getPartitionLimit(), topN.getOrderSpec(), topN.getTopNType(), topN.getLimit(), topN.getOffset(), inputFragment); } else { return buildFinalTopNFragment(context, topN.getTopNType(), topN.getLimit(), topN.getOffset(), inputFragment, optExpr); } } private PlanFragment buildFinalTopNFragment(ExecPlan context, TopNType topNType, long limit, long offset, PlanFragment inputFragment, OptExpression optExpr) { ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(), inputFragment.getPlanRoot(), DistributionSpec.DistributionType.GATHER); exchangeNode.setNumInstances(1); DataPartition dataPartition = DataPartition.UNPARTITIONED; exchangeNode.setDataPartition(dataPartition); Preconditions.checkState(inputFragment.getPlanRoot() instanceof SortNode); SortNode sortNode = (SortNode) inputFragment.getPlanRoot(); sortNode.setTopNType(topNType); exchangeNode.setMergeInfo(sortNode.getSortInfo(), offset); exchangeNode.computeStatistics(optExpr.getStatistics()); if (TopNType.ROW_NUMBER.equals(topNType)) { exchangeNode.setLimit(limit); } else { exchangeNode.unsetLimit(); } PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), exchangeNode, dataPartition); inputFragment.setDestination(exchangeNode); inputFragment.setOutputPartition(dataPartition); fragment.setQueryGlobalDicts(inputFragment.getQueryGlobalDicts()); context.getFragments().add(fragment); return fragment; } private PlanFragment buildPartialTopNFragment(OptExpression optExpr, ExecPlan context, List<ColumnRefOperator> partitionByColumns, long partitionLimit, OrderSpec orderSpec, TopNType topNType, long limit, long offset, PlanFragment inputFragment) { List<Expr> resolvedTupleExprs = Lists.newArrayList(); List<Expr> partitionExprs = Lists.newArrayList(); List<Expr> sortExprs = Lists.newArrayList(); TupleDescriptor sortTuple = context.getDescTbl().createTupleDescriptor(); if (CollectionUtils.isNotEmpty(partitionByColumns)) { for (ColumnRefOperator partitionByColumn : partitionByColumns) { Expr expr = ScalarOperatorToExpr.buildExecExpression(partitionByColumn, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); partitionExprs.add(expr); } } for (Ordering ordering : orderSpec.getOrderDescs()) { Expr sortExpr = ScalarOperatorToExpr.buildExecExpression(ordering.getColumnRef(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(sortTuple, new SlotId(ordering.getColumnRef().getId())); slotDesc.initFromExpr(sortExpr); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(sortExpr.isNullable()); slotDesc.setType(sortExpr.getType()); context.getColRefToExpr() .put(ordering.getColumnRef(), new SlotRef(ordering.getColumnRef().toString(), slotDesc)); resolvedTupleExprs.add(sortExpr); sortExprs.add(new SlotRef(slotDesc)); } ColumnRefSet columnRefSet = optExpr.inputAt(0).getLogicalProperty().getOutputColumns(); for (int i = 0; i < columnRefSet.getColumnIds().length; ++i) { /* * Add column not be used in ordering */ ColumnRefOperator columnRef = columnRefFactory.getColumnRef(columnRefSet.getColumnIds()[i]); if (orderSpec.getOrderDescs().stream().map(Ordering::getColumnRef) .noneMatch(c -> c.equals(columnRef))) { Expr outputExpr = ScalarOperatorToExpr.buildExecExpression(columnRef, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(sortTuple, new SlotId(columnRef.getId())); slotDesc.initFromExpr(outputExpr); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(outputExpr.isNullable()); slotDesc.setType(outputExpr.getType()); context.getColRefToExpr().put(columnRef, new SlotRef(columnRef.toString(), slotDesc)); resolvedTupleExprs.add(outputExpr); } } sortTuple.computeMemLayout(); SortInfo sortInfo = new SortInfo(partitionExprs, partitionLimit, sortExprs, orderSpec.getOrderDescs().stream().map(Ordering::isAscending).collect(Collectors.toList()), orderSpec.getOrderDescs().stream().map(Ordering::isNullsFirst).collect(Collectors.toList())); sortInfo.setMaterializedTupleInfo(sortTuple, resolvedTupleExprs); SortNode sortNode = new SortNode( context.getNextNodeId(), inputFragment.getPlanRoot(), sortInfo, limit != Operator.DEFAULT_LIMIT, limit == Operator.DEFAULT_LIMIT, 0); sortNode.setTopNType(topNType); sortNode.setLimit(limit); sortNode.setOffset(offset); sortNode.resolvedTupleExprs = resolvedTupleExprs; sortNode.setHasNullableGenerateChild(); sortNode.computeStatistics(optExpr.getStatistics()); if (shouldBuildGlobalRuntimeFilter()) { sortNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl()); } inputFragment.setPlanRoot(sortNode); return inputFragment; } private void setJoinPushDown(JoinNode node) { node.setIsPushDown(ConnectContext.get().getSessionVariable().isHashJoinPushDownRightTable() && (node.getJoinOp().isInnerJoin() || node.getJoinOp().isLeftSemiJoin() || node.getJoinOp().isRightJoin())); } private boolean shouldBuildGlobalRuntimeFilter() { return ConnectContext.get() != null && (ConnectContext.get().getSessionVariable().getEnableGlobalRuntimeFilter() || ConnectContext.get().getSessionVariable().isEnablePipelineEngine()); } @Override public PlanFragment visitPhysicalHashJoin(OptExpression optExpr, ExecPlan context) { PlanFragment leftFragment = visit(optExpr.inputAt(0), context); PlanFragment rightFragment = visit(optExpr.inputAt(1), context); return visitPhysicalJoin(leftFragment, rightFragment, optExpr, context); } private List<Expr> extractConjuncts(ScalarOperator predicate, ExecPlan context) { return Utils.extractConjuncts(predicate).stream() .map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); } private void setNullableForJoin(JoinOperator joinOperator, PlanFragment leftFragment, PlanFragment rightFragment, ExecPlan context) { Set<TupleId> nullableTupleIds = new HashSet<>(); nullableTupleIds.addAll(leftFragment.getPlanRoot().getNullableTupleIds()); nullableTupleIds.addAll(rightFragment.getPlanRoot().getNullableTupleIds()); if (joinOperator.isLeftOuterJoin()) { nullableTupleIds.addAll(rightFragment.getPlanRoot().getTupleIds()); } else if (joinOperator.isRightOuterJoin()) { nullableTupleIds.addAll(leftFragment.getPlanRoot().getTupleIds()); } else if (joinOperator.isFullOuterJoin()) { nullableTupleIds.addAll(leftFragment.getPlanRoot().getTupleIds()); nullableTupleIds.addAll(rightFragment.getPlanRoot().getTupleIds()); } for (TupleId tupleId : nullableTupleIds) { TupleDescriptor tupleDescriptor = context.getDescTbl().getTupleDesc(tupleId); tupleDescriptor.getSlots().forEach(slot -> slot.setIsNullable(true)); tupleDescriptor.computeMemLayout(); } } @Override public PlanFragment visitPhysicalNestLoopJoin(OptExpression optExpr, ExecPlan context) { PhysicalJoinOperator node = (PhysicalJoinOperator) optExpr.getOp(); PlanFragment leftFragment = visit(optExpr.inputAt(0), context); PlanFragment rightFragment = visit(optExpr.inputAt(1), context); List<Expr> conjuncts = extractConjuncts(node.getPredicate(), context); List<Expr> joinOnConjuncts = extractConjuncts(node.getOnPredicate(), context); List<Expr> probePartitionByExprs = Lists.newArrayList(); DistributionSpec leftDistributionSpec = optExpr.getRequiredProperties().get(0).getDistributionProperty().getSpec(); DistributionSpec rightDistributionSpec = optExpr.getRequiredProperties().get(1).getDistributionProperty().getSpec(); if (leftDistributionSpec instanceof HashDistributionSpec && rightDistributionSpec instanceof HashDistributionSpec) { probePartitionByExprs = getShuffleExprs((HashDistributionSpec) leftDistributionSpec, context); } setNullableForJoin(node.getJoinType(), leftFragment, rightFragment, context); NestLoopJoinNode joinNode = new NestLoopJoinNode(context.getNextNodeId(), leftFragment.getPlanRoot(), rightFragment.getPlanRoot(), null, node.getJoinType(), Lists.newArrayList(), joinOnConjuncts); joinNode.setLimit(node.getLimit()); joinNode.computeStatistics(optExpr.getStatistics()); joinNode.addConjuncts(conjuncts); joinNode.setProbePartitionByExprs(probePartitionByExprs); rightFragment.getPlanRoot().setFragment(leftFragment); context.getFragments().remove(rightFragment); context.getFragments().remove(leftFragment); context.getFragments().add(leftFragment); leftFragment.setPlanRoot(joinNode); leftFragment.addChildren(rightFragment.getChildren()); if (!(joinNode.getChild(1) instanceof ExchangeNode)) { joinNode.setReplicated(true); } if (shouldBuildGlobalRuntimeFilter()) { joinNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl()); } leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); return leftFragment; } @Override public PlanFragment visitPhysicalMergeJoin(OptExpression optExpr, ExecPlan context) { PlanFragment leftFragment = visit(optExpr.inputAt(0), context); PlanFragment rightFragment = visit(optExpr.inputAt(1), context); PlanNode leftPlanRoot = leftFragment.getPlanRoot(); PlanNode rightPlanRoot = rightFragment.getPlanRoot(); OptExpression leftExpression = optExpr.inputAt(0); OptExpression rightExpression = optExpr.inputAt(1); boolean needDealSort = leftExpression.getInputs().size() > 0 && rightExpression.getInputs().size() > 0; if (needDealSort) { optExpr.setChild(0, leftExpression.inputAt(0)); optExpr.setChild(1, rightExpression.inputAt(0)); leftFragment.setPlanRoot(leftPlanRoot.getChild(0)); rightFragment.setPlanRoot(rightPlanRoot.getChild(0)); } PlanFragment planFragment = visitPhysicalJoin(leftFragment, rightFragment, optExpr, context); if (needDealSort) { leftExpression.setChild(0, optExpr.inputAt(0)); rightExpression.setChild(0, optExpr.inputAt(1)); optExpr.setChild(0, leftExpression); optExpr.setChild(1, rightExpression); planFragment.getPlanRoot().setChild(0, leftPlanRoot); planFragment.getPlanRoot().setChild(1, rightPlanRoot); } return planFragment; } private List<ColumnRefOperator> getShuffleColumns(HashDistributionSpec spec) { List<Integer> columnRefs = spec.getShuffleColumns(); Preconditions.checkState(!columnRefs.isEmpty()); List<ColumnRefOperator> shuffleColumns = new ArrayList<>(); for (int columnId : columnRefs) { shuffleColumns.add(columnRefFactory.getColumnRef(columnId)); } return shuffleColumns; } private List<Expr> getShuffleExprs(HashDistributionSpec hashDistributionSpec, ExecPlan context) { List<ColumnRefOperator> shuffleColumns = getShuffleColumns(hashDistributionSpec); return shuffleColumns.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); } private PlanFragment visitPhysicalJoin(PlanFragment leftFragment, PlanFragment rightFragment, OptExpression optExpr, ExecPlan context) { PhysicalJoinOperator node = (PhysicalJoinOperator) optExpr.getOp(); ColumnRefSet leftChildColumns = optExpr.inputAt(0).getLogicalProperty().getOutputColumns(); ColumnRefSet rightChildColumns = optExpr.inputAt(1).getLogicalProperty().getOutputColumns(); JoinOperator joinOperator = node.getJoinType(); Preconditions.checkState(!joinOperator.isCrossJoin(), "should not be cross join"); PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot(); PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot(); if (leftFragmentPlanRoot instanceof DecodeNode) { leftFragmentPlanRoot = leftFragmentPlanRoot.getChild(0); } if (rightFragmentPlanRoot instanceof DecodeNode) { rightFragmentPlanRoot = rightFragmentPlanRoot.getChild(0); } List<Expr> probePartitionByExprs = Lists.newArrayList(); DistributionSpec leftDistributionSpec = optExpr.getRequiredProperties().get(0).getDistributionProperty().getSpec(); DistributionSpec rightDistributionSpec = optExpr.getRequiredProperties().get(1).getDistributionProperty().getSpec(); if (leftDistributionSpec instanceof HashDistributionSpec && rightDistributionSpec instanceof HashDistributionSpec) { probePartitionByExprs = getShuffleExprs((HashDistributionSpec) leftDistributionSpec, context); } JoinNode.DistributionMode distributionMode = inferDistributionMode(optExpr, leftFragmentPlanRoot, rightFragmentPlanRoot); JoinExprInfo joinExpr = buildJoinExpr(optExpr, context); List<Expr> eqJoinConjuncts = joinExpr.eqJoinConjuncts; List<Expr> otherJoinConjuncts = joinExpr.otherJoin; List<Expr> conjuncts = joinExpr.conjuncts; setNullableForJoin(joinOperator, leftFragment, rightFragment, context); JoinNode joinNode; if (node instanceof PhysicalHashJoinOperator) { joinNode = new HashJoinNode( context.getNextNodeId(), leftFragment.getPlanRoot(), rightFragment.getPlanRoot(), joinOperator, eqJoinConjuncts, otherJoinConjuncts); } else if (node instanceof PhysicalMergeJoinOperator) { joinNode = new MergeJoinNode( context.getNextNodeId(), leftFragment.getPlanRoot(), rightFragment.getPlanRoot(), joinOperator, eqJoinConjuncts, otherJoinConjuncts); } else { throw new StarRocksPlannerException("unknown join operator: " + node, INTERNAL_ERROR); } if (node.getProjection() != null) { ColumnRefSet outputColumns = new ColumnRefSet(); for (ScalarOperator s : node.getProjection().getColumnRefMap().values()) { outputColumns.union(s.getUsedColumns()); } for (ScalarOperator s : node.getProjection().getCommonSubOperatorMap().values()) { outputColumns.union(s.getUsedColumns()); } outputColumns.except(new ArrayList<>(node.getProjection().getCommonSubOperatorMap().keySet())); joinNode.setOutputSlots(outputColumns.getStream().boxed().collect(Collectors.toList())); } joinNode.setDistributionMode(distributionMode); joinNode.getConjuncts().addAll(conjuncts); joinNode.setLimit(node.getLimit()); joinNode.computeStatistics(optExpr.getStatistics()); joinNode.setProbePartitionByExprs(probePartitionByExprs); if (shouldBuildGlobalRuntimeFilter()) { joinNode.buildRuntimeFilters(runtimeFilterIdIdGenerator, context.getDescTbl()); } return buildJoinFragment(context, leftFragment, rightFragment, distributionMode, joinNode); } private boolean isExchangeWithDistributionType(PlanNode node, DistributionSpec.DistributionType expectedType) { if (!(node instanceof ExchangeNode)) { return false; } ExchangeNode exchangeNode = (ExchangeNode) node; return Objects.equals(exchangeNode.getDistributionType(), expectedType); } private boolean isColocateJoin(OptExpression optExpression) { return optExpression.getRequiredProperties().stream().allMatch( physicalPropertySet -> { if (!physicalPropertySet.getDistributionProperty().isShuffle()) { return false; } HashDistributionDesc.SourceType hashSourceType = ((HashDistributionSpec) (physicalPropertySet.getDistributionProperty().getSpec())) .getHashDistributionDesc().getSourceType(); return hashSourceType.equals(HashDistributionDesc.SourceType.LOCAL); }); } public boolean isShuffleJoin(OptExpression optExpression) { return optExpression.getRequiredProperties().stream().allMatch( physicalPropertySet -> { if (!physicalPropertySet.getDistributionProperty().isShuffle()) { return false; } HashDistributionDesc.SourceType hashSourceType = ((HashDistributionSpec) (physicalPropertySet.getDistributionProperty().getSpec())) .getHashDistributionDesc().getSourceType(); return hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_JOIN) || hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_ENFORCE) || hashSourceType.equals(HashDistributionDesc.SourceType.SHUFFLE_AGG); }); } public PlanFragment computeBucketShufflePlanFragment(ExecPlan context, PlanFragment stayFragment, PlanFragment removeFragment, JoinNode hashJoinNode) { hashJoinNode.setLocalHashBucket(true); hashJoinNode.setPartitionExprs(removeFragment.getDataPartition().getPartitionExprs()); removeFragment.getChild(0) .setOutputPartition(new DataPartition(TPartitionType.BUCKET_SHUFFLE_HASH_PARTITIONED, removeFragment.getDataPartition().getPartitionExprs())); context.getFragments().remove(removeFragment); context.getFragments().remove(stayFragment); context.getFragments().add(stayFragment); stayFragment.setPlanRoot(hashJoinNode); stayFragment.addChildren(removeFragment.getChildren()); stayFragment.mergeQueryGlobalDicts(removeFragment.getQueryGlobalDicts()); return stayFragment; } public PlanFragment computeShuffleHashBucketPlanFragment(ExecPlan context, PlanFragment stayFragment, PlanFragment removeFragment, JoinNode hashJoinNode) { hashJoinNode.setPartitionExprs(removeFragment.getDataPartition().getPartitionExprs()); DataPartition dataPartition = new DataPartition(TPartitionType.HASH_PARTITIONED, removeFragment.getDataPartition().getPartitionExprs()); removeFragment.getChild(0).setOutputPartition(dataPartition); context.getFragments().remove(removeFragment); context.getFragments().remove(stayFragment); context.getFragments().add(stayFragment); stayFragment.setPlanRoot(hashJoinNode); stayFragment.addChildren(removeFragment.getChildren()); stayFragment.mergeQueryGlobalDicts(removeFragment.getQueryGlobalDicts()); return stayFragment; } @Override public PlanFragment visitPhysicalAssertOneRow(OptExpression optExpression, ExecPlan context) { PlanFragment inputFragment = visit(optExpression.inputAt(0), context); for (TupleId id : inputFragment.getPlanRoot().getTupleIds()) { context.getDescTbl().getTupleDesc(id).getSlots().forEach(s -> s.setIsNullable(true)); } PhysicalAssertOneRowOperator assertOneRow = (PhysicalAssertOneRowOperator) optExpression.getOp(); AssertNumRowsNode node = new AssertNumRowsNode(context.getNextNodeId(), inputFragment.getPlanRoot(), new AssertNumRowsElement(assertOneRow.getCheckRows(), assertOneRow.getTips(), assertOneRow.getAssertion())); node.computeStatistics(optExpression.getStatistics()); inputFragment.setPlanRoot(node); return inputFragment; } @Override public PlanFragment visitPhysicalAnalytic(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalWindowOperator node = (PhysicalWindowOperator) optExpr.getOp(); List<Expr> analyticFnCalls = new ArrayList<>(); TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor(); for (Map.Entry<ColumnRefOperator, CallOperator> analyticCall : node.getAnalyticCall().entrySet()) { Expr analyticFunction = ScalarOperatorToExpr.buildExecExpression(analyticCall.getValue(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())); analyticFnCalls.add(analyticFunction); SlotDescriptor slotDesc = context.getDescTbl() .addSlotDescriptor(outputTupleDesc, new SlotId(analyticCall.getKey().getId())); slotDesc.setType(analyticFunction.getType()); slotDesc.setIsNullable(analyticFunction.isNullable()); slotDesc.setIsMaterialized(true); context.getColRefToExpr() .put(analyticCall.getKey(), new SlotRef(analyticCall.getKey().toString(), slotDesc)); } List<Expr> partitionExprs = node.getPartitionExpressions().stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); List<OrderByElement> orderByElements = node.getOrderByElements().stream().map(e -> new OrderByElement( ScalarOperatorToExpr.buildExecExpression(e.getColumnRef(), new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr())), e.isAscending(), e.isNullsFirst())).collect(Collectors.toList()); AnalyticEvalNode analyticEvalNode = new AnalyticEvalNode( context.getNextNodeId(), inputFragment.getPlanRoot(), analyticFnCalls, partitionExprs, orderByElements, node.getAnalyticWindow(), null, outputTupleDesc, null, null, context.getDescTbl().createTupleDescriptor()); analyticEvalNode.setSubstitutedPartitionExprs(partitionExprs); analyticEvalNode.setLimit(node.getLimit()); analyticEvalNode.setHasNullableGenerateChild(); analyticEvalNode.computeStatistics(optExpr.getStatistics()); if (hasColocateOlapScanChildInFragment(analyticEvalNode)) { analyticEvalNode.setColocate(true); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { analyticEvalNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } PlanNode root = inputFragment.getPlanRoot(); if (root instanceof SortNode) { SortNode sortNode = (SortNode) root; sortNode.setAnalyticPartitionExprs(analyticEvalNode.getPartitionExprs()); } inputFragment.setPlanRoot(analyticEvalNode); return inputFragment; } private PlanFragment buildSetOperation(OptExpression optExpr, ExecPlan context, OperatorType operatorType) { PhysicalSetOperation setOperation = (PhysicalSetOperation) optExpr.getOp(); TupleDescriptor setOperationTuple = context.getDescTbl().createTupleDescriptor(); for (ColumnRefOperator columnRefOperator : setOperation.getOutputColumnRefOp()) { SlotDescriptor slotDesc = context.getDescTbl() .addSlotDescriptor(setOperationTuple, new SlotId(columnRefOperator.getId())); slotDesc.setType(columnRefOperator.getType()); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(columnRefOperator.isNullable()); context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc)); } SetOperationNode setOperationNode; boolean isUnion = false; if (operatorType.equals(OperatorType.PHYSICAL_UNION)) { isUnion = true; setOperationNode = new UnionNode(context.getNextNodeId(), setOperationTuple.getId()); setOperationNode.setFirstMaterializedChildIdx_(optExpr.arity()); } else if (operatorType.equals(OperatorType.PHYSICAL_EXCEPT)) { setOperationNode = new ExceptNode(context.getNextNodeId(), setOperationTuple.getId()); } else if (operatorType.equals(OperatorType.PHYSICAL_INTERSECT)) { setOperationNode = new IntersectNode(context.getNextNodeId(), setOperationTuple.getId()); } else { throw new StarRocksPlannerException("Unsupported set operation", INTERNAL_ERROR); } List<Map<Integer, Integer>> outputSlotIdToChildSlotIdMaps = new ArrayList<>(); for (int childIdx = 0; childIdx < optExpr.arity(); ++childIdx) { Map<Integer, Integer> slotIdMap = new HashMap<>(); List<ColumnRefOperator> childOutput = setOperation.getChildOutputColumns().get(childIdx); Preconditions.checkState(childOutput.size() == setOperation.getOutputColumnRefOp().size()); for (int columnIdx = 0; columnIdx < setOperation.getOutputColumnRefOp().size(); ++columnIdx) { Integer resultColumnIdx = setOperation.getOutputColumnRefOp().get(columnIdx).getId(); slotIdMap.put(resultColumnIdx, childOutput.get(columnIdx).getId()); } outputSlotIdToChildSlotIdMaps.add(slotIdMap); Preconditions.checkState(slotIdMap.size() == setOperation.getOutputColumnRefOp().size()); } setOperationNode.setOutputSlotIdToChildSlotIdMaps(outputSlotIdToChildSlotIdMaps); Preconditions.checkState(optExpr.getInputs().size() == setOperation.getChildOutputColumns().size()); PlanFragment setOperationFragment = new PlanFragment(context.getNextFragmentId(), setOperationNode, DataPartition.RANDOM); List<List<Expr>> materializedResultExprLists = Lists.newArrayList(); for (int i = 0; i < optExpr.getInputs().size(); i++) { List<ColumnRefOperator> childOutput = setOperation.getChildOutputColumns().get(i); PlanFragment fragment = visit(optExpr.getInputs().get(i), context); List<Expr> materializedExpressions = Lists.newArrayList(); for (ColumnRefOperator ref : childOutput) { SlotDescriptor slotDescriptor = context.getDescTbl().getSlotDesc(new SlotId(ref.getId())); materializedExpressions.add(new SlotRef(slotDescriptor)); } materializedResultExprLists.add(materializedExpressions); if (isUnion) { fragment.setOutputPartition(DataPartition.RANDOM); } else { fragment.setOutputPartition(DataPartition.hashPartitioned(materializedExpressions)); } ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(), fragment.getPlanRoot(), fragment.getDataPartition()); exchangeNode.setFragment(setOperationFragment); fragment.setDestination(exchangeNode); setOperationNode.addChild(exchangeNode); } setOperationNode.setHasNullableGenerateChild(); List<Expr> setOutputList = Lists.newArrayList(); for (ColumnRefOperator columnRefOperator : setOperation.getOutputColumnRefOp()) { SlotDescriptor slotDesc = context.getDescTbl().getSlotDesc(new SlotId(columnRefOperator.getId())); slotDesc.setIsNullable(slotDesc.getIsNullable() | setOperationNode.isHasNullableGenerateChild()); setOutputList.add(new SlotRef(String.valueOf(columnRefOperator.getId()), slotDesc)); } setOperationTuple.computeMemLayout(); setOperationNode.setSetOperationOutputList(setOutputList); setOperationNode.setMaterializedResultExprLists_(materializedResultExprLists); setOperationNode.setLimit(setOperation.getLimit()); setOperationNode.computeStatistics(optExpr.getStatistics()); context.getFragments().add(setOperationFragment); return setOperationFragment; } @Override public PlanFragment visitPhysicalUnion(OptExpression optExpr, ExecPlan context) { return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_UNION); } @Override public PlanFragment visitPhysicalExcept(OptExpression optExpr, ExecPlan context) { return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_EXCEPT); } @Override public PlanFragment visitPhysicalIntersect(OptExpression optExpr, ExecPlan context) { return buildSetOperation(optExpr, context, OperatorType.PHYSICAL_INTERSECT); } @Override public PlanFragment visitPhysicalRepeat(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalRepeatOperator repeatOperator = (PhysicalRepeatOperator) optExpr.getOp(); TupleDescriptor outputGroupingTuple = context.getDescTbl().createTupleDescriptor(); for (ColumnRefOperator columnRefOperator : repeatOperator.getOutputGrouping()) { SlotDescriptor slotDesc = context.getDescTbl() .addSlotDescriptor(outputGroupingTuple, new SlotId(columnRefOperator.getId())); slotDesc.setType(columnRefOperator.getType()); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(columnRefOperator.isNullable()); context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc)); } outputGroupingTuple.computeMemLayout(); List<Set<Integer>> repeatSlotIdList = new ArrayList<>(); for (List<ColumnRefOperator> repeat : repeatOperator.getRepeatColumnRef()) { repeatSlotIdList.add( repeat.stream().map(ColumnRefOperator::getId).collect(Collectors.toSet())); } RepeatNode repeatNode = new RepeatNode( context.getNextNodeId(), inputFragment.getPlanRoot(), outputGroupingTuple, repeatSlotIdList, repeatOperator.getGroupingIds()); List<ScalarOperator> predicates = Utils.extractConjuncts(repeatOperator.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { repeatNode.getConjuncts().add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } repeatNode.computeStatistics(optExpr.getStatistics()); inputFragment.setPlanRoot(repeatNode); return inputFragment; } @Override public PlanFragment visitPhysicalFilter(OptExpression optExpr, ExecPlan context) { PlanFragment inputFragment = visit(optExpr.inputAt(0), context); PhysicalFilterOperator filter = (PhysicalFilterOperator) optExpr.getOp(); List<Expr> predicates = Utils.extractConjuncts(filter.getPredicate()).stream() .map(d -> ScalarOperatorToExpr.buildExecExpression(d, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); SelectNode selectNode = new SelectNode(context.getNextNodeId(), inputFragment.getPlanRoot(), predicates); selectNode.setLimit(filter.getLimit()); selectNode.computeStatistics(optExpr.getStatistics()); inputFragment.setPlanRoot(selectNode); return inputFragment; } @Override public PlanFragment visitPhysicalTableFunction(OptExpression optExpression, ExecPlan context) { PlanFragment inputFragment = visit(optExpression.inputAt(0), context); PhysicalTableFunctionOperator physicalTableFunction = (PhysicalTableFunctionOperator) optExpression.getOp(); TupleDescriptor udtfOutputTuple = context.getDescTbl().createTupleDescriptor(); for (int columnId : physicalTableFunction.getOutputColumns().getColumnIds()) { ColumnRefOperator columnRefOperator = columnRefFactory.getColumnRef(columnId); SlotDescriptor slotDesc = context.getDescTbl().addSlotDescriptor(udtfOutputTuple, new SlotId(columnRefOperator.getId())); slotDesc.setType(columnRefOperator.getType()); slotDesc.setIsMaterialized(true); slotDesc.setIsNullable(columnRefOperator.isNullable()); context.getColRefToExpr().put(columnRefOperator, new SlotRef(columnRefOperator.toString(), slotDesc)); } udtfOutputTuple.computeMemLayout(); TableFunctionNode tableFunctionNode = new TableFunctionNode(context.getNextNodeId(), inputFragment.getPlanRoot(), udtfOutputTuple, physicalTableFunction.getFn(), physicalTableFunction.getFnParamColumnRef().stream().map(ColumnRefOperator::getId) .collect(Collectors.toList()), Arrays.stream(physicalTableFunction.getOuterColumnRefSet().getColumnIds()).boxed() .collect(Collectors.toList()), Arrays.stream(physicalTableFunction.getFnResultColumnRefSet().getColumnIds()).boxed() .collect(Collectors.toList())); tableFunctionNode.computeStatistics(optExpression.getStatistics()); tableFunctionNode.setLimit(physicalTableFunction.getLimit()); inputFragment.setPlanRoot(tableFunctionNode); return inputFragment; } @Override public PlanFragment visitPhysicalLimit(OptExpression optExpression, ExecPlan context) { return visit(optExpression.inputAt(0), context); } @Override public PlanFragment visitPhysicalCTEConsume(OptExpression optExpression, ExecPlan context) { PhysicalCTEConsumeOperator consume = (PhysicalCTEConsumeOperator) optExpression.getOp(); int cteId = consume.getCteId(); MultiCastPlanFragment cteFragment = (MultiCastPlanFragment) context.getCteProduceFragments().get(cteId); ExchangeNode exchangeNode = new ExchangeNode(context.getNextNodeId(), cteFragment.getPlanRoot(), DistributionSpec.DistributionType.SHUFFLE); exchangeNode.setReceiveColumns(consume.getCteOutputColumnRefMap().values().stream() .map(ColumnRefOperator::getId).collect(Collectors.toList())); exchangeNode.setDataPartition(cteFragment.getDataPartition()); exchangeNode.setNumInstances(cteFragment.getPlanRoot().getNumInstances()); PlanFragment consumeFragment = new PlanFragment(context.getNextFragmentId(), exchangeNode, cteFragment.getDataPartition()); Map<ColumnRefOperator, ScalarOperator> projectMap = Maps.newHashMap(); projectMap.putAll(consume.getCteOutputColumnRefMap()); consumeFragment = buildProjectNode(optExpression, new Projection(projectMap), consumeFragment, context); consumeFragment.setQueryGlobalDicts(cteFragment.getQueryGlobalDicts()); consumeFragment.setLoadGlobalDicts(cteFragment.getLoadGlobalDicts()); if (consume.getPredicate() != null) { List<Expr> predicates = Utils.extractConjuncts(consume.getPredicate()).stream() .map(d -> ScalarOperatorToExpr.buildExecExpression(d, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); SelectNode selectNode = new SelectNode(context.getNextNodeId(), consumeFragment.getPlanRoot(), predicates); selectNode.computeStatistics(optExpression.getStatistics()); consumeFragment.setPlanRoot(selectNode); } if (consume.hasLimit()) { consumeFragment.getPlanRoot().setLimit(consume.getLimit()); } cteFragment.getDestNodeList().add(exchangeNode); consumeFragment.addChild(cteFragment); context.getFragments().add(consumeFragment); return consumeFragment; } @Override public PlanFragment visitPhysicalCTEProduce(OptExpression optExpression, ExecPlan context) { PlanFragment child = visit(optExpression.inputAt(0), context); int cteId = ((PhysicalCTEProduceOperator) optExpression.getOp()).getCteId(); context.getFragments().remove(child); MultiCastPlanFragment cteProduce = new MultiCastPlanFragment(child); List<Expr> outputs = Lists.newArrayList(); optExpression.getOutputColumns().getStream() .forEach(i -> outputs.add(context.getColRefToExpr().get(columnRefFactory.getColumnRef(i)))); cteProduce.setOutputExprs(outputs); context.getCteProduceFragments().put(cteId, cteProduce); context.getFragments().add(cteProduce); return child; } @Override public PlanFragment visitPhysicalCTEAnchor(OptExpression optExpression, ExecPlan context) { visit(optExpression.inputAt(0), context); return visit(optExpression.inputAt(1), context); } @Override public PlanFragment visitPhysicalNoCTE(OptExpression optExpression, ExecPlan context) { return visit(optExpression.inputAt(0), context); } static class JoinExprInfo { public final List<Expr> eqJoinConjuncts; public final List<Expr> otherJoin; public final List<Expr> conjuncts; public JoinExprInfo(List<Expr> eqJoinConjuncts, List<Expr> otherJoin, List<Expr> conjuncts) { this.eqJoinConjuncts = eqJoinConjuncts; this.otherJoin = otherJoin; this.conjuncts = conjuncts; } } private JoinExprInfo buildJoinExpr(OptExpression optExpr, ExecPlan context) { ScalarOperator predicate = optExpr.getOp().getPredicate(); ScalarOperator onPredicate; if (optExpr.getOp() instanceof PhysicalJoinOperator) { onPredicate = ((PhysicalJoinOperator) optExpr.getOp()).getOnPredicate(); } else if (optExpr.getOp() instanceof PhysicalStreamJoinOperator) { onPredicate = ((PhysicalStreamJoinOperator) optExpr.getOp()).getOnPredicate(); } else { throw new IllegalStateException("not supported join " + optExpr.getOp()); } List<ScalarOperator> onPredicates = Utils.extractConjuncts(onPredicate); ColumnRefSet leftChildColumns = optExpr.inputAt(0).getOutputColumns(); ColumnRefSet rightChildColumns = optExpr.inputAt(1).getOutputColumns(); List<BinaryPredicateOperator> eqOnPredicates = JoinHelper.getEqualsPredicate( leftChildColumns, rightChildColumns, onPredicates); Preconditions.checkState(!eqOnPredicates.isEmpty(), "must be eq-join"); for (BinaryPredicateOperator s : eqOnPredicates) { if (!optExpr.inputAt(0).getLogicalProperty().getOutputColumns() .containsAll(s.getChild(0).getUsedColumns())) { s.swap(); } } List<Expr> eqJoinConjuncts = eqOnPredicates.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); for (Expr expr : eqJoinConjuncts) { if (expr.isConstant()) { throw unsupportedException("Support join on constant predicate later"); } } List<ScalarOperator> otherJoin = Utils.extractConjuncts(onPredicate); otherJoin.removeAll(eqOnPredicates); List<Expr> otherJoinConjuncts = otherJoin.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); List<ScalarOperator> predicates = Utils.extractConjuncts(predicate); List<Expr> conjuncts = predicates.stream().map(e -> ScalarOperatorToExpr.buildExecExpression(e, new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()))) .collect(Collectors.toList()); return new JoinExprInfo(eqJoinConjuncts, otherJoinConjuncts, conjuncts); } @Override public PlanFragment visitPhysicalStreamJoin(OptExpression optExpr, ExecPlan context) { PhysicalStreamJoinOperator node = (PhysicalStreamJoinOperator) optExpr.getOp(); PlanFragment leftFragment = visit(optExpr.inputAt(0), context); PlanFragment rightFragment = visit(optExpr.inputAt(1), context); ColumnRefSet leftChildColumns = optExpr.inputAt(0).getLogicalProperty().getOutputColumns(); ColumnRefSet rightChildColumns = optExpr.inputAt(1).getLogicalProperty().getOutputColumns(); if (!node.getJoinType().isInnerJoin()) { throw new NotImplementedException("Only inner join is supported"); } JoinOperator joinOperator = node.getJoinType(); PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot(); PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot(); JoinNode.DistributionMode distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET; JoinExprInfo joinExpr = buildJoinExpr(optExpr, context); List<Expr> eqJoinConjuncts = joinExpr.eqJoinConjuncts; List<Expr> otherJoinConjuncts = joinExpr.otherJoin; List<Expr> conjuncts = joinExpr.conjuncts; List<PlanFragment> nullablePlanFragments = new ArrayList<>(); if (joinOperator.isLeftOuterJoin()) { nullablePlanFragments.add(rightFragment); } else if (joinOperator.isRightOuterJoin()) { nullablePlanFragments.add(leftFragment); } else if (joinOperator.isFullOuterJoin()) { nullablePlanFragments.add(leftFragment); nullablePlanFragments.add(rightFragment); } for (PlanFragment planFragment : nullablePlanFragments) { for (TupleId tupleId : planFragment.getPlanRoot().getTupleIds()) { context.getDescTbl().getTupleDesc(tupleId).getSlots().forEach(slot -> slot.setIsNullable(true)); } } JoinNode joinNode = new StreamJoinNode(context.getNextNodeId(), leftFragmentPlanRoot, rightFragmentPlanRoot, node.getJoinType(), eqJoinConjuncts, otherJoinConjuncts); if (node.getProjection() != null) { ColumnRefSet outputColumns = new ColumnRefSet(); for (ScalarOperator s : node.getProjection().getColumnRefMap().values()) { outputColumns.union(s.getUsedColumns()); } for (ScalarOperator s : node.getProjection().getCommonSubOperatorMap().values()) { outputColumns.union(s.getUsedColumns()); } outputColumns.except(new ArrayList<>(node.getProjection().getCommonSubOperatorMap().keySet())); joinNode.setOutputSlots(outputColumns.getStream().boxed().collect(Collectors.toList())); } joinNode.setDistributionMode(distributionMode); joinNode.getConjuncts().addAll(conjuncts); joinNode.setLimit(node.getLimit()); joinNode.computeStatistics(optExpr.getStatistics()); return buildJoinFragment(context, leftFragment, rightFragment, distributionMode, joinNode); } @NotNull private PlanFragment buildJoinFragment(ExecPlan context, PlanFragment leftFragment, PlanFragment rightFragment, JoinNode.DistributionMode distributionMode, JoinNode joinNode) { if (distributionMode.equals(JoinNode.DistributionMode.BROADCAST)) { setJoinPushDown(joinNode); rightFragment.getPlanRoot().setFragment(leftFragment); context.getFragments().remove(rightFragment); context.getFragments().remove(leftFragment); context.getFragments().add(leftFragment); leftFragment.setPlanRoot(joinNode); leftFragment.addChildren(rightFragment.getChildren()); leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); return leftFragment; } else if (distributionMode.equals(JoinNode.DistributionMode.PARTITIONED)) { DataPartition lhsJoinPartition = new DataPartition(TPartitionType.HASH_PARTITIONED, leftFragment.getDataPartition().getPartitionExprs()); DataPartition rhsJoinPartition = new DataPartition(TPartitionType.HASH_PARTITIONED, rightFragment.getDataPartition().getPartitionExprs()); leftFragment.getChild(0).setOutputPartition(lhsJoinPartition); rightFragment.getChild(0).setOutputPartition(rhsJoinPartition); context.getFragments().remove(leftFragment); context.getFragments().remove(rightFragment); PlanFragment joinFragment = new PlanFragment(context.getNextFragmentId(), joinNode, lhsJoinPartition); joinFragment.addChildren(leftFragment.getChildren()); joinFragment.addChildren(rightFragment.getChildren()); joinFragment.mergeQueryGlobalDicts(leftFragment.getQueryGlobalDicts()); joinFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); context.getFragments().add(joinFragment); return joinFragment; } else if (distributionMode.equals(JoinNode.DistributionMode.COLOCATE) || distributionMode.equals(JoinNode.DistributionMode.REPLICATED)) { if (distributionMode.equals(JoinNode.DistributionMode.COLOCATE)) { joinNode.setColocate(true, ""); } else { joinNode.setReplicated(true); } setJoinPushDown(joinNode); joinNode.setChild(0, leftFragment.getPlanRoot()); joinNode.setChild(1, rightFragment.getPlanRoot()); leftFragment.setPlanRoot(joinNode); leftFragment.addChildren(rightFragment.getChildren()); context.getFragments().remove(rightFragment); context.getFragments().remove(leftFragment); context.getFragments().add(leftFragment); leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); return leftFragment; } else if (distributionMode.equals(JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET)) { setJoinPushDown(joinNode); if (!(leftFragment.getPlanRoot() instanceof ExchangeNode) && !(rightFragment.getPlanRoot() instanceof ExchangeNode)) { joinNode.setChild(0, leftFragment.getPlanRoot()); joinNode.setChild(1, rightFragment.getPlanRoot()); leftFragment.setPlanRoot(joinNode); leftFragment.addChildren(rightFragment.getChildren()); context.getFragments().remove(rightFragment); context.getFragments().remove(leftFragment); context.getFragments().add(leftFragment); leftFragment.mergeQueryGlobalDicts(rightFragment.getQueryGlobalDicts()); return leftFragment; } else if (leftFragment.getPlanRoot() instanceof ExchangeNode && !(rightFragment.getPlanRoot() instanceof ExchangeNode)) { return computeShuffleHashBucketPlanFragment(context, rightFragment, leftFragment, joinNode); } else { return computeShuffleHashBucketPlanFragment(context, leftFragment, rightFragment, joinNode); } } else { setJoinPushDown(joinNode); if (leftFragment.getPlanRoot() instanceof ExchangeNode && !(rightFragment.getPlanRoot() instanceof ExchangeNode)) { leftFragment = computeBucketShufflePlanFragment(context, rightFragment, leftFragment, joinNode); } else { leftFragment = computeBucketShufflePlanFragment(context, leftFragment, rightFragment, joinNode); } return leftFragment; } } @NotNull private JoinNode.DistributionMode inferDistributionMode(OptExpression optExpr, PlanNode leftFragmentPlanRoot, PlanNode rightFragmentPlanRoot) { JoinNode.DistributionMode distributionMode; if (isExchangeWithDistributionType(leftFragmentPlanRoot, DistributionSpec.DistributionType.SHUFFLE) && isExchangeWithDistributionType(rightFragmentPlanRoot, DistributionSpec.DistributionType.SHUFFLE)) { distributionMode = JoinNode.DistributionMode.PARTITIONED; } else if (isExchangeWithDistributionType(rightFragmentPlanRoot, DistributionSpec.DistributionType.BROADCAST)) { distributionMode = JoinNode.DistributionMode.BROADCAST; } else if (!(leftFragmentPlanRoot instanceof ExchangeNode) && !(rightFragmentPlanRoot instanceof ExchangeNode)) { if (isColocateJoin(optExpr)) { distributionMode = HashJoinNode.DistributionMode.COLOCATE; } else if (ConnectContext.get().getSessionVariable().isEnableReplicationJoin() && rightFragmentPlanRoot.canDoReplicatedJoin()) { distributionMode = JoinNode.DistributionMode.REPLICATED; } else if (isShuffleJoin(optExpr)) { distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET; } else { Preconditions.checkState(false, "Must be colocate/bucket/replicate join"); distributionMode = JoinNode.DistributionMode.COLOCATE; } } else if (isShuffleJoin(optExpr)) { distributionMode = JoinNode.DistributionMode.SHUFFLE_HASH_BUCKET; } else { distributionMode = JoinNode.DistributionMode.LOCAL_HASH_BUCKET; } return distributionMode; } @Override public PlanFragment visitPhysicalStreamAgg(OptExpression optExpr, ExecPlan context) { PhysicalStreamAggOperator node = (PhysicalStreamAggOperator) optExpr.getOp(); PlanFragment inputFragment = visit(optExpr.inputAt(0), context); TupleDescriptor outputTupleDesc = context.getDescTbl().createTupleDescriptor(); AggregateExprInfo aggExpr = buildAggregateTuple(node.getAggregations(), node.getGroupBys(), null, outputTupleDesc, context); AggregateInfo aggInfo = AggregateInfo.create(aggExpr.groupExpr, aggExpr.aggregateExpr, outputTupleDesc, outputTupleDesc, AggregateInfo.AggPhase.FIRST); StreamAggNode aggNode = new StreamAggNode(context.getNextNodeId(), inputFragment.getPlanRoot(), aggInfo); aggNode.setHasNullableGenerateChild(); aggNode.computeStatistics(optExpr.getStatistics()); inputFragment.setPlanRoot(aggNode); return inputFragment; } @Override public PlanFragment visitPhysicalStreamScan(OptExpression optExpr, ExecPlan context) { PhysicalStreamScanOperator node = (PhysicalStreamScanOperator) optExpr.getOp(); OlapTable scanTable = (OlapTable) node.getTable(); context.getDescTbl().addReferencedTable(scanTable); TupleDescriptor tupleDescriptor = context.getDescTbl().createTupleDescriptor(); tupleDescriptor.setTable(scanTable); BinlogScanNode binlogScanNode = new BinlogScanNode(context.getNextNodeId(), tupleDescriptor); binlogScanNode.computeStatistics(optExpr.getStatistics()); try { binlogScanNode.computeScanRanges(); } catch (UserException e) { throw new StarRocksPlannerException( "Failed to compute scan ranges for StreamScanNode, " + e.getMessage(), INTERNAL_ERROR); } for (Map.Entry<ColumnRefOperator, Column> entry : node.getColRefToColumnMetaMap().entrySet()) { SlotDescriptor slotDescriptor = context.getDescTbl().addSlotDescriptor(tupleDescriptor, new SlotId(entry.getKey().getId())); slotDescriptor.setColumn(entry.getValue()); slotDescriptor.setIsNullable(entry.getValue().isAllowNull()); slotDescriptor.setIsMaterialized(true); context.getColRefToExpr().put(entry.getKey(), new SlotRef(entry.getKey().toString(), slotDescriptor)); } List<ScalarOperator> predicates = Utils.extractConjuncts(node.getPredicate()); ScalarOperatorToExpr.FormatterContext formatterContext = new ScalarOperatorToExpr.FormatterContext(context.getColRefToExpr()); for (ScalarOperator predicate : predicates) { binlogScanNode.getConjuncts() .add(ScalarOperatorToExpr.buildExecExpression(predicate, formatterContext)); } tupleDescriptor.computeMemLayout(); context.getScanNodes().add(binlogScanNode); PlanFragment fragment = new PlanFragment(context.getNextFragmentId(), binlogScanNode, DataPartition.RANDOM); context.getFragments().add(fragment); return fragment; } }
The value of that is passed to the ctor of `VertxHttpExporter` by `OtlpRecorder` (same as for the gRPC exporter). Furthermore, the tests already cover this
public void handle(HttpClientRequest request) { HttpClientRequest clientRequest = request.response(new Handler<>() { @Override public void handle(AsyncResult<HttpClientResponse> callResult) { if (callResult.succeeded()) { HttpClientResponse clientResponse = callResult.result(); clientResponse.body(new Handler<>() { @Override public void handle(AsyncResult<Buffer> bodyResult) { if (bodyResult.succeeded()) { onResponse.accept(new Response() { @Override public int statusCode() { return clientResponse.statusCode(); } @Override public String statusMessage() { return clientResponse.statusMessage(); } @Override public byte[] responseBody() { return bodyResult.result().getBytes(); } }); } else { onError.accept(bodyResult.cause()); } } }); } else { onError.accept(callResult.cause()); } } }) .putHeader("Content-Type", contentType); ByteArrayOutputStream os; if (compressionEnabled) { os = new ByteArrayOutputStream(contentLength); clientRequest.putHeader("Content-Encoding", "gzip"); try (var gzos = new GZIPOutputStream(os)) { marshaler.accept(gzos); } catch (IOException e) { throw new IllegalStateException(e); } } else { os = new NonCopyingByteArrayOutputStream(contentLength); marshaler.accept(os); } if (!headers.isEmpty()) { for (var entry : headers.entrySet()) { clientRequest.putHeader(entry.getKey(), entry.getValue()); } } clientRequest.send(Buffer.buffer(os.toByteArray())); }
clientRequest.putHeader("Content-Encoding", "gzip");
public void handle(HttpClientRequest request) { HttpClientRequest clientRequest = request.response(new Handler<>() { @Override public void handle(AsyncResult<HttpClientResponse> callResult) { if (callResult.succeeded()) { HttpClientResponse clientResponse = callResult.result(); clientResponse.body(new Handler<>() { @Override public void handle(AsyncResult<Buffer> bodyResult) { if (bodyResult.succeeded()) { onResponse.accept(new Response() { @Override public int statusCode() { return clientResponse.statusCode(); } @Override public String statusMessage() { return clientResponse.statusMessage(); } @Override public byte[] responseBody() { return bodyResult.result().getBytes(); } }); } else { onError.accept(bodyResult.cause()); } } }); } else { onError.accept(callResult.cause()); } } }) .putHeader("Content-Type", contentType); ByteArrayOutputStream os; if (compressionEnabled) { os = new ByteArrayOutputStream(contentLength); clientRequest.putHeader("Content-Encoding", "gzip"); try (var gzos = new GZIPOutputStream(os)) { marshaler.accept(gzos); } catch (IOException e) { throw new IllegalStateException(e); } } else { os = new NonCopyingByteArrayOutputStream(contentLength); marshaler.accept(os); } if (!headers.isEmpty()) { for (var entry : headers.entrySet()) { clientRequest.putHeader(entry.getKey(), entry.getValue()); } } clientRequest.send(Buffer.buffer(os.toByteArray())); }
class VertxHttpSender implements HttpSender { private static final String TRACES_PATH = "/v1/traces"; private final boolean compressionEnabled; private final Map<String, String> headers; private final String contentType; private final HttpClient client; VertxHttpSender( URI baseUri, boolean compressionEnabled, Duration timeout, Map<String, String> headersMap, String contentType, Consumer<HttpClientOptions> clientOptionsCustomizer, Vertx vertx) { this.compressionEnabled = compressionEnabled; this.headers = headersMap; this.contentType = contentType; var httpClientOptions = new HttpClientOptions() .setReadIdleTimeout((int) timeout.getSeconds()) .setDefaultHost(baseUri.getHost()) .setDefaultPort(getPort(baseUri)) .setTracingPolicy(TracingPolicy.IGNORE); clientOptionsCustomizer.accept(httpClientOptions); this.client = vertx.createHttpClient(httpClientOptions); } @Override public void send(Consumer<OutputStream> marshaler, int contentLength, Consumer<Response> onResponse, Consumer<Throwable> onError) { client.request(HttpMethod.POST, TRACES_PATH) .onSuccess(new Handler<>() { @Override }) .onFailure(onError::accept); } @Override public CompletableResultCode shutdown() { client.close(); return CompletableResultCode.ofSuccess(); } }
class VertxHttpSender implements HttpSender { private static final String TRACES_PATH = "/v1/traces"; private final boolean compressionEnabled; private final Map<String, String> headers; private final String contentType; private final HttpClient client; VertxHttpSender( URI baseUri, boolean compressionEnabled, Duration timeout, Map<String, String> headersMap, String contentType, Consumer<HttpClientOptions> clientOptionsCustomizer, Vertx vertx) { this.compressionEnabled = compressionEnabled; this.headers = headersMap; this.contentType = contentType; var httpClientOptions = new HttpClientOptions() .setReadIdleTimeout((int) timeout.getSeconds()) .setDefaultHost(baseUri.getHost()) .setDefaultPort(getPort(baseUri)) .setTracingPolicy(TracingPolicy.IGNORE); clientOptionsCustomizer.accept(httpClientOptions); this.client = vertx.createHttpClient(httpClientOptions); } @Override public void send(Consumer<OutputStream> marshaler, int contentLength, Consumer<Response> onResponse, Consumer<Throwable> onError) { client.request(HttpMethod.POST, TRACES_PATH) .onSuccess(new Handler<>() { @Override }) .onFailure(onError::accept); } @Override public CompletableResultCode shutdown() { client.close(); return CompletableResultCode.ofSuccess(); } }
fair enough ... I don't have a preference here.
public void testConcurrentGetAndIncrement() throws Exception { final int numThreads = 8; final CountDownLatch startLatch = new CountDownLatch(1); final CheckpointIDCounter counter = createCheckpointIdCounter(); counter.start(); ExecutorService executor = null; try { executor = Executors.newFixedThreadPool(numThreads); List<Future<List<Long>>> resultFutures = new ArrayList<>(numThreads); for (int i = 0; i < numThreads; i++) { resultFutures.add(executor.submit(new Incrementer(startLatch, counter))); } startLatch.countDown(); final int expectedTotal = numThreads * Incrementer.NumIncrements; List<Long> all = new ArrayList<>(expectedTotal); for (Future<List<Long>> result : resultFutures) { List<Long> counts = result.get(); assertStrictlyMonotonous(counts); all.addAll(counts); } Collections.sort(all); assertThat(all.size()).isEqualTo(expectedTotal); assertStrictlyMonotonous(all); final long lastCheckpointId = all.get(all.size() - 1); assertThat(lastCheckpointId).isLessThan(counter.get()); assertThat(lastCheckpointId).isLessThan(counter.getAndIncrement()); } finally { if (executor != null) { executor.shutdown(); } counter.shutdown(JobStatus.FINISHED); } }
if (executor != null) {
public void testConcurrentGetAndIncrement() throws Exception { final int numThreads = 8; final CountDownLatch startLatch = new CountDownLatch(1); final CheckpointIDCounter counter = createCheckpointIdCounter(); counter.start(); ExecutorService executor = null; try { executor = Executors.newFixedThreadPool(numThreads); List<Future<List<Long>>> resultFutures = new ArrayList<>(numThreads); for (int i = 0; i < numThreads; i++) { resultFutures.add(executor.submit(new Incrementer(startLatch, counter))); } startLatch.countDown(); final int expectedTotal = numThreads * Incrementer.NumIncrements; List<Long> all = new ArrayList<>(expectedTotal); for (Future<List<Long>> result : resultFutures) { List<Long> counts = result.get(); assertStrictlyMonotonous(counts); all.addAll(counts); } Collections.sort(all); assertThat(all.size()).isEqualTo(expectedTotal); assertStrictlyMonotonous(all); final long lastCheckpointId = all.get(all.size() - 1); assertThat(lastCheckpointId).isLessThan(counter.get()); assertThat(lastCheckpointId).isLessThan(counter.getAndIncrement()); } finally { if (executor != null) { executor.shutdown(); } counter.shutdown(JobStatus.FINISHED); } }
class CheckpointIDCounterTestBase extends TestLogger { protected abstract CheckpointIDCounter createCheckpointIdCounter() throws Exception; /** * This test guards an assumption made in the notifications in the {@link * org.apache.flink.runtime.operators.coordination.OperatorCoordinator}. The coordinator is * notified of a reset/restore and if no checkpoint yet exists (failure was before the first * checkpoint), a negative ID is passed. */ @Test public void testCounterIsNeverNegative() throws Exception { final CheckpointIDCounter counter = createCheckpointIdCounter(); try { counter.start(); assertThat(counter.get()).isGreaterThanOrEqualTo(0L); } finally { counter.shutdown(JobStatus.FINISHED); } } /** Tests serial increment and get calls. */ @Test public void testSerialIncrementAndGet() throws Exception { final CheckpointIDCounter counter = createCheckpointIdCounter(); try { counter.start(); assertThat(counter.getAndIncrement()).isEqualTo(1); assertThat(counter.get()).isEqualTo(2); assertThat(counter.getAndIncrement()).isEqualTo(2); assertThat(counter.get()).isEqualTo(3); assertThat(counter.getAndIncrement()).isEqualTo(3); assertThat(counter.get()).isEqualTo(4); assertThat(counter.getAndIncrement()).isEqualTo(4); } finally { counter.shutdown(JobStatus.FINISHED); } } /** * Tests concurrent increment and get calls from multiple Threads and verifies that the numbers * counts strictly increasing. */ @Test private static void assertStrictlyMonotonous(List<Long> checkpointIds) { long current = -1; for (long checkpointId : checkpointIds) { assertThat(current).isLessThan(checkpointId); current = checkpointId; } } /** Tests a simple {@link CheckpointIDCounter @Test public void testSetCount() throws Exception { final CheckpointIDCounter counter = createCheckpointIdCounter(); counter.start(); counter.setCount(1337); assertThat(counter.get()).isEqualTo(1337); assertThat(counter.getAndIncrement()).isEqualTo(1337); assertThat(counter.get()).isEqualTo(1338); assertThat(counter.getAndIncrement()).isEqualTo(1338); counter.shutdown(JobStatus.FINISHED); } /** Task repeatedly incrementing the {@link CheckpointIDCounter}. */ private static class Incrementer implements Callable<List<Long>> { /** Total number of {@link CheckpointIDCounter private static final int NumIncrements = 128; private final CountDownLatch startLatch; private final CheckpointIDCounter counter; public Incrementer(CountDownLatch startLatch, CheckpointIDCounter counter) { this.startLatch = startLatch; this.counter = counter; } @Override public List<Long> call() throws Exception { final Random rand = new Random(); final List<Long> counts = new ArrayList<>(); this.startLatch.await(); for (int i = 0; i < NumIncrements; i++) { counts.add(counter.getAndIncrement()); Thread.sleep(rand.nextInt(20)); } return counts; } } }
class CheckpointIDCounterTestBase { protected abstract CheckpointIDCounter createCheckpointIdCounter() throws Exception; /** * This test guards an assumption made in the notifications in the {@link * org.apache.flink.runtime.operators.coordination.OperatorCoordinator}. The coordinator is * notified of a reset/restore and if no checkpoint yet exists (failure was before the first * checkpoint), a negative ID is passed. */ @Test public void testCounterIsNeverNegative() throws Exception { final CheckpointIDCounter counter = createCheckpointIdCounter(); try { counter.start(); assertThat(counter.get()).isGreaterThanOrEqualTo(0L); } finally { counter.shutdown(JobStatus.FINISHED); } } /** Tests serial increment and get calls. */ @Test public void testSerialIncrementAndGet() throws Exception { final CheckpointIDCounter counter = createCheckpointIdCounter(); try { counter.start(); assertThat(counter.getAndIncrement()).isEqualTo(1); assertThat(counter.get()).isEqualTo(2); assertThat(counter.getAndIncrement()).isEqualTo(2); assertThat(counter.get()).isEqualTo(3); assertThat(counter.getAndIncrement()).isEqualTo(3); assertThat(counter.get()).isEqualTo(4); assertThat(counter.getAndIncrement()).isEqualTo(4); } finally { counter.shutdown(JobStatus.FINISHED); } } /** * Tests concurrent increment and get calls from multiple Threads and verifies that the numbers * counts strictly increasing. */ @Test private static void assertStrictlyMonotonous(List<Long> checkpointIds) { long current = -1; for (long checkpointId : checkpointIds) { assertThat(current).isLessThan(checkpointId); current = checkpointId; } } /** Tests a simple {@link CheckpointIDCounter @Test public void testSetCount() throws Exception { final CheckpointIDCounter counter = createCheckpointIdCounter(); counter.start(); counter.setCount(1337); assertThat(counter.get()).isEqualTo(1337); assertThat(counter.getAndIncrement()).isEqualTo(1337); assertThat(counter.get()).isEqualTo(1338); assertThat(counter.getAndIncrement()).isEqualTo(1338); counter.shutdown(JobStatus.FINISHED); } /** Task repeatedly incrementing the {@link CheckpointIDCounter}. */ private static class Incrementer implements Callable<List<Long>> { /** Total number of {@link CheckpointIDCounter private static final int NumIncrements = 128; private final CountDownLatch startLatch; private final CheckpointIDCounter counter; public Incrementer(CountDownLatch startLatch, CheckpointIDCounter counter) { this.startLatch = startLatch; this.counter = counter; } @Override public List<Long> call() throws Exception { final Random rand = new Random(); final List<Long> counts = new ArrayList<>(); this.startLatch.await(); for (int i = 0; i < NumIncrements; i++) { counts.add(counter.getAndIncrement()); Thread.sleep(rand.nextInt(20)); } return counts; } } }
Maybe `TableExtractor` could provide a `extractTablesFromDML` or `extractTablesFromSQLStatement` function to hide these calls.
private Collection<SimpleTableSegment> extractTablesFromExplain(final ExplainStatement sqlStatement) { Collection<SimpleTableSegment> result = new LinkedList<>(); ExplainStatementHandler.getSimpleTableSegment(sqlStatement).ifPresent(result::add); SQLStatement explainableStatement = sqlStatement.getStatement().orElse(null); TableExtractor extractor = new TableExtractor(); if (explainableStatement instanceof SelectStatement) { extractor.extractTablesFromSelect((SelectStatement) explainableStatement); } else if (explainableStatement instanceof InsertStatement) { extractor.extractTablesFromInsert((InsertStatement) explainableStatement); } else if (explainableStatement instanceof UpdateStatement) { extractor.extractTablesFromUpdate((UpdateStatement) explainableStatement); } else if (explainableStatement instanceof DeleteStatement) { extractor.extractTablesFromDelete((DeleteStatement) explainableStatement); } result.addAll(extractor.getRewriteTables()); return result; }
extractor.extractTablesFromDelete((DeleteStatement) explainableStatement);
private Collection<SimpleTableSegment> extractTablesFromExplain(final ExplainStatement sqlStatement) { Collection<SimpleTableSegment> result = new LinkedList<>(); ExplainStatementHandler.getSimpleTableSegment(sqlStatement).ifPresent(result::add); SQLStatement explainableStatement = sqlStatement.getStatement().orElse(null); TableExtractor extractor = new TableExtractor(); extractor.extractTablesFromSQLStatement(explainableStatement); result.addAll(extractor.getRewriteTables()); return result; }
class ExplainStatementContext extends CommonSQLStatementContext<ExplainStatement> implements TableAvailable { private final TablesContext tablesContext; public ExplainStatementContext(final ExplainStatement sqlStatement) { super(sqlStatement); tablesContext = new TablesContext(extractTablesFromExplain(sqlStatement)); } @Override public Collection<SimpleTableSegment> getAllTables() { return extractTablesFromExplain(getSqlStatement()); } }
class ExplainStatementContext extends CommonSQLStatementContext<ExplainStatement> implements TableAvailable { private final TablesContext tablesContext; public ExplainStatementContext(final ExplainStatement sqlStatement) { super(sqlStatement); tablesContext = new TablesContext(extractTablesFromExplain(sqlStatement)); } @Override public Collection<SimpleTableSegment> getAllTables() { return extractTablesFromExplain(getSqlStatement()); } }
thank you, this operator chain looks what we want!
Mono<ServiceBusReceiveLink> getActiveLink() { if (this.receiveLink != null) { return Mono.just(this.receiveLink); } return Mono.defer(() -> createSessionReceiveLink() .flatMap(link -> link.getEndpointStates() .filter(e -> e == AmqpEndpointState.ACTIVE) .next() .switchIfEmpty(Mono.error(() -> new AmqpException(true, "Session receive link completed without being active", null))) .timeout(operationTimeout) .then(Mono.just(link)))) .retryWhen(Retry.from(retrySignals -> retrySignals.flatMap(signal -> { final Throwable failure = signal.failure(); LOGGER.atInfo() .addKeyValue(ENTITY_PATH_KEY, entityPath) .addKeyValue("attempt", signal.totalRetriesInARow()) .log("Error occurred while getting unnamed session.", failure); if (isDisposed.get()) { return Mono.<Long>error(new AmqpException(false, "SessionManager is already disposed.", failure, getErrorContext())); } else if (failure instanceof TimeoutException) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).getErrorCondition() == AmqpErrorCondition.TIMEOUT_ERROR) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).isTransient()) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else { return Mono.<Long>error(failure); } }))); }
new AmqpException(true, "Session receive link completed without being active", null)))
Mono<ServiceBusReceiveLink> getActiveLink() { if (this.receiveLink != null) { return Mono.just(this.receiveLink); } return Mono.defer(() -> createSessionReceiveLink() .flatMap(link -> link.getEndpointStates() .filter(e -> e == AmqpEndpointState.ACTIVE) .next() .switchIfEmpty(Mono.error(() -> new AmqpException(true, "Session receive link completed without being active", null))) .timeout(operationTimeout) .then(Mono.just(link)))) .retryWhen(Retry.from(retrySignals -> retrySignals.flatMap(signal -> { final Throwable failure = signal.failure(); LOGGER.atInfo() .addKeyValue(ENTITY_PATH_KEY, entityPath) .addKeyValue("attempt", signal.totalRetriesInARow()) .log("Error occurred while getting unnamed session.", failure); if (isDisposed.get()) { return Mono.<Long>error(new AmqpException(false, "SessionManager is already disposed.", failure, getErrorContext())); } else if (failure instanceof TimeoutException) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).getErrorCondition() == AmqpErrorCondition.TIMEOUT_ERROR) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else { final long id = System.nanoTime(); LOGGER.atInfo() .addKeyValue(TRACKING_ID_KEY, id) .log("Unable to acquire new session.", failure); return Mono.<Long>error(failure) .publishOn(Schedulers.boundedElastic()) .doOnError(e -> LOGGER.atInfo() .addKeyValue(TRACKING_ID_KEY, id) .log("Emitting the error signal received for session acquire attempt.", e) ); } }))); }
class ServiceBusSessionManager implements AutoCloseable { private static final Duration SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION = Duration.ofMinutes(1); private static final ClientLogger LOGGER = new ClientLogger(ServiceBusSessionManager.class); private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusReceiveLink receiveLink; private final ServiceBusConnectionProcessor connectionProcessor; private final Duration operationTimeout; private final MessageSerializer messageSerializer; private final String identifier; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicBoolean isStarted = new AtomicBoolean(); private final List<Scheduler> schedulers; private final Deque<Scheduler> availableSchedulers = new ConcurrentLinkedDeque<>(); private final Duration maxSessionLockRenewDuration; /** * SessionId to receiver mapping. */ private final ConcurrentHashMap<String, ServiceBusSessionReceiver> sessionReceivers = new ConcurrentHashMap<>(); private final EmitterProcessor<Flux<ServiceBusMessageContext>> processor; private final FluxSink<Flux<ServiceBusMessageContext>> sessionReceiveSink; private volatile Flux<ServiceBusMessageContext> receiveFlux; ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, ServiceBusReceiveLink receiveLink, String identifier) { this.entityPath = entityPath; this.entityType = entityType; this.receiverOptions = receiverOptions; this.connectionProcessor = connectionProcessor; this.operationTimeout = connectionProcessor.getRetryOptions().getTryTimeout(); this.messageSerializer = messageSerializer; this.maxSessionLockRenewDuration = receiverOptions.getMaxLockRenewDuration(); this.identifier = identifier; final int numberOfSchedulers = receiverOptions.isRollingSessionReceiver() ? receiverOptions.getMaxConcurrentSessions() : 1; final List<Scheduler> schedulerList = IntStream.range(0, numberOfSchedulers) .mapToObj(index -> Schedulers.newBoundedElastic(DEFAULT_BOUNDED_ELASTIC_SIZE, DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "receiver-" + index)) .collect(Collectors.toList()); this.schedulers = Collections.unmodifiableList(schedulerList); this.availableSchedulers.addAll(this.schedulers); this.processor = EmitterProcessor.create(numberOfSchedulers, false); this.sessionReceiveSink = processor.sink(); this.receiveLink = receiveLink; } ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, String identifier) { this(entityPath, entityType, connectionProcessor, messageSerializer, receiverOptions, null, identifier); } /** * Gets the link name with the matching {@code sessionId}. * * @param sessionId Session id to get link name for. * * @return The name of the link, or {@code null} if there is no open link with that {@code sessionId}. */ String getLinkName(String sessionId) { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); return receiver != null ? receiver.getLinkName() : null; } /** * Gets the identifier of the instance of {@link ServiceBusSessionManager}. * * @return The identifier that can identify the instance of {@link ServiceBusSessionManager}. */ public String getIdentifier() { return this.identifier; } /** * Gets the state of a session given its identifier. * * @param sessionId Identifier of session to get. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<byte[]> getSessionState(String sessionId) { return validateParameter(sessionId, "sessionId", "getSessionState").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.getSessionState(sessionId, associatedLinkName); })); } /** * Gets a stream of messages from different sessions. * * @return A Flux of messages merged from different sessions. */ Flux<ServiceBusMessageContext> receive() { if (!isStarted.getAndSet(true)) { this.sessionReceiveSink.onRequest(this::onSessionRequest); if (!receiverOptions.isRollingSessionReceiver()) { receiveFlux = getSession(schedulers.get(0), false); } else { receiveFlux = Flux.merge(processor, receiverOptions.getMaxConcurrentSessions()); } } return receiveFlux; } /** * Renews the session lock. * * @param sessionId Identifier of session to get. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<OffsetDateTime> renewSessionLock(String sessionId) { return validateParameter(sessionId, "sessionId", "renewSessionLock").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.renewSessionLock(sessionId, associatedLinkName).handle((offsetDateTime, sink) -> { if (receiver != null) { receiver.setSessionLockedUntil(offsetDateTime); } sink.next(offsetDateTime); }); })); } /** * Tries to update the message disposition on a session aware receive link. * * @return {@code true} if the {@code lockToken} was updated on receive link. {@code false} otherwise. This means * there isn't an open link with that {@code sessionId}. */ Mono<Boolean> updateDisposition(String lockToken, String sessionId, DispositionStatus dispositionStatus, Map<String, Object> propertiesToModify, String deadLetterReason, String deadLetterDescription, ServiceBusTransactionContext transactionContext) { final String operation = "updateDisposition"; return Mono.when( validateParameter(lockToken, "lockToken", operation), validateParameter(lockToken, "lockToken", operation), validateParameter(sessionId, "'sessionId'", operation)).then( Mono.defer(() -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); if (receiver == null || !receiver.containsLockToken(lockToken)) { return Mono.just(false); } final DeliveryState deliveryState = MessageUtils.getDeliveryState(dispositionStatus, deadLetterReason, deadLetterDescription, propertiesToModify, transactionContext); return receiver.updateDisposition(lockToken, deliveryState).thenReturn(true); })); } @Override public void close() { if (isDisposed.getAndSet(true)) { return; } final List<Mono<Void>> closeables = sessionReceivers.values().stream() .map(receiver -> receiver.closeAsync()) .collect(Collectors.toList()); Mono.when(closeables).block(operationTimeout); sessionReceiveSink.complete(); for (Scheduler scheduler : schedulers) { scheduler.dispose(); } } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(connectionProcessor.getFullyQualifiedNamespace(), entityPath); } /** * Creates an session receive link. * * @return A Mono that completes with an session receive link. */ private Mono<ServiceBusReceiveLink> createSessionReceiveLink() { final String sessionId = receiverOptions.getSessionId(); final String linkName = (sessionId != null) ? sessionId : StringUtil.getRandomString("session-"); return connectionProcessor .flatMap(connection -> { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, identifier, sessionId); }); } /** * Gets an active unnamed session link. * * @return A Mono that completes when an unnamed session becomes available. * @throws AmqpException if the session manager is already disposed. */ /** * Gets the next available unnamed session with the given receive options and publishes its contents on the given * {@code scheduler}. * * @param scheduler Scheduler to coordinate received methods on. * @param disposeOnIdle true to dispose receiver when it idles; false otherwise. * @return A Mono that completes with an unnamed session receiver. */ private Flux<ServiceBusMessageContext> getSession(Scheduler scheduler, boolean disposeOnIdle) { return getActiveLink().flatMap(link -> link.getSessionId() .map(sessionId -> sessionReceivers.compute(sessionId, (key, existing) -> { if (existing != null) { return existing; } return new ServiceBusSessionReceiver(link, messageSerializer, connectionProcessor.getRetryOptions(), receiverOptions.getPrefetchCount(), disposeOnIdle, scheduler, this::renewSessionLock, maxSessionLockRenewDuration); }))) .flatMapMany(sessionReceiver -> sessionReceiver.receive().doFinally(signalType -> { LOGGER.atVerbose() .addKeyValue(SESSION_ID_KEY, sessionReceiver.getSessionId()) .log("Closing session receiver."); availableSchedulers.push(scheduler); sessionReceivers.remove(sessionReceiver.getSessionId()); sessionReceiver.closeAsync().subscribe(); if (receiverOptions.isRollingSessionReceiver()) { onSessionRequest(1L); } })); } private Mono<ServiceBusManagementNode> getManagementNode() { return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType)); } /** * Emits a new unnamed active session when it becomes available. * * @param request Number of unnamed active sessions to emit. */ private void onSessionRequest(long request) { if (isDisposed.get()) { LOGGER.info("Session manager is disposed. Not emitting more unnamed sessions."); return; } LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("Requested unnamed sessions."); for (int i = 0; i < request; i++) { final Scheduler scheduler = availableSchedulers.poll(); if (scheduler == null) { if (request != Long.MAX_VALUE) { LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("There are no available schedulers to fetch."); } return; } Flux<ServiceBusMessageContext> session = getSession(scheduler, true); sessionReceiveSink.next(session); } } private <T> Mono<Void> validateParameter(T parameter, String parameterName, String operation) { if (isDisposed.get()) { return monoError(LOGGER, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, operation))); } else if (parameter == null) { return monoError(LOGGER, new NullPointerException(String.format("'%s' cannot be null.", parameterName))); } else if ((parameter instanceof String) && (((String) parameter).isEmpty())) { return monoError(LOGGER, new IllegalArgumentException(String.format("'%s' cannot be an empty string.", parameterName))); } else { return Mono.empty(); } } }
class ServiceBusSessionManager implements AutoCloseable { private static final Duration SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION = Duration.ofMinutes(1); private static final String TRACKING_ID_KEY = "trackingId"; private static final ClientLogger LOGGER = new ClientLogger(ServiceBusSessionManager.class); private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusReceiveLink receiveLink; private final ServiceBusConnectionProcessor connectionProcessor; private final Duration operationTimeout; private final MessageSerializer messageSerializer; private final String identifier; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicBoolean isStarted = new AtomicBoolean(); private final List<Scheduler> schedulers; private final Deque<Scheduler> availableSchedulers = new ConcurrentLinkedDeque<>(); private final Duration maxSessionLockRenewDuration; /** * SessionId to receiver mapping. */ private final ConcurrentHashMap<String, ServiceBusSessionReceiver> sessionReceivers = new ConcurrentHashMap<>(); private final EmitterProcessor<Flux<ServiceBusMessageContext>> processor; private final FluxSink<Flux<ServiceBusMessageContext>> sessionReceiveSink; private volatile Flux<ServiceBusMessageContext> receiveFlux; ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, ServiceBusReceiveLink receiveLink, String identifier) { this.entityPath = entityPath; this.entityType = entityType; this.receiverOptions = receiverOptions; this.connectionProcessor = connectionProcessor; this.operationTimeout = connectionProcessor.getRetryOptions().getTryTimeout(); this.messageSerializer = messageSerializer; this.maxSessionLockRenewDuration = receiverOptions.getMaxLockRenewDuration(); this.identifier = identifier; final int numberOfSchedulers = receiverOptions.isRollingSessionReceiver() ? receiverOptions.getMaxConcurrentSessions() : 1; final List<Scheduler> schedulerList = IntStream.range(0, numberOfSchedulers) .mapToObj(index -> Schedulers.newBoundedElastic(DEFAULT_BOUNDED_ELASTIC_SIZE, DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "receiver-" + index)) .collect(Collectors.toList()); this.schedulers = Collections.unmodifiableList(schedulerList); this.availableSchedulers.addAll(this.schedulers); this.processor = EmitterProcessor.create(numberOfSchedulers, false); this.sessionReceiveSink = processor.sink(); this.receiveLink = receiveLink; } ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, String identifier) { this(entityPath, entityType, connectionProcessor, messageSerializer, receiverOptions, null, identifier); } /** * Gets the link name with the matching {@code sessionId}. * * @param sessionId Session id to get link name for. * * @return The name of the link, or {@code null} if there is no open link with that {@code sessionId}. */ String getLinkName(String sessionId) { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); return receiver != null ? receiver.getLinkName() : null; } /** * Gets the identifier of the instance of {@link ServiceBusSessionManager}. * * @return The identifier that can identify the instance of {@link ServiceBusSessionManager}. */ public String getIdentifier() { return this.identifier; } /** * Gets the state of a session given its identifier. * * @param sessionId Identifier of session to get. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<byte[]> getSessionState(String sessionId) { return validateParameter(sessionId, "sessionId", "getSessionState").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.getSessionState(sessionId, associatedLinkName); })); } /** * Gets a stream of messages from different sessions. * * @return A Flux of messages merged from different sessions. */ Flux<ServiceBusMessageContext> receive() { if (!isStarted.getAndSet(true)) { this.sessionReceiveSink.onRequest(this::onSessionRequest); if (!receiverOptions.isRollingSessionReceiver()) { receiveFlux = getSession(schedulers.get(0), false); } else { receiveFlux = Flux.merge(processor, receiverOptions.getMaxConcurrentSessions()); } } return receiveFlux; } /** * Renews the session lock. * * @param sessionId Identifier of session to get. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<OffsetDateTime> renewSessionLock(String sessionId) { return validateParameter(sessionId, "sessionId", "renewSessionLock").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.renewSessionLock(sessionId, associatedLinkName).handle((offsetDateTime, sink) -> { if (receiver != null) { receiver.setSessionLockedUntil(offsetDateTime); } sink.next(offsetDateTime); }); })); } /** * Tries to update the message disposition on a session aware receive link. * * @return {@code true} if the {@code lockToken} was updated on receive link. {@code false} otherwise. This means * there isn't an open link with that {@code sessionId}. */ Mono<Boolean> updateDisposition(String lockToken, String sessionId, DispositionStatus dispositionStatus, Map<String, Object> propertiesToModify, String deadLetterReason, String deadLetterDescription, ServiceBusTransactionContext transactionContext) { final String operation = "updateDisposition"; return Mono.when( validateParameter(lockToken, "lockToken", operation), validateParameter(lockToken, "lockToken", operation), validateParameter(sessionId, "'sessionId'", operation)).then( Mono.defer(() -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); if (receiver == null || !receiver.containsLockToken(lockToken)) { return Mono.just(false); } final DeliveryState deliveryState = MessageUtils.getDeliveryState(dispositionStatus, deadLetterReason, deadLetterDescription, propertiesToModify, transactionContext); return receiver.updateDisposition(lockToken, deliveryState).thenReturn(true); })); } @Override public void close() { if (isDisposed.getAndSet(true)) { return; } final List<Mono<Void>> closeables = sessionReceivers.values().stream() .map(receiver -> receiver.closeAsync()) .collect(Collectors.toList()); Mono.when(closeables).block(operationTimeout); sessionReceiveSink.complete(); for (Scheduler scheduler : schedulers) { scheduler.dispose(); } } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(connectionProcessor.getFullyQualifiedNamespace(), entityPath); } /** * Creates an session receive link. * * @return A Mono that completes with an session receive link. */ private Mono<ServiceBusReceiveLink> createSessionReceiveLink() { final String sessionId = receiverOptions.getSessionId(); final String linkName = (sessionId != null) ? sessionId : StringUtil.getRandomString("session-"); return connectionProcessor .flatMap(connection -> { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, identifier, sessionId); }); } /** * Gets an active unnamed session link. * * @return A Mono that completes when an unnamed session becomes available. * @throws AmqpException if the session manager is already disposed. */ /** * Gets the next available unnamed session with the given receive options and publishes its contents on the given * {@code scheduler}. * * @param scheduler Scheduler to coordinate received methods on. * @param disposeOnIdle true to dispose receiver when it idles; false otherwise. * @return A Mono that completes with an unnamed session receiver. */ private Flux<ServiceBusMessageContext> getSession(Scheduler scheduler, boolean disposeOnIdle) { return getActiveLink().flatMap(link -> link.getSessionId() .map(sessionId -> sessionReceivers.compute(sessionId, (key, existing) -> { if (existing != null) { return existing; } return new ServiceBusSessionReceiver(link, messageSerializer, connectionProcessor.getRetryOptions(), receiverOptions.getPrefetchCount(), disposeOnIdle, scheduler, this::renewSessionLock, maxSessionLockRenewDuration); }))) .flatMapMany(sessionReceiver -> sessionReceiver.receive().doFinally(signalType -> { LOGGER.atVerbose() .addKeyValue(SESSION_ID_KEY, sessionReceiver.getSessionId()) .log("Closing session receiver."); availableSchedulers.push(scheduler); sessionReceivers.remove(sessionReceiver.getSessionId()); sessionReceiver.closeAsync().subscribe(); if (receiverOptions.isRollingSessionReceiver()) { onSessionRequest(1L); } })); } private Mono<ServiceBusManagementNode> getManagementNode() { return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType)); } /** * Emits a new unnamed active session when it becomes available. * * @param request Number of unnamed active sessions to emit. */ private void onSessionRequest(long request) { if (isDisposed.get()) { LOGGER.info("Session manager is disposed. Not emitting more unnamed sessions."); return; } LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("Requested unnamed sessions."); for (int i = 0; i < request; i++) { final Scheduler scheduler = availableSchedulers.poll(); if (scheduler == null) { if (request != Long.MAX_VALUE) { LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("There are no available schedulers to fetch."); } return; } Flux<ServiceBusMessageContext> session = getSession(scheduler, true); sessionReceiveSink.next(session); } } private <T> Mono<Void> validateParameter(T parameter, String parameterName, String operation) { if (isDisposed.get()) { return monoError(LOGGER, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, operation))); } else if (parameter == null) { return monoError(LOGGER, new NullPointerException(String.format("'%s' cannot be null.", parameterName))); } else if ((parameter instanceof String) && (((String) parameter).isEmpty())) { return monoError(LOGGER, new IllegalArgumentException(String.format("'%s' cannot be an empty string.", parameterName))); } else { return Mono.empty(); } } }
Yes, according to the design doc, it has to go as POST.
private Mono<RxDocumentServiceResponse> deleteByPartitionKey(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.POST); }
return this.performRequest(request, HttpMethod.POST);
private Mono<RxDocumentServiceResponse> deleteByPartitionKey(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.POST); }
class RxGatewayStoreModel implements RxStoreModel { private final static byte[] EMPTY_BYTE_ARRAY = {}; private final DiagnosticsClientContext clientContext; private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class); private final Map<String, String> defaultHeaders; private final HttpClient httpClient; private final QueryCompatibilityMode queryCompatibilityMode; private final GlobalEndpointManager globalEndpointManager; private ConsistencyLevel defaultConsistencyLevel; private ISessionContainer sessionContainer; private ThroughputControlStore throughputControlStore; public RxGatewayStoreModel( DiagnosticsClientContext clientContext, ISessionContainer sessionContainer, ConsistencyLevel defaultConsistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient) { this.clientContext = clientContext; this.defaultHeaders = new HashMap<>(); this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL, "no-cache"); this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION, HttpConstants.Versions.CURRENT_VERSION); if (userAgentContainer == null) { userAgentContainer = new UserAgentContainer(); } this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent()); if (defaultConsistencyLevel != null) { this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, defaultConsistencyLevel.toString()); } this.defaultConsistencyLevel = defaultConsistencyLevel; this.globalEndpointManager = globalEndpointManager; this.queryCompatibilityMode = queryCompatibilityMode; this.httpClient = httpClient; this.sessionContainer = sessionContainer; } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.POST); } private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.PATCH); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.POST); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.GET); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.PUT); } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.DELETE); } private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.POST); } private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.GET); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { if(request.getOperationType() != OperationType.QueryPlan) { request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true"); } switch (this.queryCompatibilityMode) { case SqlQuery: request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.SQL); break; case Default: case Query: default: request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.QUERY_JSON); break; } return this.performRequest(request, HttpMethod.POST); } public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) { try { if (request.requestContext.cosmosDiagnostics == null) { request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics(); } URI uri = getUri(request); request.requestContext.resourcePhysicalAddress = uri.toString(); if (this.throughputControlStore != null) { return this.throughputControlStore.processRequest(request, performRequestInternal(request, method, uri)); } return this.performRequestInternal(request, method, uri); } catch (Exception e) { return Mono.error(e); } } /** * Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse. * * @param request * @param method * @param requestUri * @return Flux<RxDocumentServiceResponse> */ public Mono<RxDocumentServiceResponse> performRequestInternal(RxDocumentServiceRequest request, HttpMethod method, URI requestUri) { try { HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders()); Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux(); HttpRequest httpRequest = new HttpRequest(method, requestUri, requestUri.getPort(), httpHeaders, contentAsByteArray); Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds()); if (OperationType.QueryPlan.equals(request.getOperationType())) { responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds()); } else if (request.isAddressRefresh()) { responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds()); } Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout); return toDocumentServiceResponse(httpResponseMono, request, httpRequest); } catch (Exception e) { return Mono.error(e); } } private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) { HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size()); for (Entry<String, String> entry : this.defaultHeaders.entrySet()) { if (!headers.containsKey(entry.getKey())) { httpHeaders.set(entry.getKey(), entry.getValue()); } } if (headers != null) { for (Entry<String, String> entry : headers.entrySet()) { if (entry.getValue() == null) { httpHeaders.set(entry.getKey(), ""); } else { httpHeaders.set(entry.getKey(), entry.getValue()); } } } return httpHeaders; } private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException { URI rootUri = request.getEndpointOverride(); if (rootUri == null) { if (request.getIsMedia()) { rootUri = this.globalEndpointManager.getWriteEndpoints().get(0); } else { rootUri = this.globalEndpointManager.resolveServiceEndpoint(request); } } String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed); if(request.getResourceType().equals(ResourceType.DatabaseAccount)) { path = StringUtils.EMPTY; } return new URI("https", null, rootUri.getHost(), rootUri.getPort(), ensureSlashPrefixed(path), null, null); } private String ensureSlashPrefixed(String path) { if (path == null) { return null; } if (path.startsWith("/")) { return path; } return "/" + path; } /** * Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable. * * * Once the customer code subscribes to the observable returned by the CRUD APIs, * the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made. * * @param httpResponseMono * @param request * @return {@link Mono} */ private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono, RxDocumentServiceRequest request, HttpRequest httpRequest) { return httpResponseMono.flatMap(httpResponse -> { HttpHeaders httpResponseHeaders = httpResponse.headers(); int httpResponseStatus = httpResponse.statusCode(); Mono<byte[]> contentObservable = httpResponse .bodyAsByteArray() .switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY)); return contentObservable .map(content -> { ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord(); if (reactorNettyRequestRecord != null) { reactorNettyRequestRecord.setTimeCompleted(Instant.now()); BridgeInternal.setGatewayRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics, reactorNettyRequestRecord.takeTimelineSnapshot()); } validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content); StoreResponse rsp = new StoreResponse(httpResponseStatus, HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()), content); DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot()); if (request.requestContext.cosmosDiagnostics != null) { BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null); DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics); } return rsp; }) .single(); }).map(rsp -> { if (httpRequest.reactorNettyRequestRecord() != null) { return new RxDocumentServiceResponse(this.clientContext, rsp, httpRequest.reactorNettyRequestRecord().takeTimelineSnapshot()); } else { return new RxDocumentServiceResponse(this.clientContext, rsp); } }).onErrorResume(throwable -> { Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable); if (!(unwrappedException instanceof Exception)) { logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException); return Mono.error(unwrappedException); } Exception exception = (Exception) unwrappedException; CosmosException dce; if (!(exception instanceof CosmosException)) { logger.error("Network failure", exception); dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception); BridgeInternal.setRequestHeaders(dce, request.getHeaders()); } else { dce = (CosmosException) exception; } if (WebExceptionUtility.isNetworkFailure(dce)) { if (WebExceptionUtility.isReadTimeoutException(dce)) { BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT); } else { BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE); } } if (request.requestContext.cosmosDiagnostics != null) { if (BridgeInternal.getClientSideRequestStatics(request.requestContext.cosmosDiagnostics).getGatewayRequestTimeline() == null && httpRequest.reactorNettyRequestRecord() != null) { BridgeInternal.setGatewayRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics, httpRequest.reactorNettyRequestRecord().takeTimelineSnapshot()); } BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce); BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics); } return Mono.error(dce); }); } private void validateOrThrow(RxDocumentServiceRequest request, HttpResponseStatus status, HttpHeaders headers, byte[] bodyAsBytes) { int statusCode = status.code(); if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) { String statusCodeString = status.reasonPhrase() != null ? status.reasonPhrase().replace(" ", "") : ""; String body = bodyAsBytes != null ? new String(bodyAsBytes) : null; CosmosError cosmosError; cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError(); cosmosError = new CosmosError(statusCodeString, String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString), cosmosError.getPartitionedQueryExecutionInfo()); CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap()); BridgeInternal.setRequestHeaders(dce, request.getHeaders()); throw dce; } } private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) { switch (request.getOperationType()) { case Create: case Batch: return this.create(request); case Patch: return this.patch(request); case Upsert: return this.upsert(request); case Delete: if (request.getResourceType() == ResourceType.PartitionKey) { return this.deleteByPartitionKey(request); } return this.delete(request); case ExecuteJavaScript: return this.execute(request); case Read: return this.read(request); case ReadFeed: return this.readFeed(request); case Replace: return this.replace(request); case SqlQuery: case Query: case QueryPlan: return this.query(request); default: throw new IllegalStateException("Unknown operation setType " + request.getOperationType()); } } private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) { Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single(); return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy(BridgeInternal.getRetryContext(request.requestContext.cosmosDiagnostics))); } @Override public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) { this.applySessionToken(request); Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request); return responseObs.onErrorResume( e -> { CosmosException dce = Utils.as(e, CosmosException.class); if (dce == null) { logger.error("unexpected failure {}", e.getMessage(), e); return Mono.error(e); } if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) && (dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED || dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT || ( dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND && !Exceptions.isSubStatusCode(dce, HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) { this.captureSessionToken(request, dce.getResponseHeaders()); } if (Exceptions.isThroughputControlRequestRateTooLargeException(dce)) { BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce); BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics); } return Mono.error(dce); } ).map(response -> { this.captureSessionToken(request, response.getResponseHeaders()); return response; } ); } @Override public void enableThroughputControl(ThroughputControlStore throughputControlStore) { } private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) { if (request.getResourceType() == ResourceType.DocumentCollection && request.getOperationType() == OperationType.Delete) { String resourceId; if (request.getIsNameBased()) { resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID); } else { resourceId = request.getResourceId(); } this.sessionContainer.clearTokenByResourceId(resourceId); } else { this.sessionContainer.setSessionToken(request, responseHeaders); } } private void applySessionToken(RxDocumentServiceRequest request) { Map<String, String> headers = request.getHeaders(); Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null"); String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL); boolean sessionTokenApplicable = Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) || (this.defaultConsistencyLevel == ConsistencyLevel.SESSION && (!request.isReadOnlyRequest() || request.getResourceType() != ResourceType.Document || !Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString()))); if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) { if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN); } return; } if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { return; } String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request); if (!Strings.isNullOrEmpty(sessionToken)) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken); } } private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) { return ReplicatedResourceClientUtils.isMasterResource(resourceType) || isStoredProcedureMasterOperation(resourceType, operationType) || operationType == OperationType.QueryPlan; } private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) { return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript; } }
class RxGatewayStoreModel implements RxStoreModel { private final static byte[] EMPTY_BYTE_ARRAY = {}; private final DiagnosticsClientContext clientContext; private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class); private final Map<String, String> defaultHeaders; private final HttpClient httpClient; private final QueryCompatibilityMode queryCompatibilityMode; private final GlobalEndpointManager globalEndpointManager; private ConsistencyLevel defaultConsistencyLevel; private ISessionContainer sessionContainer; private ThroughputControlStore throughputControlStore; public RxGatewayStoreModel( DiagnosticsClientContext clientContext, ISessionContainer sessionContainer, ConsistencyLevel defaultConsistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient) { this.clientContext = clientContext; this.defaultHeaders = new HashMap<>(); this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL, "no-cache"); this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION, HttpConstants.Versions.CURRENT_VERSION); if (userAgentContainer == null) { userAgentContainer = new UserAgentContainer(); } this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent()); if (defaultConsistencyLevel != null) { this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, defaultConsistencyLevel.toString()); } this.defaultConsistencyLevel = defaultConsistencyLevel; this.globalEndpointManager = globalEndpointManager; this.queryCompatibilityMode = queryCompatibilityMode; this.httpClient = httpClient; this.sessionContainer = sessionContainer; } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.POST); } private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.PATCH); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.POST); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.GET); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.PUT); } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.DELETE); } private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.POST); } private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { return this.performRequest(request, HttpMethod.GET); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { if(request.getOperationType() != OperationType.QueryPlan) { request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true"); } switch (this.queryCompatibilityMode) { case SqlQuery: request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.SQL); break; case Default: case Query: default: request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.QUERY_JSON); break; } return this.performRequest(request, HttpMethod.POST); } public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) { try { if (request.requestContext.cosmosDiagnostics == null) { request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics(); } URI uri = getUri(request); request.requestContext.resourcePhysicalAddress = uri.toString(); if (this.throughputControlStore != null) { return this.throughputControlStore.processRequest(request, performRequestInternal(request, method, uri)); } return this.performRequestInternal(request, method, uri); } catch (Exception e) { return Mono.error(e); } } /** * Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse. * * @param request * @param method * @param requestUri * @return Flux<RxDocumentServiceResponse> */ public Mono<RxDocumentServiceResponse> performRequestInternal(RxDocumentServiceRequest request, HttpMethod method, URI requestUri) { try { HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders()); Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux(); HttpRequest httpRequest = new HttpRequest(method, requestUri, requestUri.getPort(), httpHeaders, contentAsByteArray); Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds()); if (OperationType.QueryPlan.equals(request.getOperationType())) { responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds()); } else if (request.isAddressRefresh()) { responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds()); } Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout); return toDocumentServiceResponse(httpResponseMono, request, httpRequest); } catch (Exception e) { return Mono.error(e); } } private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) { HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size()); for (Entry<String, String> entry : this.defaultHeaders.entrySet()) { if (!headers.containsKey(entry.getKey())) { httpHeaders.set(entry.getKey(), entry.getValue()); } } if (headers != null) { for (Entry<String, String> entry : headers.entrySet()) { if (entry.getValue() == null) { httpHeaders.set(entry.getKey(), ""); } else { httpHeaders.set(entry.getKey(), entry.getValue()); } } } return httpHeaders; } private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException { URI rootUri = request.getEndpointOverride(); if (rootUri == null) { if (request.getIsMedia()) { rootUri = this.globalEndpointManager.getWriteEndpoints().get(0); } else { rootUri = this.globalEndpointManager.resolveServiceEndpoint(request); } } String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed); if(request.getResourceType().equals(ResourceType.DatabaseAccount)) { path = StringUtils.EMPTY; } return new URI("https", null, rootUri.getHost(), rootUri.getPort(), ensureSlashPrefixed(path), null, null); } private String ensureSlashPrefixed(String path) { if (path == null) { return null; } if (path.startsWith("/")) { return path; } return "/" + path; } /** * Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable. * * * Once the customer code subscribes to the observable returned by the CRUD APIs, * the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made. * * @param httpResponseMono * @param request * @return {@link Mono} */ private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono, RxDocumentServiceRequest request, HttpRequest httpRequest) { return httpResponseMono.flatMap(httpResponse -> { HttpHeaders httpResponseHeaders = httpResponse.headers(); int httpResponseStatus = httpResponse.statusCode(); Mono<byte[]> contentObservable = httpResponse .bodyAsByteArray() .switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY)); return contentObservable .map(content -> { ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord(); if (reactorNettyRequestRecord != null) { reactorNettyRequestRecord.setTimeCompleted(Instant.now()); BridgeInternal.setGatewayRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics, reactorNettyRequestRecord.takeTimelineSnapshot()); } validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content); StoreResponse rsp = new StoreResponse(httpResponseStatus, HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()), content); DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot()); if (request.requestContext.cosmosDiagnostics != null) { BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null); DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics); } return rsp; }) .single(); }).map(rsp -> { if (httpRequest.reactorNettyRequestRecord() != null) { return new RxDocumentServiceResponse(this.clientContext, rsp, httpRequest.reactorNettyRequestRecord().takeTimelineSnapshot()); } else { return new RxDocumentServiceResponse(this.clientContext, rsp); } }).onErrorResume(throwable -> { Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable); if (!(unwrappedException instanceof Exception)) { logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException); return Mono.error(unwrappedException); } Exception exception = (Exception) unwrappedException; CosmosException dce; if (!(exception instanceof CosmosException)) { logger.error("Network failure", exception); dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception); BridgeInternal.setRequestHeaders(dce, request.getHeaders()); } else { dce = (CosmosException) exception; } if (WebExceptionUtility.isNetworkFailure(dce)) { if (WebExceptionUtility.isReadTimeoutException(dce)) { BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT); } else { BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE); } } if (request.requestContext.cosmosDiagnostics != null) { if (BridgeInternal.getClientSideRequestStatics(request.requestContext.cosmosDiagnostics).getGatewayRequestTimeline() == null && httpRequest.reactorNettyRequestRecord() != null) { BridgeInternal.setGatewayRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics, httpRequest.reactorNettyRequestRecord().takeTimelineSnapshot()); } BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce); BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics); } return Mono.error(dce); }); } private void validateOrThrow(RxDocumentServiceRequest request, HttpResponseStatus status, HttpHeaders headers, byte[] bodyAsBytes) { int statusCode = status.code(); if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) { String statusCodeString = status.reasonPhrase() != null ? status.reasonPhrase().replace(" ", "") : ""; String body = bodyAsBytes != null ? new String(bodyAsBytes) : null; CosmosError cosmosError; cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError(); cosmosError = new CosmosError(statusCodeString, String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString), cosmosError.getPartitionedQueryExecutionInfo()); CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap()); BridgeInternal.setRequestHeaders(dce, request.getHeaders()); throw dce; } } private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) { switch (request.getOperationType()) { case Create: case Batch: return this.create(request); case Patch: return this.patch(request); case Upsert: return this.upsert(request); case Delete: if (request.getResourceType() == ResourceType.PartitionKey) { return this.deleteByPartitionKey(request); } return this.delete(request); case ExecuteJavaScript: return this.execute(request); case Read: return this.read(request); case ReadFeed: return this.readFeed(request); case Replace: return this.replace(request); case SqlQuery: case Query: case QueryPlan: return this.query(request); default: throw new IllegalStateException("Unknown operation setType " + request.getOperationType()); } } private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) { Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single(); return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy(BridgeInternal.getRetryContext(request.requestContext.cosmosDiagnostics))); } @Override public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) { this.applySessionToken(request); Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request); return responseObs.onErrorResume( e -> { CosmosException dce = Utils.as(e, CosmosException.class); if (dce == null) { logger.error("unexpected failure {}", e.getMessage(), e); return Mono.error(e); } if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) && (dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED || dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT || ( dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND && !Exceptions.isSubStatusCode(dce, HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) { this.captureSessionToken(request, dce.getResponseHeaders()); } if (Exceptions.isThroughputControlRequestRateTooLargeException(dce)) { BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce); BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics); } return Mono.error(dce); } ).map(response -> { this.captureSessionToken(request, response.getResponseHeaders()); return response; } ); } @Override public void enableThroughputControl(ThroughputControlStore throughputControlStore) { } private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) { if (request.getResourceType() == ResourceType.DocumentCollection && request.getOperationType() == OperationType.Delete) { String resourceId; if (request.getIsNameBased()) { resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID); } else { resourceId = request.getResourceId(); } this.sessionContainer.clearTokenByResourceId(resourceId); } else { this.sessionContainer.setSessionToken(request, responseHeaders); } } private void applySessionToken(RxDocumentServiceRequest request) { Map<String, String> headers = request.getHeaders(); Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null"); String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL); boolean sessionTokenApplicable = Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) || (this.defaultConsistencyLevel == ConsistencyLevel.SESSION && (!request.isReadOnlyRequest() || request.getResourceType() != ResourceType.Document || !Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString()))); if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) { if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN); } return; } if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { return; } String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request); if (!Strings.isNullOrEmpty(sessionToken)) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken); } } private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) { return ReplicatedResourceClientUtils.isMasterResource(resourceType) || isStoredProcedureMasterOperation(resourceType, operationType) || operationType == OperationType.QueryPlan; } private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) { return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript; } }
Ah yes, I can't read :man_facepalming:
private List<Path> getMatchingFiles(Instant from, Instant to) { Map<Path, Instant> paths = new HashMap<>(); try { Files.walkFileTree(logDirectory, new SimpleFileVisitor<>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { if (logFilePattern.matcher(file.getFileName().toString()).matches()) paths.put(file, attrs.lastModifiedTime().toInstant()); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) { return FileVisitResult.CONTINUE; } }); } catch (IOException e) { throw new UncheckedIOException(e); } List<Path> sorted = new ArrayList<>(); for (var entries = paths.entrySet().stream().sorted(comparing(Map.Entry::getValue)).iterator(); entries.hasNext(); ) { var entry = entries.next(); if (entry.getValue().isAfter(from)) sorted.add(entry.getKey()); if (entry.getValue().isAfter(to)) break; } return sorted; }
if (entry.getValue().isAfter(to))
private List<Path> getMatchingFiles(Instant from, Instant to) { Map<Path, Instant> paths = new HashMap<>(); try { Files.walkFileTree(logDirectory, new SimpleFileVisitor<>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { if (logFilePattern.matcher(file.getFileName().toString()).matches()) paths.put(file, attrs.lastModifiedTime().toInstant()); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) { return FileVisitResult.CONTINUE; } }); } catch (IOException e) { throw new UncheckedIOException(e); } List<Path> sorted = new ArrayList<>(); for (var entries = paths.entrySet().stream().sorted(comparing(Map.Entry::getValue)).iterator(); entries.hasNext(); ) { var entry = entries.next(); if (entry.getValue().isAfter(from)) sorted.add(entry.getKey()); if (entry.getValue().isAfter(to)) break; } return sorted; }
class LogReader { private final Path logDirectory; private final Pattern logFilePattern; LogReader(String logDirectory, String logFilePattern) { this(Paths.get(Defaults.getDefaults().underVespaHome(logDirectory)), Pattern.compile(logFilePattern)); } LogReader(Path logDirectory, Pattern logFilePattern) { this.logDirectory = logDirectory; this.logFilePattern = logFilePattern; } void writeLogs(OutputStream outputStream, Instant from, Instant to) { try { List<Path> logs = getMatchingFiles(from, to); for (int i = 0; i < logs.size(); i++) { Path log = logs.get(i); boolean zipped = log.toString().endsWith(".gz"); try (InputStream in = Files.newInputStream(log)) { InputStream inProxy; if (i == 0 || i == logs.size() - 1) { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(zipped ? new GZIPInputStream(in) : in, UTF_8)); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(zipped ? new GZIPOutputStream(buffer) : buffer, UTF_8))) { for (String line; (line = reader.readLine()) != null; ) { String[] parts = line.split("\t"); if (parts.length != 7) continue; Instant at = Instant.EPOCH.plus((long) (Double.parseDouble(parts[0]) * 1_000_000), ChronoUnit.MICROS); if (at.isAfter(from) && ! at.isAfter(to)) { writer.write(line); writer.newLine(); } } } inProxy = new ByteArrayInputStream(buffer.toByteArray()); } else inProxy = in; if ( ! zipped && ! (outputStream instanceof GZIPOutputStream)) outputStream = new GZIPOutputStream(outputStream); inProxy.transferTo(outputStream); } } } catch (IOException e) { throw new UncheckedIOException(e); } finally { try { outputStream.close(); } catch (IOException e) { throw new UncheckedIOException(e); } } } /** Returns log files which may have relevant entries, sorted by modification time — the first and last must be filtered. */ }
class LogReader { private final Path logDirectory; private final Pattern logFilePattern; LogReader(String logDirectory, String logFilePattern) { this(Paths.get(Defaults.getDefaults().underVespaHome(logDirectory)), Pattern.compile(logFilePattern)); } LogReader(Path logDirectory, Pattern logFilePattern) { this.logDirectory = logDirectory; this.logFilePattern = logFilePattern; } void writeLogs(OutputStream outputStream, Instant from, Instant to) { try { List<Path> logs = getMatchingFiles(from, to); for (int i = 0; i < logs.size(); i++) { Path log = logs.get(i); boolean zipped = log.toString().endsWith(".gz"); try (InputStream in = Files.newInputStream(log)) { InputStream inProxy; if (i == 0 || i == logs.size() - 1) { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(zipped ? new GZIPInputStream(in) : in, UTF_8)); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(zipped ? new GZIPOutputStream(buffer) : buffer, UTF_8))) { for (String line; (line = reader.readLine()) != null; ) { String[] parts = line.split("\t"); if (parts.length != 7) continue; Instant at = Instant.EPOCH.plus((long) (Double.parseDouble(parts[0]) * 1_000_000), ChronoUnit.MICROS); if (at.isAfter(from) && ! at.isAfter(to)) { writer.write(line); writer.newLine(); } } } inProxy = new ByteArrayInputStream(buffer.toByteArray()); } else inProxy = in; if ( ! zipped && ! (outputStream instanceof GZIPOutputStream)) outputStream = new GZIPOutputStream(outputStream); inProxy.transferTo(outputStream); } } } catch (IOException e) { throw new UncheckedIOException(e); } finally { try { outputStream.close(); } catch (IOException e) { throw new UncheckedIOException(e); } } } /** Returns log files which may have relevant entries, sorted by modification time — the first and last must be filtered. */ }
maybe ABORTED is better, didn't want to overload a status that is uses on the backend. should we add a new enum value?
public static CompleteCommit forFailedWork(Commit commit) { return create(commit, CommitStatus.DEFAULT); }
return create(commit, CommitStatus.DEFAULT);
public static CompleteCommit forFailedWork(Commit commit) { return create(commit, CommitStatus.ABORTED); }
class CompleteCommit { public static CompleteCommit create(Commit commit, CommitStatus commitStatus) { return new AutoValue_CompleteCommit( commit.computationId(), ShardedKey.create(commit.request().getKey(), commit.request().getShardingKey()), WorkId.builder() .setWorkToken(commit.request().getWorkToken()) .setCacheToken(commit.request().getCacheToken()) .build(), commitStatus); } public static CompleteCommit create( String computationId, ShardedKey shardedKey, WorkId workId, CommitStatus status) { return new AutoValue_CompleteCommit(computationId, shardedKey, workId, status); } public abstract String computationId(); public abstract ShardedKey shardedKey(); public abstract WorkId workId(); public abstract CommitStatus status(); }
class CompleteCommit { public static CompleteCommit create(Commit commit, CommitStatus commitStatus) { return new AutoValue_CompleteCommit( commit.computationId(), ShardedKey.create(commit.request().getKey(), commit.request().getShardingKey()), WorkId.builder() .setWorkToken(commit.request().getWorkToken()) .setCacheToken(commit.request().getCacheToken()) .build(), commitStatus); } public static CompleteCommit create( String computationId, ShardedKey shardedKey, WorkId workId, CommitStatus status) { return new AutoValue_CompleteCommit(computationId, shardedKey, workId, status); } public abstract String computationId(); public abstract ShardedKey shardedKey(); public abstract WorkId workId(); public abstract CommitStatus status(); }
I have added a new failure reason named `JOB_FAILOVER_REGION` for this scene.
private void restart(long globalModVersionOfFailover) { try { if (transitionState(JobStatus.CREATED, JobStatus.RUNNING)) { if (executionGraph.getCheckpointCoordinator() != null) { executionGraph.getCheckpointCoordinator().abortPendingCheckpoints(new CheckpointException(CheckpointFailureReason.JOB_FAILURE)); executionGraph.getCheckpointCoordinator().restoreLatestCheckpointedState( tasks, false, true); } HashSet<AllocationID> previousAllocationsInRegion = new HashSet<>(connectedExecutionVertexes.size()); for (ExecutionVertex connectedExecutionVertex : connectedExecutionVertexes) { AllocationID latestPriorAllocation = connectedExecutionVertex.getLatestPriorAllocation(); if (latestPriorAllocation != null) { previousAllocationsInRegion.add(latestPriorAllocation); } } for (ExecutionVertex ev : connectedExecutionVertexes) { try { ev.scheduleForExecution( executionGraph.getSlotProvider(), executionGraph.isQueuedSchedulingAllowed(), LocationPreferenceConstraint.ANY, previousAllocationsInRegion); } catch (Throwable e) { failover(globalModVersionOfFailover); } } } else { LOG.info("FailoverRegion {} switched from CREATED to RUNNING fail, will fail this region again.", id); failover(globalModVersionOfFailover); } } catch (Exception e) { LOG.info("FailoverRegion {} restart failed, failover again.", id, e); failover(globalModVersionOfFailover); } }
executionGraph.getCheckpointCoordinator().abortPendingCheckpoints(new CheckpointException(CheckpointFailureReason.JOB_FAILURE));
private void restart(long globalModVersionOfFailover) { try { if (transitionState(JobStatus.CREATED, JobStatus.RUNNING)) { if (executionGraph.getCheckpointCoordinator() != null) { executionGraph.getCheckpointCoordinator().abortPendingCheckpoints( new CheckpointException(CheckpointFailureReason.JOB_FAILOVER_REGION)); executionGraph.getCheckpointCoordinator().restoreLatestCheckpointedState( tasks, false, true); } HashSet<AllocationID> previousAllocationsInRegion = new HashSet<>(connectedExecutionVertexes.size()); for (ExecutionVertex connectedExecutionVertex : connectedExecutionVertexes) { AllocationID latestPriorAllocation = connectedExecutionVertex.getLatestPriorAllocation(); if (latestPriorAllocation != null) { previousAllocationsInRegion.add(latestPriorAllocation); } } for (ExecutionVertex ev : connectedExecutionVertexes) { try { ev.scheduleForExecution( executionGraph.getSlotProvider(), executionGraph.isQueuedSchedulingAllowed(), LocationPreferenceConstraint.ANY, previousAllocationsInRegion); } catch (Throwable e) { failover(globalModVersionOfFailover); } } } else { LOG.info("FailoverRegion {} switched from CREATED to RUNNING fail, will fail this region again.", id); failover(globalModVersionOfFailover); } } catch (Exception e) { LOG.info("FailoverRegion {} restart failed, failover again.", id, e); failover(globalModVersionOfFailover); } }
class FailoverRegion { private static final AtomicReferenceFieldUpdater<FailoverRegion, JobStatus> STATE_UPDATER = AtomicReferenceFieldUpdater.newUpdater(FailoverRegion.class, JobStatus.class, "state"); /** The log object used for debugging. */ private static final Logger LOG = LoggerFactory.getLogger(FailoverRegion.class); /** a unique id for debugging */ private final AbstractID id = new AbstractID(); private final ExecutionGraph executionGraph; private final List<ExecutionVertex> connectedExecutionVertexes; private final Map<JobVertexID, ExecutionJobVertex> tasks; /** Current status of the job execution */ private volatile JobStatus state = JobStatus.RUNNING; public FailoverRegion( ExecutionGraph executionGraph, List<ExecutionVertex> connectedExecutions, Map<JobVertexID, ExecutionJobVertex> tasks) { this.executionGraph = checkNotNull(executionGraph); this.connectedExecutionVertexes = checkNotNull(connectedExecutions); this.tasks = checkNotNull(tasks); LOG.debug("Created failover region {} with vertices: {}", id, connectedExecutions); } public void onExecutionFail(Execution taskExecution, Throwable cause) { if (!executionGraph.getRestartStrategy().canRestart()) { executionGraph.failGlobal(cause); } else { cancel(taskExecution.getGlobalModVersion()); } } private void allVerticesInTerminalState(long globalModVersionOfFailover) { while (true) { JobStatus curStatus = this.state; if (curStatus.equals(JobStatus.CANCELLING)) { if (transitionState(curStatus, JobStatus.CANCELED)) { reset(globalModVersionOfFailover); break; } } else { LOG.info("FailoverRegion {} is {} when allVerticesInTerminalState.", id, state); break; } } } public JobStatus getState() { return state; } private void failover(long globalModVersionOfFailover) { if (!executionGraph.getRestartStrategy().canRestart()) { executionGraph.failGlobal(new FlinkException("RestartStrategy validate fail")); } else { JobStatus curStatus = this.state; if (curStatus.equals(JobStatus.RUNNING)) { cancel(globalModVersionOfFailover); } else if (curStatus.equals(JobStatus.CANCELED)) { reset(globalModVersionOfFailover); } else { LOG.info("FailoverRegion {} is {} when notified to failover.", id, state); } } } private void cancel(final long globalModVersionOfFailover) { executionGraph.getJobMasterMainThreadExecutor().assertRunningInMainThread(); while (true) { JobStatus curStatus = this.state; if (curStatus.equals(JobStatus.RUNNING)) { if (transitionState(curStatus, JobStatus.CANCELLING)) { createTerminationFutureOverAllConnectedVertexes() .thenAccept((nullptr) -> allVerticesInTerminalState(globalModVersionOfFailover)); break; } } else { LOG.info("FailoverRegion {} is {} when cancel.", id, state); break; } } } @VisibleForTesting protected CompletableFuture<Void> createTerminationFutureOverAllConnectedVertexes() { final ArrayList<CompletableFuture<?>> futures = new ArrayList<>(connectedExecutionVertexes.size()); for (ExecutionVertex vertex : connectedExecutionVertexes) { futures.add(vertex.cancel()); } return FutureUtils.waitForAll(futures); } private void reset(long globalModVersionOfFailover) { try { final Collection<CoLocationGroup> colGroups = new HashSet<>(); final long restartTimestamp = System.currentTimeMillis(); for (ExecutionVertex ev : connectedExecutionVertexes) { CoLocationGroup cgroup = ev.getJobVertex().getCoLocationGroup(); if (cgroup != null && !colGroups.contains(cgroup)){ cgroup.resetConstraints(); colGroups.add(cgroup); } ev.resetForNewExecution(restartTimestamp, globalModVersionOfFailover); } if (transitionState(JobStatus.CANCELED, JobStatus.CREATED)) { restart(globalModVersionOfFailover); } else { LOG.info("FailoverRegion {} switched from CANCELLING to CREATED fail, will fail this region again.", id); failover(globalModVersionOfFailover); } } catch (GlobalModVersionMismatch e) { state = JobStatus.RUNNING; } catch (Throwable e) { LOG.info("FailoverRegion {} reset fail, will failover again.", id); failover(globalModVersionOfFailover); } } private boolean transitionState(JobStatus current, JobStatus newState) { if (STATE_UPDATER.compareAndSet(this, current, newState)) { LOG.info("FailoverRegion {} switched from state {} to {}.", id, current, newState); return true; } else { return false; } } }
class FailoverRegion { private static final AtomicReferenceFieldUpdater<FailoverRegion, JobStatus> STATE_UPDATER = AtomicReferenceFieldUpdater.newUpdater(FailoverRegion.class, JobStatus.class, "state"); /** The log object used for debugging. */ private static final Logger LOG = LoggerFactory.getLogger(FailoverRegion.class); /** a unique id for debugging */ private final AbstractID id = new AbstractID(); private final ExecutionGraph executionGraph; private final List<ExecutionVertex> connectedExecutionVertexes; private final Map<JobVertexID, ExecutionJobVertex> tasks; /** Current status of the job execution */ private volatile JobStatus state = JobStatus.RUNNING; public FailoverRegion( ExecutionGraph executionGraph, List<ExecutionVertex> connectedExecutions, Map<JobVertexID, ExecutionJobVertex> tasks) { this.executionGraph = checkNotNull(executionGraph); this.connectedExecutionVertexes = checkNotNull(connectedExecutions); this.tasks = checkNotNull(tasks); LOG.debug("Created failover region {} with vertices: {}", id, connectedExecutions); } public void onExecutionFail(Execution taskExecution, Throwable cause) { if (!executionGraph.getRestartStrategy().canRestart()) { executionGraph.failGlobal(cause); } else { cancel(taskExecution.getGlobalModVersion()); } } private void allVerticesInTerminalState(long globalModVersionOfFailover) { while (true) { JobStatus curStatus = this.state; if (curStatus.equals(JobStatus.CANCELLING)) { if (transitionState(curStatus, JobStatus.CANCELED)) { reset(globalModVersionOfFailover); break; } } else { LOG.info("FailoverRegion {} is {} when allVerticesInTerminalState.", id, state); break; } } } public JobStatus getState() { return state; } private void failover(long globalModVersionOfFailover) { if (!executionGraph.getRestartStrategy().canRestart()) { executionGraph.failGlobal(new FlinkException("RestartStrategy validate fail")); } else { JobStatus curStatus = this.state; if (curStatus.equals(JobStatus.RUNNING)) { cancel(globalModVersionOfFailover); } else if (curStatus.equals(JobStatus.CANCELED)) { reset(globalModVersionOfFailover); } else { LOG.info("FailoverRegion {} is {} when notified to failover.", id, state); } } } private void cancel(final long globalModVersionOfFailover) { executionGraph.getJobMasterMainThreadExecutor().assertRunningInMainThread(); while (true) { JobStatus curStatus = this.state; if (curStatus.equals(JobStatus.RUNNING)) { if (transitionState(curStatus, JobStatus.CANCELLING)) { createTerminationFutureOverAllConnectedVertexes() .thenAccept((nullptr) -> allVerticesInTerminalState(globalModVersionOfFailover)); break; } } else { LOG.info("FailoverRegion {} is {} when cancel.", id, state); break; } } } @VisibleForTesting protected CompletableFuture<Void> createTerminationFutureOverAllConnectedVertexes() { final ArrayList<CompletableFuture<?>> futures = new ArrayList<>(connectedExecutionVertexes.size()); for (ExecutionVertex vertex : connectedExecutionVertexes) { futures.add(vertex.cancel()); } return FutureUtils.waitForAll(futures); } private void reset(long globalModVersionOfFailover) { try { final Collection<CoLocationGroup> colGroups = new HashSet<>(); final long restartTimestamp = System.currentTimeMillis(); for (ExecutionVertex ev : connectedExecutionVertexes) { CoLocationGroup cgroup = ev.getJobVertex().getCoLocationGroup(); if (cgroup != null && !colGroups.contains(cgroup)){ cgroup.resetConstraints(); colGroups.add(cgroup); } ev.resetForNewExecution(restartTimestamp, globalModVersionOfFailover); } if (transitionState(JobStatus.CANCELED, JobStatus.CREATED)) { restart(globalModVersionOfFailover); } else { LOG.info("FailoverRegion {} switched from CANCELLING to CREATED fail, will fail this region again.", id); failover(globalModVersionOfFailover); } } catch (GlobalModVersionMismatch e) { state = JobStatus.RUNNING; } catch (Throwable e) { LOG.info("FailoverRegion {} reset fail, will failover again.", id); failover(globalModVersionOfFailover); } } private boolean transitionState(JobStatus current, JobStatus newState) { if (STATE_UPDATER.compareAndSet(this, current, newState)) { LOG.info("FailoverRegion {} switched from state {} to {}.", id, current, newState); return true; } else { return false; } } }
I don't think we can use `==` here
public static boolean decimalSubtypeContains(SubtypeData d, EnumerableDecimal f) { if (d instanceof AllOrNothingSubtype) { return ((AllOrNothingSubtype) d).isAllSubtype(); } DecimalSubtype v = (DecimalSubtype) d; for (EnumerableType val : v.values) { if (val == f) { return v.allowed; } } return !v.allowed; }
if (val == f) {
public static boolean decimalSubtypeContains(SubtypeData d, EnumerableDecimal f) { if (d instanceof AllOrNothingSubtype) { return ((AllOrNothingSubtype) d).isAllSubtype(); } DecimalSubtype v = (DecimalSubtype) d; for (EnumerableType val : v.values) { if (val == f) { return v.allowed; } } return !v.allowed; }
class DecimalSubtype extends EnumerableSubtype implements ProperSubtypeData { public boolean allowed; public EnumerableDecimal[] values; private DecimalSubtype(boolean allowed, EnumerableDecimal value) { this(allowed, new EnumerableDecimal[]{value}); } private DecimalSubtype(boolean allowed, EnumerableDecimal[] values) { this.allowed = allowed; this.values = values; } public static SemType decimalConst(BigDecimal value) { return PredefinedType.uniformSubtype(UniformTypeCode.UT_DECIMAL, new DecimalSubtype(true, EnumerableDecimal.from(value))); } public static Optional<BigDecimal> decimalSubtypeSingleValue(SubtypeData d) { if (d instanceof AllOrNothingSubtype) { return Optional.empty(); } DecimalSubtype f = (DecimalSubtype) d; if (f.allowed) { return Optional.empty(); } EnumerableDecimal[] values = f.values; if (values.length != 1) { return Optional.empty(); } return Optional.of(values[0].value); } public static SubtypeData createDecimalSubtype(boolean allowed, EnumerableDecimal[] values) { if (values.length == 0) { return new AllOrNothingSubtype(!allowed); } return new DecimalSubtype(allowed, values); } @Override public boolean allowed() { return allowed; } @Override public EnumerableType[] values() { return values; } @Override public String toString() { StringJoiner j = new StringJoiner(", ", "DecimalSubtype:" + (allowed ? "allowed[" : "disallowed["), "]"); for (EnumerableDecimal value : values) { j.add(String.valueOf(value.value)); } return j.toString(); } }
class DecimalSubtype extends EnumerableSubtype implements ProperSubtypeData { public boolean allowed; public EnumerableDecimal[] values; private DecimalSubtype(boolean allowed, EnumerableDecimal value) { this(allowed, new EnumerableDecimal[]{value}); } private DecimalSubtype(boolean allowed, EnumerableDecimal[] values) { this.allowed = allowed; this.values = values; } public static SemType decimalConst(BigDecimal value) { return PredefinedType.uniformSubtype(UniformTypeCode.UT_DECIMAL, new DecimalSubtype(true, EnumerableDecimal.from(value))); } public static Optional<BigDecimal> decimalSubtypeSingleValue(SubtypeData d) { if (d instanceof AllOrNothingSubtype) { return Optional.empty(); } DecimalSubtype f = (DecimalSubtype) d; if (!f.allowed) { return Optional.empty(); } EnumerableDecimal[] values = f.values; if (values.length != 1) { return Optional.empty(); } return Optional.of(values[0].value); } public static SubtypeData createDecimalSubtype(boolean allowed, EnumerableDecimal[] values) { if (values.length == 0) { return new AllOrNothingSubtype(!allowed); } return new DecimalSubtype(allowed, values); } @Override public boolean allowed() { return allowed; } @Override public EnumerableType[] values() { return values; } @Override public String toString() { StringJoiner j = new StringJoiner(", ", "DecimalSubtype:" + (allowed ? "allowed[" : "disallowed["), "]"); for (EnumerableDecimal value : values) { j.add(String.valueOf(value.value)); } return j.toString(); } }
Ok,I will check if the return value of `getCatalog` is null
public void run() { for (Map.Entry<Long, Integer[]> entry : refreshMap.entrySet()) { Long catalogId = entry.getKey(); Integer[] timeGroup = entry.getValue(); Integer original = timeGroup[0]; Integer current = timeGroup[1]; if (current - REFRESH_TIME > 0) { timeGroup[1] = current - REFRESH_TIME; refreshMap.put(catalogId, timeGroup); } else { String catalogName = Env.getCurrentEnv().getCatalogMgr().getCatalog(catalogId).getName(); RefreshCatalogStmt refreshCatalogStmt = new RefreshCatalogStmt(catalogName, null); try { DdlExecutor.execute(Env.getCurrentEnv(), refreshCatalogStmt); } catch (Exception e) { LOG.warn("failed to refresh catalog {}", catalogName, e); } timeGroup[1] = original; refreshMap.put(catalogId, timeGroup); } } }
String catalogName = Env.getCurrentEnv().getCatalogMgr().getCatalog(catalogId).getName();
public void run() { for (Map.Entry<Long, Integer[]> entry : refreshMap.entrySet()) { Long catalogId = entry.getKey(); Integer[] timeGroup = entry.getValue(); Integer original = timeGroup[0]; Integer current = timeGroup[1]; if (current - REFRESH_TIME_SEC > 0) { timeGroup[1] = current - REFRESH_TIME_SEC; refreshMap.put(catalogId, timeGroup); } else { CatalogIf catalog = Env.getCurrentEnv().getCatalogMgr().getCatalog(catalogId); if (catalog != null) { String catalogName = catalog.getName(); RefreshCatalogStmt refreshCatalogStmt = new RefreshCatalogStmt(catalogName, null); try { DdlExecutor.execute(Env.getCurrentEnv(), refreshCatalogStmt); } catch (Exception e) { LOG.warn("failed to refresh catalog {}", catalogName, e); } timeGroup[1] = original; refreshMap.put(catalogId, timeGroup); } } } }
class TaskRefresh implements Runnable { @Override }
class RefreshTask implements Runnable { @Override }
I saw you have tests with finish reasons `stop` and `length`. How about cases for `content_filter` or `null`? I actually don't know how useful those would be, I only found them when searching for `finish_reason` [values](https://platform.openai.com/docs/guides/chat/response-format) (which I only could only find with an advanced Bing search query, does the OpenAI documentation really not have a "search" feature?)
static void assertCompletions(int choicesPerPrompt, Completions actual) { assertCompletions(choicesPerPrompt, "stop", actual); }
assertCompletions(choicesPerPrompt, "stop", actual);
static void assertCompletions(int choicesPerPrompt, Completions actual) { assertCompletions(choicesPerPrompt, "stop", actual); }
class OpenAIClientTestBase extends TestProxyTestBase { OpenAIClientBuilder getOpenAIClientBuilder(HttpClient httpClient, OpenAIServiceVersion serviceVersion) { OpenAIClientBuilder builder = new OpenAIClientBuilder() .httpClient(httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(serviceVersion); if (getTestMode() == TestMode.PLAYBACK) { builder .endpoint("https: .credential(new AzureKeyCredential(FAKE_API_KEY)); } else if (getTestMode() == TestMode.RECORD) { builder .addPolicy(interceptorManager.getRecordPolicy()) .endpoint(Configuration.getGlobalConfiguration().get("AZURE_OPENAI_ENDPOINT")) .credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get("AZURE_OPENAI_KEY"))); } else { builder .endpoint(Configuration.getGlobalConfiguration().get("AZURE_OPENAI_ENDPOINT")) .credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get("AZURE_OPENAI_KEY"))); } return builder; } OpenAIClientBuilder getNonAzureOpenAIClientBuilder(HttpClient httpClient) { OpenAIClientBuilder builder = new OpenAIClientBuilder() .httpClient(httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (getTestMode() == TestMode.PLAYBACK) { builder .credential(new NonAzureOpenAIKeyCredential(FAKE_API_KEY)); } else if (getTestMode() == TestMode.RECORD) { builder .addPolicy(interceptorManager.getRecordPolicy()) .credential(new NonAzureOpenAIKeyCredential(Configuration.getGlobalConfiguration().get("NON_AZURE_OPEN_AI_KEY"))); } else { builder .credential(new NonAzureOpenAIKeyCredential(Configuration.getGlobalConfiguration().get("NON_AZURE_OPEN_AI_KEY"))); } return builder; } @Test public abstract void getCompletions(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void getCompletionsWithResponse(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void getChatCompletions(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void getChatCompletionsWithResponse(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void getEmbeddings(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void getEmbeddingsWithResponse(HttpClient httpClient, OpenAIServiceVersion serviceVersion); void getCompletionsRunner(BiConsumer<String, List<String>> testRunner) { String deploymentId = "text-davinci-003"; List<String> prompt = new ArrayList<>(); prompt.add("Say this is a test"); testRunner.accept(deploymentId, prompt); } void getCompletionsFromSinglePromptRunner(BiConsumer<String, String> testRunner) { String deploymentId = "text-davinci-003"; String prompt = "Say this is a test"; testRunner.accept(deploymentId, prompt); } void getChatCompletionsRunner(BiConsumer<String, List<ChatMessage>> testRunner) { testRunner.accept("gpt-35-turbo", getChatMessages()); } void getChatCompletionsForNonAzureRunner(BiConsumer<String, List<ChatMessage>> testRunner) { testRunner.accept("gpt-3.5-turbo", getChatMessages()); } void getEmbeddingRunner(BiConsumer<String, EmbeddingsOptions> testRunner) { testRunner.accept("embedding", new EmbeddingsOptions(Arrays.asList("Your text string goes here"))); } void getEmbeddingNonAzureRunner(BiConsumer<String, EmbeddingsOptions> testRunner) { testRunner.accept("text-embedding-ada-002", new EmbeddingsOptions(Arrays.asList("Your text string goes here"))); } private List<ChatMessage> getChatMessages() { List<ChatMessage> chatMessages = new ArrayList<>(); chatMessages.add(new ChatMessage(ChatRole.SYSTEM).setContent("You are a helpful assistant. You will talk like a pirate.")); chatMessages.add(new ChatMessage(ChatRole.USER).setContent("Can you help me?")); chatMessages.add(new ChatMessage(ChatRole.ASSISTANT).setContent("Of course, me hearty! What can I do for ye?")); chatMessages.add(new ChatMessage(ChatRole.USER).setContent("What's the best way to train a parrot?")); return chatMessages; } static void assertCompletions(int choicesPerPrompt, String expectedFinishReason, Completions actual) { assertNotNull(actual); assertInstanceOf(Completions.class, actual); assertChoices(choicesPerPrompt, expectedFinishReason, actual.getChoices()); assertNotNull(actual.getUsage()); } static <T> T assertAndGetValueFromResponse(Response<BinaryData> actualResponse, Class<T> clazz, int expectedCode) { assertNotNull(actualResponse); assertEquals(expectedCode, actualResponse.getStatusCode()); assertInstanceOf(Response.class, actualResponse); BinaryData binaryData = actualResponse.getValue(); assertNotNull(binaryData); T object = binaryData.toObject(clazz); assertNotNull(object); assertInstanceOf(clazz, object); return object; } static void assertChoices(int choicesPerPrompt, String expectedFinishReason, List<Choice> actual) { assertEquals(choicesPerPrompt, actual.size()); for (int i = 0; i < actual.size(); i++) { assertChoice(i, expectedFinishReason, actual.get(i)); } } static void assertChoice(int index, String expectedFinishReason, Choice actual) { assertNotNull(actual.getText()); assertEquals(index, actual.getIndex()); assertEquals(expectedFinishReason, actual.getFinishReason().toString()); } static void assertChatCompletions(int choiceCount, ChatCompletions actual) { List<ChatChoice> choices = actual.getChoices(); assertNotNull(choices); assertTrue(choices.size() > 0); assertChatChoices(choiceCount, "stop", ChatRole.ASSISTANT, choices); assertNotNull(actual.getUsage()); } static void assertChatCompletionsStream(ChatCompletions chatCompletions) { assertNotNull(chatCompletions.getId()); assertNotNull(chatCompletions.getChoices()); assertFalse(chatCompletions.getChoices().isEmpty()); assertNotNull(chatCompletions.getChoices().get(0).getDelta()); } static void assertCompletionsStream(Completions completions) { assertNotNull(completions.getId()); assertNotNull(completions.getChoices()); assertFalse(completions.getChoices().isEmpty()); assertNotNull(completions.getChoices().get(0).getText()); } static void assertChatCompletions(int choiceCount, String expectedFinishReason, ChatRole chatRole, ChatCompletions actual) { List<ChatChoice> choices = actual.getChoices(); assertNotNull(choices); assertTrue(choices.size() > 0); assertChatChoices(choiceCount, expectedFinishReason, chatRole, choices); assertNotNull(actual.getUsage()); } static void assertChatChoices(int choiceCount, String expectedFinishReason, ChatRole chatRole, List<ChatChoice> actual) { assertEquals(choiceCount, actual.size()); for (int i = 0; i < actual.size(); i++) { assertChatChoice(i, expectedFinishReason, chatRole, actual.get(i)); } } static void assertChatChoice(int index, String expectedFinishReason, ChatRole chatRole, ChatChoice actual) { assertEquals(index, actual.getIndex()); assertEquals(chatRole, actual.getMessage().getRole()); assertNotNull(actual.getMessage().getContent()); assertEquals(expectedFinishReason, actual.getFinishReason().toString()); } static void assertEmbeddings(Embeddings actual) { List<EmbeddingItem> data = actual.getData(); assertNotNull(data); assertTrue(data.size() > 0); for (EmbeddingItem item : data) { List<Double> embedding = item.getEmbedding(); assertNotNull(embedding); assertTrue(embedding.size() > 0); } assertNotNull(actual.getUsage()); } }
class OpenAIClientTestBase extends TestProxyTestBase { OpenAIClientBuilder getOpenAIClientBuilder(HttpClient httpClient, OpenAIServiceVersion serviceVersion) { OpenAIClientBuilder builder = new OpenAIClientBuilder() .httpClient(httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(serviceVersion); if (getTestMode() == TestMode.PLAYBACK) { builder .endpoint("https: .credential(new AzureKeyCredential(FAKE_API_KEY)); } else if (getTestMode() == TestMode.RECORD) { builder .addPolicy(interceptorManager.getRecordPolicy()) .endpoint(Configuration.getGlobalConfiguration().get("AZURE_OPENAI_ENDPOINT")) .credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get("AZURE_OPENAI_KEY"))); } else { builder .endpoint(Configuration.getGlobalConfiguration().get("AZURE_OPENAI_ENDPOINT")) .credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get("AZURE_OPENAI_KEY"))); } return builder; } OpenAIClientBuilder getNonAzureOpenAIClientBuilder(HttpClient httpClient) { OpenAIClientBuilder builder = new OpenAIClientBuilder() .httpClient(httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (getTestMode() == TestMode.PLAYBACK) { builder .credential(new NonAzureOpenAIKeyCredential(FAKE_API_KEY)); } else if (getTestMode() == TestMode.RECORD) { builder .addPolicy(interceptorManager.getRecordPolicy()) .credential(new NonAzureOpenAIKeyCredential(Configuration.getGlobalConfiguration().get("NON_AZURE_OPEN_AI_KEY"))); } else { builder .credential(new NonAzureOpenAIKeyCredential(Configuration.getGlobalConfiguration().get("NON_AZURE_OPEN_AI_KEY"))); } return builder; } @Test public abstract void testGetCompletions(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void testGetCompletionsWithResponse(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void testGetChatCompletions(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void testGetChatCompletionsWithResponse(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void testGetEmbeddings(HttpClient httpClient, OpenAIServiceVersion serviceVersion); @Test public abstract void testGetEmbeddingsWithResponse(HttpClient httpClient, OpenAIServiceVersion serviceVersion); void getCompletionsRunner(BiConsumer<String, List<String>> testRunner) { String deploymentId = "text-davinci-003"; List<String> prompt = new ArrayList<>(); prompt.add("Say this is a test"); testRunner.accept(deploymentId, prompt); } void getCompletionsFromSinglePromptRunner(BiConsumer<String, String> testRunner) { String deploymentId = "text-davinci-003"; String prompt = "Say this is a test"; testRunner.accept(deploymentId, prompt); } void getChatCompletionsRunner(BiConsumer<String, List<ChatMessage>> testRunner) { testRunner.accept("gpt-35-turbo", getChatMessages()); } void getChatCompletionsForNonAzureRunner(BiConsumer<String, List<ChatMessage>> testRunner) { testRunner.accept("gpt-3.5-turbo", getChatMessages()); } void getEmbeddingRunner(BiConsumer<String, EmbeddingsOptions> testRunner) { testRunner.accept("embedding", new EmbeddingsOptions(Arrays.asList("Your text string goes here"))); } void getEmbeddingNonAzureRunner(BiConsumer<String, EmbeddingsOptions> testRunner) { testRunner.accept("text-embedding-ada-002", new EmbeddingsOptions(Arrays.asList("Your text string goes here"))); } private List<ChatMessage> getChatMessages() { List<ChatMessage> chatMessages = new ArrayList<>(); chatMessages.add(new ChatMessage(ChatRole.SYSTEM).setContent("You are a helpful assistant. You will talk like a pirate.")); chatMessages.add(new ChatMessage(ChatRole.USER).setContent("Can you help me?")); chatMessages.add(new ChatMessage(ChatRole.ASSISTANT).setContent("Of course, me hearty! What can I do for ye?")); chatMessages.add(new ChatMessage(ChatRole.USER).setContent("What's the best way to train a parrot?")); return chatMessages; } static void assertCompletions(int choicesPerPrompt, String expectedFinishReason, Completions actual) { assertNotNull(actual); assertInstanceOf(Completions.class, actual); assertChoices(choicesPerPrompt, expectedFinishReason, actual.getChoices()); assertNotNull(actual.getUsage()); } static <T> T assertAndGetValueFromResponse(Response<BinaryData> actualResponse, Class<T> clazz, int expectedCode) { assertNotNull(actualResponse); assertEquals(expectedCode, actualResponse.getStatusCode()); assertInstanceOf(Response.class, actualResponse); BinaryData binaryData = actualResponse.getValue(); assertNotNull(binaryData); T object = binaryData.toObject(clazz); assertNotNull(object); assertInstanceOf(clazz, object); return object; } static void assertChoices(int choicesPerPrompt, String expectedFinishReason, List<Choice> actual) { assertEquals(choicesPerPrompt, actual.size()); for (int i = 0; i < actual.size(); i++) { assertChoice(i, expectedFinishReason, actual.get(i)); } } static void assertChoice(int index, String expectedFinishReason, Choice actual) { assertNotNull(actual.getText()); assertEquals(index, actual.getIndex()); assertEquals(expectedFinishReason, actual.getFinishReason().toString()); } static void assertChatCompletions(int choiceCount, ChatCompletions actual) { List<ChatChoice> choices = actual.getChoices(); assertNotNull(choices); assertTrue(choices.size() > 0); assertChatChoices(choiceCount, "stop", ChatRole.ASSISTANT, choices); assertNotNull(actual.getUsage()); } static void assertChatCompletionsStream(ChatCompletions chatCompletions) { assertNotNull(chatCompletions.getId()); assertNotNull(chatCompletions.getChoices()); assertFalse(chatCompletions.getChoices().isEmpty()); assertNotNull(chatCompletions.getChoices().get(0).getDelta()); } static void assertCompletionsStream(Completions completions) { assertNotNull(completions.getId()); assertNotNull(completions.getChoices()); assertFalse(completions.getChoices().isEmpty()); assertNotNull(completions.getChoices().get(0).getText()); } static void assertChatCompletions(int choiceCount, String expectedFinishReason, ChatRole chatRole, ChatCompletions actual) { List<ChatChoice> choices = actual.getChoices(); assertNotNull(choices); assertTrue(choices.size() > 0); assertChatChoices(choiceCount, expectedFinishReason, chatRole, choices); assertNotNull(actual.getUsage()); } static void assertChatChoices(int choiceCount, String expectedFinishReason, ChatRole chatRole, List<ChatChoice> actual) { assertEquals(choiceCount, actual.size()); for (int i = 0; i < actual.size(); i++) { assertChatChoice(i, expectedFinishReason, chatRole, actual.get(i)); } } static void assertChatChoice(int index, String expectedFinishReason, ChatRole chatRole, ChatChoice actual) { assertEquals(index, actual.getIndex()); assertEquals(chatRole, actual.getMessage().getRole()); assertNotNull(actual.getMessage().getContent()); assertEquals(expectedFinishReason, actual.getFinishReason().toString()); } static void assertEmbeddings(Embeddings actual) { List<EmbeddingItem> data = actual.getData(); assertNotNull(data); assertTrue(data.size() > 0); for (EmbeddingItem item : data) { List<Double> embedding = item.getEmbedding(); assertNotNull(embedding); assertTrue(embedding.size() > 0); } assertNotNull(actual.getUsage()); } }
Please use the scheduled executor service from ExecutorOptions once it is merged from https://github.com/apache/beam/pull/23234
private void doClose() { try { closeAutoscaler(); closeConsumer(); ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); executorService.schedule( () -> { LOG.debug( "Closing session and connection after delay {}", source.spec.getCloseTimeout()); checkpointMark.discard(); closeSession(); closeConnection(); }, source.spec.getCloseTimeout(), TimeUnit.MILLISECONDS); } catch (Exception e) { LOG.error("Error closing reader", e); } }
ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
private void doClose() { try { closeAutoscaler(); closeConsumer(); ScheduledExecutorService executorService = options.as(ExecutorOptions.class).getScheduledExecutorService(); executorService.schedule( () -> { LOG.debug( "Closing session and connection after delay {}", source.spec.getCloseTimeout()); checkpointMark.discard(); closeSession(); closeConnection(); }, source.spec.getCloseTimeout().getMillis(), TimeUnit.MILLISECONDS); } catch (Exception e) { LOG.error("Error closing reader", e); } }
class UnboundedJmsReader<T> extends UnboundedReader<T> { private UnboundedJmsSource<T> source; private JmsCheckpointMark checkpointMark; private Connection connection; private Session session; private MessageConsumer consumer; private AutoScaler autoScaler; private T currentMessage; private Instant currentTimestamp; public UnboundedJmsReader(UnboundedJmsSource<T> source) { this.source = source; this.checkpointMark = new JmsCheckpointMark(); this.currentMessage = null; } @Override public boolean start() throws IOException { Read<T> spec = source.spec; ConnectionFactory connectionFactory = spec.getConnectionFactory(); try { Connection connection; if (spec.getUsername() != null) { connection = connectionFactory.createConnection(spec.getUsername(), spec.getPassword()); } else { connection = connectionFactory.createConnection(); } connection.start(); this.connection = connection; if (spec.getAutoScaler() == null) { this.autoScaler = new DefaultAutoscaler(); } else { this.autoScaler = spec.getAutoScaler(); } this.autoScaler.start(); } catch (Exception e) { throw new IOException("Error connecting to JMS", e); } try { this.session = this.connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); } catch (Exception e) { throw new IOException("Error creating JMS session", e); } try { if (spec.getTopic() != null) { this.consumer = this.session.createConsumer(this.session.createTopic(spec.getTopic())); } else { this.consumer = this.session.createConsumer(this.session.createQueue(spec.getQueue())); } } catch (Exception e) { throw new IOException("Error creating JMS consumer", e); } return advance(); } @Override public boolean advance() throws IOException { try { Message message = this.consumer.receiveNoWait(); if (message == null) { currentMessage = null; return false; } checkpointMark.add(message); currentMessage = this.source.spec.getMessageMapper().mapMessage(message); currentTimestamp = new Instant(message.getJMSTimestamp()); return true; } catch (Exception e) { throw new IOException(e); } } @Override public T getCurrent() throws NoSuchElementException { if (currentMessage == null) { throw new NoSuchElementException(); } return currentMessage; } @Override public Instant getWatermark() { return checkpointMark.getOldestMessageTimestamp(); } @Override public Instant getCurrentTimestamp() { if (currentMessage == null) { throw new NoSuchElementException(); } return currentTimestamp; } @Override public CheckpointMark getCheckpointMark() { return checkpointMark; } @Override public long getTotalBacklogBytes() { return this.autoScaler.getTotalBacklogBytes(); } @Override public UnboundedSource<T, ?> getCurrentSource() { return source; } @Override public void close() { doClose(); } @SuppressWarnings("FutureReturnValueIgnored") private void closeConnection() { try { if (connection != null) { connection.stop(); connection.close(); connection = null; } } catch (Exception e) { LOG.error("Error closing connection", e); } } private void closeSession() { try { if (session != null) { session.close(); session = null; } } catch (Exception e) { LOG.error("Error closing session" + e.getMessage(), e); } } private void closeConsumer() { try { if (consumer != null) { consumer.close(); consumer = null; } } catch (Exception e) { LOG.error("Error closing consumer", e); } } private void closeAutoscaler() { try { if (autoScaler != null) { autoScaler.stop(); autoScaler = null; } } catch (Exception e) { LOG.error("Error closing autoscaler", e); } } @Override protected void finalize() { doClose(); } }
class UnboundedJmsReader<T> extends UnboundedReader<T> { private UnboundedJmsSource<T> source; private JmsCheckpointMark checkpointMark; private Connection connection; private Session session; private MessageConsumer consumer; private AutoScaler autoScaler; private T currentMessage; private Instant currentTimestamp; private PipelineOptions options; public UnboundedJmsReader(UnboundedJmsSource<T> source, PipelineOptions options) { this.source = source; this.checkpointMark = new JmsCheckpointMark(); this.currentMessage = null; this.options = options; } @Override public boolean start() throws IOException { Read<T> spec = source.spec; ConnectionFactory connectionFactory = spec.getConnectionFactory(); try { Connection connection; if (spec.getUsername() != null) { connection = connectionFactory.createConnection(spec.getUsername(), spec.getPassword()); } else { connection = connectionFactory.createConnection(); } connection.start(); this.connection = connection; if (spec.getAutoScaler() == null) { this.autoScaler = new DefaultAutoscaler(); } else { this.autoScaler = spec.getAutoScaler(); } this.autoScaler.start(); } catch (Exception e) { throw new IOException("Error connecting to JMS", e); } try { this.session = this.connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); } catch (Exception e) { throw new IOException("Error creating JMS session", e); } try { if (spec.getTopic() != null) { this.consumer = this.session.createConsumer(this.session.createTopic(spec.getTopic())); } else { this.consumer = this.session.createConsumer(this.session.createQueue(spec.getQueue())); } } catch (Exception e) { throw new IOException("Error creating JMS consumer", e); } return advance(); } @Override public boolean advance() throws IOException { try { Message message = this.consumer.receiveNoWait(); if (message == null) { currentMessage = null; return false; } checkpointMark.add(message); currentMessage = this.source.spec.getMessageMapper().mapMessage(message); currentTimestamp = new Instant(message.getJMSTimestamp()); return true; } catch (Exception e) { throw new IOException(e); } } @Override public T getCurrent() throws NoSuchElementException { if (currentMessage == null) { throw new NoSuchElementException(); } return currentMessage; } @Override public Instant getWatermark() { return checkpointMark.getOldestMessageTimestamp(); } @Override public Instant getCurrentTimestamp() { if (currentMessage == null) { throw new NoSuchElementException(); } return currentTimestamp; } @Override public CheckpointMark getCheckpointMark() { return checkpointMark; } @Override public long getTotalBacklogBytes() { return this.autoScaler.getTotalBacklogBytes(); } @Override public UnboundedSource<T, ?> getCurrentSource() { return source; } @Override public void close() { doClose(); } @SuppressWarnings("FutureReturnValueIgnored") private void closeConnection() { try { if (connection != null) { connection.stop(); connection.close(); connection = null; } } catch (Exception e) { LOG.error("Error closing connection", e); } } private void closeSession() { try { if (session != null) { session.close(); session = null; } } catch (Exception e) { LOG.error("Error closing session" + e.getMessage(), e); } } private void closeConsumer() { try { if (consumer != null) { consumer.close(); consumer = null; } } catch (Exception e) { LOG.error("Error closing consumer", e); } } private void closeAutoscaler() { try { if (autoScaler != null) { autoScaler.stop(); autoScaler = null; } } catch (Exception e) { LOG.error("Error closing autoscaler", e); } } @Override protected void finalize() { doClose(); } }
## Accessing files should not lead to filesystem oracle attacks <!--SONAR_ISSUE_KEY:AYtAxwZ6agaDI_wZ9Hvt-->Change this code to not construct the path from user-controlled data. <p>See more on <a href="https://sonarcloud.io/project/issues?id=metersphere_metersphere&issues=AYtAxwZ6agaDI_wZ9Hvt&open=AYtAxwZ6agaDI_wZ9Hvt&pullRequest=27130">SonarCloud</a></p> [Show more details](https://github.com/metersphere/metersphere/security/code-scanning/109)
public String loadPlugin(String fileName) { String filePath = MsFileUtils.PLUGIN_DIR + "/" + fileName; File file = new File(filePath); if (!file.exists()) { downloadPluginFromRepository(fileName); } return msPluginManager.loadPlugin(Paths.get(filePath)); }
public String loadPlugin(String fileName) { MsFileUtils.validateFileName(fileName); String filePath = MsFileUtils.PLUGIN_DIR + "/" + fileName; File file = new File(filePath); if (!file.exists()) { downloadPluginFromRepository(fileName); } return msPluginManager.loadPlugin(Paths.get(filePath)); }
class PluginLoadService { @Resource private PluginMapper pluginMapper; @Resource private PluginScriptMapper pluginScriptMapper; private MsPluginManager msPluginManager = new MsPluginManager(); /** * 从文件系统中加载jar * * @param fileName * @return */ /** * 从默认的对象存储下载插件到本地,再加载 * @param fileName * @return * @throws Exception */ public void loadPluginFromRepository(String fileName) { String filePath = MsFileUtils.PLUGIN_DIR + "/" + fileName; File file = new File(filePath); try { if (!file.exists()) { InputStream fileAsStream = FileCenter.getDefaultRepository().getFileAsStream(getFileRequest(fileName)); FileUtils.copyInputStreamToFile(fileAsStream, file); } msPluginManager.loadPlugin(Paths.get(filePath)); } catch (Exception e) { LogUtils.error("从对象存储加载插件异常", e); } } /** * 将插件上传到本地文件系统中 * * @param file * @return */ public String uploadPlugin2Local(MultipartFile file) { try { return FileCenter.getRepository(StorageType.LOCAL).saveFile(file, getFileRequest(file.getOriginalFilename())); } catch (Exception e) { LogUtils.error(e); throw new MSException("文件上传异常", e); } } /** * 将文件上传到默认的对象存储中 * * @param file */ public void uploadPlugin2Repository(MultipartFile file) { try { FileCenter.getDefaultRepository().saveFile(file, getFileRequest(file.getOriginalFilename())); } catch (Exception e) { LogUtils.error(e); throw new MSException("文件上传异常", e); } } /** * 从对象存储中下载插件 * * @param fileName */ public void downloadPluginFromRepository(String fileName) { try { InputStream inputStream = FileCenter.getDefaultRepository().getFileAsStream(getFileRequest(fileName)); FileCenter.getRepository(StorageType.LOCAL).saveFile(inputStream, getFileRequest(fileName)); } catch (Exception e) { LogUtils.error(e); throw new MSException("下载插件异常", e); } } private FileRequest getFileRequest(String name) { FileRequest request = new FileRequest(); request.setProjectId(MsFileUtils.PLUGIN_DIR_NAME); request.setFileName(name); return request; } /** * @return 返回前端渲染需要的数据 * 默认会返回 resources下的 script 下的 json 文件 */ public List<String> getFrontendScripts(String pluginId) { MsPlugin msPluginInstance = (MsPlugin) msPluginManager.getPlugin(pluginId).getPlugin(); String scriptDir = msPluginInstance.getScriptDir(); List<String> scripts = new ArrayList<>(10); String jarPath = msPluginManager.getPlugin(pluginId).getPluginPath().toString(); try (JarFile jarFile = new JarFile(jarPath)) { Enumeration<JarEntry> entries = jarFile.entries(); while (entries.hasMoreElements()) { JarEntry jarEntry = entries.nextElement(); String innerPath = jarEntry.getName(); if (innerPath.startsWith(scriptDir) && !jarEntry.isDirectory()) { try (InputStream inputStream = msPluginManager.getPluginClassLoader(pluginId).getResourceAsStream(innerPath)) { if (inputStream != null) { scripts.add(IOUtil.toString(inputStream)); } } } } return scripts; } catch (Exception e) { LogUtils.error(e); throw new MSException("获取脚本异常", e); } } /** * 项目启动时加载插件 */ public synchronized void loadPlugins() { List<Plugin> plugins = pluginMapper.selectByExample(new PluginExample()); plugins.forEach(plugin -> { String fileName = plugin.getFileName(); try { loadPlugin(fileName); msPluginManager.startPlugin(plugin.getId()); } catch (Throwable e) { LogUtils.error("初始化插件异常" + plugin.getFileName(), e); } }); } /** * 卸载插件 */ public void unloadPlugin(String pluginId) { if (msPluginManager.getPlugin(pluginId) != null) { msPluginManager.deletePlugin(pluginId); } } public boolean hasPlugin(String pluginId) { return msPluginManager.getPlugin(pluginId) != null; } /** * 删除插件 */ public void deletePluginFile(String fileName) { FileRequest fileRequest = getFileRequest(fileName); try { FileCenter.getRepository(StorageType.LOCAL).delete(fileRequest); FileCenter.getDefaultRepository().delete(fileRequest); } catch (Exception e) { LogUtils.error(e); } } /** * 删除本地插件 * @param fileName */ public void deleteLocalPluginFile(String fileName) { FileRequest fileRequest = getFileRequest(fileName); try { FileCenter.getRepository(StorageType.LOCAL).delete(fileRequest); } catch (Exception e) { LogUtils.error(e); } } public InputStream getResourceAsStream(String pluginId, String name) { return msPluginManager.getPluginClassLoader(pluginId).getResourceAsStream(name); } public Map getPluginScriptConfig(String pluginId, String scriptId) { PluginScript pluginScript = pluginScriptMapper.selectByPrimaryKey(pluginId, scriptId); return JSON.parseMap(new String(pluginScript.getScript())); } public Object getPluginScriptContent(String pluginId, String scriptId) { return getPluginScriptConfig(pluginId, scriptId).get("script"); } public PluginWrapper getPluginWrapper(String id) { return msPluginManager.getPlugin(id); } /** * 获取插件中的是实现类列表 * @param clazz * @return * @param <T> */ public <T> List<T> getExtensions(Class<T> clazz) { return msPluginManager.getExtensions(clazz); } /** * 获取插件中的是实现类 * @param clazz * @param pluginId * @return * @param <T> */ public <T> Class<? extends T> getExtensionsClass(Class<T> clazz, String pluginId) { List<Class<? extends T>> classes = msPluginManager.getExtensionClasses(clazz, pluginId); return CollectionUtils.isEmpty(classes) ? null : classes.get(0); } public MsPluginManager getMsPluginManager() { return msPluginManager; } public <T> T getImplInstance(Class<T> extensionClazz, String pluginId, Object param) { try { Class<? extends T> clazz = getExtensionsClass(extensionClazz, pluginId); if (clazz == null) { throw new MSException(CommonResultCode.PLUGIN_GET_INSTANCE); } if (param == null) { return clazz.getConstructor().newInstance(); } else { return clazz.getConstructor(param.getClass()).newInstance(param); } } catch (InvocationTargetException e) { LogUtils.error(e.getTargetException()); throw new MSException(CommonResultCode.PLUGIN_GET_INSTANCE, e.getTargetException().getMessage()); } catch (Exception e) { LogUtils.error(e); throw new MSException(CommonResultCode.PLUGIN_GET_INSTANCE, e.getMessage()); } } public void handlePluginAddNotified(String pluginId, String fileName) { if (!hasPlugin(pluginId)) { loadPluginFromRepository(fileName); } } public void handlePluginDeleteNotified(String pluginId, String fileName) { if (hasPlugin(pluginId)) { unloadPlugin(pluginId); deleteLocalPluginFile(fileName); } } }
class PluginLoadService { @Resource private PluginMapper pluginMapper; @Resource private PluginScriptMapper pluginScriptMapper; private MsPluginManager msPluginManager = new MsPluginManager(); /** * 从文件系统中加载jar * * @param fileName * @return */ /** * 从默认的对象存储下载插件到本地,再加载 * @param fileName * @return * @throws Exception */ public void loadPluginFromRepository(String fileName) { MsFileUtils.validateFileName(fileName); String filePath = MsFileUtils.PLUGIN_DIR + "/" + fileName; File file = new File(filePath); try { if (!file.exists()) { InputStream fileAsStream = FileCenter.getDefaultRepository().getFileAsStream(getFileRequest(fileName)); FileUtils.copyInputStreamToFile(fileAsStream, file); } msPluginManager.loadPlugin(Paths.get(filePath)); } catch (Exception e) { LogUtils.error("从对象存储加载插件异常", e); } } /** * 将插件上传到本地文件系统中 * * @param file * @return */ public String uploadPlugin2Local(MultipartFile file) { try { return FileCenter.getRepository(StorageType.LOCAL).saveFile(file, getFileRequest(file.getOriginalFilename())); } catch (Exception e) { LogUtils.error(e); throw new MSException("文件上传异常", e); } } /** * 将文件上传到默认的对象存储中 * * @param file */ public void uploadPlugin2Repository(MultipartFile file) { try { FileCenter.getDefaultRepository().saveFile(file, getFileRequest(file.getOriginalFilename())); } catch (Exception e) { LogUtils.error(e); throw new MSException("文件上传异常", e); } } /** * 从对象存储中下载插件 * * @param fileName */ public void downloadPluginFromRepository(String fileName) { try { InputStream inputStream = FileCenter.getDefaultRepository().getFileAsStream(getFileRequest(fileName)); FileCenter.getRepository(StorageType.LOCAL).saveFile(inputStream, getFileRequest(fileName)); } catch (Exception e) { LogUtils.error(e); throw new MSException("下载插件异常", e); } } private FileRequest getFileRequest(String name) { FileRequest request = new FileRequest(); request.setProjectId(MsFileUtils.PLUGIN_DIR_NAME); request.setFileName(name); return request; } /** * @return 返回前端渲染需要的数据 * 默认会返回 resources下的 script 下的 json 文件 */ public List<String> getFrontendScripts(String pluginId) { MsPlugin msPluginInstance = (MsPlugin) msPluginManager.getPlugin(pluginId).getPlugin(); String scriptDir = msPluginInstance.getScriptDir(); List<String> scripts = new ArrayList<>(10); String jarPath = msPluginManager.getPlugin(pluginId).getPluginPath().toString(); try (JarFile jarFile = new JarFile(jarPath)) { Enumeration<JarEntry> entries = jarFile.entries(); while (entries.hasMoreElements()) { JarEntry jarEntry = entries.nextElement(); String innerPath = jarEntry.getName(); if (innerPath.startsWith(scriptDir) && !jarEntry.isDirectory()) { try (InputStream inputStream = msPluginManager.getPluginClassLoader(pluginId).getResourceAsStream(innerPath)) { if (inputStream != null) { scripts.add(IOUtil.toString(inputStream)); } } } } return scripts; } catch (Exception e) { LogUtils.error(e); throw new MSException("获取脚本异常", e); } } /** * 项目启动时加载插件 */ public synchronized void loadPlugins() { List<Plugin> plugins = pluginMapper.selectByExample(new PluginExample()); plugins.forEach(plugin -> { String fileName = plugin.getFileName(); try { loadPlugin(fileName); msPluginManager.startPlugin(plugin.getId()); } catch (Throwable e) { LogUtils.error("初始化插件异常" + plugin.getFileName(), e); } }); } /** * 卸载插件 */ public void unloadPlugin(String pluginId) { if (msPluginManager.getPlugin(pluginId) != null) { msPluginManager.deletePlugin(pluginId); } } public boolean hasPlugin(String pluginId) { return msPluginManager.getPlugin(pluginId) != null; } /** * 删除插件 */ public void deletePluginFile(String fileName) { FileRequest fileRequest = getFileRequest(fileName); try { FileCenter.getRepository(StorageType.LOCAL).delete(fileRequest); FileCenter.getDefaultRepository().delete(fileRequest); } catch (Exception e) { LogUtils.error(e); } } /** * 删除本地插件 * @param fileName */ public void deleteLocalPluginFile(String fileName) { FileRequest fileRequest = getFileRequest(fileName); try { FileCenter.getRepository(StorageType.LOCAL).delete(fileRequest); } catch (Exception e) { LogUtils.error(e); } } public InputStream getResourceAsStream(String pluginId, String name) { return msPluginManager.getPluginClassLoader(pluginId).getResourceAsStream(name); } public Map getPluginScriptConfig(String pluginId, String scriptId) { PluginScript pluginScript = pluginScriptMapper.selectByPrimaryKey(pluginId, scriptId); return JSON.parseMap(new String(pluginScript.getScript())); } public Object getPluginScriptContent(String pluginId, String scriptId) { return getPluginScriptConfig(pluginId, scriptId).get("script"); } public PluginWrapper getPluginWrapper(String id) { return msPluginManager.getPlugin(id); } /** * 获取插件中的是实现类列表 * @param clazz * @return * @param <T> */ public <T> List<T> getExtensions(Class<T> clazz) { return msPluginManager.getExtensions(clazz); } /** * 获取插件中的是实现类 * @param clazz * @param pluginId * @return * @param <T> */ public <T> Class<? extends T> getExtensionsClass(Class<T> clazz, String pluginId) { List<Class<? extends T>> classes = msPluginManager.getExtensionClasses(clazz, pluginId); return CollectionUtils.isEmpty(classes) ? null : classes.get(0); } public MsPluginManager getMsPluginManager() { return msPluginManager; } public <T> T getImplInstance(Class<T> extensionClazz, String pluginId, Object param) { try { Class<? extends T> clazz = getExtensionsClass(extensionClazz, pluginId); if (clazz == null) { throw new MSException(CommonResultCode.PLUGIN_GET_INSTANCE); } if (param == null) { return clazz.getConstructor().newInstance(); } else { return clazz.getConstructor(param.getClass()).newInstance(param); } } catch (InvocationTargetException e) { LogUtils.error(e.getTargetException()); throw new MSException(CommonResultCode.PLUGIN_GET_INSTANCE, e.getTargetException().getMessage()); } catch (Exception e) { LogUtils.error(e); throw new MSException(CommonResultCode.PLUGIN_GET_INSTANCE, e.getMessage()); } } public void handlePluginAddNotified(String pluginId, String fileName) { if (!hasPlugin(pluginId)) { loadPluginFromRepository(fileName); } } public void handlePluginDeleteNotified(String pluginId, String fileName) { if (hasPlugin(pluginId)) { unloadPlugin(pluginId); deleteLocalPluginFile(fileName); } } }
no idea why this particular zone requires 4G RAM, sounds wrong - should not be needed - you can leave be and we can remove once we know why we need this - @hmusum ?
private NodeResources defaultNodeResources(ClusterSpec.Type clusterType) { if (zone.system() == SystemName.PublicCd && clusterType == ClusterSpec.Type.admin && zone.environment() != Environment.prod) return new NodeResources(1, 3, 50); if (zone.system() == SystemName.cd && zone.environment() == Environment.dev && zone.region().value().equals("cd-us-west-1")) return new NodeResources(1, 4, 50); if (zone.system() == SystemName.cd && zone.environment() == Environment.test || zone.environment() == Environment.staging) return clusterType == ClusterSpec.Type.admin ? new NodeResources(1, 3, 50) : new NodeResources(4, 4, 50); return new NodeResources(2, 8, 50); }
return new NodeResources(1, 4, 50);
private NodeResources defaultNodeResources(ClusterSpec.Type clusterType) { if (clusterType == ClusterSpec.Type.admin) return new NodeResources(0.5, 3, 50); if (zone.system() == SystemName.cd && zone.environment().isTest()) new NodeResources(4, 4, 50); return new NodeResources(2, 8, 50); }
class CapacityPolicies { private final Zone zone; private final NodeFlavors flavors; public CapacityPolicies(Zone zone, NodeFlavors flavors) { this.zone = zone; this.flavors = flavors; } public int decideSize(Capacity requestedCapacity, ClusterSpec.Type clusterType) { int requestedNodes = ensureRedundancy(requestedCapacity.nodeCount(), clusterType, requestedCapacity.canFail()); if (requestedCapacity.isRequired()) return requestedNodes; switch(zone.environment()) { case dev : case test : return 1; case perf : return Math.min(requestedCapacity.nodeCount(), 3); case staging: return requestedNodes <= 1 ? requestedNodes : Math.max(2, requestedNodes / 10); case prod : return requestedNodes; default : throw new IllegalArgumentException("Unsupported environment " + zone.environment()); } } public NodeResources decideNodeResources(Optional<NodeResources> requestedResources, ClusterSpec cluster) { NodeResources resources = specifiedOrDefaultNodeResources(requestedResources, cluster); if (resources.allocateByLegacyName()) return resources; if (zone.system() == SystemName.cd || zone.environment() == Environment.dev || zone.environment() == Environment.test) resources = resources.withDiskSpeed(NodeResources.DiskSpeed.any); if (zone.environment() == Environment.dev) resources = resources.withVcpu(0.1); return resources; } private NodeResources specifiedOrDefaultNodeResources(Optional<NodeResources> requestedResources, ClusterSpec cluster) { if (requestedResources.isPresent() && ! requestedResources.get().allocateByLegacyName()) return requestedResources.get(); if (requestedResources.isEmpty()) return defaultNodeResources(cluster.type()); if (zone.system() == SystemName.cd) return flavors.exists(requestedResources.get().legacyName().get()) ? requestedResources.get() : defaultNodeResources(cluster.type()); else { switch (zone.environment()) { case dev: case test: case staging: return defaultNodeResources(cluster.type()); default: flavors.getFlavorOrThrow(requestedResources.get().legacyName().get()); return requestedResources.get(); } } } /** * Whether or not the nodes requested can share physical host with other applications. * A security feature which only makes sense for prod. */ public boolean decideExclusivity(boolean requestedExclusivity) { return requestedExclusivity && zone.environment() == Environment.prod; } /** * Throw if the node count is 1 for container and content clusters and we're in a production zone * * @return the argument node count * @throws IllegalArgumentException if only one node is requested and we can fail */ private int ensureRedundancy(int nodeCount, ClusterSpec.Type clusterType, boolean canFail) { if (canFail && nodeCount == 1 && Arrays.asList(ClusterSpec.Type.container, ClusterSpec.Type.content).contains(clusterType) && zone.environment().isProduction()) throw new IllegalArgumentException("Deployments to prod require at least 2 nodes per cluster for redundancy"); return nodeCount; } }
class CapacityPolicies { private final Zone zone; private final NodeFlavors flavors; public CapacityPolicies(Zone zone, NodeFlavors flavors) { this.zone = zone; this.flavors = flavors; } public int decideSize(Capacity requestedCapacity, ClusterSpec.Type clusterType) { int requestedNodes = ensureRedundancy(requestedCapacity.nodeCount(), clusterType, requestedCapacity.canFail()); if (requestedCapacity.isRequired()) return requestedNodes; switch(zone.environment()) { case dev : case test : return 1; case perf : return Math.min(requestedCapacity.nodeCount(), 3); case staging: return requestedNodes <= 1 ? requestedNodes : Math.max(2, requestedNodes / 10); case prod : return requestedNodes; default : throw new IllegalArgumentException("Unsupported environment " + zone.environment()); } } public NodeResources decideNodeResources(Optional<NodeResources> requestedResources, ClusterSpec cluster) { NodeResources resources = specifiedOrDefaultNodeResources(requestedResources, cluster); if (resources.allocateByLegacyName()) return resources; if (zone.system() == SystemName.cd || zone.environment() == Environment.dev || zone.environment() == Environment.test) resources = resources.withDiskSpeed(NodeResources.DiskSpeed.any); if (zone.environment() == Environment.dev) resources = resources.withVcpu(0.1); return resources; } private NodeResources specifiedOrDefaultNodeResources(Optional<NodeResources> requestedResources, ClusterSpec cluster) { if (requestedResources.isPresent() && ! requestedResources.get().allocateByLegacyName()) return requestedResources.get(); if (requestedResources.isEmpty()) return defaultNodeResources(cluster.type()); if (zone.system() == SystemName.cd) return flavors.exists(requestedResources.get().legacyName().get()) ? requestedResources.get() : defaultNodeResources(cluster.type()); else { switch (zone.environment()) { case dev: case test: case staging: return defaultNodeResources(cluster.type()); default: flavors.getFlavorOrThrow(requestedResources.get().legacyName().get()); return requestedResources.get(); } } } /** * Whether or not the nodes requested can share physical host with other applications. * A security feature which only makes sense for prod. */ public boolean decideExclusivity(boolean requestedExclusivity) { return requestedExclusivity && zone.environment() == Environment.prod; } /** * Throw if the node count is 1 for container and content clusters and we're in a production zone * * @return the argument node count * @throws IllegalArgumentException if only one node is requested and we can fail */ private int ensureRedundancy(int nodeCount, ClusterSpec.Type clusterType, boolean canFail) { if (canFail && nodeCount == 1 && Arrays.asList(ClusterSpec.Type.container, ClusterSpec.Type.content).contains(clusterType) && zone.environment().isProduction()) throw new IllegalArgumentException("Deployments to prod require at least 2 nodes per cluster for redundancy"); return nodeCount; } }
`restArgs.get(restArgs.size() - 1)` I think we can extract this or related logic to a variable. This is used multiple times. This will simplify the code increasing readability. L5083, L5088, L5098, L5115 (condition)
public void visit(BLangFunction funcNode) { SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); if (!funcNode.interfaceFunction) { addReturnIfNotPresent(funcNode); } funcNode.originalFuncSymbol = funcNode.symbol; funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol); funcNode.requiredParams = rewrite(funcNode.requiredParams, funcEnv); funcNode.restParam = rewrite(funcNode.restParam, funcEnv); funcNode.workers = rewrite(funcNode.workers, funcEnv); if (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) { funcNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv); } List<BLangAnnotationAttachment> participantAnnotation = funcNode.annAttachments.stream() .filter(a -> Transactions.isTransactionsAnnotation(a.pkgAlias.value, a.annotationName.value)) .collect(Collectors.toList()); funcNode.body = rewrite(funcNode.body, funcEnv); funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); if (funcNode.returnTypeNode != null) { funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env)); } if (participantAnnotation.isEmpty()) { result = funcNode; return; } result = desugarParticipantFunction(funcNode, participantAnnotation); } private BLangFunction desugarParticipantFunction(BLangFunction funcNode, List<BLangAnnotationAttachment> participantAnnotation) { BLangAnnotationAttachment annotation = participantAnnotation.get(0); BLangBlockFunctionBody onCommitBody = null; BLangBlockFunctionBody onAbortBody = null; funcNode.requiredParams.forEach(bLangSimpleVariable -> bLangSimpleVariable.symbol.closure = true); if (funcNode.receiver != null) { funcNode.receiver.symbol.closure = true; } BType trxReturnType = BUnionType.create(null, symTable.errorType, symTable.anyType); BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType); BLangLambdaFunction commitFunc = createLambdaFunction(funcNode.pos, "$anonOnCommitFunc$", ASTBuilderUtil.createTypeNode(symTable.nilType)); BLangLambdaFunction abortFunc = createLambdaFunction(funcNode.pos, "$anonOnAbortFunc$", ASTBuilderUtil.createTypeNode(symTable.nilType)); BLangSimpleVariable onCommitTrxVar = ASTBuilderUtil .createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null, new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID, symTable.stringType, commitFunc.function.symbol)); BLangSimpleVariable onAbortTrxVar = ASTBuilderUtil .createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null, new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID, symTable.stringType, abortFunc.function.symbol)); BLangSimpleVarRef trxIdOnCommitRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onCommitTrxVar.symbol); BLangSimpleVarRef trxIdOnAbortRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onAbortTrxVar.symbol); for (Map.Entry<String, BLangExpression> entry : getKeyValuePairs((BLangStatementExpression) annotation.expr).entrySet()) { switch (entry.getKey()) { case Transactions.TRX_ONCOMMIT_FUNC: BInvokableSymbol commitSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol; BLangInvocation onCommit = ASTBuilderUtil .createInvocationExprMethod(funcNode.pos, commitSym, Lists.of(trxIdOnCommitRef), Collections.emptyList(), symResolver); BLangStatement onCommitStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommit); onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onCommitStmt)); break; case Transactions.TRX_ONABORT_FUNC: BInvokableSymbol abortSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol; BLangInvocation onAbort = ASTBuilderUtil .createInvocationExprMethod(funcNode.pos, abortSym, Lists.of(trxIdOnAbortRef), Collections.emptyList(), symResolver); BLangStatement onAbortStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbort); onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onAbortStmt)); break; } } if (onCommitBody == null) { onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommitBody); returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE); } if (onAbortBody == null) { onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbortBody); returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE); } commitFunc.function.body = onCommitBody; commitFunc.function.requiredParams.add(onCommitTrxVar); commitFunc.type = new BInvokableType(Lists.of(onCommitTrxVar.symbol.type), commitFunc.function.symbol.type.getReturnType(), null); commitFunc.function.symbol.type = commitFunc.type; commitFunc.function.symbol.params = Lists.of(onCommitTrxVar.symbol); abortFunc.function.body = onAbortBody; abortFunc.function.requiredParams.add(onAbortTrxVar); abortFunc.type = new BInvokableType(Lists.of(onAbortTrxVar.symbol.type), abortFunc.function.symbol.type.getReturnType(), null); abortFunc.function.symbol.type = abortFunc.type; abortFunc.function.symbol.params = Lists.of(onAbortTrxVar.symbol); BSymbol trxModSym = env.enclPkg.imports .stream() .filter(importPackage -> importPackage.symbol. pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names.ORG_NAME_SEPARATOR.value + Names.TRANSACTION_PACKAGE.value)) .findAny().get().symbol; BInvokableSymbol invokableSymbol = (BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(trxModSym), getParticipantFunctionName(funcNode)); BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.stringType, getTransactionBlockId()); BLangLambdaFunction trxMainWrapperFunc = createLambdaFunction(funcNode.pos, "$anonTrxWrapperFunc$", Collections.emptyList(), funcNode.returnTypeNode, funcNode.body); for (BLangSimpleVariable var : funcNode.requiredParams) { trxMainWrapperFunc.function.closureVarSymbols.add(new ClosureVarSymbol(var.symbol, var.pos)); } BLangBlockFunctionBody trxMainBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos); BLangLambdaFunction trxMainFunc = createLambdaFunction(funcNode.pos, "$anonTrxParticipantFunc$", Collections.emptyList(), trxReturnNode, trxMainBody); trxMainWrapperFunc.capturedClosureEnv = trxMainFunc.function.clonedEnv; commitFunc.capturedClosureEnv = env.createClone(); abortFunc.capturedClosureEnv = env.createClone(); BVarSymbol wrapperSym = new BVarSymbol(0, names.fromString("$wrapper$1"), this.env.scope.owner.pkgID, trxMainWrapperFunc.type, trxMainFunc.function.symbol); BLangSimpleVariable wrapperFuncVar = ASTBuilderUtil.createVariable(funcNode.pos, "$wrapper$1", trxMainWrapperFunc.type, trxMainWrapperFunc, wrapperSym); BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(funcNode.pos, trxMainBody); variableDef.var = wrapperFuncVar; BLangSimpleVarRef wrapperVarRef = rewrite(ASTBuilderUtil.createVariableRef(variableDef.pos, wrapperFuncVar.symbol), env); BLangInvocation wrapperInvocation = new BFunctionPointerInvocation(trxMainWrapperFunc.pos, wrapperVarRef, wrapperFuncVar.symbol, trxMainWrapperFunc.function.symbol.retType); BLangReturn wrapperReturn = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired (wrapperInvocation, trxReturnNode.type)); trxMainWrapperFunc.function.receiver = funcNode.receiver; trxMainFunc.function.receiver = funcNode.receiver; trxMainBody.stmts.add(wrapperReturn); rewrite(trxMainFunc.function, env); List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, trxMainFunc, commitFunc, abortFunc); BLangInvocation participantInvocation = ASTBuilderUtil.createInvocationExprMethod(funcNode.pos, invokableSymbol, requiredArgs, Collections.emptyList(), symResolver); participantInvocation.type = ((BInvokableType) invokableSymbol.type).retType; BLangStatement stmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired (participantInvocation, funcNode.symbol.retType)); funcNode.body = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(rewrite(stmt, env))); return funcNode; } private Name getParticipantFunctionName(BLangFunction function) { if (Symbols.isFlagOn((function).symbol.flags, Flags.RESOURCE)) { return TRX_REMOTE_PARTICIPANT_BEGIN_FUNCTION; } return TRX_LOCAL_PARTICIPANT_BEGIN_FUNCTION; } @Override public void visit(BLangResource resourceNode) { } public void visit(BLangAnnotation annotationNode) { annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); } public void visit(BLangAnnotationAttachment annAttachmentNode) { annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env); result = annAttachmentNode; } @Override public void visit(BLangSimpleVariable varNode) { if (((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) && (varNode.symbol.owner.tag & SymTag.LET) != SymTag.LET) { varNode.expr = null; result = varNode; return; } if (varNode.typeNode != null && varNode.typeNode.getKind() != null) { varNode.typeNode = rewrite(varNode.typeNode, env); } BLangExpression bLangExpression = rewriteExpr(varNode.expr); if (bLangExpression != null) { bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type); } varNode.expr = bLangExpression; varNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = varNode; } @Override public void visit(BLangLetExpression letExpression) { SymbolEnv prevEnv = this.env; this.env = letExpression.env; BLangExpression expr = letExpression.expr; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env); if (node.getKind() == NodeKind.BLOCK) { blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts); } else { blockStmt.addStatement((BLangSimpleVariableDef) node); } } BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++), expr.type, expr, expr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol); blockStmt.addStatement(tempVarDef); BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.type = expr.type; result = rewrite(stmtExpr, env); this.env = prevEnv; } @Override public void visit(BLangTupleVariable varNode) { result = varNode; } @Override public void visit(BLangRecordVariable varNode) { result = varNode; } @Override public void visit(BLangErrorVariable varNode) { result = varNode; } @Override public void visit(BLangBlockStmt block) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env); block.stmts = rewriteStmt(block.stmts, blockEnv); result = block; } @Override public void visit(BLangSimpleVariableDef varDefNode) { varDefNode.var = rewrite(varDefNode.var, env); result = varDefNode; } @Override public void visit(BLangTupleVariableDef varDefNode) { varDefNode.var = rewrite(varDefNode.var, env); BLangTupleVariable tupleVariable = varDefNode.var; final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos); BType runTimeType = new BArrayType(symTable.anyType); String name = "tuple"; final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varDefNode.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); tuple.expr = tupleVariable.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt); variableDef.var = tuple; createVarDefStmts(tupleVariable, blockStmt, tuple.symbol, null); createRestFieldVarDefStmts(tupleVariable, blockStmt, tuple.symbol); result = rewrite(blockStmt, env); } private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt, BVarSymbol tupleVarSymbol) { final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable; boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE; DiagnosticPos pos = blockStmt.pos; if (arrayVar != null) { BLangArrayLiteral arrayExpr = createArrayLiteralExprNode(); arrayExpr.type = arrayVar.type; arrayVar.expr = arrayExpr; BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt); arrayVarDef.var = arrayVar; BLangExpression tupleExpr = parentTupleVariable.expr; BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol); BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size() : parentTupleVariable.memberVariables.size()); startIndexLiteral.type = symTable.intType; BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef, createLengthInvocation(pos, arrayVarRef)); indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType); createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null); foreach.body = foreachBody; blockStmt.addStatement(foreach); } } @Override public void visit(BLangRecordVariableDef varDefNode) { BLangRecordVariable varNode = varDefNode.var; final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos); BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null); final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varDefNode.pos, "$map$0", runTimeType, null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); mapVariable.expr = varDefNode.var.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt); variableDef.var = mapVariable; createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangErrorVariableDef varDefNode) { BLangErrorVariable errorVariable = varDefNode.errorVariable; final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos); BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner); final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varDefNode.pos, errorVarSymbol.name.value, symTable.errorType, null, errorVarSymbol); error.expr = errorVariable.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt); variableDef.var = error; createVarDefStmts(errorVariable, blockStmt, error.symbol, null); result = rewrite(blockStmt, env); } /** * This method iterate through each member of the tupleVar and create the relevant var def statements. This method * does the check for node kind of each member and call the related var def creation method. * * Example: * ((string, float) int)) ((a, b), c)) = (tuple) * * (a, b) is again a tuple, so it is a recursive var def creation. * * c is a simple var, so a simple var def will be created. * */ private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangVariable> memberVars = parentTupleVariable.memberVariables; for (int index = 0; index < memberVars.size(); index++) { BLangVariable variable = memberVars.get(index); BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index); if (NodeKind.VARIABLE == variable.getKind()) { createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol, parentIndexAccessExpr); continue; } if (variable.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.RECORD_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BType accessedElemType = symTable.errorType; if (tupleVarSymbol.type.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) tupleVarSymbol.type; accessedElemType = arrayType.eType; } BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); } } } /** * Overloaded method to handle record variables. * This method iterate through each member of the recordVar and create the relevant var def statements. This method * does the check for node kind of each member and call the related var def creation method. * * Example: * type Foo record { * string name; * (int, string) age; * Address address; * }; * * Foo {name: a, age: (b, c), address: d} = {record literal} * * a is a simple var, so a simple var def will be created. * * (b, c) is a tuple, so it is a recursive var def creation. * * d is a record, so it is a recursive var def creation. * */ private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt, BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList; for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) { BLangVariable variable = recordFieldKeyValue.valueBindingPattern; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType, recordFieldKeyValue.key.value); if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) { createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, indexExpr, recordVarSymbol, parentIndexAccessExpr); continue; } if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, new BArrayType(symTable.anyType), recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); } } if (parentRecordVariable.restParam != null) { DiagnosticPos pos = parentBlockStmt.pos; BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type; BLangSimpleVarRef variableReference; if (parentIndexAccessExpr != null) { BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner)); mapVariable.expr = parentIndexAccessExpr; BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDef.var = mapVariable; variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol); } else { variableReference = ASTBuilderUtil.createVariableRef(pos, ((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol); } List<String> keysToRemove = parentRecordVariable.variableList.stream() .map(var -> var.getKey().getValue()) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos, keysToRemove, restParamType, parentBlockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol); BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam; BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); restParamVarDef.var = restParam; restParamVarDef.var.type = restParamType; restParam.expr = varRef; } } /** * This method will create the relevant var def statements for reason and details of the error variable. * The var def statements are created by creating the reason() and detail() builtin methods. */ private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt, BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BVarSymbol convertedErrorVarSymbol; if (parentIndexBasedAccess != null) { BType prevType = parentIndexBasedAccess.type; parentIndexBasedAccess.type = symTable.anyType; BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++, symTable.errorType, addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType), parentErrorVariable.pos); parentIndexBasedAccess.type = prevType; parentBlockStmt.addStatement(errorVarDef); convertedErrorVarSymbol = errorVarDef.var.symbol; } else { convertedErrorVarSymbol = errorVariableSymbol; } parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos, parentErrorVariable.reason.type, convertedErrorVarSymbol, null); if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) { parentErrorVariable.reason = null; } else { BLangSimpleVariableDef reasonVariableDef = ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt); reasonVariableDef.var = parentErrorVariable.reason; } if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty()) && parentErrorVariable.restDetail == null) { return; } BType detailMapType; BType detailType = ((BErrorType) parentErrorVariable.type).detailType; if (detailType.tag == TypeTags.MAP) { detailMapType = detailType; } else { detailMapType = symTable.detailType; } parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction( parentErrorVariable.pos, convertedErrorVarSymbol, null); BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail", parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos); detailTempVarDef.type = parentErrorVariable.detailExpr.type; parentBlockStmt.addStatement(detailTempVarDef); this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol); for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) { BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol); createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar); } if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) { DiagnosticPos pos = parentErrorVariable.restDetail.pos; BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef( pos, detailTempVarDef.var.symbol); List<String> keysToRemove = parentErrorVariable.detail.stream() .map(detail -> detail.key.getValue()) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove, parentErrorVariable.restDetail.type, parentBlockStmt); BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDefStmt.var = ASTBuilderUtil.createVariable(pos, parentErrorVariable.restDetail.name.value, filteredDetail.type, ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol), parentErrorVariable.restDetail.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol), ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol)); parentBlockStmt.addStatement(assignmentStmt); } rewrite(parentBlockStmt, env); } private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos, BType targetType) { BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"), this.env.enclPkg.packageID, targetType, this.env.scope.owner); BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol); BLangExpression expr; if (targetType.tag == TypeTags.RECORD) { expr = variableRef; } else { expr = addConversionExprIfRequired(variableRef, targetType); } BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr, errorVarSym); return ASTBuilderUtil.createVariableDef(pos, errorVar); } private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos, List<String> keysToRemove, BType targetType, BLangBlockStmt parentBlockStmt) { BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType); int restNum = annonVarCount++; String name = "$map$ref$" + restNum; BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name); BLangInvocation entriesInvocation = generateMapEntriesInvocation( ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol), typeCastExpr.type); String entriesVarName = "$map$ref$entries$" + restNum; BType entriesType = new BMapType(TypeTags.MAP, new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null); BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt, addConversionExprIfRequired(entriesInvocation, entriesType), entriesVarName); BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos); BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter); String filteredEntriesName = "$filtered$detail$entries" + restNum; BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation, filteredEntriesName); String filteredVarName = "$detail$filtered" + restNum; BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos); BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda); BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt, mapInvocation, filteredVarName); String filteredRestVarName = "$restVar$" + restNum; BLangInvocation constructed = generateConstructFromInvocation(pos, targetType, filtered.symbol); return defVariable(pos, targetType, parentBlockStmt, addConversionExprIfRequired(constructed, targetType), filteredRestVarName); } private BLangInvocation generateMapEntriesInvocation(BLangExpression expr, BType type) { BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), type); invocationNode.expr = expr; invocationNode.symbol = symResolver.lookupLangLibMethod(type, names.fromString("entries")); invocationNode.requiredArgs = Lists.of(expr); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.langLibInvocation = true; return invocationNode; } private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar, BLangLambdaFunction backToMapLambda) { BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type); invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol); invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map")); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol)); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.requiredArgs.add(backToMapLambda); return invocationNode; } private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) { String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++; BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName); BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID, getStringAnyTupleType(), this.env.scope.owner); BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(), null, keyValSymbol); function.requiredParams.add(inputParameter); BLangValueType anyType = new BLangValueType(); anyType.typeKind = TypeKind.ANY; anyType.type = symTable.anyType; function.returnTypeNode = anyType; BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>()); function.body = functionBlock; BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil .createLiteral(pos, symTable.intType, (long) 1)); BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type, indexBasesAccessExpr, pos); functionBlock.addStatement(tupSecondElem); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock); returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet), new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true); functionSymbol.retType = function.returnTypeNode.type; functionSymbol.params = function.requiredParams.stream() .map(param -> param.symbol) .collect(Collectors.toList()); functionSymbol.scope = env.scope; functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()), symTable.anyType, null); function.symbol = functionSymbol; rewrite(function, env); env.enclPkg.addFunction(function); return createLambdaFunction(function, functionSymbol); } private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos, BLangSimpleVariable entriesInvocationVar, BLangLambdaFunction filter) { BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type); invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol); invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter")); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol)); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.requiredArgs.add(filter); return invocationNode; } private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt, BLangExpression expression, String name) { Name varName = names.fromString(name); BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression, new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner)); BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap); constructedMap.type = varType; parentBlockStmt.addStatement(constructedMap); env.scope.define(varName, detailMap.symbol); return detailMap; } private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt, BLangErrorVariable.BLangErrorDetailEntry detailEntry, BLangExpression detailEntryVar) { if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) { BLangSimpleVariableDef errorDetailVar = createVarDef( ((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value, detailEntry.valueBindingPattern.type, detailEntryVar, detailEntry.valueBindingPattern.pos); parentBlockStmt.addStatement(errorDetailVar); } else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef( detailEntry.valueBindingPattern.pos, (BLangRecordVariable) detailEntry.valueBindingPattern); recordVariableDef.var.expr = detailEntryVar; recordVariableDef.type = symTable.recordType; parentBlockStmt.addStatement(recordVariableDef); } else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef( detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern); parentBlockStmt.addStatement(tupleVariableDef); } } private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry, BVarSymbol tempDetailVarSymbol) { BLangExpression detailEntryVar = createIndexBasedAccessExpr( detailEntry.valueBindingPattern.type, detailEntry.valueBindingPattern.pos, createStringLiteral(detailEntry.key.pos, detailEntry.key.value), tempDetailVarSymbol, null); if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar; bLangIndexBasedAccess.originalType = symTable.pureType; } return detailEntryVar; } private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) { BLangExpression concatExpr = null; BLangExpression currentExpr; for (BLangExpression expr : exprs) { currentExpr = expr; if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) { currentExpr = getToStringInvocationOnExpr(expr); } if (concatExpr == null) { concatExpr = currentExpr; continue; } BType binaryExprType = TypeTags.isXMLTypeTag(concatExpr.type.tag) || TypeTags.isXMLTypeTag(currentExpr.type.tag) ? symTable.xmlType : symTable.stringType; concatExpr = ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr, binaryExprType, OperatorKind.ADD, null); } return concatExpr; } private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope .lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol; List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{ add(addConversionExprIfRequired(expression, symbol.params.get(0).type)); }}; return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(), symResolver); } private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BLangExpression onExpr = parentIndexBasedAccess != null ? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarySymbol); return createLangLibInvocationNode(ERROR_DETAIL_FUNCTION_NAME, onExpr, new ArrayList<>(), null, pos); } private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType, BVarSymbol errorVarSymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BLangExpression onExpr = parentIndexBasedAccess != null ? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarSymbol); return createLangLibInvocationNode(ERROR_REASON_FUNCTION_NAME, onExpr, new ArrayList<>(), reasonType, pos); } private BLangInvocation generateConstructFromInvocation(DiagnosticPos pos, BType targetType, BVarSymbol source) { BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol); BLangInvocation invocationNode = createInvocationNode(CONSTRUCT_FROM, new ArrayList<>(), typedescType); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = targetType; typedescExpr.type = typedescType; invocationNode.expr = typedescExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CONSTRUCT_FROM)); invocationNode.requiredArgs = Lists.of(typedescExpr, ASTBuilderUtil.createVariableRef(pos, source)); invocationNode.type = BUnionType.create(null, targetType, symTable.errorType); return invocationNode; } private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) { String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++; BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName); BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID, getStringAnyTupleType(), this.env.scope.owner); BLangBlockFunctionBody functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol); BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil .createLiteral(pos, symTable.intType, (long) 0)); BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type, indexBasesAccessExpr, pos); functionBlock.addStatement(tupFirstElem); for (String toRemoveItem : toRemoveList) { createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem); } BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock); return createLambdaFunction(function, functionSymbol); } private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) { List<String> fieldNamesToRemove = recordVariable.variableList.stream() .map(var -> var.getKey().getValue()) .collect(Collectors.toList()); return createFuncToFilterOutRestParam(fieldNamesToRemove, pos); } private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockFunctionBody blockStmt, String key) { BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol); BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt); BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>()); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock); returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false); ifStmt.body = ifBlock; BLangGroupExpr groupExpr = new BLangGroupExpr(); groupExpr.type = symTable.booleanType; BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted, ASTBuilderUtil.createLiteral(pos, symTable.stringType, key), symTable.booleanType, OperatorKind.EQUAL, null); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator( binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type); groupExpr.expression = binaryExpr; ifStmt.expr = groupExpr; } BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.function = function; lambdaFunction.type = functionSymbol.type; return lambdaFunction; } private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function, BLangBlockFunctionBody functionBlock) { BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock); trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet), new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true); functionSymbol.retType = function.returnTypeNode.type; functionSymbol.params = function.requiredParams.stream() .map(param -> param.symbol) .collect(Collectors.toList()); functionSymbol.scope = env.scope; functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()), getRestType(functionSymbol), symTable.booleanType, null); function.symbol = functionSymbol; rewrite(function, env); env.enclPkg.addFunction(function); return functionSymbol; } private BLangBlockFunctionBody createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function, BVarSymbol keyValSymbol) { BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(), null, keyValSymbol); function.requiredParams.add(inputParameter); BLangValueType booleanTypeKind = new BLangValueType(); booleanTypeKind.typeKind = TypeKind.BOOLEAN; booleanTypeKind.type = symTable.booleanType; function.returnTypeNode = booleanTypeKind; BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>()); function.body = functionBlock; return functionBlock; } private BTupleType getStringAnyTupleType() { ArrayList<BType> typeList = new ArrayList<BType>() {{ add(symTable.stringType); add(symTable.anyType); }}; return new BTupleType(typeList); } /** * This method creates a simple variable def and assigns and array expression based on the given indexExpr. * * case 1: when there is no parent array access expression, but with the indexExpr : 1 * string s = x[1]; * * case 2: when there is a parent array expression : x[2] and indexExpr : 3 * string s = x[2][3]; * * case 3: when there is no parent array access expression, but with the indexExpr : name * string s = x[name]; * * case 4: when there is a parent map expression : x[name] and indexExpr : fName * string s = x[name][fName]; * * case 5: when there is a parent map expression : x[name] and indexExpr : 1 * string s = x[name][1]; */ private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt, BLangLiteral indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentArrayAccessExpr) { Name varName = names.fromIdNode(simpleVariable.name); if (varName == Names.IGNORE) { return; } final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos, parentBlockStmt); simpleVariableDef.var = simpleVariable; simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos, indexExpr, tupleVarSymbol, parentArrayAccessExpr); } @Override public void visit(BLangAssignment assignNode) { if (safeNavigateLHS(assignNode.varRef)) { BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef; accessExpr.leafNode = true; result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment); result = rewrite(result, env); return; } assignNode.varRef = rewriteExpr(assignNode.varRef); assignNode.expr = rewriteExpr(assignNode.expr); assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type); result = assignNode; } @Override public void visit(BLangTupleDestructure tupleDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos); BType runTimeType = new BArrayType(symTable.anyType); String name = "tuple"; final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); tuple.expr = tupleDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos, blockStmt); variableDef.var = tuple; createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null); createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol); result = rewrite(blockStmt, env); } private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt, BVarSymbol tupleVarSymbol) { BLangTupleVarRef tupleVarRef = tupleDestructure.varRef; DiagnosticPos pos = blockStmt.pos; if (tupleVarRef.restParam != null) { BLangExpression tupleExpr = tupleDestructure.expr; BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam; BArrayType restParamType = (BArrayType) restParam.type; BLangArrayLiteral arrayExpr = createArrayLiteralExprNode(); arrayExpr.type = restParamType; BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt); restParamAssignment.varRef = restParam; restParamAssignment.varRef.type = restParamType; restParamAssignment.expr = arrayExpr; BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); startIndexLiteral.value = (long) tupleVarRef.expressions.size(); startIndexLiteral.type = symTable.intType; BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam, createLengthInvocation(pos, restParam)); indexAccessExpr.type = restParamType.eType; createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null); foreach.body = foreachBody; blockStmt.addStatement(foreach); } } private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) { BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver .lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME)); BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol, Lists.of(collection), symResolver); lengthInvocation.argExprs = lengthInvocation.requiredArgs; lengthInvocation.type = lengthInvokableSymbol.type.getReturnType(); return lengthInvocation; } /** * This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements. * This method does the check for node kind of each member and call the related var ref creation method. * * Example: * ((a, b), c)) = (tuple) * * (a, b) is again a tuple, so it is a recursive var ref creation. * * c is a simple var, so a simple var def will be created. * */ private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangExpression> expressions = parentTupleVariable.expressions; for (int index = 0; index < expressions.size(); index++) { BLangExpression expression = expressions.get(index); if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() || NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() || NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() || NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) { BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index); createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr, tupleVarSymbol, parentIndexAccessExpr); continue; } if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) { BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos, new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) { BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); TypeDefBuilderHelper.addTypeDefinition(recordVarRef.type, recordVarRef.type.tsymbol, TypeDefBuilderHelper.createRecordTypeNode( (BRecordType) recordVarRef.type, env.enclPkg.packageID, symTable, recordVarRef.pos), env); continue; } if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) { BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); } } } /** * This method creates a assignment statement and assigns and array expression based on the given indexExpr. * */ private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt, BLangExpression indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentArrayAccessExpr) { if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName); if (varName == Names.IGNORE) { return; } } BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos, indexExpr, tupleVarSymbol, parentArrayAccessExpr); assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type); final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt); assignmentStmt.varRef = simpleVarRef; assignmentStmt.expr = assignmentExpr; } private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) { BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos, symTable.anyType, tupleVarSymbol, indexExpr); arrayAccess.originalType = varType; if (parentExpr != null) { arrayAccess.expr = parentExpr; } final BLangExpression assignmentExpr; if (types.isValueType(varType)) { BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); castExpr.expr = arrayAccess; castExpr.type = varType; assignmentExpr = castExpr; } else { assignmentExpr = arrayAccess; } return assignmentExpr; } @Override public void visit(BLangRecordDestructure recordDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos); BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null); String name = "$map$0"; final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); mapVariable.expr = recordDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil. createVariableDefStmt(recordDestructure.pos, blockStmt); variableDef.var = mapVariable; createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangErrorDestructure errorDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos); String name = "$error$"; final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name, symTable.errorType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); errorVar.expr = errorDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos, blockStmt); variableDef.var = errorVar; createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null); result = rewrite(blockStmt, env); } private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt, BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields; for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) { BLangExpression variableReference = varRefKeyValue.variableReference; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType, varRefKeyValue.variableName.getValue()); if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() || NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() || NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() || NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) { createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt, indexExpr, recordVarSymbol, parentIndexAccessExpr); continue; } if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) { BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) { BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, symTable.tupleType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos, symTable.errorType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol, arrayAccessExpr); } } if (parentRecordVarRef.restParam != null) { DiagnosticPos pos = parentBlockStmt.pos; BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type; BLangSimpleVarRef variableReference; if (parentIndexAccessExpr != null) { BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType, null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner)); mapVariable.expr = parentIndexAccessExpr; BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDef.var = mapVariable; variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol); } else { variableReference = ASTBuilderUtil.createVariableRef(pos, ((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol); } BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam; List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream() .map(field -> field.variableName.value) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos, keysToRemove, restParamType, parentBlockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol); BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt); restParamAssignment.varRef = restParam; restParamAssignment.varRef.type = restParamType; restParamAssignment.expr = varRef; } } private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt, BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) { if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF || names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) { BLangAssignment reasonAssignment = ASTBuilderUtil .createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt); reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos, symTable.stringType, errorVarySymbol, parentIndexAccessExpr); reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type); reasonAssignment.varRef = parentErrorVarRef.reason; } if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) { return; } BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos, errorVarySymbol, parentIndexAccessExpr); BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++, symTable.detailType, errorDetailBuiltinFunction, parentErrorVarRef.pos); detailTempVarDef.type = symTable.detailType; parentBlockStmt.addStatement(detailTempVarDef); this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol); List<String> extractedKeys = new ArrayList<>(); for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) { extractedKeys.add(detail.name.value); BLangVariableReference ref = (BLangVariableReference) detail.expr; BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos, createStringLiteral(detail.name.pos, detail.name.value), detailTempVarDef.var.symbol, null); if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar; bLangIndexBasedAccess.originalType = symTable.pureType; } BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt); detailAssignment.varRef = ref; detailAssignment.expr = detailEntryVar; } if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) { BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos, detailTempVarDef.var.symbol); BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos, extractedKeys, parentErrorVarRef.restVar.type, parentBlockStmt); BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos, parentBlockStmt); restAssignment.varRef = parentErrorVarRef.restVar; restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos, filteredDetail.symbol); } BErrorType errorType = (BErrorType) parentErrorVarRef.type; if (errorType.detailType.getKind() == TypeKind.RECORD) { BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol; tsymbol.initializerFunc = createRecordInitFunc(); tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol); } } private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) { if (parentErrorVarRef.restVar == null) { return true; } if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value)); } return false; } @Override public void visit(BLangAbort abortNode) { BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(abortNode.pos, symTable.intType, -1L); result = rewrite(returnStmt, env); } @Override public void visit(BLangRetry retryNode) { BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(retryNode.pos, symTable.intType, 1L); result = rewrite(returnStmt, env); } @Override public void visit(BLangContinue nextNode) { result = nextNode; } @Override public void visit(BLangBreak breakNode) { result = breakNode; } @Override public void visit(BLangReturn returnNode) { if (returnNode.expr != null) { returnNode.expr = rewriteExpr(returnNode.expr); } result = returnNode; } @Override public void visit(BLangPanic panicNode) { panicNode.expr = rewriteExpr(panicNode.expr); result = panicNode; } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env); result = xmlnsStmtNode; } @Override public void visit(BLangXMLNS xmlnsNode) { BLangXMLNS generatedXMLNSNode; xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI); BSymbol ownerSymbol = xmlnsNode.symbol.owner; if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { generatedXMLNSNode = new BLangLocalXMLNS(); } else { generatedXMLNSNode = new BLangPackageXMLNS(); } generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI; generatedXMLNSNode.prefix = xmlnsNode.prefix; generatedXMLNSNode.symbol = xmlnsNode.symbol; result = generatedXMLNSNode; } public void visit(BLangCompoundAssignment compoundAssignment) { BLangVariableReference varRef = compoundAssignment.varRef; if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol); varRef.lhsVar = true; } result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef), rewriteExpr(compoundAssignment.modifiedExpr)); return; } List<BLangStatement> statements = new ArrayList<>(); List<BLangSimpleVarRef> varRefs = new ArrayList<>(); List<BType> types = new ArrayList<>(); do { BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + ++indexExprCount + "$", ((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr, compoundAssignment.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos, tempIndexVarDef.var.symbol); statements.add(0, tempIndexVarDef); varRefs.add(0, tempVarRef); types.add(0, varRef.type); varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr; } while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR); BLangVariableReference var = varRef; for (int ref = 0; ref < varRefs.size(); ref++) { var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref)); var.type = types.get(ref); } var.type = compoundAssignment.varRef.type; BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var, compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null); rhsExpression.type = compoundAssignment.modifiedExpr.type; BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var, rhsExpression); statements.add(assignStmt); BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements); result = rewrite(bLangBlockStmt, env); } @Override public void visit(BLangExpressionStmt exprStmtNode) { exprStmtNode.expr = rewriteExpr(exprStmtNode.expr); result = exprStmtNode; } @Override public void visit(BLangIf ifNode) { ifNode.expr = rewriteExpr(ifNode.expr); ifNode.body = rewrite(ifNode.body, env); ifNode.elseStmt = rewrite(ifNode.elseStmt, env); result = ifNode; } @Override public void visit(BLangMatch matchStmt) { BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode(); matchBlockStmt.pos = matchStmt.pos; String matchExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos, matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0, names.fromString(matchExprVarName), this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner)); BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar); matchBlockStmt.stmts.add(matchExprVarDef); matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar)); rewrite(matchBlockStmt, this.env); result = matchBlockStmt; } @Override public void visit(BLangForeach foreach) { BLangBlockStmt blockNode; BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID, foreach.collection.type, this.env.scope.owner); BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$", foreach.collection.type, foreach.collection, dataSymbol); BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable); BVarSymbol collectionSymbol = dataVariable.symbol; switch (foreach.collection.type.tag) { case TypeTags.STRING: case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.XML: case TypeTags.MAP: case TypeTags.STREAM: case TypeTags.RECORD: BInvokableSymbol iteratorSymbol = getLangLibIteratorInvokableSymbol(collectionSymbol); blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, true); break; case TypeTags.OBJECT: iteratorSymbol = getIterableObjectIteratorInvokableSymbol(collectionSymbol); blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, false); break; default: blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.stmts.add(0, dataVarDef); break; } rewrite(blockNode, this.env); result = blockNode; } private BLangBlockStmt desugarForeachWithIteratorDef(BLangForeach foreach, BLangSimpleVariableDef dataVariableDefinition, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach.pos, collectionSymbol, iteratorInvokableSymbol, isIteratorFuncFromLangLib); BLangBlockStmt blockNode = desugarForeachToWhile(foreach, iteratorVarDef); blockNode.stmts.add(0, dataVariableDefinition); return blockNode; } public BInvokableSymbol getIterableObjectIteratorInvokableSymbol(BVarSymbol collectionSymbol) { BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) collectionSymbol.type.tsymbol; BAttachedFunction iteratorFunc = null; for (BAttachedFunction func : typeSymbol.attachedFuncs) { if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) { iteratorFunc = func; break; } } BAttachedFunction function = iteratorFunc; return function.symbol; } BInvokableSymbol getLangLibIteratorInvokableSymbol(BVarSymbol collectionSymbol) { return (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); } private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) { BVarSymbol iteratorSymbol = varDef.var.symbol; BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID, foreach.nillableResultType, this.env.scope.owner); BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos, foreach.nillableResultType, iteratorSymbol, resultSymbol); BLangType userDefineType = getUserDefineTypeNode(foreach.resultType); BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol); BLangTypeTestExpr typeTestExpr = ASTBuilderUtil .createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType); BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode(); whileNode.pos = foreach.pos; whileNode.expr = typeTestExpr; whileNode.body = foreach.body; BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol); VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode; BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol); valueAccessExpr.expr = addConversionExprIfRequired(valueAccessExpr.expr, types.getSafeType(valueAccessExpr.expr.type, true, false)); variableDefinitionNode.getVariable() .setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType)); whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode); whileNode.body.stmts.add(1, resultAssignment); BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.addStatement(varDef); blockNode.addStatement(resultVariableDefinition); blockNode.addStatement(whileNode); return blockNode; } private BLangType getUserDefineTypeNode(BType type) { BLangUserDefinedType recordType = new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""), ASTBuilderUtil.createIdentifier(null, "")); recordType.type = type; return recordType; } @Override public void visit(BLangWhile whileNode) { whileNode.expr = rewriteExpr(whileNode.expr); whileNode.body = rewrite(whileNode.body, env); result = whileNode; } @Override public void visit(BLangLock lockNode) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos); blockStmt.addStatement(lockStmt); enclLocks.push(lockStmt); BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE); BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType); BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral); statementExpression.type = symTable.nilType; BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); trapExpr.type = nillableError; trapExpr.expr = statementExpression; BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"), this.env.scope.owner.pkgID, nillableError, this.env.scope.owner); BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult", nillableError, trapExpr, nillableErrorVarSymbol); BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable); blockStmt.addStatement(simpleVariableDef); BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos); blockStmt.addStatement(unLockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = lockNode.pos; panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType); ifBody.addStatement(panicNode); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode()); isErrorTest.type = symTable.booleanType; BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null); blockStmt.addStatement(ifelse); result = rewrite(blockStmt, env); enclLocks.pop(); } @Override public void visit(BLangLockStmt lockStmt) { result = lockStmt; } @Override public void visit(BLangUnLockStmt unLockStmt) { result = unLockStmt; } @Override public void visit(BLangTransaction transactionNode) { DiagnosticPos pos = transactionNode.pos; BType trxReturnType = symTable.intType; BType otherReturnType = symTable.nilType; BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType); BLangType otherReturnNode = ASTBuilderUtil.createTypeNode(otherReturnType); DiagnosticPos invPos = transactionNode.pos; /* transaction block code will be desugar to function which returns int. Return value determines the status of the transaction code. ex. 0 = successful 1 = retry -1 = abort Since transaction block code doesn't return anything, we need to add return statement at end of the block unless we have abort or retry statement. */ DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol); BLangStatement statement = null; if (!transactionNode.transactionBody.stmts.isEmpty()) { statement = transactionNode.transactionBody.stmts.get(transactionNode.transactionBody.stmts.size() - 1); } if (statement == null || !(statement.getKind() == NodeKind.ABORT) && !(statement.getKind() == NodeKind.ABORT)) { BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(returnStmtPos, trxReturnType, 0L); transactionNode.transactionBody.addStatement(returnStmt); } if (transactionNode.abortedBody == null) { transactionNode.abortedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos); } if (transactionNode.committedBody == null) { transactionNode.committedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos); } if (transactionNode.onRetryBody == null) { transactionNode.onRetryBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos); } if (transactionNode.retryCount == null) { transactionNode.retryCount = ASTBuilderUtil.createLiteral(pos, symTable.intType, 3L); } BLangLambdaFunction trxMainFunc = createLambdaFunction(pos, "$anonTrxMainFunc$", Collections.emptyList(), trxReturnNode, transactionNode.transactionBody.stmts, env, transactionNode.transactionBody.scope); BLangLambdaFunction trxOnRetryFunc = createLambdaFunction(pos, "$anonTrxOnRetryFunc$", Collections.emptyList(), otherReturnNode, transactionNode.onRetryBody.stmts, env, transactionNode.onRetryBody.scope); BLangLambdaFunction trxCommittedFunc = createLambdaFunction(pos, "$anonTrxCommittedFunc$", Collections.emptyList(), otherReturnNode, transactionNode.committedBody.stmts, env, transactionNode.committedBody.scope); BLangLambdaFunction trxAbortedFunc = createLambdaFunction(pos, "$anonTrxAbortedFunc$", Collections.emptyList(), otherReturnNode, transactionNode.abortedBody.stmts, env, transactionNode.abortedBody.scope); trxMainFunc.capturedClosureEnv = env.createClone(); trxOnRetryFunc.capturedClosureEnv = env.createClone(); trxCommittedFunc.capturedClosureEnv = env.createClone(); trxAbortedFunc.capturedClosureEnv = env.createClone(); PackageID packageID = new PackageID(Names.BALLERINA_ORG, Names.TRANSACTION_PACKAGE, Names.EMPTY); BPackageSymbol transactionPkgSymbol = new BPackageSymbol(packageID, null, 0); BInvokableSymbol invokableSymbol = (BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(transactionPkgSymbol), TRX_INITIATOR_BEGIN_FUNCTION); BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(pos, symTable.stringType, getTransactionBlockId()); List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, transactionNode.retryCount, trxMainFunc, trxOnRetryFunc, trxCommittedFunc, trxAbortedFunc); BLangInvocation trxInvocation = ASTBuilderUtil.createInvocationExprMethod(pos, invokableSymbol, requiredArgs, Collections.emptyList(), symResolver); BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pos, ASTBuilderUtil.createBlockStmt(pos)); stmt.expr = trxInvocation; result = rewrite(stmt, env); } private String getTransactionBlockId() { return env.enclPkg.packageID.orgName + "$" + env.enclPkg.packageID.name + "$" + transactionIndex++; } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, BLangFunctionBody lambdaBody) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++); lambdaFunction.function = func; func.requiredParams.addAll(lambdaFunctionVariable); func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); lambdaFunctionVariable = func.requiredParams; func.body = lambdaBody; func.desugared = false; lambdaFunction.pos = pos; List<BType> paramTypes = new ArrayList<>(); lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type)); lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(), null); return lambdaFunction; } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, List<BLangStatement> fnBodyStmts, SymbolEnv env, Scope trxScope) { BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); body.scope = trxScope; SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); body.stmts = rewriteStmt(fnBodyStmts, bodyEnv); return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body); } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, TypeNode returnType) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++); lambdaFunction.function = func; func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); func.desugared = false; lambdaFunction.pos = pos; return lambdaFunction; } private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) { final BPackageSymbol packageSymbol = targetPkg.symbol; final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol); symbolEnter.defineNode(funcNode, packageEnv); packageEnv.enclPkg.functions.add(funcNode); packageEnv.enclPkg.topLevelNodes.add(funcNode); } @Override public void visit(BLangForkJoin forkJoin) { result = forkJoin; } @Override public void visit(BLangLiteral literalExpr) { if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) { result = rewriteBlobLiteral(literalExpr); return; } result = literalExpr; } private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) { String[] result = getBlobTextValue((String) literalExpr.value); byte[] values; if (BASE_64.equals(result[0])) { values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8)); } else { values = hexStringToByteArray(result[1]); } BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteralNode.type = literalExpr.type; arrayLiteralNode.pos = literalExpr.pos; arrayLiteralNode.exprs = new ArrayList<>(); for (byte b : values) { arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b)); } return arrayLiteralNode; } private String[] getBlobTextValue(String blobLiteralNodeText) { String nodeText = blobLiteralNodeText.replaceAll(" ", ""); String[] result = new String[2]; result[0] = nodeText.substring(0, nodeText.indexOf('`')); result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`')); return result; } private static byte[] hexStringToByteArray(String str) { int len = str.length(); byte[] data = new byte[len / 2]; for (int i = 0; i < len; i += 2) { data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16)); } return data; } @Override public void visit(BLangListConstructorExpr listConstructor) { listConstructor.exprs = rewriteExprs(listConstructor.exprs); BLangExpression expr; if (listConstructor.type.tag == TypeTags.TUPLE) { expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } else if (listConstructor.type.tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type)); result = rewriteExpr(expr); } else if (getElementType(listConstructor.type).tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } else if (listConstructor.type.tag == TypeTags.TYPEDESC) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = listConstructor.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); } else { expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } } @Override public void visit(BLangArrayLiteral arrayLiteral) { arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs); if (arrayLiteral.type.tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type)); return; } else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type); return; } result = arrayLiteral; } @Override public void visit(BLangTupleLiteral tupleLiteral) { if (tupleLiteral.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = tupleLiteral.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); return; } tupleLiteral.exprs.forEach(expr -> { BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type; types.setImplicitCastExpr(expr, expType, symTable.anyType); }); tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs); result = tupleLiteral; } @Override public void visit(BLangGroupExpr groupExpr) { if (groupExpr.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = groupExpr.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); } else { result = rewriteExpr(groupExpr.expression); } } @Override public void visit(BLangRecordLiteral recordLiteral) { List<RecordLiteralNode.RecordField> fields = recordLiteral.fields; fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2))); result = rewriteExpr(rewriteMappingConstructor(recordLiteral)); } @Override public void visit(BLangSimpleVarRef varRefExpr) { BLangSimpleVarRef genVarRefExpr = varRefExpr; if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName); qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol; qnameExpr.localname = varRefExpr.variableName; qnameExpr.prefix = varRefExpr.pkgAlias; qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI; qnameExpr.isUsedInXML = false; qnameExpr.pos = varRefExpr.pos; qnameExpr.type = symTable.stringType; result = qnameExpr; return; } if (varRefExpr.symbol == null) { result = varRefExpr; return; } if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol; if (varSymbol.originalSymbol != null) { varRefExpr.symbol = varSymbol.originalSymbol; } } BSymbol ownerSymbol = varRefExpr.symbol.owner; if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION && varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) { genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol); } else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE && !((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) { genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.LET) == SymTag.LET) { genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) { genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType, constSymbol.value.value); result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type)); return; } } genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol); } genVarRefExpr.type = varRefExpr.type; genVarRefExpr.pos = varRefExpr.pos; if ((varRefExpr.lhsVar) || genVarRefExpr.symbol.name.equals(IGNORE)) { genVarRefExpr.lhsVar = varRefExpr.lhsVar; genVarRefExpr.type = varRefExpr.symbol.type; result = genVarRefExpr; return; } genVarRefExpr.lhsVar = varRefExpr.lhsVar; BType targetType = genVarRefExpr.type; genVarRefExpr.type = genVarRefExpr.symbol.type; BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType); result = expression.impConversionExpr != null ? expression.impConversionExpr : expression; } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { if (safeNavigate(fieldAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr)); return; } BLangAccessExpression targetVarRef = fieldAccessExpr; BType varRefType = fieldAccessExpr.expr.type; fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr); if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) { fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType); } BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value); int varRefTypeTag = varRefType.tag; if (varRefTypeTag == TypeTags.OBJECT || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false); } } else if (varRefTypeTag == TypeTags.RECORD || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false); } } else if (types.isLax(varRefType)) { if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) { if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) { result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr)); return; } fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType); targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit); } else { targetVarRef = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr); } } else if (varRefTypeTag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit); } else if (TypeTags.isXMLTypeTag(varRefTypeTag)) { targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.fieldKind); } targetVarRef.lhsVar = fieldAccessExpr.lhsVar; targetVarRef.type = fieldAccessExpr.type; targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess; result = targetVarRef; } private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangStatementExpression statementExpression = new BLangStatementExpression(); BLangBlockStmt block = new BLangBlockStmt(); statementExpression.stmt = block; BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.type, symTable.errorType); DiagnosticPos pos = fieldAccessExpr.pos; BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos); block.addStatement(result); BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol); resultRef.type = fieldAccessType; statementExpression.type = fieldAccessType; BLangLiteral mapIndex = ASTBuilderUtil.createLiteral( fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value); BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex); BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.type, symTable.nilType); mapAccessExpr.type = xmlOrNil; BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos); BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol); block.addStatement(mapResult); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block); BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType); ifStmt.expr = isLikeNilExpr; BLangBlockStmt resultNilBody = new BLangBlockStmt(); ifStmt.body = resultNilBody; BLangBlockStmt resultHasValueBody = new BLangBlockStmt(); ifStmt.elseStmt = resultHasValueBody; BLangInvocation errorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue("error"); errorInvocation.name = name; errorInvocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); errorInvocation.symbol = symTable.errorConstructor; errorInvocation.type = symTable.errorType; ArrayList<BLangExpression> errorCtorArgs = new ArrayList<>(); errorInvocation.requiredArgs = errorCtorArgs; errorCtorArgs.add(createStringLiteral(pos, "{" + BLangConstants.MAP_LANG_LIB + "}InvalidKey")); BLangNamedArgsExpression message = new BLangNamedArgsExpression(); message.name = ASTBuilderUtil.createIdentifier(pos, "key"); message.expr = createStringLiteral(pos, fieldAccessExpr.field.value); errorCtorArgs.add(message); BLangSimpleVariableDef errorDef = createVarDef("_$_invalid_key_error", symTable.errorType, errorInvocation, pos); resultNilBody.addStatement(errorDef); BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol); BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody); errorVarAssignment.varRef = resultRef; errorVarAssignment.expr = errorRef; BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt( pos, resultHasValueBody); mapResultAssignment.varRef = resultRef; mapResultAssignment.expr = mapResultRef; statementExpression.expr = resultRef; return statementExpression; } private BLangAccessExpression rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) { ArrayList<BLangExpression> args = new ArrayList<>(); String fieldName = fieldAccessExpr.field.value; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr; fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName); } if (fieldName.equals("_")) { return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING, fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>()); } BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName); args.add(attributeNameLiteral); args.add(isOptionalAccessToLiteral(fieldAccessExpr)); return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args, new ArrayList<>()); } private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) { return rewrite( createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env); } private String createExpandedQName(String nsURI, String localName) { return "{" + nsURI + "}" + localName; } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { if (safeNavigate(indexAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr)); return; } BLangVariableReference targetVarRef = indexAccessExpr; indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr); BType varRefType = indexAccessExpr.expr.type; indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr); if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType); } if (varRefType.tag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) { targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false); } else if (types.isSubTypeOfList(varRefType)) { targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (types.isAssignable(varRefType, symTable.stringType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType); targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } targetVarRef.lhsVar = indexAccessExpr.lhsVar; targetVarRef.type = indexAccessExpr.type; result = targetVarRef; } @Override public void visit(BLangInvocation iExpr) { BLangInvocation genIExpr = iExpr; if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) { result = rewriteErrorConstructor(iExpr); } reorderArguments(iExpr); iExpr.requiredArgs = rewriteExprs(iExpr.requiredArgs); fixNonRestArgTypeCastInTypeParamInvocation(iExpr); iExpr.restArgs = rewriteExprs(iExpr.restArgs); annotationDesugar.defineStatementAnnotations(iExpr.annAttachments, iExpr.pos, iExpr.symbol.pkgID, iExpr.symbol.owner, env); if (iExpr.functionPointerInvocation) { visitFunctionPointerInvocation(iExpr); return; } iExpr.expr = rewriteExpr(iExpr.expr); result = genIExpr; if (iExpr.expr == null) { fixTypeCastInTypeParamInvocation(iExpr, genIExpr); if (iExpr.exprSymbol == null) { return; } iExpr.expr = ASTBuilderUtil.createVariableRef(iExpr.pos, iExpr.exprSymbol); iExpr.expr = rewriteExpr(iExpr.expr); } switch (iExpr.expr.type.tag) { case TypeTags.OBJECT: case TypeTags.RECORD: if (!iExpr.langLibInvocation) { List<BLangExpression> argExprs = new ArrayList<>(iExpr.requiredArgs); argExprs.add(0, iExpr.expr); BLangAttachedFunctionInvocation attachedFunctionInvocation = new BLangAttachedFunctionInvocation(iExpr.pos, argExprs, iExpr.restArgs, iExpr.symbol, iExpr.type, iExpr.expr, iExpr.async); attachedFunctionInvocation.actionInvocation = iExpr.actionInvocation; attachedFunctionInvocation.name = iExpr.name; attachedFunctionInvocation.annAttachments = iExpr.annAttachments; result = genIExpr = attachedFunctionInvocation; } break; } fixTypeCastInTypeParamInvocation(iExpr, genIExpr); } private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) { if (!iExpr.langLibInvocation) { return; } List<BLangExpression> requiredArgs = iExpr.requiredArgs; List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params; for (int i = 1; i < requiredArgs.size(); i++) { requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type)); } } private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) { if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) { BType originalInvType = genIExpr.type; genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType; BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType); if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) { this.result = expr; return; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = genIExpr; conversionExpr.targetType = originalInvType; conversionExpr.type = originalInvType; conversionExpr.pos = genIExpr.pos; this.result = conversionExpr; } } private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) { BLangExpression reasonExpr = iExpr.requiredArgs.get(0); if (reasonExpr.impConversionExpr != null && reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) { reasonExpr.impConversionExpr = null; } reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType); reasonExpr = rewriteExpr(reasonExpr); iExpr.requiredArgs.remove(0); iExpr.requiredArgs.add(reasonExpr); BLangExpression errorDetail; BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos, ((BErrorType) iExpr.symbol.type).detailType); List<BLangExpression> namedArgs = iExpr.requiredArgs.stream() .filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR) .collect(Collectors.toList()); if (namedArgs.isEmpty()) { errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.type); } else { for (BLangExpression arg : namedArgs) { BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg; BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField(); member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos, symTable.stringType, namedArg.name.value)); if (recordLiteral.type.tag == TypeTags.RECORD) { member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType); } else { member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type); } recordLiteral.fields.add(member); iExpr.requiredArgs.remove(arg); } errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) iExpr.symbol.type).detailType); } iExpr.requiredArgs.add(errorDetail); return iExpr; } public void visit(BLangTypeInit typeInitExpr) { if (typeInitExpr.type.tag == TypeTags.STREAM) { result = rewriteExpr(desugarStreamTypeInit(typeInitExpr)); } else { result = rewrite(desugarObjectTypeInit(typeInitExpr), env); } } private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) { typeInitExpr.desugared = true; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BType objType = getObjectType(typeInitExpr.type); BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos); BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol); blockStmt.addStatement(objVarDef); typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol; typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol; if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) { BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt); initInvExpr.expr = typeInitExpr.initInvocation; typeInitExpr.initInvocation.name.value = Names.GENERATED_INIT_SUFFIX.value; BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef); stmtExpr.type = objVarRef.symbol.type; return stmtExpr; } BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type, typeInitExpr.initInvocation, typeInitExpr.pos); blockStmt.addStatement(initInvRetValVarDef); BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos); blockStmt.addStatement(resultVarDef); BLangSimpleVarRef initRetValVarRefInCondition = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol); BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode()); isErrorTest.type = symTable.booleanType; BLangSimpleVarRef thenInitRetValVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangAssignment errAssignment = ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef); thenStmt.addStatement(errAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangAssignment objAssignment = ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef); BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); elseStmt.addStatement(objAssignment); BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt); blockStmt.addStatement(ifelse); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = resultVarRef.symbol.type; return stmtExpr; } private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CONSTRUCT_STREAM).symbol; BType targetType = ((BStreamType) typeInitExpr.type).constraint; BType errorType = ((BStreamType) typeInitExpr.type).error; BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = targetType; typedescExpr.type = typedescType; BLangExpression iteratorObj = typeInitExpr.argsExpr.get(0); BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod( typeInitExpr.pos, symbol, new ArrayList<>(Lists.of(typedescExpr, iteratorObj)), symResolver); streamConstructInvocation.type = new BStreamType(TypeTags.STREAM, targetType, errorType, null); return streamConstructInvocation; } private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) { BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name)); if (objSym == null || objSym == symTable.notFoundSymbol) { objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner); } BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr, (BVarSymbol) objSym); BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos); objVarDef.var = objVar; objVarDef.type = objVar.type; return objVarDef; } private BType getObjectType(BType type) { if (type.tag == TypeTags.OBJECT) { return type; } else if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream() .filter(t -> t.tag == TypeTags.OBJECT) .findFirst() .orElse(symTable.noType); } throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context"); } BLangErrorType getErrorTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.type = symTable.errorType; return errorTypeNode; } @Override public void visit(BLangTernaryExpr ternaryExpr) { /* * First desugar to if-else: * * T $result$; * if () { * $result$ = thenExpr; * } else { * $result$ = elseExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr); thenBody.addStatement(thenAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = ternaryExpr.type; result = rewriteExpr(stmtExpr); } @Override public void visit(BLangWaitExpr waitExpr) { if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) { waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>()); } else { waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression())); } result = waitExpr; } private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) { visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs); visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs); return exprs; } private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) { if (expr.getKind() == NodeKind.BINARY_EXPR) { collectAllBinaryExprs((BLangBinaryExpr) expr, exprs); } else { expr = rewriteExpr(expr); exprs.add(expr); } } @Override public void visit(BLangWaitForAllExpr waitExpr) { waitExpr.keyValuePairs.forEach(keyValue -> { if (keyValue.valueExpr != null) { keyValue.valueExpr = rewriteExpr(keyValue.valueExpr); } else { keyValue.keyExpr = rewriteExpr(keyValue.keyExpr); } }); BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type); result = rewriteExpr(expr); } @Override public void visit(BLangTrapExpr trapExpr) { trapExpr.expr = rewriteExpr(trapExpr.expr); if (trapExpr.expr.type.tag != TypeTags.NIL) { trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type); } result = trapExpr; } @Override public void visit(BLangBinaryExpr binaryExpr) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) { binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr); } result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr)); return; } if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) { visitBinaryLogicalExpr(binaryExpr); return; } OperatorKind binaryOpKind = binaryExpr.opKind; if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB || binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND || binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) { checkByteTypeIncompatibleOperations(binaryExpr); } binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr); binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr); result = binaryExpr; int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag; int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag; if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL || binaryExpr.opKind == OperatorKind.REF_EQUAL || binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) { if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } } if (lhsExprTypeTag == rhsExprTypeTag) { return; } if (TypeTags.isStringTypeTag(lhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) { if (TypeTags.isXMLTypeTag(rhsExprTypeTag)) { binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr, binaryExpr.lhsExpr.pos, symTable.xmlType); return; } binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (TypeTags.isStringTypeTag(rhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) { if (TypeTags.isXMLTypeTag(lhsExprTypeTag)) { binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); return; } if (lhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (rhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); return; } if (lhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (rhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); } } private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CREATE_INT_RANGE).symbol; BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol, new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver); createIntRangeInvocation.type = symTable.intRangeType; return createIntRangeInvocation; } private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) { if (binaryExpr.parent == null || binaryExpr.parent.type == null) { return; } int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag; int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag; if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) { return; } int resultTypeTag = binaryExpr.type.tag; if (resultTypeTag == TypeTags.INT) { if (rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType); } if (lhsExprTypeTag == TypeTags.BYTE) { binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType); } } } /** * This method checks whether given binary expression is related to shift operation. * If its true, then both lhs and rhs of the binary expression will be converted to 'int' type. * <p> * byte a = 12; * byte b = 34; * int i = 234; * int j = -4; * <p> * true: where binary expression's expected type is 'int' * int i1 = a >> b; * int i2 = a << b; * int i3 = a >> i; * int i4 = a << i; * int i5 = i >> j; * int i6 = i << j; */ private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) { return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT; } public void visit(BLangElvisExpr elvisExpr) { BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr); matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos, rewriteExpr(elvisExpr.rhsExpr))); matchExpr.type = elvisExpr.type; matchExpr.pos = elvisExpr.pos; result = rewriteExpr(matchExpr); } @Override public void visit(BLangUnaryExpr unaryExpr) { if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) { rewriteBitwiseComplementOperator(unaryExpr); return; } unaryExpr.expr = rewriteExpr(unaryExpr.expr); result = unaryExpr; } /** * This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below. * Example : ~a -> a ^ -1; * ~ 11110011 -> 00001100 * 11110011 ^ 11111111 -> 00001100 * * @param unaryExpr the bitwise complement expression */ private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) { final DiagnosticPos pos = unaryExpr.pos; final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = pos; binaryExpr.opKind = OperatorKind.BITWISE_XOR; binaryExpr.lhsExpr = unaryExpr.expr; if (TypeTags.BYTE == unaryExpr.type.tag) { binaryExpr.type = symTable.byteType; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.byteType, symTable.byteType); } else { binaryExpr.type = symTable.intType; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.intType, symTable.intType); } result = rewriteExpr(binaryExpr); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) { result = rewriteExpr(conversionExpr.expr); return; } conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env); conversionExpr.expr = rewriteExpr(conversionExpr.expr); result = conversionExpr; } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); result = bLangLambdaFunction; } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode(); bLangFunction.setName(bLangArrowFunction.functionName); BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.pos = bLangArrowFunction.pos; bLangFunction.addFlag(Flag.LAMBDA); lambdaFunction.function = bLangFunction; BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode(); returnType.type = bLangArrowFunction.body.expr.type; bLangFunction.setReturnTypeNode(returnType); bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction)); bLangArrowFunction.params.forEach(bLangFunction::addParameter); lambdaFunction.parent = bLangArrowFunction.parent; lambdaFunction.type = bLangArrowFunction.funcType; BLangFunction funcNode = lambdaFunction.function; BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet), new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType, env.enclEnv.enclVarSym, true); SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env); defineInvokableSymbol(funcNode, funcSymbol, invokableEnv); List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> { Scope enclScope = invokableEnv.scope; varNode.symbol.kind = SymbolKind.FUNCTION; varNode.symbol.owner = invokableEnv.scope.owner; enclScope.define(varNode.symbol.name, varNode.symbol); }).map(varNode -> varNode.symbol).collect(Collectors.toList()); funcSymbol.params = paramSymbols; funcSymbol.restParam = getRestSymbol(funcNode); funcSymbol.retType = funcNode.returnTypeNode.type; List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList()); funcNode.type = new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.type, null); lambdaFunction.function.pos = bLangArrowFunction.pos; lambdaFunction.function.body.pos = bLangArrowFunction.pos; lambdaFunction.capturedClosureEnv = env; rewrite(lambdaFunction.function, env); env.enclPkg.addFunction(lambdaFunction.function); bLangArrowFunction.function = lambdaFunction.function; result = rewriteExpr(lambdaFunction); } private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol, SymbolEnv invokableEnv) { invokableNode.symbol = funcSymbol; funcSymbol.scope = new Scope(funcSymbol); invokableEnv.scope = funcSymbol.scope; } @Override public void visit(BLangXMLQName xmlQName) { result = xmlQName; } @Override public void visit(BLangXMLAttribute xmlAttribute) { xmlAttribute.name = rewriteExpr(xmlAttribute.name); xmlAttribute.value = rewriteExpr(xmlAttribute.value); result = xmlAttribute; } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName); xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName); xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren); xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes); Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator(); while (attributesItr.hasNext()) { BLangXMLAttribute attribute = attributesItr.next(); if (!attribute.isNamespaceDeclr) { continue; } BLangXMLNS xmlns; if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) { xmlns = new BLangPackageXMLNS(); } else { xmlns = new BLangLocalXMLNS(); } xmlns.namespaceURI = attribute.value.concatExpr; xmlns.prefix = ((BLangXMLQName) attribute.name).localname; xmlns.symbol = attribute.symbol; xmlElementLiteral.inlineNamespaces.add(xmlns); } result = xmlElementLiteral; } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments)); result = xmlTextLiteral; } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments)); result = xmlCommentLiteral; } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target); xmlProcInsLiteral.dataConcatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments)); result = xmlProcInsLiteral; } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlQuotedString.textFragments)); result = xmlQuotedString; } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs)); } @Override public void visit(BLangWorkerSend workerSendNode) { workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type); if (workerSendNode.keyExpr != null) { workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr); } result = workerSendNode; } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type); result = syncSendExpr; } @Override public void visit(BLangWorkerReceive workerReceiveNode) { if (workerReceiveNode.keyExpr != null) { workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr); } result = workerReceiveNode; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts .stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList()); result = workerFlushExpr; } @Override public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr); xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr); if (xmlAttributeAccessExpr.indexExpr != null && xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) { ((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true; } xmlAttributeAccessExpr.desugared = true; if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) { result = xmlAttributeAccessExpr; } else { result = rewriteExpr(xmlAttributeAccessExpr); } } @Override public void visit(BLangLocalVarRef localVarRef) { result = localVarRef; } @Override public void visit(BLangFieldVarRef fieldVarRef) { result = fieldVarRef; } @Override public void visit(BLangPackageVarRef packageVarRef) { result = packageVarRef; } @Override public void visit(BLangFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangStructFieldAccessExpr fieldAccessExpr) { result = fieldAccessExpr; } @Override public void visit(BLangStructFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangMapAccessExpr mapKeyAccessExpr) { result = mapKeyAccessExpr; } @Override public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangMapLiteral mapLiteral) { result = mapLiteral; } @Override public void visit(BLangStructLiteral structLiteral) { result = structLiteral; } @Override public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) { result = waitLiteral; } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr); ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS, xmlElementAccess.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) { Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env); BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX)); String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null; ArrayList<BLangExpression> args = new ArrayList<>(); for (BLangXMLElementFilter filter : filters) { BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace)); if (nsSymbol == symTable.notFoundSymbol) { if (defaultNS != null && !filter.name.equals("*")) { String expandedName = createExpandedQName(defaultNS, filter.name); args.add(createStringLiteral(filter.elemNamePos, expandedName)); } else { args.add(createStringLiteral(filter.elemNamePos, filter.name)); } } else { BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol; String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name); BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName); args.add(stringLiteral); } } return args; } private BLangInvocation createLanglibXMLInvocation(DiagnosticPos pos, String functionName, BLangExpression invokeOnExpr, ArrayList<BLangExpression> args, ArrayList<BLangExpression> restArgs) { invokeOnExpr = rewriteExpr(invokeOnExpr); BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = invokeOnExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(invokeOnExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.restArgs = rewriteExprs(restArgs); invocationNode.type = ((BInvokableType) invocationNode.symbol.type).getReturnType(); invocationNode.langLibInvocation = true; return invocationNode; } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { xmlNavigation.expr = rewriteExpr(xmlNavigation.expr); xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex); ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN, xmlNavigation.expr, new ArrayList<>(), new ArrayList<>()); result = rewriteExpr(invocationNode); } else { BLangExpression childIndexExpr; if (xmlNavigation.childIndex == null) { childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType); } else { childIndexExpr = xmlNavigation.childIndex; } ArrayList<BLangExpression> args = new ArrayList<>(); args.add(rewriteExpr(childIndexExpr)); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters); result = rewriteExpr(invocationNode); } } @Override public void visit(BLangIsAssignableExpr assignableExpr) { assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr); result = assignableExpr; } @Override public void visit(BFunctionPointerInvocation fpInvocation) { result = fpInvocation; } @Override public void visit(BLangTypedescExpr typedescExpr) { typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env); result = typedescExpr; } @Override public void visit(BLangIntRangeExpression intRangeExpression) { if (!intRangeExpression.includeStart) { intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr); } if (!intRangeExpression.includeEnd) { intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr); } intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr); intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr); result = intRangeExpression; } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { result = rewriteExpr(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr); result = bLangNamedArgsExpression.expr; } @Override public void visit(BLangMatchExpression bLangMatchExpression) { addMatchExprDefaultCase(bLangMatchExpression); String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, matchTempResultVarName, bLangMatchExpression.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, bLangMatchExpression.type, this.env.scope.owner)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar); tempResultVarDef.desugared = true; BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef)); List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>(); for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) { BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i); pattern.expr = rewriteExpr(pattern.expr); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt)); patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody)); } stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr, patternClauses)); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef); statementExpr.type = bLangMatchExpression.type; result = rewriteExpr(statementExpr); } @Override public void visit(BLangCheckedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, false); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, true); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) { String checkedExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos, checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0, names.fromString(checkedExprVarName), this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner)); BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar); checkedExprVarDef.desugared = true; BLangMatchTypedBindingPatternClause patternSuccessCase = getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true, checkedExprVar.symbol, null); BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos, this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic); BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr, new ArrayList<BLangMatchTypedBindingPatternClause>() {{ add(patternSuccessCase); add(patternErrorCase); }}); BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos, new ArrayList<BLangStatement>() {{ add(checkedExprVarDef); add(matchStmt); }}); BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef( checkedExpr.pos, checkedExprVar.symbol); BLangStatementExpression statementExpr = createStatementExpression( generatedStmtBlock, tempCheckedExprVarRef); statementExpr.type = checkedExpr.type; result = rewriteExpr(statementExpr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos, serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type); serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = rewriteExpr(typeInit); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { BLangExpression expr = typeTestExpr.expr; if (types.isValueType(expr.type)) { addConversionExprIfRequired(expr, symTable.anyType); } typeTestExpr.expr = rewriteExpr(expr); typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env); result = typeTestExpr; } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = annotAccessExpr.pos; binaryExpr.opKind = OperatorKind.ANNOT_ACCESS; binaryExpr.lhsExpr = annotAccessExpr.expr; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType, annotAccessExpr.annotationSymbol.bvmAlias()); binaryExpr.type = annotAccessExpr.type; binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null, new BInvokableType(Lists.of(binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type), annotAccessExpr.type, null), null); result = rewriteExpr(binaryExpr); } @Override public void visit(BLangIsLikeExpr isLikeExpr) { isLikeExpr.expr = rewriteExpr(isLikeExpr.expr); result = isLikeExpr; } @Override public void visit(BLangStatementExpression bLangStatementExpression) { bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr); bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env); result = bLangStatementExpression; } @Override public void visit(BLangQueryExpr queryExpr) { BLangStatementExpression stmtExpr = queryDesugar.desugarQueryExpr(queryExpr, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangQueryAction queryAction) { BLangStatementExpression stmtExpr = queryDesugar.desugarQueryAction(queryAction, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangJSONArrayLiteral jsonArrayLiteral) { jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs); result = jsonArrayLiteral; } @Override public void visit(BLangConstant constant) { BConstantSymbol constSymbol = constant.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) { throw new IllegalStateException(); } BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType, constSymbol.value.value); constant.expr = rewriteExpr(literal); } else { constant.expr = rewriteExpr(constant.expr); } constant.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = constant; } @Override public void visit(BLangIgnoreExpr ignoreExpr) { result = ignoreExpr; } @Override public void visit(BLangConstRef constantRef) { result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.type, constantRef.value); } BLangSimpleVariableDef getIteratorVariableDefinition(DiagnosticPos pos, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol); BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); iteratorInvocation.pos = pos; iteratorInvocation.expr = dataReference; iteratorInvocation.symbol = iteratorInvokableSymbol; iteratorInvocation.type = iteratorInvokableSymbol.retType; iteratorInvocation.argExprs = Lists.of(dataReference); iteratorInvocation.requiredArgs = iteratorInvocation.argExprs; iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib; BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID, iteratorInvokableSymbol.retType, this.env.scope.owner); BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$", iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol); return ASTBuilderUtil.createVariableDef(pos, iteratorVariable); } BLangSimpleVariableDef getIteratorNextVariableDefinition(DiagnosticPos pos, BType nillableResultType, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$", nillableResultType, nextInvocation, resultSymbol); return ASTBuilderUtil.createVariableDef(pos, resultVariable); } BLangAssignment getIteratorNextAssignment(DiagnosticPos pos, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false); return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false); } BLangInvocation createIteratorNextInvocation(DiagnosticPos pos, BVarSymbol iteratorSymbol) { BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next"); BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol); BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol; BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); nextInvocation.pos = pos; nextInvocation.name = nextIdentifier; nextInvocation.expr = iteratorReferenceInNext; nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol)); nextInvocation.argExprs = nextInvocation.requiredArgs; nextInvocation.symbol = nextFuncSymbol; nextInvocation.type = nextFuncSymbol.retType; return nextInvocation; } private BAttachedFunction getNextFunc(BObjectType iteratorType) { BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol; for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value.equals("next")) { return bAttachedFunction; } } return null; } BLangFieldBasedAccess getValueAccessExpression(DiagnosticPos pos, BType varType, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, "value"); BLangFieldBasedAccess fieldBasedAccessExpression = ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier); fieldBasedAccessExpression.pos = pos; fieldBasedAccessExpression.type = varType; fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type; return fieldBasedAccessExpression; } private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) { BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = bLangArrowFunction.body.expr.pos; returnNode.setExpression(bLangArrowFunction.body.expr); blockNode.addStatement(returnNode); return blockNode; } private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol; invocationNode.type = retType; invocationNode.requiredArgs = args; return invocationNode; } private BLangInvocation createLangLibInvocationNode(String functionName, BLangExpression onExpr, List<BLangExpression> args, BType retType, DiagnosticPos pos) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = onExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.type, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(onExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.type = retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType; invocationNode.langLibInvocation = true; return invocationNode; } private BLangArrayLiteral createArrayLiteralExprNode() { BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); expr.exprs = new ArrayList<>(); expr.type = new BArrayType(symTable.anyType); return expr; } private void visitFunctionPointerInvocation(BLangInvocation iExpr) { BLangVariableReference expr; if (iExpr.expr == null) { expr = new BLangSimpleVarRef(); } else { BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess(); fieldBasedAccess.expr = iExpr.expr; fieldBasedAccess.field = iExpr.name; expr = fieldBasedAccess; } expr.symbol = iExpr.symbol; expr.type = iExpr.symbol.type; BLangExpression rewritten = rewriteExpr(expr); result = new BFunctionPointerInvocation(iExpr, rewritten); } private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.type)) { return expr; } if (expr.type.tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), expr.type, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.type)) { return expr; } if (expr.type.tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.type, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } @SuppressWarnings("unchecked") <E extends BLangNode> E rewrite(E node, SymbolEnv env) { if (node == null) { return null; } if (node.desugared) { return node; } SymbolEnv previousEnv = this.env; this.env = env; node.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; this.env = previousEnv; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangExpression> E rewriteExpr(E node) { if (node == null) { return null; } if (node.desugared) { return node; } BLangExpression expr = node; if (node.impConversionExpr != null) { expr = node.impConversionExpr; node.impConversionExpr = null; } expr.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangStatement> E rewrite(E statement, SymbolEnv env) { if (statement == null) { return null; } BLangStatementLink link = new BLangStatementLink(); link.parent = currentLink; currentLink = link; BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env); link.statement = stmt; stmt.statementLink = link; currentLink = link.parent; return (E) stmt; } private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewriteExpr(nodeList.get(i))); } return nodeList; } private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) { BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType); stringLit.pos = pos; return stringLit; } private BLangLiteral createIntLiteral(long value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.type = symTable.intType; return literal; } private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) { BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType); byteLiteral.pos = pos; return byteLiteral; } private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) { BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.pos = expr.pos; conversionExpr.expr = expr; conversionExpr.type = targetType; conversionExpr.targetType = targetType; return conversionExpr; } private BType getElementType(BType type) { if (type.tag != TypeTags.ARRAY) { return type; } return getElementType(((BArrayType) type).getElementType()); } private void addReturnIfNotPresent(BLangInvokableNode invokableNode) { if (Symbols.isNative(invokableNode.symbol) || (invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) { return; } BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body; if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1 || funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) { DiagnosticPos invPos = invokableNode.pos; DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol); BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType); funcBody.addStatement(returnStmt); } } /** * Reorder the invocation arguments to match the original function signature. * * @param iExpr Function invocation expressions to reorder the arguments */ private void reorderArguments(BLangInvocation iExpr) { BSymbol symbol = iExpr.symbol; if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) { return; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol; List<BLangExpression> restArgs = iExpr.restArgs; int originalRequiredArgCount = iExpr.requiredArgs.size(); BLangExpression varargRef = null; BLangBlockStmt blockStmt = null; if (!iExpr.restArgs.isEmpty() && restArgs.get(restArgs.size() - 1).getKind() == NodeKind.REST_ARGS_EXPR && iExpr.requiredArgs.size() < invokableSymbol.params.size()) { BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgs.size() - 1)).expr; DiagnosticPos varargExpPos = expr.pos; BType varargVarType = expr.type; String varargVarName = DESUGARED_VARARG_KEY + this.varargCount++; BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID, varargVarType, this.env.scope.owner); varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol); BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, ((BLangRestArgsExpression) restArgs.get(restArgs.size() - 1)).expr, varargVarSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos); varDef.var = var; varDef.type = varargVarType; blockStmt = createBlockStmt(varargExpPos); blockStmt.stmts.add(varDef); } if (!invokableSymbol.params.isEmpty()) { reorderNamedArgs(iExpr, invokableSymbol, varargRef); } if (restArgs.isEmpty() || restArgs.get(restArgs.size() - 1).getKind() != NodeKind.REST_ARGS_EXPR) { if (invokableSymbol.restParam == null) { return; } BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); List<BLangExpression> exprs = new ArrayList<>(); BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type; BType elemType = arrayType.eType; for (BLangExpression restArg : restArgs) { exprs.add(addConversionExprIfRequired(restArg, elemType)); } arrayLiteral.exprs = exprs; arrayLiteral.type = arrayType; iExpr.restArgs = new ArrayList<>(); iExpr.restArgs.add(arrayLiteral); return; } if (restArgs.size() == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) { if (iExpr.requiredArgs.size() == originalRequiredArgCount) { return; } BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg); stmtExpression.type = firstNonRestArg.type; iExpr.requiredArgs.add(0, stmtExpression); if (invokableSymbol.restParam == null) { return; } BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount); BLangInvocation sliceInvocation = createLangLibInvocationNode(SLICE_LANGLIB_METHOD, varargRef, new ArrayList<BLangExpression>() {{ add(startIndex); }}, varargRef.type, varargRef.pos); restArgs.remove(0); restArgs.add(addConversionExprIfRequired(sliceInvocation, invokableSymbol.restParam.type)); return; } BArrayType type = (BArrayType) invokableSymbol.restParam.type; BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteral.type = type; BType elemType = type.eType; DiagnosticPos pos = restArgs.get(0).pos; List<BLangExpression> exprs = new ArrayList<>(); for (int i = 0; i < restArgs.size() - 1; i++) { exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType)); } arrayLiteral.exprs = exprs; BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode(); pushRestArgsExpr.pos = pos; pushRestArgsExpr.expr = restArgs.remove(restArgs.size() - 1); String name = DESUGARED_VARARG_KEY + this.varargCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner); BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); BLangSimpleVariable var = createVariable(pos, name, type, arrayLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.type = type; BLangBlockStmt pushBlockStmt = createBlockStmt(pos); pushBlockStmt.stmts.add(varDef); BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt); BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef, new ArrayList<BLangExpression>() {{ add(pushRestArgsExpr); }}, type, pos); pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1)); expressionStmt.expr = pushInvocation; BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef); stmtExpression.type = type; iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }}; } private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) { List<BLangExpression> args = new ArrayList<>(); Map<String, BLangExpression> namedArgs = new HashMap<>(); iExpr.requiredArgs.stream() .filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR) .forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr)); List<BVarSymbol> params = invokableSymbol.params; int varargIndex = 0; BType varargType = null; boolean tupleTypedVararg = false; if (varargRef != null) { varargType = varargRef.type; tupleTypedVararg = varargType.tag == TypeTags.TUPLE; } for (int i = 0; i < params.size(); i++) { BVarSymbol param = params.get(i); if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) { args.add(iExpr.requiredArgs.get(i)); } else if (namedArgs.containsKey(param.name.value)) { args.add(namedArgs.get(param.name.value)); } else if (varargRef == null) { BLangExpression expr = new BLangIgnoreExpr(); expr.type = param.type; args.add(expr); } else { BLangIndexBasedAccess memberAccessExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); memberAccessExpr.pos = varargRef.pos; memberAccessExpr.expr = varargRef; memberAccessExpr.indexExpr = rewriteExpr(createIntLiteral(varargIndex)); memberAccessExpr.type = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType; varargIndex++; args.add(addConversionExprIfRequired(memberAccessExpr, param.type)); } } iExpr.requiredArgs = args; } private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern( DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) { BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType; Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ? ((BUnionType) enclosingFuncReturnType).getMemberTypes() : new LinkedHashSet<BType>() {{ add(enclosingFuncReturnType); }}; boolean returnOnError = equivalentErrorTypes.stream() .allMatch(errorType -> returnTypeSet.stream() .anyMatch(retType -> types.isAssignable(errorType, retType))); String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure"; BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos, patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(patternFailureCaseVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos, patternFailureCaseVar.symbol); BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode(); patternBlockFailureCase.pos = pos; if (!isCheckPanicExpr && returnOnError) { BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode(); returnStmt.pos = pos; returnStmt.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(returnStmt); } else { BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = pos; panicNode.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(panicNode); } return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase); } private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType, boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) { String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match"; BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos, patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0, names.fromString(patternSuccessCaseVarName), this.env.scope.owner.pkgID, lhsType, this.env.scope.owner)); BLangExpression varRefExpr; if (isVarDef) { varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol); } else { varRefExpr = lhsExpr; } BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos, patternSuccessCaseVar.symbol); BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos, varRefExpr, patternSuccessCaseVarRef, false); BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<BLangStatement>() {{ add(assignmentStmtSuccessCase); }}); return ASTBuilderUtil.createMatchStatementPattern(pos, patternSuccessCaseVar, patternBlockSuccessCase); } private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) { List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses; BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar); BLangIf currentIfNode = parentIfNode; for (int i = 1; i < patterns.size(); i++) { BLangMatchBindingPatternClause patternClause = patterns.get(i); if (i == patterns.size() - 1 && patternClause.isLastPattern) { currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar); } else { currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar); currentIfNode = (BLangIf) currentIfNode.elseStmt; } } return parentIfNode; } /** * Generate an if-else statement from the given match statement. * * @param pattern match pattern statement node * @param matchExprVar variable node of the match expression * @return if else statement node */ private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol); if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) { BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar); return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null); } BType expectedType = matchExprVar.type; if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) { BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern; expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType); BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol); structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } if (structuredPattern.typeGuardExpr != null) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos); blockStmt.addStatement(varDef); blockStmt.addStatement(varDefStmt); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, structuredPattern.typeGuardExpr); stmtExpr.type = symTable.booleanType; ifCondition = ASTBuilderUtil .createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.AND, symTable.booleanType, symTable.booleanType)); } else { structuredPattern.body.stmts.add(0, varDef); structuredPattern.body.stmts.add(1, varDefStmt); } } return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null); } private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body; BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern; if (patternClause.variable.name.value.equals(Names.IGNORE.value)) { return patternClause.body; } BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos, matchExprVar.symbol); BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type); BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "", patternClause.variable.type, patternVarExpr, patternClause.variable.symbol); BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar); patternClause.body.stmts.add(0, patternVarDef); body = patternClause.body; return body; } private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body = pattern.body; if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol); BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } structuredPattern.body.stmts.add(0, varDefStmt); body = structuredPattern.body; } return body; } BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) { if (lhsType.tag == TypeTags.NONE) { return expr; } BType rhsType = expr.type; if (types.isSameType(rhsType, lhsType)) { return expr; } types.setImplicitCastExpr(expr, rhsType, lhsType); if (expr.impConversionExpr != null) { return expr; } if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) { return expr; } if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) { return expr; } if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) { return expr; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = expr; conversionExpr.targetType = lhsType; conversionExpr.type = lhsType; conversionExpr.pos = expr.pos; conversionExpr.checkTypes = false; return conversionExpr; } private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol) { BType patternType; switch (patternClause.getKind()) { case MATCH_STATIC_PATTERN_CLAUSE: BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) patternClause; patternType = staticPattern.literal.type; break; case MATCH_STRUCTURED_PATTERN_CLAUSE: BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) patternClause; patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable); break; default: BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause; patternType = simplePattern.variable.type; break; } BLangExpression binaryExpr; BType[] memberTypes; if (patternType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) patternType; memberTypes = unionType.getMemberTypes().toArray(new BType[0]); } else { memberTypes = new BType[1]; memberTypes[0] = patternType; } if (memberTypes.length == 1) { binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); } else { BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]); binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.type, rhsExpr.type)); for (int i = 2; i < memberTypes.length; i++) { lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]); rhsExpr = binaryExpr; binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.type, rhsExpr.type)); } } return binaryExpr; } private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) { if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) { BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable; List<BType> memberTypes = new ArrayList<>(); for (int i = 0; i < tupleVariable.memberVariables.size(); i++) { memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i))); } BTupleType tupleType = new BTupleType(memberTypes); if (tupleVariable.restVariable != null) { BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable); tupleType.restType = restArrayType.eType; } return tupleType; } if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) { BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable; BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++), env.enclPkg.symbol.pkgID, null, env.scope.owner); recordSymbol.initializerFunc = createRecordInitFunc(); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); List<BField> fields = new ArrayList<>(); List<BLangSimpleVariable> typeDefFields = new ArrayList<>(); for (int i = 0; i < recordVariable.variableList.size(); i++) { String fieldNameStr = recordVariable.variableList.get(i).key.value; Name fieldName = names.fromString(fieldNameStr); BType fieldType = getStructuredBindingPatternType( recordVariable.variableList.get(i).valueBindingPattern); BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType, recordSymbol); fields.add(new BField(fieldName, bindingPatternVariable.pos, fieldSymbol)); typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordVarType = new BRecordType(recordSymbol); recordVarType.fields = fields; recordVarType.restFieldType = recordVariable.restParam != null ? ((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint : symTable.anydataType; recordSymbol.type = recordVarType; recordVarType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields, recordVarType, bindingPatternVariable.pos); recordTypeNode.initFunction = rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env); return recordVarType; } if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) { BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable; BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol( SymTag.ERROR, Flags.PUBLIC, names.fromString("$anonErrorType$" + errorCount++), env.enclPkg.symbol.pkgID, null, null); BType detailType; if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) { detailType = symTable.detailType; } else { detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++); BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType); TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env); } BErrorType errorType = new BErrorType(errorTypeSymbol, ((BErrorType) errorVariable.type).reasonType, detailType); errorTypeSymbol.type = errorType; TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env); return errorType; } return bindingPatternVariable.type; } private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) { List<BLangSimpleVariable> fieldList = new ArrayList<>(); for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) { BVarSymbol symbol = field.valueBindingPattern.symbol; if (symbol == null) { symbol = new BVarSymbol( Flags.PUBLIC, names.fromString(field.key.value + "$"), this.env.enclPkg.packageID, symTable.pureType, null); } BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable( field.valueBindingPattern.pos, symbol.name.value, field.valueBindingPattern.type, field.valueBindingPattern.expr, symbol); fieldList.add(fieldVar); } return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos); } private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail, BLangSimpleVariable restDetail, int errorNo) { BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol( SymTag.RECORD, Flags.PUBLIC, names.fromString("$anonErrorType$" + errorNo + "$detailType"), env.enclPkg.symbol.pkgID, null, null); detailRecordTypeSymbol.initializerFunc = createRecordInitFunc(); detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol); detailRecordTypeSymbol.scope.define( names.fromString(detailRecordTypeSymbol.name.value + "." + detailRecordTypeSymbol.initializerFunc.funcName.value), detailRecordTypeSymbol.initializerFunc.symbol); BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol); detailRecordType.restFieldType = symTable.anydataType; if (restDetail == null) { detailRecordType.sealed = true; } for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) { Name fieldName = names.fromIdNode(detailEntry.key); BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern); BVarSymbol fieldSym = new BVarSymbol( Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol); detailRecordType.fields.add(new BField(fieldName, detailEntry.key.pos, fieldSym)); detailRecordTypeSymbol.scope.define(fieldName, fieldSym); } return detailRecordType; } private BAttachedFunction createRecordInitFunc() { BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false); initFuncSymbol.retType = symTable.nilType; return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType); } BLangErrorType createErrorTypeNode(BErrorType errorType) { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.type = errorType; return errorTypeNode; } private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol, BType patternType) { DiagnosticPos pos = patternClause.pos; BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) { BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause; return createBinaryExpression(pos, varRef, pattern.literal); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) { return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType); } if (patternType == symTable.nilType) { BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null); return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType, OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL, symTable.anyType, symTable.nilType)); } else { return createIsAssignableExpression(pos, varSymbol, patternType); } } private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef, BLangExpression expression) { BLangBinaryExpr binaryExpr; if (NodeKind.GROUP_EXPR == expression.getKind()) { return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression); } if (NodeKind.BINARY_EXPR == expression.getKind()) { binaryExpr = (BLangBinaryExpr) expression; BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr); BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr); binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType)); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) { BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode(); anyType.type = symTable.anyType; anyType.typeKind = TypeKind.ANY; return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType); } else { binaryExpr = ASTBuilderUtil .createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null); BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver .getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type, binaryExpr); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; } return binaryExpr; } private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos, BVarSymbol varSymbol, BType patternType) { BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names); } private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) { return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType); } private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = variable.pos; varRef.variableName = variable.name; varRef.symbol = variable.symbol; varRef.type = variable.type; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = variable.expr; assignmentStmt.pos = variable.pos; assignmentStmt.setVariable(varRef); return assignmentStmt; } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable, BVarSymbol symbol) { BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(variable.pos, symbol); BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, variable.name); fieldAccess.symbol = variable.symbol; fieldAccess.type = variable.type; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = variable.expr; assignmentStmt.pos = variable.pos; assignmentStmt.setVariable(fieldAccess); SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env); return rewrite(assignmentStmt, initFuncEnv); } private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes; List<BType> unmatchedTypes = new ArrayList<>(); if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type; exprTypes = new ArrayList<>(unionType.getMemberTypes()); } else { exprTypes = Lists.of(bLangMatchExpression.type); } for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { if (this.types.isAssignable(type, pattern.variable.type)) { assignable = true; break; } } if (!assignable) { unmatchedTypes.add(type); } } if (unmatchedTypes.isEmpty()) { return; } BType defaultPatternType; if (unmatchedTypes.size() == 1) { defaultPatternType = unmatchedTypes.get(0); } else { defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes)); } String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default"; BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName), this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner)); BLangMatchExprPatternClause defaultPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); defaultPattern.variable = patternMatchCaseVar; defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol); defaultPattern.pos = bLangMatchExpression.pos; bLangMatchExpression.patternClauses.add(defaultPattern); } private boolean safeNavigate(BLangAccessExpression accessExpr) { if (accessExpr.lhsVar || accessExpr.expr == null) { return false; } if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) { return true; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { return safeNavigate((BLangAccessExpression) accessExpr.expr); } return false; } private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) { BType originalExprType = accessExpr.type; String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar); BLangMatch matcEXpr = this.matchStmtStack.firstElement(); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr)); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef); stmtExpression.type = originalExprType; this.matchStmtStack = new Stack<>(); this.accessExprStack = new Stack<>(); this.successPattern = null; this.safeNavigationAssignment = null; return stmtExpression; } private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) { if (accessExpr.expr == null) { return; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar); } if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) { BType originalType = accessExpr.originalType; if (TypeTags.isXMLTypeTag(originalType.tag)) { accessExpr.type = BUnionType.create(null, originalType, symTable.errorType); } else { accessExpr.type = originalType; } if (this.safeNavigationAssignment != null) { this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type); } return; } /* * If the field access is a safe navigation, create a match expression. * Then chain the current expression as the success-pattern of the parent * match expr, if available. * eg: * x but { <--- parent match expr * error e => e, * T t => t.y but { <--- current expr * error e => e, * R r => r.z * } * } */ BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>()); if (accessExpr.nilSafeNavigation) { matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar)); matchStmt.type = type; } if (accessExpr.errorSafeNavigation) { matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar)); matchStmt.type = type; matchStmt.pos = accessExpr.pos; } BLangMatchTypedBindingPatternClause successPattern = getSuccessPattern(accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); this.matchStmtStack.push(matchStmt); if (this.successPattern != null) { this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt)); } this.successPattern = successPattern; } private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error"; BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, errorPatternVar, patternBody); return errorPattern; } private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos, BLangExpression expr) { String nullPatternVarName = IGNORE.toString(); BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner)); BLangMatchExprPatternClause nullPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); nullPattern.variable = errorPatternVar; nullPattern.expr = expr; nullPattern.pos = pos; return nullPattern; } private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null"; BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, nullPatternVar, patternBody); return nullPattern; } private BLangMatchTypedBindingPatternClause getSuccessPattern(BLangAccessExpression accessExpr, BLangSimpleVariable tempResultVar, boolean liftError) { BType type = types.getSafeType(accessExpr.expr.type, true, liftError); String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success"; BVarSymbol successPatternSymbol; if (type.tag == TypeTags.INVOKABLE) { successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner); } else { successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner); } BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName, type, null, successPatternSymbol); accessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol); accessExpr.errorSafeNavigation = false; accessExpr.nilSafeNavigation = false; if (TypeTags.isXMLTypeTag(accessExpr.expr.type.tag)) { accessExpr.type = BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType); } else { accessExpr.type = accessExpr.originalType; } BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); BLangExpression assignmentRhsExpr = addConversionExprIfRequired(accessExpr, tempResultVarRef.type); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause successPattern = ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody); this.safeNavigationAssignment = assignmentStmt; return successPattern; } private boolean safeNavigateLHS(BLangExpression expr) { if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { return false; } BLangExpression varRef = ((BLangAccessExpression) expr).expr; if (varRef.type.isNullable()) { return true; } return safeNavigateLHS(varRef); } private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr, boolean safeAssignment) { this.accessExprStack = new Stack<>(); List<BLangStatement> stmts = new ArrayList<>(); createLHSSafeNavigation(stmts, accessExpr.expr); BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, cloneExpression(accessExpr), rhsExpr); stmts.add(assignment); return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts); } private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) { NodeKind kind = expr.getKind(); boolean root = false; if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { BLangAccessExpression accessExpr = (BLangAccessExpression) expr; createLHSSafeNavigation(stmts, accessExpr.expr); accessExpr.expr = accessExprStack.pop(); } else { root = true; } if (expr.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expr; BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value + "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner); BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos, interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol); BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos, intermediateVariable); stmts.add(intermediateVariableDefinition); expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol); } if (expr.type.isNullable()) { BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode()); isNillTest.type = symTable.booleanType; BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos); expr = cloneExpression(expr); expr.type = types.getSafeType(expr.type, true, false); if (isDefaultableMappingType(expr.type) && !root) { BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); jsonLiteral.type = expr.type; jsonLiteral.pos = expr.pos; BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos, expr, jsonLiteral); thenStmt.addStatement(assignment); } else { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = ERROR_REASON_NULL_REFERENCE_ERROR; literal.type = symTable.stringType; BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); errorCtorInvocation.pos = expr.pos; errorCtorInvocation.argExprs.add(literal); errorCtorInvocation.requiredArgs.add(literal); errorCtorInvocation.type = symTable.errorType; errorCtorInvocation.symbol = symTable.errorConstructor; BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.expr = errorCtorInvocation; panicNode.pos = expr.pos; thenStmt.addStatement(panicNode); } BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null); stmts.add(ifelse); } accessExprStack.push(expr); } BLangValueType getNillTypeNode() { BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nillTypeNode.typeKind = TypeKind.NIL; nillTypeNode.type = symTable.nilType; return nillTypeNode; } private BLangVariableReference cloneExpression(BLangExpression expr) { switch (expr.getKind()) { case SIMPLE_VARIABLE_REF: return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol); case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case INVOCATION: return cloneAccessExpr((BLangAccessExpression) expr); default: throw new IllegalStateException(); } } private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) { if (originalAccessExpr.expr == null) { return originalAccessExpr; } BLangVariableReference varRef; NodeKind kind = originalAccessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr); } else { varRef = cloneExpression(originalAccessExpr.expr); } varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false); BLangAccessExpression accessExpr; switch (originalAccessExpr.getKind()) { case FIELD_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef, ((BLangFieldBasedAccess) originalAccessExpr).field); break; case INDEX_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef, ((BLangIndexBasedAccess) originalAccessExpr).indexExpr); break; case INVOCATION: accessExpr = null; break; default: throw new IllegalStateException(); } accessExpr.originalType = originalAccessExpr.originalType; accessExpr.pos = originalAccessExpr.pos; accessExpr.lhsVar = originalAccessExpr.lhsVar; accessExpr.symbol = originalAccessExpr.symbol; accessExpr.errorSafeNavigation = false; accessExpr.nilSafeNavigation = false; accessExpr.type = originalAccessExpr.originalType; return accessExpr; } private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.intType, symTable.intType)); } private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB, symTable.intType, symTable.intType)); } private BLangLiteral getBooleanLiteral(boolean value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.type = symTable.booleanType; return literal; } private boolean isDefaultableMappingType(BType type) { switch (types.getSafeType(type, true, false).tag) { case TypeTags.JSON: case TypeTags.MAP: case TypeTags.RECORD: return true; default: return false; } } private BLangFunction createInitFunctionForObjectType(BLangObjectTypeNode structureTypeNode, SymbolEnv env) { BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(structureTypeNode, env, Names.GENERATED_INIT_SUFFIX, names, symTable); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) structureTypeNode.type.tsymbol); typeSymbol.generatedInitializerFunc = new BAttachedFunction(Names.GENERATED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.type); structureTypeNode.generatedInitFunction = initFunction; initFunction.returnTypeNode.type = symTable.nilType; return rewrite(initFunction, env); } private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) { /* * Desugar (lhsExpr && rhsExpr) to following if-else: * * logical AND: * ------------- * T $result$; * if (lhsExpr) { * $result$ = rhsExpr; * } else { * $result$ = false; * } * * logical OR: * ------------- * T $result$; * if (lhsExpr) { * $result$ = true; * } else { * $result$ = rhsExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangExpression thenResult; if (binaryExpr.opKind == OperatorKind.AND) { thenResult = binaryExpr.rhsExpr; } else { thenResult = getBooleanLiteral(true); } BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult); thenBody.addStatement(thenAssignment); BLangExpression elseResult; BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); if (binaryExpr.opKind == OperatorKind.AND) { elseResult = getBooleanLiteral(false); } else { elseResult = binaryExpr.rhsExpr; } BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = binaryExpr.type; result = rewriteExpr(stmtExpr); } /** * Split packahe init function into several smaller functions. * * @param packageNode package node * @param env symbol environment * @return initial init function but trimmed in size */ private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) { int methodSize = INIT_METHOD_SPLIT_SIZE; BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body; if (funcBody.stmts.size() < methodSize || !isJvmTarget) { return packageNode.initFunction; } BLangFunction initFunction = packageNode.initFunction; List<BLangFunction> generatedFunctions = new ArrayList<>(); List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts); funcBody.stmts.clear(); BLangFunction newFunc = initFunction; BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body; int varDefIndex = 0; for (int i = 0; i < stmts.size(); i++) { if (stmts.get(i).getKind() == NodeKind.VARIABLE_DEF) { break; } varDefIndex++; if (i > 0 && i % methodSize == 0) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } List<BLangStatement> chunkStmts = new ArrayList<>(); for (int i = varDefIndex; i < stmts.size(); i++) { BLangStatement stmt = stmts.get(i); chunkStmts.add(stmt); varDefIndex++; if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) && (newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) { if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.addAll(chunkStmts); chunkStmts.clear(); } else if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) && Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags, Flags.LISTENER) ) { break; } } newFuncBody.stmts.addAll(chunkStmts); for (int i = varDefIndex; i < stmts.size(); i++) { if (i > 0 && i % methodSize == 0) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } generatedFunctions.add(newFunc); for (int j = 0; j < generatedFunctions.size() - 1; j++) { BLangFunction thisFunction = generatedFunctions.get(j); BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(initFunction.pos, createInvocationNode(generatedFunctions.get(j + 1).name.value, new ArrayList<>(), symTable.errorOrNilType), symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt expressionStmt = ASTBuilderUtil .createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body); expressionStmt.expr = checkedExpr; expressionStmt.expr.pos = initFunction.pos; if (j > 0) { thisFunction = rewrite(thisFunction, env); packageNode.functions.add(thisFunction); packageNode.topLevelNodes.add(thisFunction); } } if (generatedFunctions.size() > 1) { BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1); lastFunc = rewrite(lastFunc, env); packageNode.functions.add(lastFunc); packageNode.topLevelNodes.add(lastFunc); } return generatedFunctions.get(0); } /** * Create an intermediate package init function. * * @param pkgNode package node * @param env symbol environment of package */ private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); BLangFunction initFunction = ASTBuilderUtil .createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, new Name(Names.INIT_FUNCTION_SUFFIX.value + this.initFuncIndex++), symTable); createInvokableSymbol(initFunction, env); return initFunction; } private BType getRestType(BInvokableSymbol invokableSymbol) { if (invokableSymbol != null && invokableSymbol.restParam != null) { return invokableSymbol.restParam.type; } return null; } private BType getRestType(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.type; } return null; } private BVarSymbol getRestSymbol(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.symbol; } return null; } private boolean isComputedKey(RecordLiteralNode.RecordField field) { if (!field.isKeyValueField()) { return false; } return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey; } private BLangStatementExpression rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) { List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields; BType type = mappingConstructorExpr.type; DiagnosticPos pos = mappingConstructorExpr.pos; BLangRecordLiteral recordLiteral = type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type) : new BLangMapLiteral(pos, type); String name = DESUGARED_MAPPING_CONSTR_KEY + this.annonVarCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner); BLangSimpleVariable var = createVariable(pos, name, type, recordLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.type = type; BLangBlockStmt blockStmt = createBlockStmt(pos); blockStmt.stmts.add(varDef); BLangSimpleVarRef mappingVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); for (RecordLiteralNode.RecordField field : fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValueField = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangRecordLiteral.BLangRecordKey key = keyValueField.key; BLangExpression keyExpr = key.expr; BLangExpression indexExpr = key.computedKey ? keyExpr : keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos, ((BLangSimpleVarRef) keyExpr).variableName.value) : ((BLangLiteral) keyExpr);; addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef, indexExpr, keyValueField.valueExpr); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field; addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef, createStringLiteral(pos, varRefField.variableName.value), varRefField); } else { BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField = (BLangRecordLiteral.BLangRecordSpreadOperatorField) field; BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = generateMapEntriesInvocation(spreadOpField.expr, spreadOpField.expr.type); types.setForeachTypedBindingPatternType(foreach); BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBodyBlock = ASTBuilderUtil.createBlockStmt(pos); BTupleType foreachVarRefType = (BTupleType) foreachVarRef.type; BLangIndexBasedAccess indexExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); indexExpr.pos = pos; indexExpr.expr = foreachVarRef; indexExpr.indexExpr = rewriteExpr(createIntLiteral(0)); indexExpr.type = foreachVarRefType.tupleTypes.get(0); BLangIndexBasedAccess valueExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); valueExpr.pos = pos; valueExpr.expr = foreachVarRef; valueExpr.indexExpr = rewriteExpr(createIntLiteral(1)); valueExpr.type = foreachVarRefType.tupleTypes.get(1); addMemberStoreForKeyValuePair(pos, foreachBodyBlock, mappingVarRef, indexExpr, valueExpr); foreach.body = foreachBodyBlock; blockStmt.addStatement(foreach); } } BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, mappingVarRef); stmtExpression.type = type; return stmtExpression; } private void addMemberStoreForKeyValuePair(DiagnosticPos pos, BLangBlockStmt blockStmt, BLangExpression mappingVarRef, BLangExpression indexExpr, BLangExpression value) { BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt); assignmentStmt.expr = rewriteExpr(value); BLangIndexBasedAccess indexAccessNode = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); indexAccessNode.pos = pos; indexAccessNode.expr = mappingVarRef; indexAccessNode.indexExpr = rewriteExpr(indexExpr); indexAccessNode.type = value.type; assignmentStmt.varRef = indexAccessNode; } private Map<String, BLangExpression> getKeyValuePairs(BLangStatementExpression desugaredMappingConst) { List<BLangStatement> stmts = ((BLangBlockStmt) desugaredMappingConst.stmt).stmts; Map<String, BLangExpression> keyValuePairs = new HashMap<>(); for (int i = 1; i < stmts.size(); i++) { BLangAssignment assignmentStmt = (BLangAssignment) stmts.get(i); BLangExpression indexExpr = ((BLangIndexBasedAccess) assignmentStmt.varRef).indexExpr; if (indexExpr.getKind() != NodeKind.LITERAL) { continue; } keyValuePairs.put((String) ((BLangLiteral) indexExpr).value, assignmentStmt.expr); } return keyValuePairs; } }
restArgs.get(restArgs.size() - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
public void visit(BLangFunction funcNode) { SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); if (!funcNode.interfaceFunction) { addReturnIfNotPresent(funcNode); } funcNode.originalFuncSymbol = funcNode.symbol; funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol); funcNode.requiredParams = rewrite(funcNode.requiredParams, funcEnv); funcNode.restParam = rewrite(funcNode.restParam, funcEnv); funcNode.workers = rewrite(funcNode.workers, funcEnv); if (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) { funcNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv); } List<BLangAnnotationAttachment> participantAnnotation = funcNode.annAttachments.stream() .filter(a -> Transactions.isTransactionsAnnotation(a.pkgAlias.value, a.annotationName.value)) .collect(Collectors.toList()); funcNode.body = rewrite(funcNode.body, funcEnv); funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); if (funcNode.returnTypeNode != null) { funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env)); } if (participantAnnotation.isEmpty()) { result = funcNode; return; } result = desugarParticipantFunction(funcNode, participantAnnotation); } private BLangFunction desugarParticipantFunction(BLangFunction funcNode, List<BLangAnnotationAttachment> participantAnnotation) { BLangAnnotationAttachment annotation = participantAnnotation.get(0); BLangBlockFunctionBody onCommitBody = null; BLangBlockFunctionBody onAbortBody = null; funcNode.requiredParams.forEach(bLangSimpleVariable -> bLangSimpleVariable.symbol.closure = true); if (funcNode.receiver != null) { funcNode.receiver.symbol.closure = true; } BType trxReturnType = BUnionType.create(null, symTable.errorType, symTable.anyType); BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType); BLangLambdaFunction commitFunc = createLambdaFunction(funcNode.pos, "$anonOnCommitFunc$", ASTBuilderUtil.createTypeNode(symTable.nilType)); BLangLambdaFunction abortFunc = createLambdaFunction(funcNode.pos, "$anonOnAbortFunc$", ASTBuilderUtil.createTypeNode(symTable.nilType)); BLangSimpleVariable onCommitTrxVar = ASTBuilderUtil .createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null, new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID, symTable.stringType, commitFunc.function.symbol)); BLangSimpleVariable onAbortTrxVar = ASTBuilderUtil .createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null, new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID, symTable.stringType, abortFunc.function.symbol)); BLangSimpleVarRef trxIdOnCommitRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onCommitTrxVar.symbol); BLangSimpleVarRef trxIdOnAbortRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onAbortTrxVar.symbol); for (Map.Entry<String, BLangExpression> entry : getKeyValuePairs((BLangStatementExpression) annotation.expr).entrySet()) { switch (entry.getKey()) { case Transactions.TRX_ONCOMMIT_FUNC: BInvokableSymbol commitSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol; BLangInvocation onCommit = ASTBuilderUtil .createInvocationExprMethod(funcNode.pos, commitSym, Lists.of(trxIdOnCommitRef), Collections.emptyList(), symResolver); BLangStatement onCommitStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommit); onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onCommitStmt)); break; case Transactions.TRX_ONABORT_FUNC: BInvokableSymbol abortSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol; BLangInvocation onAbort = ASTBuilderUtil .createInvocationExprMethod(funcNode.pos, abortSym, Lists.of(trxIdOnAbortRef), Collections.emptyList(), symResolver); BLangStatement onAbortStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbort); onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onAbortStmt)); break; } } if (onCommitBody == null) { onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommitBody); returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE); } if (onAbortBody == null) { onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbortBody); returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE); } commitFunc.function.body = onCommitBody; commitFunc.function.requiredParams.add(onCommitTrxVar); commitFunc.type = new BInvokableType(Lists.of(onCommitTrxVar.symbol.type), commitFunc.function.symbol.type.getReturnType(), null); commitFunc.function.symbol.type = commitFunc.type; commitFunc.function.symbol.params = Lists.of(onCommitTrxVar.symbol); abortFunc.function.body = onAbortBody; abortFunc.function.requiredParams.add(onAbortTrxVar); abortFunc.type = new BInvokableType(Lists.of(onAbortTrxVar.symbol.type), abortFunc.function.symbol.type.getReturnType(), null); abortFunc.function.symbol.type = abortFunc.type; abortFunc.function.symbol.params = Lists.of(onAbortTrxVar.symbol); BSymbol trxModSym = env.enclPkg.imports .stream() .filter(importPackage -> importPackage.symbol. pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names.ORG_NAME_SEPARATOR.value + Names.TRANSACTION_PACKAGE.value)) .findAny().get().symbol; BInvokableSymbol invokableSymbol = (BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(trxModSym), getParticipantFunctionName(funcNode)); BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.stringType, getTransactionBlockId()); BLangLambdaFunction trxMainWrapperFunc = createLambdaFunction(funcNode.pos, "$anonTrxWrapperFunc$", Collections.emptyList(), funcNode.returnTypeNode, funcNode.body); for (BLangSimpleVariable var : funcNode.requiredParams) { trxMainWrapperFunc.function.closureVarSymbols.add(new ClosureVarSymbol(var.symbol, var.pos)); } BLangBlockFunctionBody trxMainBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos); BLangLambdaFunction trxMainFunc = createLambdaFunction(funcNode.pos, "$anonTrxParticipantFunc$", Collections.emptyList(), trxReturnNode, trxMainBody); trxMainWrapperFunc.capturedClosureEnv = trxMainFunc.function.clonedEnv; commitFunc.capturedClosureEnv = env.createClone(); abortFunc.capturedClosureEnv = env.createClone(); BVarSymbol wrapperSym = new BVarSymbol(0, names.fromString("$wrapper$1"), this.env.scope.owner.pkgID, trxMainWrapperFunc.type, trxMainFunc.function.symbol); BLangSimpleVariable wrapperFuncVar = ASTBuilderUtil.createVariable(funcNode.pos, "$wrapper$1", trxMainWrapperFunc.type, trxMainWrapperFunc, wrapperSym); BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(funcNode.pos, trxMainBody); variableDef.var = wrapperFuncVar; BLangSimpleVarRef wrapperVarRef = rewrite(ASTBuilderUtil.createVariableRef(variableDef.pos, wrapperFuncVar.symbol), env); BLangInvocation wrapperInvocation = new BFunctionPointerInvocation(trxMainWrapperFunc.pos, wrapperVarRef, wrapperFuncVar.symbol, trxMainWrapperFunc.function.symbol.retType); BLangReturn wrapperReturn = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired (wrapperInvocation, trxReturnNode.type)); trxMainWrapperFunc.function.receiver = funcNode.receiver; trxMainFunc.function.receiver = funcNode.receiver; trxMainBody.stmts.add(wrapperReturn); rewrite(trxMainFunc.function, env); List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, trxMainFunc, commitFunc, abortFunc); BLangInvocation participantInvocation = ASTBuilderUtil.createInvocationExprMethod(funcNode.pos, invokableSymbol, requiredArgs, Collections.emptyList(), symResolver); participantInvocation.type = ((BInvokableType) invokableSymbol.type).retType; BLangStatement stmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired (participantInvocation, funcNode.symbol.retType)); funcNode.body = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(rewrite(stmt, env))); return funcNode; } private Name getParticipantFunctionName(BLangFunction function) { if (Symbols.isFlagOn((function).symbol.flags, Flags.RESOURCE)) { return TRX_REMOTE_PARTICIPANT_BEGIN_FUNCTION; } return TRX_LOCAL_PARTICIPANT_BEGIN_FUNCTION; } @Override public void visit(BLangResource resourceNode) { } public void visit(BLangAnnotation annotationNode) { annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); } public void visit(BLangAnnotationAttachment annAttachmentNode) { annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env); result = annAttachmentNode; } @Override public void visit(BLangSimpleVariable varNode) { if (((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) && (varNode.symbol.owner.tag & SymTag.LET) != SymTag.LET) { varNode.expr = null; result = varNode; return; } if (varNode.typeNode != null && varNode.typeNode.getKind() != null) { varNode.typeNode = rewrite(varNode.typeNode, env); } BLangExpression bLangExpression = rewriteExpr(varNode.expr); if (bLangExpression != null) { bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type); } varNode.expr = bLangExpression; varNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = varNode; } @Override public void visit(BLangLetExpression letExpression) { SymbolEnv prevEnv = this.env; this.env = letExpression.env; BLangExpression expr = letExpression.expr; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env); if (node.getKind() == NodeKind.BLOCK) { blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts); } else { blockStmt.addStatement((BLangSimpleVariableDef) node); } } BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++), expr.type, expr, expr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol); blockStmt.addStatement(tempVarDef); BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.type = expr.type; result = rewrite(stmtExpr, env); this.env = prevEnv; } @Override public void visit(BLangTupleVariable varNode) { result = varNode; } @Override public void visit(BLangRecordVariable varNode) { result = varNode; } @Override public void visit(BLangErrorVariable varNode) { result = varNode; } @Override public void visit(BLangBlockStmt block) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env); block.stmts = rewriteStmt(block.stmts, blockEnv); result = block; } @Override public void visit(BLangSimpleVariableDef varDefNode) { varDefNode.var = rewrite(varDefNode.var, env); result = varDefNode; } @Override public void visit(BLangTupleVariableDef varDefNode) { varDefNode.var = rewrite(varDefNode.var, env); BLangTupleVariable tupleVariable = varDefNode.var; final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos); BType runTimeType = new BArrayType(symTable.anyType); String name = "tuple"; final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varDefNode.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); tuple.expr = tupleVariable.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt); variableDef.var = tuple; createVarDefStmts(tupleVariable, blockStmt, tuple.symbol, null); createRestFieldVarDefStmts(tupleVariable, blockStmt, tuple.symbol); result = rewrite(blockStmt, env); } private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt, BVarSymbol tupleVarSymbol) { final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable; boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE; DiagnosticPos pos = blockStmt.pos; if (arrayVar != null) { BLangArrayLiteral arrayExpr = createArrayLiteralExprNode(); arrayExpr.type = arrayVar.type; arrayVar.expr = arrayExpr; BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt); arrayVarDef.var = arrayVar; BLangExpression tupleExpr = parentTupleVariable.expr; BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol); BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size() : parentTupleVariable.memberVariables.size()); startIndexLiteral.type = symTable.intType; BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef, createLengthInvocation(pos, arrayVarRef)); indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType); createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null); foreach.body = foreachBody; blockStmt.addStatement(foreach); } } @Override public void visit(BLangRecordVariableDef varDefNode) { BLangRecordVariable varNode = varDefNode.var; final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos); BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null); final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varDefNode.pos, "$map$0", runTimeType, null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); mapVariable.expr = varDefNode.var.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt); variableDef.var = mapVariable; createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangErrorVariableDef varDefNode) { BLangErrorVariable errorVariable = varDefNode.errorVariable; final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos); BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner); final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varDefNode.pos, errorVarSymbol.name.value, symTable.errorType, null, errorVarSymbol); error.expr = errorVariable.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt); variableDef.var = error; createVarDefStmts(errorVariable, blockStmt, error.symbol, null); result = rewrite(blockStmt, env); } /** * This method iterate through each member of the tupleVar and create the relevant var def statements. This method * does the check for node kind of each member and call the related var def creation method. * * Example: * ((string, float) int)) ((a, b), c)) = (tuple) * * (a, b) is again a tuple, so it is a recursive var def creation. * * c is a simple var, so a simple var def will be created. * */ private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangVariable> memberVars = parentTupleVariable.memberVariables; for (int index = 0; index < memberVars.size(); index++) { BLangVariable variable = memberVars.get(index); BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index); if (NodeKind.VARIABLE == variable.getKind()) { createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol, parentIndexAccessExpr); continue; } if (variable.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.RECORD_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BType accessedElemType = symTable.errorType; if (tupleVarSymbol.type.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) tupleVarSymbol.type; accessedElemType = arrayType.eType; } BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); } } } /** * Overloaded method to handle record variables. * This method iterate through each member of the recordVar and create the relevant var def statements. This method * does the check for node kind of each member and call the related var def creation method. * * Example: * type Foo record { * string name; * (int, string) age; * Address address; * }; * * Foo {name: a, age: (b, c), address: d} = {record literal} * * a is a simple var, so a simple var def will be created. * * (b, c) is a tuple, so it is a recursive var def creation. * * d is a record, so it is a recursive var def creation. * */ private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt, BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList; for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) { BLangVariable variable = recordFieldKeyValue.valueBindingPattern; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType, recordFieldKeyValue.key.value); if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) { createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, indexExpr, recordVarSymbol, parentIndexAccessExpr); continue; } if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, new BArrayType(symTable.anyType), recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); } } if (parentRecordVariable.restParam != null) { DiagnosticPos pos = parentBlockStmt.pos; BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type; BLangSimpleVarRef variableReference; if (parentIndexAccessExpr != null) { BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner)); mapVariable.expr = parentIndexAccessExpr; BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDef.var = mapVariable; variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol); } else { variableReference = ASTBuilderUtil.createVariableRef(pos, ((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol); } List<String> keysToRemove = parentRecordVariable.variableList.stream() .map(var -> var.getKey().getValue()) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos, keysToRemove, restParamType, parentBlockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol); BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam; BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); restParamVarDef.var = restParam; restParamVarDef.var.type = restParamType; restParam.expr = varRef; } } /** * This method will create the relevant var def statements for reason and details of the error variable. * The var def statements are created by creating the reason() and detail() builtin methods. */ private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt, BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BVarSymbol convertedErrorVarSymbol; if (parentIndexBasedAccess != null) { BType prevType = parentIndexBasedAccess.type; parentIndexBasedAccess.type = symTable.anyType; BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++, symTable.errorType, addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType), parentErrorVariable.pos); parentIndexBasedAccess.type = prevType; parentBlockStmt.addStatement(errorVarDef); convertedErrorVarSymbol = errorVarDef.var.symbol; } else { convertedErrorVarSymbol = errorVariableSymbol; } parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos, parentErrorVariable.reason.type, convertedErrorVarSymbol, null); if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) { parentErrorVariable.reason = null; } else { BLangSimpleVariableDef reasonVariableDef = ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt); reasonVariableDef.var = parentErrorVariable.reason; } if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty()) && parentErrorVariable.restDetail == null) { return; } BType detailMapType; BType detailType = ((BErrorType) parentErrorVariable.type).detailType; if (detailType.tag == TypeTags.MAP) { detailMapType = detailType; } else { detailMapType = symTable.detailType; } parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction( parentErrorVariable.pos, convertedErrorVarSymbol, null); BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail", parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos); detailTempVarDef.type = parentErrorVariable.detailExpr.type; parentBlockStmt.addStatement(detailTempVarDef); this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol); for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) { BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol); createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar); } if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) { DiagnosticPos pos = parentErrorVariable.restDetail.pos; BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef( pos, detailTempVarDef.var.symbol); List<String> keysToRemove = parentErrorVariable.detail.stream() .map(detail -> detail.key.getValue()) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove, parentErrorVariable.restDetail.type, parentBlockStmt); BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDefStmt.var = ASTBuilderUtil.createVariable(pos, parentErrorVariable.restDetail.name.value, filteredDetail.type, ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol), parentErrorVariable.restDetail.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol), ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol)); parentBlockStmt.addStatement(assignmentStmt); } rewrite(parentBlockStmt, env); } private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos, BType targetType) { BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"), this.env.enclPkg.packageID, targetType, this.env.scope.owner); BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol); BLangExpression expr; if (targetType.tag == TypeTags.RECORD) { expr = variableRef; } else { expr = addConversionExprIfRequired(variableRef, targetType); } BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr, errorVarSym); return ASTBuilderUtil.createVariableDef(pos, errorVar); } private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos, List<String> keysToRemove, BType targetType, BLangBlockStmt parentBlockStmt) { BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType); int restNum = annonVarCount++; String name = "$map$ref$" + restNum; BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name); BLangInvocation entriesInvocation = generateMapEntriesInvocation( ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol), typeCastExpr.type); String entriesVarName = "$map$ref$entries$" + restNum; BType entriesType = new BMapType(TypeTags.MAP, new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null); BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt, addConversionExprIfRequired(entriesInvocation, entriesType), entriesVarName); BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos); BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter); String filteredEntriesName = "$filtered$detail$entries" + restNum; BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation, filteredEntriesName); String filteredVarName = "$detail$filtered" + restNum; BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos); BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda); BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt, mapInvocation, filteredVarName); String filteredRestVarName = "$restVar$" + restNum; BLangInvocation constructed = generateConstructFromInvocation(pos, targetType, filtered.symbol); return defVariable(pos, targetType, parentBlockStmt, addConversionExprIfRequired(constructed, targetType), filteredRestVarName); } private BLangInvocation generateMapEntriesInvocation(BLangExpression expr, BType type) { BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), type); invocationNode.expr = expr; invocationNode.symbol = symResolver.lookupLangLibMethod(type, names.fromString("entries")); invocationNode.requiredArgs = Lists.of(expr); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.langLibInvocation = true; return invocationNode; } private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar, BLangLambdaFunction backToMapLambda) { BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type); invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol); invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map")); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol)); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.requiredArgs.add(backToMapLambda); return invocationNode; } private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) { String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++; BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName); BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID, getStringAnyTupleType(), this.env.scope.owner); BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(), null, keyValSymbol); function.requiredParams.add(inputParameter); BLangValueType anyType = new BLangValueType(); anyType.typeKind = TypeKind.ANY; anyType.type = symTable.anyType; function.returnTypeNode = anyType; BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>()); function.body = functionBlock; BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil .createLiteral(pos, symTable.intType, (long) 1)); BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type, indexBasesAccessExpr, pos); functionBlock.addStatement(tupSecondElem); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock); returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet), new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true); functionSymbol.retType = function.returnTypeNode.type; functionSymbol.params = function.requiredParams.stream() .map(param -> param.symbol) .collect(Collectors.toList()); functionSymbol.scope = env.scope; functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()), symTable.anyType, null); function.symbol = functionSymbol; rewrite(function, env); env.enclPkg.addFunction(function); return createLambdaFunction(function, functionSymbol); } private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos, BLangSimpleVariable entriesInvocationVar, BLangLambdaFunction filter) { BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type); invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol); invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter")); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol)); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.requiredArgs.add(filter); return invocationNode; } private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt, BLangExpression expression, String name) { Name varName = names.fromString(name); BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression, new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner)); BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap); constructedMap.type = varType; parentBlockStmt.addStatement(constructedMap); env.scope.define(varName, detailMap.symbol); return detailMap; } private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt, BLangErrorVariable.BLangErrorDetailEntry detailEntry, BLangExpression detailEntryVar) { if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) { BLangSimpleVariableDef errorDetailVar = createVarDef( ((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value, detailEntry.valueBindingPattern.type, detailEntryVar, detailEntry.valueBindingPattern.pos); parentBlockStmt.addStatement(errorDetailVar); } else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef( detailEntry.valueBindingPattern.pos, (BLangRecordVariable) detailEntry.valueBindingPattern); recordVariableDef.var.expr = detailEntryVar; recordVariableDef.type = symTable.recordType; parentBlockStmt.addStatement(recordVariableDef); } else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef( detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern); parentBlockStmt.addStatement(tupleVariableDef); } } private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry, BVarSymbol tempDetailVarSymbol) { BLangExpression detailEntryVar = createIndexBasedAccessExpr( detailEntry.valueBindingPattern.type, detailEntry.valueBindingPattern.pos, createStringLiteral(detailEntry.key.pos, detailEntry.key.value), tempDetailVarSymbol, null); if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar; bLangIndexBasedAccess.originalType = symTable.pureType; } return detailEntryVar; } private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) { BLangExpression concatExpr = null; BLangExpression currentExpr; for (BLangExpression expr : exprs) { currentExpr = expr; if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) { currentExpr = getToStringInvocationOnExpr(expr); } if (concatExpr == null) { concatExpr = currentExpr; continue; } BType binaryExprType = TypeTags.isXMLTypeTag(concatExpr.type.tag) || TypeTags.isXMLTypeTag(currentExpr.type.tag) ? symTable.xmlType : symTable.stringType; concatExpr = ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr, binaryExprType, OperatorKind.ADD, null); } return concatExpr; } private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope .lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol; List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{ add(addConversionExprIfRequired(expression, symbol.params.get(0).type)); }}; return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(), symResolver); } private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BLangExpression onExpr = parentIndexBasedAccess != null ? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarySymbol); return createLangLibInvocationNode(ERROR_DETAIL_FUNCTION_NAME, onExpr, new ArrayList<>(), null, pos); } private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType, BVarSymbol errorVarSymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BLangExpression onExpr = parentIndexBasedAccess != null ? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarSymbol); return createLangLibInvocationNode(ERROR_REASON_FUNCTION_NAME, onExpr, new ArrayList<>(), reasonType, pos); } private BLangInvocation generateConstructFromInvocation(DiagnosticPos pos, BType targetType, BVarSymbol source) { BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol); BLangInvocation invocationNode = createInvocationNode(CONSTRUCT_FROM, new ArrayList<>(), typedescType); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = targetType; typedescExpr.type = typedescType; invocationNode.expr = typedescExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CONSTRUCT_FROM)); invocationNode.requiredArgs = Lists.of(typedescExpr, ASTBuilderUtil.createVariableRef(pos, source)); invocationNode.type = BUnionType.create(null, targetType, symTable.errorType); return invocationNode; } private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) { String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++; BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName); BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID, getStringAnyTupleType(), this.env.scope.owner); BLangBlockFunctionBody functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol); BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil .createLiteral(pos, symTable.intType, (long) 0)); BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type, indexBasesAccessExpr, pos); functionBlock.addStatement(tupFirstElem); for (String toRemoveItem : toRemoveList) { createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem); } BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock); return createLambdaFunction(function, functionSymbol); } private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) { List<String> fieldNamesToRemove = recordVariable.variableList.stream() .map(var -> var.getKey().getValue()) .collect(Collectors.toList()); return createFuncToFilterOutRestParam(fieldNamesToRemove, pos); } private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockFunctionBody blockStmt, String key) { BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol); BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt); BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>()); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock); returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false); ifStmt.body = ifBlock; BLangGroupExpr groupExpr = new BLangGroupExpr(); groupExpr.type = symTable.booleanType; BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted, ASTBuilderUtil.createLiteral(pos, symTable.stringType, key), symTable.booleanType, OperatorKind.EQUAL, null); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator( binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type); groupExpr.expression = binaryExpr; ifStmt.expr = groupExpr; } BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.function = function; lambdaFunction.type = functionSymbol.type; return lambdaFunction; } private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function, BLangBlockFunctionBody functionBlock) { BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock); trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet), new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true); functionSymbol.retType = function.returnTypeNode.type; functionSymbol.params = function.requiredParams.stream() .map(param -> param.symbol) .collect(Collectors.toList()); functionSymbol.scope = env.scope; functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()), getRestType(functionSymbol), symTable.booleanType, null); function.symbol = functionSymbol; rewrite(function, env); env.enclPkg.addFunction(function); return functionSymbol; } private BLangBlockFunctionBody createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function, BVarSymbol keyValSymbol) { BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(), null, keyValSymbol); function.requiredParams.add(inputParameter); BLangValueType booleanTypeKind = new BLangValueType(); booleanTypeKind.typeKind = TypeKind.BOOLEAN; booleanTypeKind.type = symTable.booleanType; function.returnTypeNode = booleanTypeKind; BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>()); function.body = functionBlock; return functionBlock; } private BTupleType getStringAnyTupleType() { ArrayList<BType> typeList = new ArrayList<BType>() {{ add(symTable.stringType); add(symTable.anyType); }}; return new BTupleType(typeList); } /** * This method creates a simple variable def and assigns and array expression based on the given indexExpr. * * case 1: when there is no parent array access expression, but with the indexExpr : 1 * string s = x[1]; * * case 2: when there is a parent array expression : x[2] and indexExpr : 3 * string s = x[2][3]; * * case 3: when there is no parent array access expression, but with the indexExpr : name * string s = x[name]; * * case 4: when there is a parent map expression : x[name] and indexExpr : fName * string s = x[name][fName]; * * case 5: when there is a parent map expression : x[name] and indexExpr : 1 * string s = x[name][1]; */ private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt, BLangLiteral indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentArrayAccessExpr) { Name varName = names.fromIdNode(simpleVariable.name); if (varName == Names.IGNORE) { return; } final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos, parentBlockStmt); simpleVariableDef.var = simpleVariable; simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos, indexExpr, tupleVarSymbol, parentArrayAccessExpr); } @Override public void visit(BLangAssignment assignNode) { if (safeNavigateLHS(assignNode.varRef)) { BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef; accessExpr.leafNode = true; result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment); result = rewrite(result, env); return; } assignNode.varRef = rewriteExpr(assignNode.varRef); assignNode.expr = rewriteExpr(assignNode.expr); assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type); result = assignNode; } @Override public void visit(BLangTupleDestructure tupleDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos); BType runTimeType = new BArrayType(symTable.anyType); String name = "tuple"; final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); tuple.expr = tupleDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos, blockStmt); variableDef.var = tuple; createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null); createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol); result = rewrite(blockStmt, env); } private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt, BVarSymbol tupleVarSymbol) { BLangTupleVarRef tupleVarRef = tupleDestructure.varRef; DiagnosticPos pos = blockStmt.pos; if (tupleVarRef.restParam != null) { BLangExpression tupleExpr = tupleDestructure.expr; BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam; BArrayType restParamType = (BArrayType) restParam.type; BLangArrayLiteral arrayExpr = createArrayLiteralExprNode(); arrayExpr.type = restParamType; BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt); restParamAssignment.varRef = restParam; restParamAssignment.varRef.type = restParamType; restParamAssignment.expr = arrayExpr; BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); startIndexLiteral.value = (long) tupleVarRef.expressions.size(); startIndexLiteral.type = symTable.intType; BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam, createLengthInvocation(pos, restParam)); indexAccessExpr.type = restParamType.eType; createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null); foreach.body = foreachBody; blockStmt.addStatement(foreach); } } private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) { BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver .lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME)); BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol, Lists.of(collection), symResolver); lengthInvocation.argExprs = lengthInvocation.requiredArgs; lengthInvocation.type = lengthInvokableSymbol.type.getReturnType(); return lengthInvocation; } /** * This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements. * This method does the check for node kind of each member and call the related var ref creation method. * * Example: * ((a, b), c)) = (tuple) * * (a, b) is again a tuple, so it is a recursive var ref creation. * * c is a simple var, so a simple var def will be created. * */ private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangExpression> expressions = parentTupleVariable.expressions; for (int index = 0; index < expressions.size(); index++) { BLangExpression expression = expressions.get(index); if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() || NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() || NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() || NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) { BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index); createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr, tupleVarSymbol, parentIndexAccessExpr); continue; } if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) { BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos, new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) { BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); TypeDefBuilderHelper.addTypeDefinition(recordVarRef.type, recordVarRef.type.tsymbol, TypeDefBuilderHelper.createRecordTypeNode( (BRecordType) recordVarRef.type, env.enclPkg.packageID, symTable, recordVarRef.pos), env); continue; } if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) { BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); } } } /** * This method creates a assignment statement and assigns and array expression based on the given indexExpr. * */ private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt, BLangExpression indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentArrayAccessExpr) { if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName); if (varName == Names.IGNORE) { return; } } BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos, indexExpr, tupleVarSymbol, parentArrayAccessExpr); assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type); final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt); assignmentStmt.varRef = simpleVarRef; assignmentStmt.expr = assignmentExpr; } private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) { BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos, symTable.anyType, tupleVarSymbol, indexExpr); arrayAccess.originalType = varType; if (parentExpr != null) { arrayAccess.expr = parentExpr; } final BLangExpression assignmentExpr; if (types.isValueType(varType)) { BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); castExpr.expr = arrayAccess; castExpr.type = varType; assignmentExpr = castExpr; } else { assignmentExpr = arrayAccess; } return assignmentExpr; } @Override public void visit(BLangRecordDestructure recordDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos); BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null); String name = "$map$0"; final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); mapVariable.expr = recordDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil. createVariableDefStmt(recordDestructure.pos, blockStmt); variableDef.var = mapVariable; createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangErrorDestructure errorDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos); String name = "$error$"; final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name, symTable.errorType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); errorVar.expr = errorDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos, blockStmt); variableDef.var = errorVar; createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null); result = rewrite(blockStmt, env); } private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt, BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields; for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) { BLangExpression variableReference = varRefKeyValue.variableReference; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType, varRefKeyValue.variableName.getValue()); if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() || NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() || NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() || NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) { createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt, indexExpr, recordVarSymbol, parentIndexAccessExpr); continue; } if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) { BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) { BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, symTable.tupleType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos, symTable.errorType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol, arrayAccessExpr); } } if (parentRecordVarRef.restParam != null) { DiagnosticPos pos = parentBlockStmt.pos; BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type; BLangSimpleVarRef variableReference; if (parentIndexAccessExpr != null) { BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType, null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner)); mapVariable.expr = parentIndexAccessExpr; BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDef.var = mapVariable; variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol); } else { variableReference = ASTBuilderUtil.createVariableRef(pos, ((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol); } BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam; List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream() .map(field -> field.variableName.value) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos, keysToRemove, restParamType, parentBlockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol); BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt); restParamAssignment.varRef = restParam; restParamAssignment.varRef.type = restParamType; restParamAssignment.expr = varRef; } } private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt, BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) { if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF || names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) { BLangAssignment reasonAssignment = ASTBuilderUtil .createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt); reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos, symTable.stringType, errorVarySymbol, parentIndexAccessExpr); reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type); reasonAssignment.varRef = parentErrorVarRef.reason; } if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) { return; } BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos, errorVarySymbol, parentIndexAccessExpr); BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++, symTable.detailType, errorDetailBuiltinFunction, parentErrorVarRef.pos); detailTempVarDef.type = symTable.detailType; parentBlockStmt.addStatement(detailTempVarDef); this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol); List<String> extractedKeys = new ArrayList<>(); for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) { extractedKeys.add(detail.name.value); BLangVariableReference ref = (BLangVariableReference) detail.expr; BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos, createStringLiteral(detail.name.pos, detail.name.value), detailTempVarDef.var.symbol, null); if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar; bLangIndexBasedAccess.originalType = symTable.pureType; } BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt); detailAssignment.varRef = ref; detailAssignment.expr = detailEntryVar; } if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) { BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos, detailTempVarDef.var.symbol); BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos, extractedKeys, parentErrorVarRef.restVar.type, parentBlockStmt); BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos, parentBlockStmt); restAssignment.varRef = parentErrorVarRef.restVar; restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos, filteredDetail.symbol); } BErrorType errorType = (BErrorType) parentErrorVarRef.type; if (errorType.detailType.getKind() == TypeKind.RECORD) { BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol; tsymbol.initializerFunc = createRecordInitFunc(); tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol); } } private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) { if (parentErrorVarRef.restVar == null) { return true; } if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value)); } return false; } @Override public void visit(BLangAbort abortNode) { BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(abortNode.pos, symTable.intType, -1L); result = rewrite(returnStmt, env); } @Override public void visit(BLangRetry retryNode) { BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(retryNode.pos, symTable.intType, 1L); result = rewrite(returnStmt, env); } @Override public void visit(BLangContinue nextNode) { result = nextNode; } @Override public void visit(BLangBreak breakNode) { result = breakNode; } @Override public void visit(BLangReturn returnNode) { if (returnNode.expr != null) { returnNode.expr = rewriteExpr(returnNode.expr); } result = returnNode; } @Override public void visit(BLangPanic panicNode) { panicNode.expr = rewriteExpr(panicNode.expr); result = panicNode; } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env); result = xmlnsStmtNode; } @Override public void visit(BLangXMLNS xmlnsNode) { BLangXMLNS generatedXMLNSNode; xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI); BSymbol ownerSymbol = xmlnsNode.symbol.owner; if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { generatedXMLNSNode = new BLangLocalXMLNS(); } else { generatedXMLNSNode = new BLangPackageXMLNS(); } generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI; generatedXMLNSNode.prefix = xmlnsNode.prefix; generatedXMLNSNode.symbol = xmlnsNode.symbol; result = generatedXMLNSNode; } public void visit(BLangCompoundAssignment compoundAssignment) { BLangVariableReference varRef = compoundAssignment.varRef; if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol); varRef.lhsVar = true; } result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef), rewriteExpr(compoundAssignment.modifiedExpr)); return; } List<BLangStatement> statements = new ArrayList<>(); List<BLangSimpleVarRef> varRefs = new ArrayList<>(); List<BType> types = new ArrayList<>(); do { BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + ++indexExprCount + "$", ((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr, compoundAssignment.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos, tempIndexVarDef.var.symbol); statements.add(0, tempIndexVarDef); varRefs.add(0, tempVarRef); types.add(0, varRef.type); varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr; } while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR); BLangVariableReference var = varRef; for (int ref = 0; ref < varRefs.size(); ref++) { var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref)); var.type = types.get(ref); } var.type = compoundAssignment.varRef.type; BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var, compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null); rhsExpression.type = compoundAssignment.modifiedExpr.type; BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var, rhsExpression); statements.add(assignStmt); BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements); result = rewrite(bLangBlockStmt, env); } @Override public void visit(BLangExpressionStmt exprStmtNode) { exprStmtNode.expr = rewriteExpr(exprStmtNode.expr); result = exprStmtNode; } @Override public void visit(BLangIf ifNode) { ifNode.expr = rewriteExpr(ifNode.expr); ifNode.body = rewrite(ifNode.body, env); ifNode.elseStmt = rewrite(ifNode.elseStmt, env); result = ifNode; } @Override public void visit(BLangMatch matchStmt) { BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode(); matchBlockStmt.pos = matchStmt.pos; String matchExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos, matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0, names.fromString(matchExprVarName), this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner)); BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar); matchBlockStmt.stmts.add(matchExprVarDef); matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar)); rewrite(matchBlockStmt, this.env); result = matchBlockStmt; } @Override public void visit(BLangForeach foreach) { BLangBlockStmt blockNode; BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID, foreach.collection.type, this.env.scope.owner); BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$", foreach.collection.type, foreach.collection, dataSymbol); BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable); BVarSymbol collectionSymbol = dataVariable.symbol; switch (foreach.collection.type.tag) { case TypeTags.STRING: case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.XML: case TypeTags.MAP: case TypeTags.STREAM: case TypeTags.RECORD: BInvokableSymbol iteratorSymbol = getLangLibIteratorInvokableSymbol(collectionSymbol); blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, true); break; case TypeTags.OBJECT: iteratorSymbol = getIterableObjectIteratorInvokableSymbol(collectionSymbol); blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, false); break; default: blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.stmts.add(0, dataVarDef); break; } rewrite(blockNode, this.env); result = blockNode; } private BLangBlockStmt desugarForeachWithIteratorDef(BLangForeach foreach, BLangSimpleVariableDef dataVariableDefinition, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach.pos, collectionSymbol, iteratorInvokableSymbol, isIteratorFuncFromLangLib); BLangBlockStmt blockNode = desugarForeachToWhile(foreach, iteratorVarDef); blockNode.stmts.add(0, dataVariableDefinition); return blockNode; } public BInvokableSymbol getIterableObjectIteratorInvokableSymbol(BVarSymbol collectionSymbol) { BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) collectionSymbol.type.tsymbol; BAttachedFunction iteratorFunc = null; for (BAttachedFunction func : typeSymbol.attachedFuncs) { if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) { iteratorFunc = func; break; } } BAttachedFunction function = iteratorFunc; return function.symbol; } BInvokableSymbol getLangLibIteratorInvokableSymbol(BVarSymbol collectionSymbol) { return (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); } private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) { BVarSymbol iteratorSymbol = varDef.var.symbol; BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID, foreach.nillableResultType, this.env.scope.owner); BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos, foreach.nillableResultType, iteratorSymbol, resultSymbol); BLangType userDefineType = getUserDefineTypeNode(foreach.resultType); BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol); BLangTypeTestExpr typeTestExpr = ASTBuilderUtil .createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType); BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode(); whileNode.pos = foreach.pos; whileNode.expr = typeTestExpr; whileNode.body = foreach.body; BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol); VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode; BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol); valueAccessExpr.expr = addConversionExprIfRequired(valueAccessExpr.expr, types.getSafeType(valueAccessExpr.expr.type, true, false)); variableDefinitionNode.getVariable() .setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType)); whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode); whileNode.body.stmts.add(1, resultAssignment); BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.addStatement(varDef); blockNode.addStatement(resultVariableDefinition); blockNode.addStatement(whileNode); return blockNode; } private BLangType getUserDefineTypeNode(BType type) { BLangUserDefinedType recordType = new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""), ASTBuilderUtil.createIdentifier(null, "")); recordType.type = type; return recordType; } @Override public void visit(BLangWhile whileNode) { whileNode.expr = rewriteExpr(whileNode.expr); whileNode.body = rewrite(whileNode.body, env); result = whileNode; } @Override public void visit(BLangLock lockNode) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos); blockStmt.addStatement(lockStmt); enclLocks.push(lockStmt); BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE); BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType); BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral); statementExpression.type = symTable.nilType; BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); trapExpr.type = nillableError; trapExpr.expr = statementExpression; BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"), this.env.scope.owner.pkgID, nillableError, this.env.scope.owner); BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult", nillableError, trapExpr, nillableErrorVarSymbol); BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable); blockStmt.addStatement(simpleVariableDef); BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos); blockStmt.addStatement(unLockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = lockNode.pos; panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType); ifBody.addStatement(panicNode); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode()); isErrorTest.type = symTable.booleanType; BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null); blockStmt.addStatement(ifelse); result = rewrite(blockStmt, env); enclLocks.pop(); } @Override public void visit(BLangLockStmt lockStmt) { result = lockStmt; } @Override public void visit(BLangUnLockStmt unLockStmt) { result = unLockStmt; } @Override public void visit(BLangTransaction transactionNode) { DiagnosticPos pos = transactionNode.pos; BType trxReturnType = symTable.intType; BType otherReturnType = symTable.nilType; BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType); BLangType otherReturnNode = ASTBuilderUtil.createTypeNode(otherReturnType); DiagnosticPos invPos = transactionNode.pos; /* transaction block code will be desugar to function which returns int. Return value determines the status of the transaction code. ex. 0 = successful 1 = retry -1 = abort Since transaction block code doesn't return anything, we need to add return statement at end of the block unless we have abort or retry statement. */ DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol); BLangStatement statement = null; if (!transactionNode.transactionBody.stmts.isEmpty()) { statement = transactionNode.transactionBody.stmts.get(transactionNode.transactionBody.stmts.size() - 1); } if (statement == null || !(statement.getKind() == NodeKind.ABORT) && !(statement.getKind() == NodeKind.ABORT)) { BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(returnStmtPos, trxReturnType, 0L); transactionNode.transactionBody.addStatement(returnStmt); } if (transactionNode.abortedBody == null) { transactionNode.abortedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos); } if (transactionNode.committedBody == null) { transactionNode.committedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos); } if (transactionNode.onRetryBody == null) { transactionNode.onRetryBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos); } if (transactionNode.retryCount == null) { transactionNode.retryCount = ASTBuilderUtil.createLiteral(pos, symTable.intType, 3L); } BLangLambdaFunction trxMainFunc = createLambdaFunction(pos, "$anonTrxMainFunc$", Collections.emptyList(), trxReturnNode, transactionNode.transactionBody.stmts, env, transactionNode.transactionBody.scope); BLangLambdaFunction trxOnRetryFunc = createLambdaFunction(pos, "$anonTrxOnRetryFunc$", Collections.emptyList(), otherReturnNode, transactionNode.onRetryBody.stmts, env, transactionNode.onRetryBody.scope); BLangLambdaFunction trxCommittedFunc = createLambdaFunction(pos, "$anonTrxCommittedFunc$", Collections.emptyList(), otherReturnNode, transactionNode.committedBody.stmts, env, transactionNode.committedBody.scope); BLangLambdaFunction trxAbortedFunc = createLambdaFunction(pos, "$anonTrxAbortedFunc$", Collections.emptyList(), otherReturnNode, transactionNode.abortedBody.stmts, env, transactionNode.abortedBody.scope); trxMainFunc.capturedClosureEnv = env.createClone(); trxOnRetryFunc.capturedClosureEnv = env.createClone(); trxCommittedFunc.capturedClosureEnv = env.createClone(); trxAbortedFunc.capturedClosureEnv = env.createClone(); PackageID packageID = new PackageID(Names.BALLERINA_ORG, Names.TRANSACTION_PACKAGE, Names.EMPTY); BPackageSymbol transactionPkgSymbol = new BPackageSymbol(packageID, null, 0); BInvokableSymbol invokableSymbol = (BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(transactionPkgSymbol), TRX_INITIATOR_BEGIN_FUNCTION); BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(pos, symTable.stringType, getTransactionBlockId()); List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, transactionNode.retryCount, trxMainFunc, trxOnRetryFunc, trxCommittedFunc, trxAbortedFunc); BLangInvocation trxInvocation = ASTBuilderUtil.createInvocationExprMethod(pos, invokableSymbol, requiredArgs, Collections.emptyList(), symResolver); BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pos, ASTBuilderUtil.createBlockStmt(pos)); stmt.expr = trxInvocation; result = rewrite(stmt, env); } private String getTransactionBlockId() { return env.enclPkg.packageID.orgName + "$" + env.enclPkg.packageID.name + "$" + transactionIndex++; } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, BLangFunctionBody lambdaBody) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++); lambdaFunction.function = func; func.requiredParams.addAll(lambdaFunctionVariable); func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); lambdaFunctionVariable = func.requiredParams; func.body = lambdaBody; func.desugared = false; lambdaFunction.pos = pos; List<BType> paramTypes = new ArrayList<>(); lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type)); lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(), null); return lambdaFunction; } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, List<BLangStatement> fnBodyStmts, SymbolEnv env, Scope trxScope) { BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); body.scope = trxScope; SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); body.stmts = rewriteStmt(fnBodyStmts, bodyEnv); return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body); } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, TypeNode returnType) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++); lambdaFunction.function = func; func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); func.desugared = false; lambdaFunction.pos = pos; return lambdaFunction; } private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) { final BPackageSymbol packageSymbol = targetPkg.symbol; final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol); symbolEnter.defineNode(funcNode, packageEnv); packageEnv.enclPkg.functions.add(funcNode); packageEnv.enclPkg.topLevelNodes.add(funcNode); } @Override public void visit(BLangForkJoin forkJoin) { result = forkJoin; } @Override public void visit(BLangLiteral literalExpr) { if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) { result = rewriteBlobLiteral(literalExpr); return; } result = literalExpr; } private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) { String[] result = getBlobTextValue((String) literalExpr.value); byte[] values; if (BASE_64.equals(result[0])) { values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8)); } else { values = hexStringToByteArray(result[1]); } BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteralNode.type = literalExpr.type; arrayLiteralNode.pos = literalExpr.pos; arrayLiteralNode.exprs = new ArrayList<>(); for (byte b : values) { arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b)); } return arrayLiteralNode; } private String[] getBlobTextValue(String blobLiteralNodeText) { String nodeText = blobLiteralNodeText.replaceAll(" ", ""); String[] result = new String[2]; result[0] = nodeText.substring(0, nodeText.indexOf('`')); result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`')); return result; } private static byte[] hexStringToByteArray(String str) { int len = str.length(); byte[] data = new byte[len / 2]; for (int i = 0; i < len; i += 2) { data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16)); } return data; } @Override public void visit(BLangListConstructorExpr listConstructor) { listConstructor.exprs = rewriteExprs(listConstructor.exprs); BLangExpression expr; if (listConstructor.type.tag == TypeTags.TUPLE) { expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } else if (listConstructor.type.tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type)); result = rewriteExpr(expr); } else if (getElementType(listConstructor.type).tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } else if (listConstructor.type.tag == TypeTags.TYPEDESC) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = listConstructor.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); } else { expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } } @Override public void visit(BLangArrayLiteral arrayLiteral) { arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs); if (arrayLiteral.type.tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type)); return; } else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type); return; } result = arrayLiteral; } @Override public void visit(BLangTupleLiteral tupleLiteral) { if (tupleLiteral.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = tupleLiteral.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); return; } tupleLiteral.exprs.forEach(expr -> { BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type; types.setImplicitCastExpr(expr, expType, symTable.anyType); }); tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs); result = tupleLiteral; } @Override public void visit(BLangGroupExpr groupExpr) { if (groupExpr.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = groupExpr.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); } else { result = rewriteExpr(groupExpr.expression); } } @Override public void visit(BLangRecordLiteral recordLiteral) { List<RecordLiteralNode.RecordField> fields = recordLiteral.fields; fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2))); result = rewriteExpr(rewriteMappingConstructor(recordLiteral)); } @Override public void visit(BLangSimpleVarRef varRefExpr) { BLangSimpleVarRef genVarRefExpr = varRefExpr; if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName); qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol; qnameExpr.localname = varRefExpr.variableName; qnameExpr.prefix = varRefExpr.pkgAlias; qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI; qnameExpr.isUsedInXML = false; qnameExpr.pos = varRefExpr.pos; qnameExpr.type = symTable.stringType; result = qnameExpr; return; } if (varRefExpr.symbol == null) { result = varRefExpr; return; } if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol; if (varSymbol.originalSymbol != null) { varRefExpr.symbol = varSymbol.originalSymbol; } } BSymbol ownerSymbol = varRefExpr.symbol.owner; if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION && varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) { genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol); } else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE && !((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) { genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.LET) == SymTag.LET) { genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) { genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType, constSymbol.value.value); result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type)); return; } } genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol); } genVarRefExpr.type = varRefExpr.type; genVarRefExpr.pos = varRefExpr.pos; if ((varRefExpr.lhsVar) || genVarRefExpr.symbol.name.equals(IGNORE)) { genVarRefExpr.lhsVar = varRefExpr.lhsVar; genVarRefExpr.type = varRefExpr.symbol.type; result = genVarRefExpr; return; } genVarRefExpr.lhsVar = varRefExpr.lhsVar; BType targetType = genVarRefExpr.type; genVarRefExpr.type = genVarRefExpr.symbol.type; BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType); result = expression.impConversionExpr != null ? expression.impConversionExpr : expression; } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { if (safeNavigate(fieldAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr)); return; } BLangAccessExpression targetVarRef = fieldAccessExpr; BType varRefType = fieldAccessExpr.expr.type; fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr); if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) { fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType); } BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value); int varRefTypeTag = varRefType.tag; if (varRefTypeTag == TypeTags.OBJECT || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false); } } else if (varRefTypeTag == TypeTags.RECORD || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false); } } else if (types.isLax(varRefType)) { if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) { if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) { result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr)); return; } fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType); targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit); } else { targetVarRef = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr); } } else if (varRefTypeTag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit); } else if (TypeTags.isXMLTypeTag(varRefTypeTag)) { targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.fieldKind); } targetVarRef.lhsVar = fieldAccessExpr.lhsVar; targetVarRef.type = fieldAccessExpr.type; targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess; result = targetVarRef; } private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangStatementExpression statementExpression = new BLangStatementExpression(); BLangBlockStmt block = new BLangBlockStmt(); statementExpression.stmt = block; BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.type, symTable.errorType); DiagnosticPos pos = fieldAccessExpr.pos; BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos); block.addStatement(result); BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol); resultRef.type = fieldAccessType; statementExpression.type = fieldAccessType; BLangLiteral mapIndex = ASTBuilderUtil.createLiteral( fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value); BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex); BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.type, symTable.nilType); mapAccessExpr.type = xmlOrNil; BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos); BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol); block.addStatement(mapResult); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block); BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType); ifStmt.expr = isLikeNilExpr; BLangBlockStmt resultNilBody = new BLangBlockStmt(); ifStmt.body = resultNilBody; BLangBlockStmt resultHasValueBody = new BLangBlockStmt(); ifStmt.elseStmt = resultHasValueBody; BLangInvocation errorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue("error"); errorInvocation.name = name; errorInvocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); errorInvocation.symbol = symTable.errorConstructor; errorInvocation.type = symTable.errorType; ArrayList<BLangExpression> errorCtorArgs = new ArrayList<>(); errorInvocation.requiredArgs = errorCtorArgs; errorCtorArgs.add(createStringLiteral(pos, "{" + BLangConstants.MAP_LANG_LIB + "}InvalidKey")); BLangNamedArgsExpression message = new BLangNamedArgsExpression(); message.name = ASTBuilderUtil.createIdentifier(pos, "key"); message.expr = createStringLiteral(pos, fieldAccessExpr.field.value); errorCtorArgs.add(message); BLangSimpleVariableDef errorDef = createVarDef("_$_invalid_key_error", symTable.errorType, errorInvocation, pos); resultNilBody.addStatement(errorDef); BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol); BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody); errorVarAssignment.varRef = resultRef; errorVarAssignment.expr = errorRef; BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt( pos, resultHasValueBody); mapResultAssignment.varRef = resultRef; mapResultAssignment.expr = mapResultRef; statementExpression.expr = resultRef; return statementExpression; } private BLangAccessExpression rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) { ArrayList<BLangExpression> args = new ArrayList<>(); String fieldName = fieldAccessExpr.field.value; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr; fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName); } if (fieldName.equals("_")) { return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING, fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>()); } BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName); args.add(attributeNameLiteral); args.add(isOptionalAccessToLiteral(fieldAccessExpr)); return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args, new ArrayList<>()); } private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) { return rewrite( createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env); } private String createExpandedQName(String nsURI, String localName) { return "{" + nsURI + "}" + localName; } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { if (safeNavigate(indexAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr)); return; } BLangVariableReference targetVarRef = indexAccessExpr; indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr); BType varRefType = indexAccessExpr.expr.type; indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr); if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType); } if (varRefType.tag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) { targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false); } else if (types.isSubTypeOfList(varRefType)) { targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (types.isAssignable(varRefType, symTable.stringType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType); targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } targetVarRef.lhsVar = indexAccessExpr.lhsVar; targetVarRef.type = indexAccessExpr.type; result = targetVarRef; } @Override public void visit(BLangInvocation iExpr) { BLangInvocation genIExpr = iExpr; if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) { result = rewriteErrorConstructor(iExpr); } reorderArguments(iExpr); iExpr.requiredArgs = rewriteExprs(iExpr.requiredArgs); fixNonRestArgTypeCastInTypeParamInvocation(iExpr); iExpr.restArgs = rewriteExprs(iExpr.restArgs); annotationDesugar.defineStatementAnnotations(iExpr.annAttachments, iExpr.pos, iExpr.symbol.pkgID, iExpr.symbol.owner, env); if (iExpr.functionPointerInvocation) { visitFunctionPointerInvocation(iExpr); return; } iExpr.expr = rewriteExpr(iExpr.expr); result = genIExpr; if (iExpr.expr == null) { fixTypeCastInTypeParamInvocation(iExpr, genIExpr); if (iExpr.exprSymbol == null) { return; } iExpr.expr = ASTBuilderUtil.createVariableRef(iExpr.pos, iExpr.exprSymbol); iExpr.expr = rewriteExpr(iExpr.expr); } switch (iExpr.expr.type.tag) { case TypeTags.OBJECT: case TypeTags.RECORD: if (!iExpr.langLibInvocation) { List<BLangExpression> argExprs = new ArrayList<>(iExpr.requiredArgs); argExprs.add(0, iExpr.expr); BLangAttachedFunctionInvocation attachedFunctionInvocation = new BLangAttachedFunctionInvocation(iExpr.pos, argExprs, iExpr.restArgs, iExpr.symbol, iExpr.type, iExpr.expr, iExpr.async); attachedFunctionInvocation.actionInvocation = iExpr.actionInvocation; attachedFunctionInvocation.name = iExpr.name; attachedFunctionInvocation.annAttachments = iExpr.annAttachments; result = genIExpr = attachedFunctionInvocation; } break; } fixTypeCastInTypeParamInvocation(iExpr, genIExpr); } private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) { if (!iExpr.langLibInvocation) { return; } List<BLangExpression> requiredArgs = iExpr.requiredArgs; List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params; for (int i = 1; i < requiredArgs.size(); i++) { requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type)); } } private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) { if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) { BType originalInvType = genIExpr.type; genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType; BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType); if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) { this.result = expr; return; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = genIExpr; conversionExpr.targetType = originalInvType; conversionExpr.type = originalInvType; conversionExpr.pos = genIExpr.pos; this.result = conversionExpr; } } private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) { BLangExpression reasonExpr = iExpr.requiredArgs.get(0); if (reasonExpr.impConversionExpr != null && reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) { reasonExpr.impConversionExpr = null; } reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType); reasonExpr = rewriteExpr(reasonExpr); iExpr.requiredArgs.remove(0); iExpr.requiredArgs.add(reasonExpr); BLangExpression errorDetail; BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos, ((BErrorType) iExpr.symbol.type).detailType); List<BLangExpression> namedArgs = iExpr.requiredArgs.stream() .filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR) .collect(Collectors.toList()); if (namedArgs.isEmpty()) { errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.type); } else { for (BLangExpression arg : namedArgs) { BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg; BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField(); member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos, symTable.stringType, namedArg.name.value)); if (recordLiteral.type.tag == TypeTags.RECORD) { member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType); } else { member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type); } recordLiteral.fields.add(member); iExpr.requiredArgs.remove(arg); } errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) iExpr.symbol.type).detailType); } iExpr.requiredArgs.add(errorDetail); return iExpr; } public void visit(BLangTypeInit typeInitExpr) { if (typeInitExpr.type.tag == TypeTags.STREAM) { result = rewriteExpr(desugarStreamTypeInit(typeInitExpr)); } else { result = rewrite(desugarObjectTypeInit(typeInitExpr), env); } } private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) { typeInitExpr.desugared = true; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BType objType = getObjectType(typeInitExpr.type); BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos); BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol); blockStmt.addStatement(objVarDef); typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol; typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol; if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) { BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt); initInvExpr.expr = typeInitExpr.initInvocation; typeInitExpr.initInvocation.name.value = Names.GENERATED_INIT_SUFFIX.value; BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef); stmtExpr.type = objVarRef.symbol.type; return stmtExpr; } BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type, typeInitExpr.initInvocation, typeInitExpr.pos); blockStmt.addStatement(initInvRetValVarDef); BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos); blockStmt.addStatement(resultVarDef); BLangSimpleVarRef initRetValVarRefInCondition = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol); BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode()); isErrorTest.type = symTable.booleanType; BLangSimpleVarRef thenInitRetValVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangAssignment errAssignment = ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef); thenStmt.addStatement(errAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangAssignment objAssignment = ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef); BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); elseStmt.addStatement(objAssignment); BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt); blockStmt.addStatement(ifelse); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = resultVarRef.symbol.type; return stmtExpr; } private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CONSTRUCT_STREAM).symbol; BType targetType = ((BStreamType) typeInitExpr.type).constraint; BType errorType = ((BStreamType) typeInitExpr.type).error; BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = targetType; typedescExpr.type = typedescType; BLangExpression iteratorObj = typeInitExpr.argsExpr.get(0); BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod( typeInitExpr.pos, symbol, new ArrayList<>(Lists.of(typedescExpr, iteratorObj)), symResolver); streamConstructInvocation.type = new BStreamType(TypeTags.STREAM, targetType, errorType, null); return streamConstructInvocation; } private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) { BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name)); if (objSym == null || objSym == symTable.notFoundSymbol) { objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner); } BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr, (BVarSymbol) objSym); BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos); objVarDef.var = objVar; objVarDef.type = objVar.type; return objVarDef; } private BType getObjectType(BType type) { if (type.tag == TypeTags.OBJECT) { return type; } else if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream() .filter(t -> t.tag == TypeTags.OBJECT) .findFirst() .orElse(symTable.noType); } throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context"); } BLangErrorType getErrorTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.type = symTable.errorType; return errorTypeNode; } @Override public void visit(BLangTernaryExpr ternaryExpr) { /* * First desugar to if-else: * * T $result$; * if () { * $result$ = thenExpr; * } else { * $result$ = elseExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr); thenBody.addStatement(thenAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = ternaryExpr.type; result = rewriteExpr(stmtExpr); } @Override public void visit(BLangWaitExpr waitExpr) { if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) { waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>()); } else { waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression())); } result = waitExpr; } private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) { visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs); visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs); return exprs; } private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) { if (expr.getKind() == NodeKind.BINARY_EXPR) { collectAllBinaryExprs((BLangBinaryExpr) expr, exprs); } else { expr = rewriteExpr(expr); exprs.add(expr); } } @Override public void visit(BLangWaitForAllExpr waitExpr) { waitExpr.keyValuePairs.forEach(keyValue -> { if (keyValue.valueExpr != null) { keyValue.valueExpr = rewriteExpr(keyValue.valueExpr); } else { keyValue.keyExpr = rewriteExpr(keyValue.keyExpr); } }); BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type); result = rewriteExpr(expr); } @Override public void visit(BLangTrapExpr trapExpr) { trapExpr.expr = rewriteExpr(trapExpr.expr); if (trapExpr.expr.type.tag != TypeTags.NIL) { trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type); } result = trapExpr; } @Override public void visit(BLangBinaryExpr binaryExpr) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) { binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr); } result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr)); return; } if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) { visitBinaryLogicalExpr(binaryExpr); return; } OperatorKind binaryOpKind = binaryExpr.opKind; if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB || binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND || binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) { checkByteTypeIncompatibleOperations(binaryExpr); } binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr); binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr); result = binaryExpr; int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag; int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag; if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL || binaryExpr.opKind == OperatorKind.REF_EQUAL || binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) { if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } } if (lhsExprTypeTag == rhsExprTypeTag) { return; } if (TypeTags.isStringTypeTag(lhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) { if (TypeTags.isXMLTypeTag(rhsExprTypeTag)) { binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr, binaryExpr.lhsExpr.pos, symTable.xmlType); return; } binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (TypeTags.isStringTypeTag(rhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) { if (TypeTags.isXMLTypeTag(lhsExprTypeTag)) { binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); return; } if (lhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (rhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); return; } if (lhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (rhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); } } private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CREATE_INT_RANGE).symbol; BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol, new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver); createIntRangeInvocation.type = symTable.intRangeType; return createIntRangeInvocation; } private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) { if (binaryExpr.expectedType == null) { return; } int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag; int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag; if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) { return; } int resultTypeTag = binaryExpr.expectedType.tag; if (resultTypeTag == TypeTags.INT) { if (rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType); } if (lhsExprTypeTag == TypeTags.BYTE) { binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType); } } } /** * This method checks whether given binary expression is related to shift operation. * If its true, then both lhs and rhs of the binary expression will be converted to 'int' type. * <p> * byte a = 12; * byte b = 34; * int i = 234; * int j = -4; * <p> * true: where binary expression's expected type is 'int' * int i1 = a >> b; * int i2 = a << b; * int i3 = a >> i; * int i4 = a << i; * int i5 = i >> j; * int i6 = i << j; */ private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) { return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT; } public void visit(BLangElvisExpr elvisExpr) { BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr); matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos, rewriteExpr(elvisExpr.rhsExpr))); matchExpr.type = elvisExpr.type; matchExpr.pos = elvisExpr.pos; result = rewriteExpr(matchExpr); } @Override public void visit(BLangUnaryExpr unaryExpr) { if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) { rewriteBitwiseComplementOperator(unaryExpr); return; } unaryExpr.expr = rewriteExpr(unaryExpr.expr); result = unaryExpr; } /** * This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below. * Example : ~a -> a ^ -1; * ~ 11110011 -> 00001100 * 11110011 ^ 11111111 -> 00001100 * * @param unaryExpr the bitwise complement expression */ private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) { final DiagnosticPos pos = unaryExpr.pos; final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = pos; binaryExpr.opKind = OperatorKind.BITWISE_XOR; binaryExpr.lhsExpr = unaryExpr.expr; if (TypeTags.BYTE == unaryExpr.type.tag) { binaryExpr.type = symTable.byteType; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.byteType, symTable.byteType); } else { binaryExpr.type = symTable.intType; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.intType, symTable.intType); } result = rewriteExpr(binaryExpr); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) { result = rewriteExpr(conversionExpr.expr); return; } conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env); conversionExpr.expr = rewriteExpr(conversionExpr.expr); result = conversionExpr; } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); result = bLangLambdaFunction; } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode(); bLangFunction.setName(bLangArrowFunction.functionName); BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.pos = bLangArrowFunction.pos; bLangFunction.addFlag(Flag.LAMBDA); lambdaFunction.function = bLangFunction; BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode(); returnType.type = bLangArrowFunction.body.expr.type; bLangFunction.setReturnTypeNode(returnType); bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction)); bLangArrowFunction.params.forEach(bLangFunction::addParameter); lambdaFunction.parent = bLangArrowFunction.parent; lambdaFunction.type = bLangArrowFunction.funcType; BLangFunction funcNode = lambdaFunction.function; BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet), new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType, env.enclEnv.enclVarSym, true); SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env); defineInvokableSymbol(funcNode, funcSymbol, invokableEnv); List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> { Scope enclScope = invokableEnv.scope; varNode.symbol.kind = SymbolKind.FUNCTION; varNode.symbol.owner = invokableEnv.scope.owner; enclScope.define(varNode.symbol.name, varNode.symbol); }).map(varNode -> varNode.symbol).collect(Collectors.toList()); funcSymbol.params = paramSymbols; funcSymbol.restParam = getRestSymbol(funcNode); funcSymbol.retType = funcNode.returnTypeNode.type; List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList()); funcNode.type = new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.type, null); lambdaFunction.function.pos = bLangArrowFunction.pos; lambdaFunction.function.body.pos = bLangArrowFunction.pos; lambdaFunction.capturedClosureEnv = env; rewrite(lambdaFunction.function, env); env.enclPkg.addFunction(lambdaFunction.function); bLangArrowFunction.function = lambdaFunction.function; result = rewriteExpr(lambdaFunction); } private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol, SymbolEnv invokableEnv) { invokableNode.symbol = funcSymbol; funcSymbol.scope = new Scope(funcSymbol); invokableEnv.scope = funcSymbol.scope; } @Override public void visit(BLangXMLQName xmlQName) { result = xmlQName; } @Override public void visit(BLangXMLAttribute xmlAttribute) { xmlAttribute.name = rewriteExpr(xmlAttribute.name); xmlAttribute.value = rewriteExpr(xmlAttribute.value); result = xmlAttribute; } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName); xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName); xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren); xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes); Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator(); while (attributesItr.hasNext()) { BLangXMLAttribute attribute = attributesItr.next(); if (!attribute.isNamespaceDeclr) { continue; } BLangXMLNS xmlns; if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) { xmlns = new BLangPackageXMLNS(); } else { xmlns = new BLangLocalXMLNS(); } xmlns.namespaceURI = attribute.value.concatExpr; xmlns.prefix = ((BLangXMLQName) attribute.name).localname; xmlns.symbol = attribute.symbol; xmlElementLiteral.inlineNamespaces.add(xmlns); } result = xmlElementLiteral; } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments)); result = xmlTextLiteral; } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments)); result = xmlCommentLiteral; } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target); xmlProcInsLiteral.dataConcatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments)); result = xmlProcInsLiteral; } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlQuotedString.textFragments)); result = xmlQuotedString; } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs)); } @Override public void visit(BLangWorkerSend workerSendNode) { workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type); if (workerSendNode.keyExpr != null) { workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr); } result = workerSendNode; } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type); result = syncSendExpr; } @Override public void visit(BLangWorkerReceive workerReceiveNode) { if (workerReceiveNode.keyExpr != null) { workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr); } result = workerReceiveNode; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts .stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList()); result = workerFlushExpr; } @Override public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr); xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr); if (xmlAttributeAccessExpr.indexExpr != null && xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) { ((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true; } xmlAttributeAccessExpr.desugared = true; if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) { result = xmlAttributeAccessExpr; } else { result = rewriteExpr(xmlAttributeAccessExpr); } } @Override public void visit(BLangLocalVarRef localVarRef) { result = localVarRef; } @Override public void visit(BLangFieldVarRef fieldVarRef) { result = fieldVarRef; } @Override public void visit(BLangPackageVarRef packageVarRef) { result = packageVarRef; } @Override public void visit(BLangFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangStructFieldAccessExpr fieldAccessExpr) { result = fieldAccessExpr; } @Override public void visit(BLangStructFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangMapAccessExpr mapKeyAccessExpr) { result = mapKeyAccessExpr; } @Override public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangMapLiteral mapLiteral) { result = mapLiteral; } @Override public void visit(BLangStructLiteral structLiteral) { result = structLiteral; } @Override public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) { result = waitLiteral; } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr); ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS, xmlElementAccess.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) { Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env); BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX)); String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null; ArrayList<BLangExpression> args = new ArrayList<>(); for (BLangXMLElementFilter filter : filters) { BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace)); if (nsSymbol == symTable.notFoundSymbol) { if (defaultNS != null && !filter.name.equals("*")) { String expandedName = createExpandedQName(defaultNS, filter.name); args.add(createStringLiteral(filter.elemNamePos, expandedName)); } else { args.add(createStringLiteral(filter.elemNamePos, filter.name)); } } else { BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol; String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name); BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName); args.add(stringLiteral); } } return args; } private BLangInvocation createLanglibXMLInvocation(DiagnosticPos pos, String functionName, BLangExpression invokeOnExpr, ArrayList<BLangExpression> args, ArrayList<BLangExpression> restArgs) { invokeOnExpr = rewriteExpr(invokeOnExpr); BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = invokeOnExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(invokeOnExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.restArgs = rewriteExprs(restArgs); invocationNode.type = ((BInvokableType) invocationNode.symbol.type).getReturnType(); invocationNode.langLibInvocation = true; return invocationNode; } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { xmlNavigation.expr = rewriteExpr(xmlNavigation.expr); xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex); ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN, xmlNavigation.expr, new ArrayList<>(), new ArrayList<>()); result = rewriteExpr(invocationNode); } else { BLangExpression childIndexExpr; if (xmlNavigation.childIndex == null) { childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType); } else { childIndexExpr = xmlNavigation.childIndex; } ArrayList<BLangExpression> args = new ArrayList<>(); args.add(rewriteExpr(childIndexExpr)); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters); result = rewriteExpr(invocationNode); } } @Override public void visit(BLangIsAssignableExpr assignableExpr) { assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr); result = assignableExpr; } @Override public void visit(BFunctionPointerInvocation fpInvocation) { result = fpInvocation; } @Override public void visit(BLangTypedescExpr typedescExpr) { typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env); result = typedescExpr; } @Override public void visit(BLangIntRangeExpression intRangeExpression) { if (!intRangeExpression.includeStart) { intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr); } if (!intRangeExpression.includeEnd) { intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr); } intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr); intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr); result = intRangeExpression; } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { result = rewriteExpr(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr); result = bLangNamedArgsExpression.expr; } @Override public void visit(BLangMatchExpression bLangMatchExpression) { addMatchExprDefaultCase(bLangMatchExpression); String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, matchTempResultVarName, bLangMatchExpression.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, bLangMatchExpression.type, this.env.scope.owner)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar); tempResultVarDef.desugared = true; BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef)); List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>(); for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) { BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i); pattern.expr = rewriteExpr(pattern.expr); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt)); patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody)); } stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr, patternClauses)); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef); statementExpr.type = bLangMatchExpression.type; result = rewriteExpr(statementExpr); } @Override public void visit(BLangCheckedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, false); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, true); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) { String checkedExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos, checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0, names.fromString(checkedExprVarName), this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner)); BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar); checkedExprVarDef.desugared = true; BLangMatchTypedBindingPatternClause patternSuccessCase = getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true, checkedExprVar.symbol, null); BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos, this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic); BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr, new ArrayList<BLangMatchTypedBindingPatternClause>() {{ add(patternSuccessCase); add(patternErrorCase); }}); BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos, new ArrayList<BLangStatement>() {{ add(checkedExprVarDef); add(matchStmt); }}); BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef( checkedExpr.pos, checkedExprVar.symbol); BLangStatementExpression statementExpr = createStatementExpression( generatedStmtBlock, tempCheckedExprVarRef); statementExpr.type = checkedExpr.type; result = rewriteExpr(statementExpr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos, serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type); serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = rewriteExpr(typeInit); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { BLangExpression expr = typeTestExpr.expr; if (types.isValueType(expr.type)) { addConversionExprIfRequired(expr, symTable.anyType); } typeTestExpr.expr = rewriteExpr(expr); typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env); result = typeTestExpr; } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = annotAccessExpr.pos; binaryExpr.opKind = OperatorKind.ANNOT_ACCESS; binaryExpr.lhsExpr = annotAccessExpr.expr; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType, annotAccessExpr.annotationSymbol.bvmAlias()); binaryExpr.type = annotAccessExpr.type; binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null, new BInvokableType(Lists.of(binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type), annotAccessExpr.type, null), null); result = rewriteExpr(binaryExpr); } @Override public void visit(BLangIsLikeExpr isLikeExpr) { isLikeExpr.expr = rewriteExpr(isLikeExpr.expr); result = isLikeExpr; } @Override public void visit(BLangStatementExpression bLangStatementExpression) { bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr); bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env); result = bLangStatementExpression; } @Override public void visit(BLangQueryExpr queryExpr) { BLangStatementExpression stmtExpr = queryDesugar.desugarQueryExpr(queryExpr, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangQueryAction queryAction) { BLangStatementExpression stmtExpr = queryDesugar.desugarQueryAction(queryAction, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangJSONArrayLiteral jsonArrayLiteral) { jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs); result = jsonArrayLiteral; } @Override public void visit(BLangConstant constant) { BConstantSymbol constSymbol = constant.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) { throw new IllegalStateException(); } BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType, constSymbol.value.value); constant.expr = rewriteExpr(literal); } else { constant.expr = rewriteExpr(constant.expr); } constant.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = constant; } @Override public void visit(BLangIgnoreExpr ignoreExpr) { result = ignoreExpr; } @Override public void visit(BLangConstRef constantRef) { result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.type, constantRef.value); } BLangSimpleVariableDef getIteratorVariableDefinition(DiagnosticPos pos, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol); BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); iteratorInvocation.pos = pos; iteratorInvocation.expr = dataReference; iteratorInvocation.symbol = iteratorInvokableSymbol; iteratorInvocation.type = iteratorInvokableSymbol.retType; iteratorInvocation.argExprs = Lists.of(dataReference); iteratorInvocation.requiredArgs = iteratorInvocation.argExprs; iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib; BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID, iteratorInvokableSymbol.retType, this.env.scope.owner); BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$", iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol); return ASTBuilderUtil.createVariableDef(pos, iteratorVariable); } BLangSimpleVariableDef getIteratorNextVariableDefinition(DiagnosticPos pos, BType nillableResultType, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$", nillableResultType, nextInvocation, resultSymbol); return ASTBuilderUtil.createVariableDef(pos, resultVariable); } BLangAssignment getIteratorNextAssignment(DiagnosticPos pos, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false); return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false); } BLangInvocation createIteratorNextInvocation(DiagnosticPos pos, BVarSymbol iteratorSymbol) { BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next"); BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol); BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol; BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); nextInvocation.pos = pos; nextInvocation.name = nextIdentifier; nextInvocation.expr = iteratorReferenceInNext; nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol)); nextInvocation.argExprs = nextInvocation.requiredArgs; nextInvocation.symbol = nextFuncSymbol; nextInvocation.type = nextFuncSymbol.retType; return nextInvocation; } private BAttachedFunction getNextFunc(BObjectType iteratorType) { BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol; for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value.equals("next")) { return bAttachedFunction; } } return null; } BLangFieldBasedAccess getValueAccessExpression(DiagnosticPos pos, BType varType, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, "value"); BLangFieldBasedAccess fieldBasedAccessExpression = ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier); fieldBasedAccessExpression.pos = pos; fieldBasedAccessExpression.type = varType; fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type; return fieldBasedAccessExpression; } private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) { BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = bLangArrowFunction.body.expr.pos; returnNode.setExpression(bLangArrowFunction.body.expr); blockNode.addStatement(returnNode); return blockNode; } private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol; invocationNode.type = retType; invocationNode.requiredArgs = args; return invocationNode; } private BLangInvocation createLangLibInvocationNode(String functionName, BLangExpression onExpr, List<BLangExpression> args, BType retType, DiagnosticPos pos) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = onExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.type, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(onExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.type = retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType; invocationNode.langLibInvocation = true; return invocationNode; } private BLangArrayLiteral createArrayLiteralExprNode() { BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); expr.exprs = new ArrayList<>(); expr.type = new BArrayType(symTable.anyType); return expr; } private void visitFunctionPointerInvocation(BLangInvocation iExpr) { BLangVariableReference expr; if (iExpr.expr == null) { expr = new BLangSimpleVarRef(); } else { BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess(); fieldBasedAccess.expr = iExpr.expr; fieldBasedAccess.field = iExpr.name; expr = fieldBasedAccess; } expr.symbol = iExpr.symbol; expr.type = iExpr.symbol.type; BLangExpression rewritten = rewriteExpr(expr); result = new BFunctionPointerInvocation(iExpr, rewritten); } private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.type)) { return expr; } if (expr.type.tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), expr.type, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.type)) { return expr; } if (expr.type.tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.type, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } @SuppressWarnings("unchecked") <E extends BLangNode> E rewrite(E node, SymbolEnv env) { if (node == null) { return null; } if (node.desugared) { return node; } SymbolEnv previousEnv = this.env; this.env = env; node.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; this.env = previousEnv; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangExpression> E rewriteExpr(E node) { if (node == null) { return null; } if (node.desugared) { return node; } BLangExpression expr = node; if (node.impConversionExpr != null) { expr = node.impConversionExpr; node.impConversionExpr = null; } expr.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangStatement> E rewrite(E statement, SymbolEnv env) { if (statement == null) { return null; } BLangStatementLink link = new BLangStatementLink(); link.parent = currentLink; currentLink = link; BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env); link.statement = stmt; stmt.statementLink = link; currentLink = link.parent; return (E) stmt; } private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewriteExpr(nodeList.get(i))); } return nodeList; } private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) { BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType); stringLit.pos = pos; return stringLit; } private BLangLiteral createIntLiteral(long value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.type = symTable.intType; return literal; } private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) { BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType); byteLiteral.pos = pos; return byteLiteral; } private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) { BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.pos = expr.pos; conversionExpr.expr = expr; conversionExpr.type = targetType; conversionExpr.targetType = targetType; return conversionExpr; } private BType getElementType(BType type) { if (type.tag != TypeTags.ARRAY) { return type; } return getElementType(((BArrayType) type).getElementType()); } private void addReturnIfNotPresent(BLangInvokableNode invokableNode) { if (Symbols.isNative(invokableNode.symbol) || (invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) { return; } BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body; if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1 || funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) { DiagnosticPos invPos = invokableNode.pos; DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol); BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType); funcBody.addStatement(returnStmt); } } /** * Reorder the invocation arguments to match the original function signature. * * @param iExpr Function invocation expressions to reorder the arguments */ private void reorderArguments(BLangInvocation iExpr) { BSymbol symbol = iExpr.symbol; if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) { return; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol; List<BLangExpression> restArgs = iExpr.restArgs; int originalRequiredArgCount = iExpr.requiredArgs.size(); BLangExpression varargRef = null; BLangBlockStmt blockStmt = null; int restArgCount = restArgs.size(); if (restArgCount > 0 && restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR && originalRequiredArgCount < invokableSymbol.params.size()) { BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr; DiagnosticPos varargExpPos = expr.pos; BType varargVarType = expr.type; String varargVarName = DESUGARED_VARARG_KEY + this.varargCount++; BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID, varargVarType, this.env.scope.owner); varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol); BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos); varDef.var = var; varDef.type = varargVarType; blockStmt = createBlockStmt(varargExpPos); blockStmt.stmts.add(varDef); } if (!invokableSymbol.params.isEmpty()) { reorderNamedArgs(iExpr, invokableSymbol, varargRef); } if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) { if (invokableSymbol.restParam == null) { return; } BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); List<BLangExpression> exprs = new ArrayList<>(); BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type; BType elemType = arrayType.eType; for (BLangExpression restArg : restArgs) { exprs.add(addConversionExprIfRequired(restArg, elemType)); } arrayLiteral.exprs = exprs; arrayLiteral.type = arrayType; if (restArgCount != 0) { iExpr.restArgs = new ArrayList<>(); } iExpr.restArgs.add(arrayLiteral); return; } if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) { if (iExpr.requiredArgs.size() == originalRequiredArgCount) { return; } BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg); stmtExpression.type = firstNonRestArg.type; iExpr.requiredArgs.add(0, stmtExpression); if (invokableSymbol.restParam == null) { return; } BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount); BLangInvocation sliceInvocation = createLangLibInvocationNode(SLICE_LANGLIB_METHOD, varargRef, new ArrayList<BLangExpression>() {{ add(startIndex); }}, varargRef.type, varargRef.pos); restArgs.remove(0); restArgs.add(addConversionExprIfRequired(sliceInvocation, invokableSymbol.restParam.type)); return; } BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type; BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteral.type = restParamType; BType elemType = restParamType.eType; DiagnosticPos pos = restArgs.get(0).pos; List<BLangExpression> exprs = new ArrayList<>(); for (int i = 0; i < restArgCount - 1; i++) { exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType)); } arrayLiteral.exprs = exprs; BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode(); pushRestArgsExpr.pos = pos; pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1); String name = DESUGARED_VARARG_KEY + this.varargCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner); BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.type = restParamType; BLangBlockStmt pushBlockStmt = createBlockStmt(pos); pushBlockStmt.stmts.add(varDef); BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt); BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef, new ArrayList<BLangExpression>() {{ add(pushRestArgsExpr); }}, restParamType, pos); pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1)); expressionStmt.expr = pushInvocation; BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef); stmtExpression.type = restParamType; iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }}; } private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) { List<BLangExpression> args = new ArrayList<>(); Map<String, BLangExpression> namedArgs = new HashMap<>(); iExpr.requiredArgs.stream() .filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR) .forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr)); List<BVarSymbol> params = invokableSymbol.params; int varargIndex = 0; BType varargType = null; boolean tupleTypedVararg = false; if (varargRef != null) { varargType = varargRef.type; tupleTypedVararg = varargType.tag == TypeTags.TUPLE; } for (int i = 0; i < params.size(); i++) { BVarSymbol param = params.get(i); if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) { args.add(iExpr.requiredArgs.get(i)); } else if (namedArgs.containsKey(param.name.value)) { args.add(namedArgs.get(param.name.value)); } else if (varargRef == null) { BLangExpression expr = new BLangIgnoreExpr(); expr.type = param.type; args.add(expr); } else { BLangIndexBasedAccess memberAccessExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); memberAccessExpr.pos = varargRef.pos; memberAccessExpr.expr = varargRef; memberAccessExpr.indexExpr = rewriteExpr(createIntLiteral(varargIndex)); memberAccessExpr.type = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType; varargIndex++; args.add(addConversionExprIfRequired(memberAccessExpr, param.type)); } } iExpr.requiredArgs = args; } private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern( DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) { BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType; Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ? ((BUnionType) enclosingFuncReturnType).getMemberTypes() : new LinkedHashSet<BType>() {{ add(enclosingFuncReturnType); }}; boolean returnOnError = equivalentErrorTypes.stream() .allMatch(errorType -> returnTypeSet.stream() .anyMatch(retType -> types.isAssignable(errorType, retType))); String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure"; BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos, patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(patternFailureCaseVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos, patternFailureCaseVar.symbol); BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode(); patternBlockFailureCase.pos = pos; if (!isCheckPanicExpr && returnOnError) { BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode(); returnStmt.pos = pos; returnStmt.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(returnStmt); } else { BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = pos; panicNode.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(panicNode); } return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase); } private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType, boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) { String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match"; BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos, patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0, names.fromString(patternSuccessCaseVarName), this.env.scope.owner.pkgID, lhsType, this.env.scope.owner)); BLangExpression varRefExpr; if (isVarDef) { varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol); } else { varRefExpr = lhsExpr; } BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos, patternSuccessCaseVar.symbol); BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos, varRefExpr, patternSuccessCaseVarRef, false); BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<BLangStatement>() {{ add(assignmentStmtSuccessCase); }}); return ASTBuilderUtil.createMatchStatementPattern(pos, patternSuccessCaseVar, patternBlockSuccessCase); } private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) { List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses; BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar); BLangIf currentIfNode = parentIfNode; for (int i = 1; i < patterns.size(); i++) { BLangMatchBindingPatternClause patternClause = patterns.get(i); if (i == patterns.size() - 1 && patternClause.isLastPattern) { currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar); } else { currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar); currentIfNode = (BLangIf) currentIfNode.elseStmt; } } return parentIfNode; } /** * Generate an if-else statement from the given match statement. * * @param pattern match pattern statement node * @param matchExprVar variable node of the match expression * @return if else statement node */ private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol); if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) { BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar); return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null); } BType expectedType = matchExprVar.type; if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) { BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern; expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType); BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol); structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } if (structuredPattern.typeGuardExpr != null) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos); blockStmt.addStatement(varDef); blockStmt.addStatement(varDefStmt); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, structuredPattern.typeGuardExpr); stmtExpr.type = symTable.booleanType; ifCondition = ASTBuilderUtil .createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.AND, symTable.booleanType, symTable.booleanType)); } else { structuredPattern.body.stmts.add(0, varDef); structuredPattern.body.stmts.add(1, varDefStmt); } } return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null); } private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body; BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern; if (patternClause.variable.name.value.equals(Names.IGNORE.value)) { return patternClause.body; } BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos, matchExprVar.symbol); BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type); BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "", patternClause.variable.type, patternVarExpr, patternClause.variable.symbol); BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar); patternClause.body.stmts.add(0, patternVarDef); body = patternClause.body; return body; } private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body = pattern.body; if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol); BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } structuredPattern.body.stmts.add(0, varDefStmt); body = structuredPattern.body; } return body; } BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) { if (lhsType.tag == TypeTags.NONE) { return expr; } BType rhsType = expr.type; if (types.isSameType(rhsType, lhsType)) { return expr; } types.setImplicitCastExpr(expr, rhsType, lhsType); if (expr.impConversionExpr != null) { return expr; } if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) { return expr; } if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) { return expr; } if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) { return expr; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = expr; conversionExpr.targetType = lhsType; conversionExpr.type = lhsType; conversionExpr.pos = expr.pos; conversionExpr.checkTypes = false; return conversionExpr; } private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol) { BType patternType; switch (patternClause.getKind()) { case MATCH_STATIC_PATTERN_CLAUSE: BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) patternClause; patternType = staticPattern.literal.type; break; case MATCH_STRUCTURED_PATTERN_CLAUSE: BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) patternClause; patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable); break; default: BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause; patternType = simplePattern.variable.type; break; } BLangExpression binaryExpr; BType[] memberTypes; if (patternType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) patternType; memberTypes = unionType.getMemberTypes().toArray(new BType[0]); } else { memberTypes = new BType[1]; memberTypes[0] = patternType; } if (memberTypes.length == 1) { binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); } else { BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]); binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.type, rhsExpr.type)); for (int i = 2; i < memberTypes.length; i++) { lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]); rhsExpr = binaryExpr; binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.type, rhsExpr.type)); } } return binaryExpr; } private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) { if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) { BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable; List<BType> memberTypes = new ArrayList<>(); for (int i = 0; i < tupleVariable.memberVariables.size(); i++) { memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i))); } BTupleType tupleType = new BTupleType(memberTypes); if (tupleVariable.restVariable != null) { BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable); tupleType.restType = restArrayType.eType; } return tupleType; } if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) { BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable; BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++), env.enclPkg.symbol.pkgID, null, env.scope.owner); recordSymbol.initializerFunc = createRecordInitFunc(); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); List<BField> fields = new ArrayList<>(); List<BLangSimpleVariable> typeDefFields = new ArrayList<>(); for (int i = 0; i < recordVariable.variableList.size(); i++) { String fieldNameStr = recordVariable.variableList.get(i).key.value; Name fieldName = names.fromString(fieldNameStr); BType fieldType = getStructuredBindingPatternType( recordVariable.variableList.get(i).valueBindingPattern); BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType, recordSymbol); fields.add(new BField(fieldName, bindingPatternVariable.pos, fieldSymbol)); typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordVarType = new BRecordType(recordSymbol); recordVarType.fields = fields; recordVarType.restFieldType = recordVariable.restParam != null ? ((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint : symTable.anydataType; recordSymbol.type = recordVarType; recordVarType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields, recordVarType, bindingPatternVariable.pos); recordTypeNode.initFunction = rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env); return recordVarType; } if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) { BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable; BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol( SymTag.ERROR, Flags.PUBLIC, names.fromString("$anonErrorType$" + errorCount++), env.enclPkg.symbol.pkgID, null, null); BType detailType; if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) { detailType = symTable.detailType; } else { detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++); BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType); TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env); } BErrorType errorType = new BErrorType(errorTypeSymbol, ((BErrorType) errorVariable.type).reasonType, detailType); errorTypeSymbol.type = errorType; TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env); return errorType; } return bindingPatternVariable.type; } private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) { List<BLangSimpleVariable> fieldList = new ArrayList<>(); for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) { BVarSymbol symbol = field.valueBindingPattern.symbol; if (symbol == null) { symbol = new BVarSymbol( Flags.PUBLIC, names.fromString(field.key.value + "$"), this.env.enclPkg.packageID, symTable.pureType, null); } BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable( field.valueBindingPattern.pos, symbol.name.value, field.valueBindingPattern.type, field.valueBindingPattern.expr, symbol); fieldList.add(fieldVar); } return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos); } private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail, BLangSimpleVariable restDetail, int errorNo) { BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol( SymTag.RECORD, Flags.PUBLIC, names.fromString("$anonErrorType$" + errorNo + "$detailType"), env.enclPkg.symbol.pkgID, null, null); detailRecordTypeSymbol.initializerFunc = createRecordInitFunc(); detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol); detailRecordTypeSymbol.scope.define( names.fromString(detailRecordTypeSymbol.name.value + "." + detailRecordTypeSymbol.initializerFunc.funcName.value), detailRecordTypeSymbol.initializerFunc.symbol); BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol); detailRecordType.restFieldType = symTable.anydataType; if (restDetail == null) { detailRecordType.sealed = true; } for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) { Name fieldName = names.fromIdNode(detailEntry.key); BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern); BVarSymbol fieldSym = new BVarSymbol( Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol); detailRecordType.fields.add(new BField(fieldName, detailEntry.key.pos, fieldSym)); detailRecordTypeSymbol.scope.define(fieldName, fieldSym); } return detailRecordType; } private BAttachedFunction createRecordInitFunc() { BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false); initFuncSymbol.retType = symTable.nilType; return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType); } BLangErrorType createErrorTypeNode(BErrorType errorType) { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.type = errorType; return errorTypeNode; } private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol, BType patternType) { DiagnosticPos pos = patternClause.pos; BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) { BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause; return createBinaryExpression(pos, varRef, pattern.literal); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) { return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType); } if (patternType == symTable.nilType) { BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null); return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType, OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL, symTable.anyType, symTable.nilType)); } else { return createIsAssignableExpression(pos, varSymbol, patternType); } } private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef, BLangExpression expression) { BLangBinaryExpr binaryExpr; if (NodeKind.GROUP_EXPR == expression.getKind()) { return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression); } if (NodeKind.BINARY_EXPR == expression.getKind()) { binaryExpr = (BLangBinaryExpr) expression; BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr); BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr); binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType)); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) { BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode(); anyType.type = symTable.anyType; anyType.typeKind = TypeKind.ANY; return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType); } else { binaryExpr = ASTBuilderUtil .createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null); BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver .getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type, binaryExpr); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; } return binaryExpr; } private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos, BVarSymbol varSymbol, BType patternType) { BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names); } private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) { return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType); } private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = variable.pos; varRef.variableName = variable.name; varRef.symbol = variable.symbol; varRef.type = variable.type; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = variable.expr; assignmentStmt.pos = variable.pos; assignmentStmt.setVariable(varRef); return assignmentStmt; } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable, BVarSymbol symbol) { BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(variable.pos, symbol); BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, variable.name); fieldAccess.symbol = variable.symbol; fieldAccess.type = variable.type; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = variable.expr; assignmentStmt.pos = variable.pos; assignmentStmt.setVariable(fieldAccess); SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env); return rewrite(assignmentStmt, initFuncEnv); } private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes; List<BType> unmatchedTypes = new ArrayList<>(); if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type; exprTypes = new ArrayList<>(unionType.getMemberTypes()); } else { exprTypes = Lists.of(bLangMatchExpression.type); } for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { if (this.types.isAssignable(type, pattern.variable.type)) { assignable = true; break; } } if (!assignable) { unmatchedTypes.add(type); } } if (unmatchedTypes.isEmpty()) { return; } BType defaultPatternType; if (unmatchedTypes.size() == 1) { defaultPatternType = unmatchedTypes.get(0); } else { defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes)); } String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default"; BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName), this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner)); BLangMatchExprPatternClause defaultPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); defaultPattern.variable = patternMatchCaseVar; defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol); defaultPattern.pos = bLangMatchExpression.pos; bLangMatchExpression.patternClauses.add(defaultPattern); } private boolean safeNavigate(BLangAccessExpression accessExpr) { if (accessExpr.lhsVar || accessExpr.expr == null) { return false; } if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) { return true; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { return safeNavigate((BLangAccessExpression) accessExpr.expr); } return false; } private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) { BType originalExprType = accessExpr.type; String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar); BLangMatch matcEXpr = this.matchStmtStack.firstElement(); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr)); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef); stmtExpression.type = originalExprType; this.matchStmtStack = new Stack<>(); this.accessExprStack = new Stack<>(); this.successPattern = null; this.safeNavigationAssignment = null; return stmtExpression; } private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) { if (accessExpr.expr == null) { return; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar); } if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) { BType originalType = accessExpr.originalType; if (TypeTags.isXMLTypeTag(originalType.tag)) { accessExpr.type = BUnionType.create(null, originalType, symTable.errorType); } else { accessExpr.type = originalType; } if (this.safeNavigationAssignment != null) { this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type); } return; } /* * If the field access is a safe navigation, create a match expression. * Then chain the current expression as the success-pattern of the parent * match expr, if available. * eg: * x but { <--- parent match expr * error e => e, * T t => t.y but { <--- current expr * error e => e, * R r => r.z * } * } */ BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>()); if (accessExpr.nilSafeNavigation) { matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar)); matchStmt.type = type; } if (accessExpr.errorSafeNavigation) { matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar)); matchStmt.type = type; matchStmt.pos = accessExpr.pos; } BLangMatchTypedBindingPatternClause successPattern = getSuccessPattern(accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); this.matchStmtStack.push(matchStmt); if (this.successPattern != null) { this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt)); } this.successPattern = successPattern; } private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error"; BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, errorPatternVar, patternBody); return errorPattern; } private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos, BLangExpression expr) { String nullPatternVarName = IGNORE.toString(); BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner)); BLangMatchExprPatternClause nullPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); nullPattern.variable = errorPatternVar; nullPattern.expr = expr; nullPattern.pos = pos; return nullPattern; } private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null"; BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, nullPatternVar, patternBody); return nullPattern; } private BLangMatchTypedBindingPatternClause getSuccessPattern(BLangAccessExpression accessExpr, BLangSimpleVariable tempResultVar, boolean liftError) { BType type = types.getSafeType(accessExpr.expr.type, true, liftError); String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success"; BVarSymbol successPatternSymbol; if (type.tag == TypeTags.INVOKABLE) { successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner); } else { successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner); } BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName, type, null, successPatternSymbol); accessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol); accessExpr.errorSafeNavigation = false; accessExpr.nilSafeNavigation = false; if (TypeTags.isXMLTypeTag(accessExpr.expr.type.tag)) { accessExpr.type = BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType); } else { accessExpr.type = accessExpr.originalType; } BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); BLangExpression assignmentRhsExpr = addConversionExprIfRequired(accessExpr, tempResultVarRef.type); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause successPattern = ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody); this.safeNavigationAssignment = assignmentStmt; return successPattern; } private boolean safeNavigateLHS(BLangExpression expr) { if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { return false; } BLangExpression varRef = ((BLangAccessExpression) expr).expr; if (varRef.type.isNullable()) { return true; } return safeNavigateLHS(varRef); } private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr, boolean safeAssignment) { this.accessExprStack = new Stack<>(); List<BLangStatement> stmts = new ArrayList<>(); createLHSSafeNavigation(stmts, accessExpr.expr); BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, cloneExpression(accessExpr), rhsExpr); stmts.add(assignment); return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts); } private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) { NodeKind kind = expr.getKind(); boolean root = false; if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { BLangAccessExpression accessExpr = (BLangAccessExpression) expr; createLHSSafeNavigation(stmts, accessExpr.expr); accessExpr.expr = accessExprStack.pop(); } else { root = true; } if (expr.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expr; BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value + "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner); BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos, interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol); BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos, intermediateVariable); stmts.add(intermediateVariableDefinition); expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol); } if (expr.type.isNullable()) { BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode()); isNillTest.type = symTable.booleanType; BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos); expr = cloneExpression(expr); expr.type = types.getSafeType(expr.type, true, false); if (isDefaultableMappingType(expr.type) && !root) { BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); jsonLiteral.type = expr.type; jsonLiteral.pos = expr.pos; BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos, expr, jsonLiteral); thenStmt.addStatement(assignment); } else { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = ERROR_REASON_NULL_REFERENCE_ERROR; literal.type = symTable.stringType; BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); errorCtorInvocation.pos = expr.pos; errorCtorInvocation.argExprs.add(literal); errorCtorInvocation.requiredArgs.add(literal); errorCtorInvocation.type = symTable.errorType; errorCtorInvocation.symbol = symTable.errorConstructor; BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.expr = errorCtorInvocation; panicNode.pos = expr.pos; thenStmt.addStatement(panicNode); } BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null); stmts.add(ifelse); } accessExprStack.push(expr); } BLangValueType getNillTypeNode() { BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nillTypeNode.typeKind = TypeKind.NIL; nillTypeNode.type = symTable.nilType; return nillTypeNode; } private BLangVariableReference cloneExpression(BLangExpression expr) { switch (expr.getKind()) { case SIMPLE_VARIABLE_REF: return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol); case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case INVOCATION: return cloneAccessExpr((BLangAccessExpression) expr); default: throw new IllegalStateException(); } } private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) { if (originalAccessExpr.expr == null) { return originalAccessExpr; } BLangVariableReference varRef; NodeKind kind = originalAccessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr); } else { varRef = cloneExpression(originalAccessExpr.expr); } varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false); BLangAccessExpression accessExpr; switch (originalAccessExpr.getKind()) { case FIELD_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef, ((BLangFieldBasedAccess) originalAccessExpr).field); break; case INDEX_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef, ((BLangIndexBasedAccess) originalAccessExpr).indexExpr); break; case INVOCATION: accessExpr = null; break; default: throw new IllegalStateException(); } accessExpr.originalType = originalAccessExpr.originalType; accessExpr.pos = originalAccessExpr.pos; accessExpr.lhsVar = originalAccessExpr.lhsVar; accessExpr.symbol = originalAccessExpr.symbol; accessExpr.errorSafeNavigation = false; accessExpr.nilSafeNavigation = false; accessExpr.type = originalAccessExpr.originalType; return accessExpr; } private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.intType, symTable.intType)); } private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB, symTable.intType, symTable.intType)); } private BLangLiteral getBooleanLiteral(boolean value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.type = symTable.booleanType; return literal; } private boolean isDefaultableMappingType(BType type) { switch (types.getSafeType(type, true, false).tag) { case TypeTags.JSON: case TypeTags.MAP: case TypeTags.RECORD: return true; default: return false; } } private BLangFunction createInitFunctionForObjectType(BLangObjectTypeNode structureTypeNode, SymbolEnv env) { BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(structureTypeNode, env, Names.GENERATED_INIT_SUFFIX, names, symTable); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) structureTypeNode.type.tsymbol); typeSymbol.generatedInitializerFunc = new BAttachedFunction(Names.GENERATED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.type); structureTypeNode.generatedInitFunction = initFunction; initFunction.returnTypeNode.type = symTable.nilType; return rewrite(initFunction, env); } private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) { /* * Desugar (lhsExpr && rhsExpr) to following if-else: * * logical AND: * ------------- * T $result$; * if (lhsExpr) { * $result$ = rhsExpr; * } else { * $result$ = false; * } * * logical OR: * ------------- * T $result$; * if (lhsExpr) { * $result$ = true; * } else { * $result$ = rhsExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangExpression thenResult; if (binaryExpr.opKind == OperatorKind.AND) { thenResult = binaryExpr.rhsExpr; } else { thenResult = getBooleanLiteral(true); } BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult); thenBody.addStatement(thenAssignment); BLangExpression elseResult; BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); if (binaryExpr.opKind == OperatorKind.AND) { elseResult = getBooleanLiteral(false); } else { elseResult = binaryExpr.rhsExpr; } BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = binaryExpr.type; result = rewriteExpr(stmtExpr); } /** * Split packahe init function into several smaller functions. * * @param packageNode package node * @param env symbol environment * @return initial init function but trimmed in size */ private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) { int methodSize = INIT_METHOD_SPLIT_SIZE; BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body; if (funcBody.stmts.size() < methodSize || !isJvmTarget) { return packageNode.initFunction; } BLangFunction initFunction = packageNode.initFunction; List<BLangFunction> generatedFunctions = new ArrayList<>(); List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts); funcBody.stmts.clear(); BLangFunction newFunc = initFunction; BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body; int varDefIndex = 0; for (int i = 0; i < stmts.size(); i++) { if (stmts.get(i).getKind() == NodeKind.VARIABLE_DEF) { break; } varDefIndex++; if (i > 0 && i % methodSize == 0) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } List<BLangStatement> chunkStmts = new ArrayList<>(); for (int i = varDefIndex; i < stmts.size(); i++) { BLangStatement stmt = stmts.get(i); chunkStmts.add(stmt); varDefIndex++; if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) && (newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) { if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.addAll(chunkStmts); chunkStmts.clear(); } else if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) && Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags, Flags.LISTENER) ) { break; } } newFuncBody.stmts.addAll(chunkStmts); for (int i = varDefIndex; i < stmts.size(); i++) { if (i > 0 && i % methodSize == 0) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } generatedFunctions.add(newFunc); for (int j = 0; j < generatedFunctions.size() - 1; j++) { BLangFunction thisFunction = generatedFunctions.get(j); BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(initFunction.pos, createInvocationNode(generatedFunctions.get(j + 1).name.value, new ArrayList<>(), symTable.errorOrNilType), symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt expressionStmt = ASTBuilderUtil .createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body); expressionStmt.expr = checkedExpr; expressionStmt.expr.pos = initFunction.pos; if (j > 0) { thisFunction = rewrite(thisFunction, env); packageNode.functions.add(thisFunction); packageNode.topLevelNodes.add(thisFunction); } } if (generatedFunctions.size() > 1) { BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1); lastFunc = rewrite(lastFunc, env); packageNode.functions.add(lastFunc); packageNode.topLevelNodes.add(lastFunc); } return generatedFunctions.get(0); } /** * Create an intermediate package init function. * * @param pkgNode package node * @param env symbol environment of package */ private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); BLangFunction initFunction = ASTBuilderUtil .createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, new Name(Names.INIT_FUNCTION_SUFFIX.value + this.initFuncIndex++), symTable); createInvokableSymbol(initFunction, env); return initFunction; } private BType getRestType(BInvokableSymbol invokableSymbol) { if (invokableSymbol != null && invokableSymbol.restParam != null) { return invokableSymbol.restParam.type; } return null; } private BType getRestType(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.type; } return null; } private BVarSymbol getRestSymbol(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.symbol; } return null; } private boolean isComputedKey(RecordLiteralNode.RecordField field) { if (!field.isKeyValueField()) { return false; } return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey; } private BLangStatementExpression rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) { List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields; BType type = mappingConstructorExpr.type; DiagnosticPos pos = mappingConstructorExpr.pos; BLangRecordLiteral recordLiteral = type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type) : new BLangMapLiteral(pos, type); String name = DESUGARED_MAPPING_CONSTR_KEY + this.annonVarCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner); BLangSimpleVariable var = createVariable(pos, name, type, recordLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.type = type; BLangBlockStmt blockStmt = createBlockStmt(pos); blockStmt.stmts.add(varDef); BLangSimpleVarRef mappingVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); for (RecordLiteralNode.RecordField field : fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValueField = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangRecordLiteral.BLangRecordKey key = keyValueField.key; BLangExpression keyExpr = key.expr; BLangExpression indexExpr = key.computedKey ? keyExpr : keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos, ((BLangSimpleVarRef) keyExpr).variableName.value) : ((BLangLiteral) keyExpr);; addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef, indexExpr, keyValueField.valueExpr); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field; addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef, createStringLiteral(pos, varRefField.variableName.value), varRefField); } else { BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField = (BLangRecordLiteral.BLangRecordSpreadOperatorField) field; BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = generateMapEntriesInvocation(spreadOpField.expr, spreadOpField.expr.type); types.setForeachTypedBindingPatternType(foreach); BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBodyBlock = ASTBuilderUtil.createBlockStmt(pos); BTupleType foreachVarRefType = (BTupleType) foreachVarRef.type; BLangIndexBasedAccess indexExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); indexExpr.pos = pos; indexExpr.expr = foreachVarRef; indexExpr.indexExpr = rewriteExpr(createIntLiteral(0)); indexExpr.type = foreachVarRefType.tupleTypes.get(0); BLangIndexBasedAccess valueExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); valueExpr.pos = pos; valueExpr.expr = foreachVarRef; valueExpr.indexExpr = rewriteExpr(createIntLiteral(1)); valueExpr.type = foreachVarRefType.tupleTypes.get(1); addMemberStoreForKeyValuePair(pos, foreachBodyBlock, mappingVarRef, indexExpr, valueExpr); foreach.body = foreachBodyBlock; blockStmt.addStatement(foreach); } } BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, mappingVarRef); stmtExpression.type = type; return stmtExpression; } private void addMemberStoreForKeyValuePair(DiagnosticPos pos, BLangBlockStmt blockStmt, BLangExpression mappingVarRef, BLangExpression indexExpr, BLangExpression value) { BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt); assignmentStmt.expr = rewriteExpr(value); BLangIndexBasedAccess indexAccessNode = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); indexAccessNode.pos = pos; indexAccessNode.expr = mappingVarRef; indexAccessNode.indexExpr = rewriteExpr(indexExpr); indexAccessNode.type = value.type; assignmentStmt.varRef = indexAccessNode; } private Map<String, BLangExpression> getKeyValuePairs(BLangStatementExpression desugaredMappingConst) { List<BLangStatement> stmts = ((BLangBlockStmt) desugaredMappingConst.stmt).stmts; Map<String, BLangExpression> keyValuePairs = new HashMap<>(); for (int i = 1; i < stmts.size(); i++) { BLangAssignment assignmentStmt = (BLangAssignment) stmts.get(i); BLangExpression indexExpr = ((BLangIndexBasedAccess) assignmentStmt.varRef).indexExpr; if (indexExpr.getKind() != NodeKind.LITERAL) { continue; } keyValuePairs.put((String) ((BLangLiteral) indexExpr).value, assignmentStmt.expr); } return keyValuePairs; } }
class Desugar extends BLangNodeVisitor { private static final CompilerContext.Key<Desugar> DESUGAR_KEY = new CompilerContext.Key<>(); private static final String QUERY_TABLE_WITH_JOIN_CLAUSE = "queryTableWithJoinClause"; private static final String QUERY_TABLE_WITHOUT_JOIN_CLAUSE = "queryTableWithoutJoinClause"; private static final String BASE_64 = "base64"; private static final String ERROR_REASON_FUNCTION_NAME = "reason"; private static final String ERROR_DETAIL_FUNCTION_NAME = "detail"; private static final String TO_STRING_FUNCTION_NAME = "toString"; private static final String LENGTH_FUNCTION_NAME = "length"; private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException"; private static final String CONSTRUCT_FROM = "constructFrom"; private static final String SLICE_LANGLIB_METHOD = "slice"; private static final String PUSH_LANGLIB_METHOD = "push"; private static final String DESUGARED_VARARG_KEY = "$vararg$"; public static final String XML_INTERNAL_SELECT_DESCENDANTS = "selectDescendants"; public static final String XML_INTERNAL_CHILDREN = "children"; public static final String XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT = "getFilteredChildrenFlat"; public static final String XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING = "getElementNameNilLifting"; public static final String XML_INTERNAL_GET_ATTRIBUTE = "getAttribute"; public static final String XML_INTERNAL_GET_ELEMENTS = "getElements"; private SymbolTable symTable; private SymbolResolver symResolver; private final SymbolEnter symbolEnter; private ClosureDesugar closureDesugar; private QueryDesugar queryDesugar; private AnnotationDesugar annotationDesugar; private Types types; private Names names; private ServiceDesugar serviceDesugar; private BLangNode result; private NodeCloner nodeCloner; private SemanticAnalyzer semanticAnalyzer; private BLangStatementLink currentLink; public Stack<BLangLockStmt> enclLocks = new Stack<>(); private SymbolEnv env; private int lambdaFunctionCount = 0; private int transactionIndex = 0; private int recordCount = 0; private int errorCount = 0; private int annonVarCount = 0; private int initFuncIndex = 0; private int indexExprCount = 0; private int letCount = 0; private int varargCount = 0; private Stack<BLangMatch> matchStmtStack = new Stack<>(); Stack<BLangExpression> accessExprStack = new Stack<>(); private BLangMatchTypedBindingPatternClause successPattern; private BLangAssignment safeNavigationAssignment; static boolean isJvmTarget = false; public static Desugar getInstance(CompilerContext context) { Desugar desugar = context.get(DESUGAR_KEY); if (desugar == null) { desugar = new Desugar(context); } return desugar; } private Desugar(CompilerContext context) { isJvmTarget = true; context.put(DESUGAR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.closureDesugar = ClosureDesugar.getInstance(context); this.queryDesugar = QueryDesugar.getInstance(context); this.annotationDesugar = AnnotationDesugar.getInstance(context); this.types = Types.getInstance(context); this.names = Names.getInstance(context); this.names = Names.getInstance(context); this.serviceDesugar = ServiceDesugar.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); } public BLangPackage perform(BLangPackage pkgNode) { annotationDesugar.initializeAnnotationMap(pkgNode); SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol); return rewrite(pkgNode, env); } private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) { for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) { if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) { continue; } if (typeDef.symbol.tag == SymTag.OBJECT) { BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode; objectTypeNode.functions.forEach(f -> { if (!pkgNode.objAttachedFunctions.contains(f.symbol)) { pkgNode.functions.add(f); pkgNode.topLevelNodes.add(f); } }); if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) { continue; } BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectTypeNode, env); tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction, tempGeneratedInitFunction.symbol.scope, env); this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env); objectTypeNode.generatedInitFunction = tempGeneratedInitFunction; pkgNode.functions.add(objectTypeNode.generatedInitFunction); pkgNode.topLevelNodes.add(objectTypeNode.generatedInitFunction); if (objectTypeNode.initFunction != null) { pkgNode.functions.add(objectTypeNode.initFunction); pkgNode.topLevelNodes.add(objectTypeNode.initFunction); } } else if (typeDef.symbol.tag == SymTag.RECORD) { BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode; recordTypeNode.initFunction = rewrite( TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); pkgNode.functions.add(recordTypeNode.initFunction); pkgNode.topLevelNodes.add(recordTypeNode.initFunction); } } } private BLangFunction createGeneratedInitializerFunction(BLangObjectTypeNode objectTypeNode, SymbolEnv env) { BLangFunction generatedInitFunc = createInitFunctionForObjectType(objectTypeNode, env); if (objectTypeNode.initFunction == null) { return generatedInitFunc; } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc; BAttachedFunction generatedInitializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc; addRequiredParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc); addRestParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc); generatedInitFunc.returnTypeNode = objectTypeNode.initFunction.returnTypeNode; generatedInitializerFunc.symbol.retType = generatedInitFunc.returnTypeNode.type; ((BInvokableType) generatedInitFunc.symbol.type).paramTypes = initializerFunc.type.paramTypes; ((BInvokableType) generatedInitFunc.symbol.type).retType = initializerFunc.type.retType; ((BInvokableType) generatedInitFunc.symbol.type).restType = initializerFunc.type.restType; generatedInitializerFunc.type = initializerFunc.type; generatedInitFunc.desugared = false; return generatedInitFunc; } private void addRequiredParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc, BAttachedFunction generatedInitializerFunc) { if (initFunction.requiredParams.isEmpty()) { return; } for (BLangSimpleVariable requiredParameter : initFunction.requiredParams) { BLangSimpleVariable var = ASTBuilderUtil.createVariable(initFunction.pos, requiredParameter.name.getValue(), requiredParameter.type, createRequiredParamExpr(requiredParameter.expr), new BVarSymbol(0, names.fromString(requiredParameter.name.getValue()), requiredParameter.symbol.pkgID, requiredParameter.type, requiredParameter.symbol.owner)); generatedInitFunc.requiredParams.add(var); generatedInitializerFunc.symbol.params.add(var.symbol); } } private BLangExpression createRequiredParamExpr(BLangExpression expr) { if (expr == null) { return null; } if (expr.getKind() == NodeKind.LAMBDA) { BLangFunction func = ((BLangLambdaFunction) expr).function; return createLambdaFunction(func.pos, func.name.value, func.requiredParams, func.returnTypeNode, func.body); } BLangExpression expression = this.nodeCloner.clone(expr); if (expression.getKind() == NodeKind.ARROW_EXPR) { BLangIdentifier func = (BLangIdentifier) ((BLangArrowFunction) expression).functionName; ((BLangArrowFunction) expression).functionName = ASTBuilderUtil.createIdentifier(func.pos, "$" + func.getValue() + "$"); } return expression; } private void addRestParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc, BAttachedFunction generatedInitializerFunc) { if (initFunction.restParam == null) { return; } BLangSimpleVariable restParam = initFunction.restParam; generatedInitFunc.restParam = ASTBuilderUtil.createVariable(initFunction.pos, restParam.name.getValue(), restParam.type, null, new BVarSymbol(0, names.fromString(restParam.name.getValue()), restParam.symbol.pkgID, restParam.type, restParam.symbol.owner)); generatedInitializerFunc.symbol.restParam = generatedInitFunc.restParam.symbol; } /** * Create package init functions. * * @param pkgNode package node * @param env symbol environment of package */ private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, Names.INIT_FUNCTION_SUFFIX, symTable); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; for (BLangXMLNS xmlns : pkgNode.xmlnsList) { initFnBody.addStatement(createNamespaceDeclrStatement(xmlns)); } pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, Names.START_FUNCTION_SUFFIX, symTable); pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias, Names.STOP_FUNCTION_SUFFIX); createInvokableSymbol(pkgNode.initFunction, env); createInvokableSymbol(pkgNode.startFunction, env); createInvokableSymbol(pkgNode.stopFunction, env); } private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) { Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream() .filter(bLangFunction -> !bLangFunction.attachedFunction && bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value)) .findFirst(); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; if (!userDefInitOptional.isPresent()) { addNilReturnStatement(initFnBody); return; } BLangFunction userDefInit = userDefInitOptional.get(); BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); userDefInitInvocation.pos = pkgNode.initFunction.pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(userDefInit.name.value); userDefInitInvocation.name = name; userDefInitInvocation.symbol = userDefInit.symbol; BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); pkgAlias.setLiteral(false); pkgAlias.setValue(pkgNode.packageID.name.value); userDefInitInvocation.pkgAlias = pkgAlias; userDefInitInvocation.type = userDefInit.returnTypeNode.type; userDefInitInvocation.requiredArgs = Collections.emptyList(); BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode(); returnStmt.pos = pkgNode.initFunction.pos; returnStmt.expr = userDefInitInvocation; initFnBody.stmts.add(returnStmt); } /** * Create invokable symbol for function. * * @param bLangFunction function node * @param env Symbol environment */ private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) { BType returnType = bLangFunction.returnTypeNode.type == null ? symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type; BInvokableType invokableType = new BInvokableType(new ArrayList<>(), getRestType(bLangFunction), returnType, null); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet), new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true); functionSymbol.retType = returnType; for (BLangVariable param : bLangFunction.requiredParams) { functionSymbol.params.add(param.symbol); } functionSymbol.scope = new Scope(functionSymbol); bLangFunction.symbol = functionSymbol; } /** * Add nil return statement. * * @param bLangBlockStmt block statement node */ private void addNilReturnStatement(BlockNode bLangBlockStmt) { BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(((BLangNode) bLangBlockStmt).pos, symTable.nilType); bLangBlockStmt.addStatement(returnStmt); } /** * Create namespace declaration statement for XMNLNS. * * @param xmlns XMLNS node * @return XMLNS statement */ private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) { BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode(); xmlnsStmt.xmlnsDecl = xmlns; xmlnsStmt.pos = xmlns.pos; return xmlnsStmt; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) { result = pkgNode; return; } createPackageInitFunctions(pkgNode, env); addAttachedFunctionsToPackageLevel(pkgNode, env); pkgNode.constants.stream() .filter(constant -> constant.expr.getKind() == NodeKind.LITERAL || constant.expr.getKind() == NodeKind.NUMERIC_LITERAL) .forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition)); BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; for (BLangConstant constant : pkgNode.constants) { if (constant.symbol.type.tag == TypeTags.MAP) { BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol); constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode, pkgNode.initFunction.symbol.scope, env)); BLangInvocation frozenConstValExpr = createLangLibInvocationNode( "cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.type, constant.pos); BLangAssignment constInit = ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr); initFnBody.stmts.add(constInit); } } pkgNode.globalVars.forEach(globalVar -> { BLangAssignment assignment = createAssignmentStmt(globalVar); if (assignment.expr != null) { initFnBody.stmts.add(assignment); } }); pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env)); annotationDesugar.rewritePackageAnnotations(pkgNode, env); addUserDefinedModuleInitInvocationAndReturn(pkgNode); pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence)); pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env); pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env); pkgNode.constants = rewrite(pkgNode.constants, env); pkgNode.globalVars = rewrite(pkgNode.globalVars, env); pkgNode.functions = rewrite(pkgNode.functions, env); serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction); ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body); addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body); addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body); pkgNode.initFunction = splitInitFunction(pkgNode, env); pkgNode.initFunction = rewrite(pkgNode.initFunction, env); pkgNode.startFunction = rewrite(pkgNode.startFunction, env); pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env); closureDesugar.visit(pkgNode); for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) { rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol)); } pkgNode.completedPhases.add(CompilerPhase.DESUGAR); initFuncIndex = 0; result = pkgNode; } @Override public void visit(BLangImportPackage importPkgNode) { BPackageSymbol pkgSymbol = importPkgNode.symbol; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol); rewrite(pkgEnv.node, pkgEnv); result = importPkgNode; } @Override public void visit(BLangTypeDefinition typeDef) { if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE || typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) { typeDef.typeNode = rewrite(typeDef.typeNode, env); } typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = typeDef; } @Override public void visit(BLangObjectTypeNode objectTypeNode) { objectTypeNode.fields.addAll(objectTypeNode.referencedFields); if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) { result = objectTypeNode; return; } for (BLangSimpleVariable bLangSimpleVariable : objectTypeNode.fields) { bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env); } Map<BSymbol, BLangStatement> initFuncStmts = objectTypeNode.generatedInitFunction.initFunctionStmts; for (BLangSimpleVariable field : objectTypeNode.fields) { if (!initFuncStmts.containsKey(field.symbol) && field.expr != null) { initFuncStmts.put(field.symbol, createStructFieldUpdate(objectTypeNode.generatedInitFunction, field, objectTypeNode.generatedInitFunction.receiver.symbol)); } } BLangStatement[] initStmts = initFuncStmts.values().toArray(new BLangStatement[0]); BLangBlockFunctionBody generatedInitFnBody = (BLangBlockFunctionBody) objectTypeNode.generatedInitFunction.body; int i; for (i = 0; i < initStmts.length; i++) { generatedInitFnBody.stmts.add(i, initStmts[i]); } if (objectTypeNode.initFunction != null) { ((BLangReturn) generatedInitFnBody.stmts.get(i)).expr = createUserDefinedInitInvocation(objectTypeNode); } for (BLangFunction fn : objectTypeNode.functions) { rewrite(fn, this.env); } rewrite(objectTypeNode.generatedInitFunction, this.env); rewrite(objectTypeNode.initFunction, this.env); result = objectTypeNode; } private BLangInvocation createUserDefinedInitInvocation(BLangObjectTypeNode objectTypeNode) { ArrayList<BLangExpression> paramRefs = new ArrayList<>(); for (BLangSimpleVariable var : objectTypeNode.generatedInitFunction.requiredParams) { paramRefs.add(ASTBuilderUtil.createVariableRef(objectTypeNode.pos, var.symbol)); } BLangInvocation invocation = ASTBuilderUtil.createInvocationExprMethod(objectTypeNode.pos, ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc.symbol, paramRefs, Collections.emptyList(), symResolver); if (objectTypeNode.generatedInitFunction.restParam != null) { BLangSimpleVarRef restVarRef = ASTBuilderUtil.createVariableRef(objectTypeNode.pos, objectTypeNode.generatedInitFunction.restParam.symbol); BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression(); bLangRestArgsExpression.expr = restVarRef; bLangRestArgsExpression.pos = objectTypeNode.generatedInitFunction.pos; bLangRestArgsExpression.type = objectTypeNode.generatedInitFunction.restParam.type; bLangRestArgsExpression.expectedType = bLangRestArgsExpression.type; invocation.restArgs.add(bLangRestArgsExpression); } invocation.exprSymbol = ((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc.symbol.receiverSymbol; return rewriteExpr(invocation); } @Override public void visit(BLangRecordTypeNode recordTypeNode) { recordTypeNode.fields.addAll(recordTypeNode.referencedFields); for (BLangSimpleVariable bLangSimpleVariable : recordTypeNode.fields) { bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env); } if (recordTypeNode.initFunction == null) { recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); env.enclPkg.addFunction(recordTypeNode.initFunction); env.enclPkg.topLevelNodes.add(recordTypeNode.initFunction); } for (BLangSimpleVariable field : recordTypeNode.fields) { if (!recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) && !Symbols.isOptional(field.symbol) && field.expr != null) { recordTypeNode.initFunction.initFunctionStmts .put(field.symbol, createStructFieldUpdate(recordTypeNode.initFunction, field, recordTypeNode.initFunction.receiver.symbol)); } } BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts .values().toArray(new BLangStatement[0]); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) recordTypeNode.initFunction.body; for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) { initFnBody.stmts.add(i, initStmts[i]); } if (recordTypeNode.isAnonymous && recordTypeNode.isLocal) { BLangUserDefinedType userDefinedType = desugarLocalAnonRecordTypeNode(recordTypeNode); TypeDefBuilderHelper.addTypeDefinition(recordTypeNode.type, recordTypeNode.type.tsymbol, recordTypeNode, env); recordTypeNode.desugared = true; result = userDefinedType; return; } result = recordTypeNode; } private BLangUserDefinedType desugarLocalAnonRecordTypeNode(BLangRecordTypeNode recordTypeNode) { return ASTBuilderUtil.createUserDefineTypeNode(recordTypeNode.symbol.name.value, recordTypeNode.type, recordTypeNode.pos); } @Override public void visit(BLangArrayType arrayType) { arrayType.elemtype = rewrite(arrayType.elemtype, env); result = arrayType; } @Override public void visit(BLangConstrainedType constrainedType) { constrainedType.constraint = rewrite(constrainedType.constraint, env); result = constrainedType; } @Override public void visit(BLangStreamType streamType) { streamType.constraint = rewrite(streamType.constraint, env); streamType.error = rewrite(streamType.error, env); result = streamType; } @Override public void visit(BLangValueType valueType) { result = valueType; } @Override public void visit(BLangUserDefinedType userDefinedType) { result = userDefinedType; } @Override public void visit(BLangUnionTypeNode unionTypeNode) { List<BLangType> rewrittenMembers = new ArrayList<>(); unionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env))); unionTypeNode.memberTypeNodes = rewrittenMembers; result = unionTypeNode; } @Override public void visit(BLangErrorType errorType) { errorType.detailType = rewrite(errorType.detailType, env); result = errorType; } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { functionTypeNode.params.forEach(param -> rewrite(param.typeNode, env)); functionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env); result = functionTypeNode; } @Override public void visit(BLangBuiltInRefTypeNode refTypeNode) { result = refTypeNode; } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { List<BLangType> rewrittenMembers = new ArrayList<>(); tupleTypeNode.memberTypeNodes.forEach(member -> rewrittenMembers.add(rewrite(member, env))); tupleTypeNode.memberTypeNodes = rewrittenMembers; tupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env); result = tupleTypeNode; } @Override public void visit(BLangBlockFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); body.stmts = rewriteStmt(body.stmts, bodyEnv); result = body; } @Override public void visit(BLangExprFunctionBody exprBody) { BLangBlockFunctionBody body = ASTBuilderUtil.createBlockFunctionBody(exprBody.pos, new ArrayList<>()); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(exprBody.pos, body); returnStmt.expr = rewriteExpr(exprBody.expr); result = body; } @Override public void visit(BLangExternalFunctionBody body) { for (BLangAnnotationAttachment attachment : body.annAttachments) { rewrite(attachment, env); } result = body; } @Override
class Desugar extends BLangNodeVisitor { private static final CompilerContext.Key<Desugar> DESUGAR_KEY = new CompilerContext.Key<>(); private static final String QUERY_TABLE_WITH_JOIN_CLAUSE = "queryTableWithJoinClause"; private static final String QUERY_TABLE_WITHOUT_JOIN_CLAUSE = "queryTableWithoutJoinClause"; private static final String BASE_64 = "base64"; private static final String ERROR_REASON_FUNCTION_NAME = "reason"; private static final String ERROR_DETAIL_FUNCTION_NAME = "detail"; private static final String TO_STRING_FUNCTION_NAME = "toString"; private static final String LENGTH_FUNCTION_NAME = "length"; private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException"; private static final String CONSTRUCT_FROM = "constructFrom"; private static final String SLICE_LANGLIB_METHOD = "slice"; private static final String PUSH_LANGLIB_METHOD = "push"; private static final String DESUGARED_VARARG_KEY = "$vararg$"; public static final String XML_INTERNAL_SELECT_DESCENDANTS = "selectDescendants"; public static final String XML_INTERNAL_CHILDREN = "children"; public static final String XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT = "getFilteredChildrenFlat"; public static final String XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING = "getElementNameNilLifting"; public static final String XML_INTERNAL_GET_ATTRIBUTE = "getAttribute"; public static final String XML_INTERNAL_GET_ELEMENTS = "getElements"; private SymbolTable symTable; private SymbolResolver symResolver; private final SymbolEnter symbolEnter; private ClosureDesugar closureDesugar; private QueryDesugar queryDesugar; private AnnotationDesugar annotationDesugar; private Types types; private Names names; private ServiceDesugar serviceDesugar; private BLangNode result; private NodeCloner nodeCloner; private SemanticAnalyzer semanticAnalyzer; private BLangStatementLink currentLink; public Stack<BLangLockStmt> enclLocks = new Stack<>(); private SymbolEnv env; private int lambdaFunctionCount = 0; private int transactionIndex = 0; private int recordCount = 0; private int errorCount = 0; private int annonVarCount = 0; private int initFuncIndex = 0; private int indexExprCount = 0; private int letCount = 0; private int varargCount = 0; private Stack<BLangMatch> matchStmtStack = new Stack<>(); Stack<BLangExpression> accessExprStack = new Stack<>(); private BLangMatchTypedBindingPatternClause successPattern; private BLangAssignment safeNavigationAssignment; static boolean isJvmTarget = false; public static Desugar getInstance(CompilerContext context) { Desugar desugar = context.get(DESUGAR_KEY); if (desugar == null) { desugar = new Desugar(context); } return desugar; } private Desugar(CompilerContext context) { isJvmTarget = true; context.put(DESUGAR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.closureDesugar = ClosureDesugar.getInstance(context); this.queryDesugar = QueryDesugar.getInstance(context); this.annotationDesugar = AnnotationDesugar.getInstance(context); this.types = Types.getInstance(context); this.names = Names.getInstance(context); this.names = Names.getInstance(context); this.serviceDesugar = ServiceDesugar.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); } public BLangPackage perform(BLangPackage pkgNode) { annotationDesugar.initializeAnnotationMap(pkgNode); SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol); return rewrite(pkgNode, env); } private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) { for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) { if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) { continue; } if (typeDef.symbol.tag == SymTag.OBJECT) { BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode; objectTypeNode.functions.forEach(f -> { if (!pkgNode.objAttachedFunctions.contains(f.symbol)) { pkgNode.functions.add(f); pkgNode.topLevelNodes.add(f); } }); if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) { continue; } BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectTypeNode, env); tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction, tempGeneratedInitFunction.symbol.scope, env); this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env); objectTypeNode.generatedInitFunction = tempGeneratedInitFunction; pkgNode.functions.add(objectTypeNode.generatedInitFunction); pkgNode.topLevelNodes.add(objectTypeNode.generatedInitFunction); if (objectTypeNode.initFunction != null) { pkgNode.functions.add(objectTypeNode.initFunction); pkgNode.topLevelNodes.add(objectTypeNode.initFunction); } } else if (typeDef.symbol.tag == SymTag.RECORD) { BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode; recordTypeNode.initFunction = rewrite( TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); pkgNode.functions.add(recordTypeNode.initFunction); pkgNode.topLevelNodes.add(recordTypeNode.initFunction); } } } private BLangFunction createGeneratedInitializerFunction(BLangObjectTypeNode objectTypeNode, SymbolEnv env) { BLangFunction generatedInitFunc = createInitFunctionForObjectType(objectTypeNode, env); if (objectTypeNode.initFunction == null) { return generatedInitFunc; } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc; BAttachedFunction generatedInitializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc; addRequiredParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc); addRestParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc); generatedInitFunc.returnTypeNode = objectTypeNode.initFunction.returnTypeNode; generatedInitializerFunc.symbol.retType = generatedInitFunc.returnTypeNode.type; ((BInvokableType) generatedInitFunc.symbol.type).paramTypes = initializerFunc.type.paramTypes; ((BInvokableType) generatedInitFunc.symbol.type).retType = initializerFunc.type.retType; ((BInvokableType) generatedInitFunc.symbol.type).restType = initializerFunc.type.restType; generatedInitializerFunc.type = initializerFunc.type; generatedInitFunc.desugared = false; return generatedInitFunc; } private void addRequiredParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc, BAttachedFunction generatedInitializerFunc) { if (initFunction.requiredParams.isEmpty()) { return; } for (BLangSimpleVariable requiredParameter : initFunction.requiredParams) { BLangSimpleVariable var = ASTBuilderUtil.createVariable(initFunction.pos, requiredParameter.name.getValue(), requiredParameter.type, createRequiredParamExpr(requiredParameter.expr), new BVarSymbol(0, names.fromString(requiredParameter.name.getValue()), requiredParameter.symbol.pkgID, requiredParameter.type, requiredParameter.symbol.owner)); generatedInitFunc.requiredParams.add(var); generatedInitializerFunc.symbol.params.add(var.symbol); } } private BLangExpression createRequiredParamExpr(BLangExpression expr) { if (expr == null) { return null; } if (expr.getKind() == NodeKind.LAMBDA) { BLangFunction func = ((BLangLambdaFunction) expr).function; return createLambdaFunction(func.pos, func.name.value, func.requiredParams, func.returnTypeNode, func.body); } BLangExpression expression = this.nodeCloner.clone(expr); if (expression.getKind() == NodeKind.ARROW_EXPR) { BLangIdentifier func = (BLangIdentifier) ((BLangArrowFunction) expression).functionName; ((BLangArrowFunction) expression).functionName = ASTBuilderUtil.createIdentifier(func.pos, "$" + func.getValue() + "$"); } return expression; } private void addRestParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc, BAttachedFunction generatedInitializerFunc) { if (initFunction.restParam == null) { return; } BLangSimpleVariable restParam = initFunction.restParam; generatedInitFunc.restParam = ASTBuilderUtil.createVariable(initFunction.pos, restParam.name.getValue(), restParam.type, null, new BVarSymbol(0, names.fromString(restParam.name.getValue()), restParam.symbol.pkgID, restParam.type, restParam.symbol.owner)); generatedInitializerFunc.symbol.restParam = generatedInitFunc.restParam.symbol; } /** * Create package init functions. * * @param pkgNode package node * @param env symbol environment of package */ private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, Names.INIT_FUNCTION_SUFFIX, symTable); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; for (BLangXMLNS xmlns : pkgNode.xmlnsList) { initFnBody.addStatement(createNamespaceDeclrStatement(xmlns)); } pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, Names.START_FUNCTION_SUFFIX, symTable); pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias, Names.STOP_FUNCTION_SUFFIX); createInvokableSymbol(pkgNode.initFunction, env); createInvokableSymbol(pkgNode.startFunction, env); createInvokableSymbol(pkgNode.stopFunction, env); } private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) { Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream() .filter(bLangFunction -> !bLangFunction.attachedFunction && bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value)) .findFirst(); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; if (!userDefInitOptional.isPresent()) { addNilReturnStatement(initFnBody); return; } BLangFunction userDefInit = userDefInitOptional.get(); BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); userDefInitInvocation.pos = pkgNode.initFunction.pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(userDefInit.name.value); userDefInitInvocation.name = name; userDefInitInvocation.symbol = userDefInit.symbol; BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); pkgAlias.setLiteral(false); pkgAlias.setValue(pkgNode.packageID.name.value); userDefInitInvocation.pkgAlias = pkgAlias; userDefInitInvocation.type = userDefInit.returnTypeNode.type; userDefInitInvocation.requiredArgs = Collections.emptyList(); BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode(); returnStmt.pos = pkgNode.initFunction.pos; returnStmt.expr = userDefInitInvocation; initFnBody.stmts.add(returnStmt); } /** * Create invokable symbol for function. * * @param bLangFunction function node * @param env Symbol environment */ private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) { BType returnType = bLangFunction.returnTypeNode.type == null ? symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type; BInvokableType invokableType = new BInvokableType(new ArrayList<>(), getRestType(bLangFunction), returnType, null); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet), new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true); functionSymbol.retType = returnType; for (BLangVariable param : bLangFunction.requiredParams) { functionSymbol.params.add(param.symbol); } functionSymbol.scope = new Scope(functionSymbol); bLangFunction.symbol = functionSymbol; } /** * Add nil return statement. * * @param bLangBlockStmt block statement node */ private void addNilReturnStatement(BlockNode bLangBlockStmt) { BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(((BLangNode) bLangBlockStmt).pos, symTable.nilType); bLangBlockStmt.addStatement(returnStmt); } /** * Create namespace declaration statement for XMNLNS. * * @param xmlns XMLNS node * @return XMLNS statement */ private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) { BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode(); xmlnsStmt.xmlnsDecl = xmlns; xmlnsStmt.pos = xmlns.pos; return xmlnsStmt; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) { result = pkgNode; return; } createPackageInitFunctions(pkgNode, env); addAttachedFunctionsToPackageLevel(pkgNode, env); pkgNode.constants.stream() .filter(constant -> constant.expr.getKind() == NodeKind.LITERAL || constant.expr.getKind() == NodeKind.NUMERIC_LITERAL) .forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition)); BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; for (BLangConstant constant : pkgNode.constants) { if (constant.symbol.type.tag == TypeTags.MAP) { BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol); constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode, pkgNode.initFunction.symbol.scope, env)); BLangInvocation frozenConstValExpr = createLangLibInvocationNode( "cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.type, constant.pos); BLangAssignment constInit = ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr); initFnBody.stmts.add(constInit); } } pkgNode.globalVars.forEach(globalVar -> { BLangAssignment assignment = createAssignmentStmt(globalVar); if (assignment.expr != null) { initFnBody.stmts.add(assignment); } }); pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env)); annotationDesugar.rewritePackageAnnotations(pkgNode, env); addUserDefinedModuleInitInvocationAndReturn(pkgNode); pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence)); pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env); pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env); pkgNode.constants = rewrite(pkgNode.constants, env); pkgNode.globalVars = rewrite(pkgNode.globalVars, env); pkgNode.functions = rewrite(pkgNode.functions, env); serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction); ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body); addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body); addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body); pkgNode.initFunction = splitInitFunction(pkgNode, env); pkgNode.initFunction = rewrite(pkgNode.initFunction, env); pkgNode.startFunction = rewrite(pkgNode.startFunction, env); pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env); closureDesugar.visit(pkgNode); for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) { rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol)); } pkgNode.completedPhases.add(CompilerPhase.DESUGAR); initFuncIndex = 0; result = pkgNode; } @Override public void visit(BLangImportPackage importPkgNode) { BPackageSymbol pkgSymbol = importPkgNode.symbol; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol); rewrite(pkgEnv.node, pkgEnv); result = importPkgNode; } @Override public void visit(BLangTypeDefinition typeDef) { if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE || typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) { typeDef.typeNode = rewrite(typeDef.typeNode, env); } typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = typeDef; } @Override public void visit(BLangObjectTypeNode objectTypeNode) { objectTypeNode.fields.addAll(objectTypeNode.referencedFields); if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) { result = objectTypeNode; return; } for (BLangSimpleVariable bLangSimpleVariable : objectTypeNode.fields) { bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env); } Map<BSymbol, BLangStatement> initFuncStmts = objectTypeNode.generatedInitFunction.initFunctionStmts; for (BLangSimpleVariable field : objectTypeNode.fields) { if (!initFuncStmts.containsKey(field.symbol) && field.expr != null) { initFuncStmts.put(field.symbol, createStructFieldUpdate(objectTypeNode.generatedInitFunction, field, objectTypeNode.generatedInitFunction.receiver.symbol)); } } BLangStatement[] initStmts = initFuncStmts.values().toArray(new BLangStatement[0]); BLangBlockFunctionBody generatedInitFnBody = (BLangBlockFunctionBody) objectTypeNode.generatedInitFunction.body; int i; for (i = 0; i < initStmts.length; i++) { generatedInitFnBody.stmts.add(i, initStmts[i]); } if (objectTypeNode.initFunction != null) { ((BLangReturn) generatedInitFnBody.stmts.get(i)).expr = createUserDefinedInitInvocation(objectTypeNode); } for (BLangFunction fn : objectTypeNode.functions) { rewrite(fn, this.env); } rewrite(objectTypeNode.generatedInitFunction, this.env); rewrite(objectTypeNode.initFunction, this.env); result = objectTypeNode; } private BLangInvocation createUserDefinedInitInvocation(BLangObjectTypeNode objectTypeNode) { ArrayList<BLangExpression> paramRefs = new ArrayList<>(); for (BLangSimpleVariable var : objectTypeNode.generatedInitFunction.requiredParams) { paramRefs.add(ASTBuilderUtil.createVariableRef(objectTypeNode.pos, var.symbol)); } BLangInvocation invocation = ASTBuilderUtil.createInvocationExprMethod(objectTypeNode.pos, ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc.symbol, paramRefs, Collections.emptyList(), symResolver); if (objectTypeNode.generatedInitFunction.restParam != null) { BLangSimpleVarRef restVarRef = ASTBuilderUtil.createVariableRef(objectTypeNode.pos, objectTypeNode.generatedInitFunction.restParam.symbol); BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression(); bLangRestArgsExpression.expr = restVarRef; bLangRestArgsExpression.pos = objectTypeNode.generatedInitFunction.pos; bLangRestArgsExpression.type = objectTypeNode.generatedInitFunction.restParam.type; bLangRestArgsExpression.expectedType = bLangRestArgsExpression.type; invocation.restArgs.add(bLangRestArgsExpression); } invocation.exprSymbol = ((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc.symbol.receiverSymbol; return rewriteExpr(invocation); } @Override public void visit(BLangRecordTypeNode recordTypeNode) { recordTypeNode.fields.addAll(recordTypeNode.referencedFields); for (BLangSimpleVariable bLangSimpleVariable : recordTypeNode.fields) { bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env); } if (recordTypeNode.initFunction == null) { recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); env.enclPkg.addFunction(recordTypeNode.initFunction); env.enclPkg.topLevelNodes.add(recordTypeNode.initFunction); } for (BLangSimpleVariable field : recordTypeNode.fields) { if (!recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) && !Symbols.isOptional(field.symbol) && field.expr != null) { recordTypeNode.initFunction.initFunctionStmts .put(field.symbol, createStructFieldUpdate(recordTypeNode.initFunction, field, recordTypeNode.initFunction.receiver.symbol)); } } BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts .values().toArray(new BLangStatement[0]); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) recordTypeNode.initFunction.body; for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) { initFnBody.stmts.add(i, initStmts[i]); } if (recordTypeNode.isAnonymous && recordTypeNode.isLocal) { BLangUserDefinedType userDefinedType = desugarLocalAnonRecordTypeNode(recordTypeNode); TypeDefBuilderHelper.addTypeDefinition(recordTypeNode.type, recordTypeNode.type.tsymbol, recordTypeNode, env); recordTypeNode.desugared = true; result = userDefinedType; return; } result = recordTypeNode; } private BLangUserDefinedType desugarLocalAnonRecordTypeNode(BLangRecordTypeNode recordTypeNode) { return ASTBuilderUtil.createUserDefineTypeNode(recordTypeNode.symbol.name.value, recordTypeNode.type, recordTypeNode.pos); } @Override public void visit(BLangArrayType arrayType) { arrayType.elemtype = rewrite(arrayType.elemtype, env); result = arrayType; } @Override public void visit(BLangConstrainedType constrainedType) { constrainedType.constraint = rewrite(constrainedType.constraint, env); result = constrainedType; } @Override public void visit(BLangStreamType streamType) { streamType.constraint = rewrite(streamType.constraint, env); streamType.error = rewrite(streamType.error, env); result = streamType; } @Override public void visit(BLangValueType valueType) { result = valueType; } @Override public void visit(BLangUserDefinedType userDefinedType) { result = userDefinedType; } @Override public void visit(BLangUnionTypeNode unionTypeNode) { List<BLangType> rewrittenMembers = new ArrayList<>(); unionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env))); unionTypeNode.memberTypeNodes = rewrittenMembers; result = unionTypeNode; } @Override public void visit(BLangErrorType errorType) { errorType.detailType = rewrite(errorType.detailType, env); result = errorType; } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { functionTypeNode.params.forEach(param -> rewrite(param.typeNode, env)); functionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env); result = functionTypeNode; } @Override public void visit(BLangBuiltInRefTypeNode refTypeNode) { result = refTypeNode; } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { List<BLangType> rewrittenMembers = new ArrayList<>(); tupleTypeNode.memberTypeNodes.forEach(member -> rewrittenMembers.add(rewrite(member, env))); tupleTypeNode.memberTypeNodes = rewrittenMembers; tupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env); result = tupleTypeNode; } @Override public void visit(BLangBlockFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); body.stmts = rewriteStmt(body.stmts, bodyEnv); result = body; } @Override public void visit(BLangExprFunctionBody exprBody) { BLangBlockFunctionBody body = ASTBuilderUtil.createBlockFunctionBody(exprBody.pos, new ArrayList<>()); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(exprBody.pos, body); returnStmt.expr = rewriteExpr(exprBody.expr); result = body; } @Override public void visit(BLangExternalFunctionBody body) { for (BLangAnnotationAttachment attachment : body.annAttachments) { rewrite(attachment, env); } result = body; } @Override
Can you add tests for some of the new utilities ? For example, PendingJobManager and PendingJob.
public void testWriteUnknown() throws Exception { p.apply( Create.of( new TableRow().set("name", "a").set("number", 1), new TableRow().set("name", "b").set("number", 2), new TableRow().set("name", "c").set("number", 3)) .withCoder(TableRowJsonCoder.of())) .apply( BigQueryIO.writeTableRows() .to("project-id:dataset-id.table-id") .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_NEVER) .withTestServices(fakeBqServices) .withoutValidation()); thrown.expect(RuntimeException.class); thrown.expectMessage("Failed to create job"); p.run(); }
thrown.expectMessage("Failed to create job");
public void testWriteUnknown() throws Exception { p.apply( Create.of( new TableRow().set("name", "a").set("number", 1), new TableRow().set("name", "b").set("number", 2), new TableRow().set("name", "c").set("number", 3)) .withCoder(TableRowJsonCoder.of())) .apply( BigQueryIO.writeTableRows() .to("project-id:dataset-id.table-id") .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_NEVER) .withTestServices(fakeBqServices) .withoutValidation()); thrown.expect(RuntimeException.class); thrown.expectMessage("Failed to create job"); p.run(); }
class PartitionedGlobalWindowCoder extends AtomicCoder<PartitionedGlobalWindow> { @Override public void encode(PartitionedGlobalWindow window, OutputStream outStream) throws IOException { encode(window, outStream, Context.NESTED); } @Override public void encode(PartitionedGlobalWindow window, OutputStream outStream, Context context) throws IOException { StringUtf8Coder.of().encode(window.value, outStream, context); } @Override public PartitionedGlobalWindow decode(InputStream inStream) throws IOException { return decode(inStream, Context.NESTED); } @Override public PartitionedGlobalWindow decode(InputStream inStream, Context context) throws IOException { return new PartitionedGlobalWindow(StringUtf8Coder.of().decode(inStream, context)); } @Override public void verifyDeterministic() {} }
class PartitionedGlobalWindowCoder extends AtomicCoder<PartitionedGlobalWindow> { @Override public void encode(PartitionedGlobalWindow window, OutputStream outStream) throws IOException { encode(window, outStream, Context.NESTED); } @Override public void encode(PartitionedGlobalWindow window, OutputStream outStream, Context context) throws IOException { StringUtf8Coder.of().encode(window.value, outStream, context); } @Override public PartitionedGlobalWindow decode(InputStream inStream) throws IOException { return decode(inStream, Context.NESTED); } @Override public PartitionedGlobalWindow decode(InputStream inStream, Context context) throws IOException { return new PartitionedGlobalWindow(StringUtf8Coder.of().decode(inStream, context)); } @Override public void verifyDeterministic() {} }
You want `attributes[1].key`. SQL arrays are 1-indexed.
public void testSQLSelectsArrayAttributes() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes ARRAY<ROW<key VARCHAR, `value` VARCHAR>>, \n" + "payload ROW< \n" + " id INTEGER, \n" + " name VARCHAR \n" + " > \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES '{ " + "%s" + "\"timestampAttributeKey\" : \"ts\" }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam()); String queryString = "SELECT message.payload.id, attributes[0].key AS name FROM message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz")); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(PAYLOAD_SCHEMA), observedRows -> observedRows.equals( ImmutableSet.of( row(PAYLOAD_SCHEMA, 3, "foo"), row(PAYLOAD_SCHEMA, 5, "bar"), row(PAYLOAD_SCHEMA, 7, "baz"))))); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(5)); }
String queryString = "SELECT message.payload.id, attributes[0].key AS name FROM message";
public void testSQLSelectsArrayAttributes() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes ARRAY<ROW<key VARCHAR, `value` VARCHAR>>, \n" + "payload ROW< \n" + " id INTEGER, \n" + " name VARCHAR \n" + " > \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES '{ " + "%s" + "\"timestampAttributeKey\" : \"ts\" }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam()); String queryString = "SELECT message.payload.id, attributes[1].key AS a1, attributes[2].key AS a2 FROM message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz")); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(PAYLOAD_SCHEMA), observedRows -> { Map<Integer, String> entries = new HashMap<>(); for (Row row : observedRows) { if ("ts".equals(row.getString("a1"))) { entries.put(row.getInt32("id"), row.getString("a2")); } else { entries.put(row.getInt32("id"), row.getString("a1")); } } return entries.equals(ImmutableMap.of(3, "foo", 5, "bar", 7, "baz")); })); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(1)); }
class PubsubTableProviderIT implements Serializable { private static final Schema PAYLOAD_SCHEMA = Schema.builder() .addNullableField("id", Schema.FieldType.INT32) .addNullableField("name", Schema.FieldType.STRING) .build(); @Rule public transient TestPubsub eventsTopic = TestPubsub.create(); @Rule public transient TestPubsub filteredEventsTopic = TestPubsub.create(); @Rule public transient TestPubsub dlqTopic = TestPubsub.create(); @Rule public transient TestPubsubSignal resultSignal = TestPubsubSignal.create(); @Rule public transient TestPipeline pipeline = TestPipeline.create(); @Rule public transient TestPipeline filterPipeline = TestPipeline.create(); private final SchemaIOTableProviderWrapper tableProvider = new PubsubTableProvider(); @Parameters public static Collection<Object[]> data() { return Arrays.asList( new Object[][] {{new PubsubJsonObjectProvider()}, {new PubsubAvroObjectProvider()}}); } @Parameter public PubsubObjectProvider objectsProvider; /** * HACK: we need an objectmapper to turn pipelineoptions back into a map. We need to use * ReflectHelpers to get the extra PipelineOptions. */ private static final ObjectMapper MAPPER = new ObjectMapper() .registerModules(ObjectMapper.findModules(ReflectHelpers.findClassLoader())); @Test public void testSQLSelectsPayloadContent() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes MAP<VARCHAR, VARCHAR>, \n" + "payload ROW< \n" + " id INTEGER, \n" + " name VARCHAR \n" + " > \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES '{ " + "%s" + "\"timestampAttributeKey\" : \"ts\" }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam()); String queryString = "SELECT message.payload.id, message.payload.name from message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz")); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(PAYLOAD_SCHEMA), observedRows -> observedRows.equals( ImmutableSet.of( row(PAYLOAD_SCHEMA, 3, "foo"), row(PAYLOAD_SCHEMA, 5, "bar"), row(PAYLOAD_SCHEMA, 7, "baz"))))); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(5)); } @Test @Test public void testSQLWithBytePayload() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes MAP<VARCHAR, VARCHAR>, \n" + "payload VARBINARY \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES '{ " + "\"timestampAttributeKey\" : \"ts\" }'", tableProvider.getTableType(), eventsTopic.topicPath()); String queryString = "SELECT message.payload AS some_bytes FROM message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz")); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); Schema justBytesSchema = Schema.builder().addByteArrayField("some_bytes").build(); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(justBytesSchema), observedRows -> observedRows.equals( ImmutableSet.of( row(justBytesSchema, (Object) messages.get(0).getPayload()), row(justBytesSchema, (Object) messages.get(1).getPayload()), row(justBytesSchema, (Object) messages.get(2).getPayload()))))); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(5)); } @Test @SuppressWarnings("unchecked") public void testUsesDlq() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes MAP<VARCHAR, VARCHAR>, \n" + "payload ROW< \n" + " id INTEGER, \n" + " name VARCHAR \n" + " > \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s" + " \"timestampAttributeKey\" : \"ts\", " + " \"deadLetterQueue\" : \"%s\"" + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam(), dlqTopic.topicPath()); String queryString = "SELECT message.payload.id, message.payload.name from message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz"), messagePayload(ts(4), "{ - }", ImmutableMap.of()), messagePayload(ts(5), "{ + }", ImmutableMap.of())); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(PAYLOAD_SCHEMA), observedRows -> observedRows.equals( ImmutableSet.of( row(PAYLOAD_SCHEMA, 3, "foo"), row(PAYLOAD_SCHEMA, 5, "bar"), row(PAYLOAD_SCHEMA, 7, "baz"))))); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(4)); dlqTopic .assertThatTopicEventuallyReceives( matcherPayload(ts(4), "{ - }"), matcherPayload(ts(5), "{ + }")) .waitForUpTo(Duration.standardSeconds(40)); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void testSQLLimit() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes MAP<VARCHAR, VARCHAR>, \n" + "payload ROW< \n" + " id INTEGER, \n" + " name VARCHAR \n" + " > \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s" + " \"timestampAttributeKey\" : \"ts\", " + " \"deadLetterQueue\" : \"%s\"" + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam(), dlqTopic.topicPath()); List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz"), objectsProvider.messageIdName(ts(4), 9, "ba2"), objectsProvider.messageIdName(ts(5), 10, "ba3"), objectsProvider.messageIdName(ts(6), 13, "ba4"), objectsProvider.messageIdName(ts(7), 15, "ba5")); CalciteConnection connection = connect(pipeline.getOptions(), new PubsubTableProvider()); Statement statement = connection.createStatement(); statement.execute(createTableString); ExecutorService pool = Executors.newFixedThreadPool(1); Future<List<String>> queryResult = pool.submit( (Callable) () -> { ResultSet resultSet = statement.executeQuery("SELECT message.payload.id FROM message LIMIT 3"); ImmutableList.Builder<String> result = ImmutableList.builder(); while (resultSet.next()) { result.add(resultSet.getString(1)); } return result.build(); }); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); assertThat(queryResult.get(2, TimeUnit.MINUTES).size(), equalTo(3)); pool.shutdown(); } @Test public void testSQLSelectsPayloadContentFlat() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "id INTEGER, \n" + "name VARCHAR \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s" + " \"timestampAttributeKey\" : \"ts\" " + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam()); String queryString = "SELECT message.id, message.name from message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz")); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(PAYLOAD_SCHEMA), observedRows -> observedRows.equals( ImmutableSet.of( row(PAYLOAD_SCHEMA, 3, "foo"), row(PAYLOAD_SCHEMA, 5, "bar"), row(PAYLOAD_SCHEMA, 7, "baz"))))); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(5)); } @Test @SuppressWarnings("unchecked") public void testSQLInsertRowsToPubsubFlat() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "name VARCHAR, \n" + "height INTEGER, \n" + "knowsJavascript BOOLEAN \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s" + " \"deadLetterQueue\" : \"%s\"" + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam(), dlqTopic.topicPath()); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); String queryString = "INSERT INTO message (name, height, knowsJavascript) \n" + "VALUES \n" + "('person1', 80, TRUE), \n" + "('person2', 70, FALSE)"; query(sqlEnv, pipeline, queryString); pipeline.run().waitUntilFinish(Duration.standardMinutes(5)); eventsTopic .assertThatTopicEventuallyReceives( objectsProvider.matcherNameHeightKnowsJS("person1", 80, true), objectsProvider.matcherNameHeightKnowsJS("person2", 70, false)) .waitForUpTo(Duration.standardSeconds(40)); } @Test @SuppressWarnings("unchecked") public void testSQLInsertRowsToPubsubWithTimestampAttributeFlat() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + " event_timestamp TIMESTAMP, \n" + " name VARCHAR, \n" + " height INTEGER, \n" + " knowsJavascript BOOLEAN \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s " + " \"deadLetterQueue\" : \"%s\"," + " \"timestampAttributeKey\" : \"ts\"" + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam(), dlqTopic.topicPath()); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); String queryString = "INSERT INTO message " + "VALUES " + "(TIMESTAMP '1970-01-01 00:00:00.001', 'person1', 80, TRUE), " + "(TIMESTAMP '1970-01-01 00:00:00.002', 'person2', 70, FALSE)"; query(sqlEnv, pipeline, queryString); pipeline.run().waitUntilFinish(Duration.standardMinutes(5)); eventsTopic .assertThatTopicEventuallyReceives( matcherTsNameHeightKnowsJS(ts(1), "person1", 80, true), matcherTsNameHeightKnowsJS(ts(2), "person2", 70, false)) .waitForUpTo(Duration.standardSeconds(40)); } @Test @SuppressWarnings("unchecked") public void testSQLReadAndWriteWithSameFlatTableDefinition() throws Exception { String tblProperties = objectsProvider.getPayloadFormat() == null ? "" : String.format( "TBLPROPERTIES '{\"format\": \"%s\"}'", objectsProvider.getPayloadFormat()); String createTableString = String.format( "CREATE EXTERNAL TABLE people (\n" + "event_timestamp TIMESTAMP, \n" + "name VARCHAR, \n" + "height INTEGER, \n" + "knowsJavascript BOOLEAN \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "%s", tableProvider.getTableType(), eventsTopic.topicPath(), tblProperties); String createFilteredTableString = String.format( "CREATE EXTERNAL TABLE javascript_people (\n" + "event_timestamp TIMESTAMP, \n" + "name VARCHAR, \n" + "height INTEGER \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "%s", tableProvider.getTableType(), filteredEventsTopic.topicPath(), tblProperties); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); sqlEnv.executeDdl(createFilteredTableString); String filterQueryString = "INSERT INTO javascript_people (name, height) (\n" + " SELECT \n" + " name, \n" + " height \n" + " FROM people \n" + " WHERE knowsJavascript \n" + ")"; String injectQueryString = "INSERT INTO people (name, height, knowsJavascript) VALUES \n" + "('person1', 80, TRUE), \n" + "('person2', 70, FALSE), \n" + "('person3', 60, TRUE), \n" + "('person4', 50, FALSE), \n" + "('person5', 40, TRUE)"; query(sqlEnv, filterPipeline, filterQueryString); query(sqlEnv, pipeline, injectQueryString); filterPipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); pipeline.run().waitUntilFinish(Duration.standardMinutes(5)); filteredEventsTopic .assertThatTopicEventuallyReceives( objectsProvider.matcherNameHeight("person1", 80), objectsProvider.matcherNameHeight("person3", 60), objectsProvider.matcherNameHeight("person5", 40)) .waitForUpTo(Duration.standardMinutes(5)); } @SuppressWarnings("unchecked") private CalciteConnection connect(PipelineOptions options, TableProvider... tableProviders) { Map<String, String> argsMap = ((Map<String, Object>) MAPPER.convertValue(pipeline.getOptions(), Map.class).get("options")) .entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> toArg(entry.getValue()))); InMemoryMetaStore inMemoryMetaStore = new InMemoryMetaStore(); for (TableProvider tableProvider : tableProviders) { inMemoryMetaStore.registerProvider(tableProvider); } JdbcConnection connection = JdbcDriver.connect(inMemoryMetaStore, options); connection.setPipelineOptionsMap(argsMap); return connection; } private static String toArg(Object o) { try { String jsonRepr = MAPPER.writeValueAsString(o); if (jsonRepr.startsWith("\"") && jsonRepr.endsWith("\"")) { return jsonRepr.substring(1, jsonRepr.length() - 1); } else { return jsonRepr; } } catch (JsonProcessingException e) { throw new RuntimeException(e); } } private String payloadFormatParam() { return objectsProvider.getPayloadFormat() == null ? "" : String.format("\"format\" : \"%s\", ", objectsProvider.getPayloadFormat()); } private PCollection<Row> query(BeamSqlEnv sqlEnv, TestPipeline pipeline, String queryString) { return BeamSqlRelUtils.toPCollection(pipeline, sqlEnv.parseQuery(queryString)); } private Row row(Schema schema, Object... values) { return Row.withSchema(schema).addValues(values).build(); } private static PubsubMessage message( Instant timestamp, byte[] payload, Map<String, String> attributes) { return new PubsubMessage( payload, ImmutableMap.<String, String>builder() .putAll(attributes) .put("ts", String.valueOf(timestamp.getMillis())) .build()); } private Matcher<PubsubMessage> matcherTsNameHeightKnowsJS( Instant ts, String name, int height, boolean knowsJS) throws Exception { return allOf( objectsProvider.matcherNameHeightKnowsJS(name, height, knowsJS), hasProperty("attributeMap", hasEntry("ts", String.valueOf(ts.getMillis())))); } private Matcher<PubsubMessage> matcherPayload(Instant timestamp, String payload) { return allOf( hasProperty("payload", equalTo(payload.getBytes(StandardCharsets.US_ASCII))), hasProperty("attributeMap", hasEntry("ts", String.valueOf(timestamp.getMillis())))); } private Instant ts(long millis) { return Instant.ofEpochMilli(millis); } private PubsubMessage messagePayload( Instant timestamp, String payload, Map<String, String> attributes) { return message(timestamp, payload.getBytes(StandardCharsets.US_ASCII), attributes); } private abstract static class PubsubObjectProvider implements Serializable { protected abstract String getPayloadFormat(); protected abstract PubsubMessage messageIdName(Instant timestamp, int id, String name) throws Exception; protected abstract Matcher<PubsubMessage> matcherNames(String name) throws Exception; protected abstract Matcher<PubsubMessage> matcherNameHeightKnowsJS( String name, int height, boolean knowsJS) throws Exception; protected abstract Matcher<PubsubMessage> matcherNameHeight(String name, int height) throws Exception; } private static class PubsubJsonObjectProvider extends PubsubObjectProvider { @Override protected String getPayloadFormat() { return null; } @Override protected PubsubMessage messageIdName(Instant timestamp, int id, String name) { String jsonString = "{ \"id\" : " + id + ", \"name\" : \"" + name + "\" }"; return message(timestamp, jsonString, ImmutableMap.of(name, Integer.toString(id))); } @Override protected Matcher<PubsubMessage> matcherNames(String name) throws IOException { return hasProperty("payload", toJsonByteLike(String.format("{\"name\":\"%s\"}", name))); } @Override protected Matcher<PubsubMessage> matcherNameHeightKnowsJS( String name, int height, boolean knowsJS) throws IOException { String jsonString = String.format( "{\"name\":\"%s\", \"height\": %s, \"knowsJavascript\": %s}", name, height, knowsJS); return hasProperty("payload", toJsonByteLike(jsonString)); } @Override protected Matcher<PubsubMessage> matcherNameHeight(String name, int height) throws IOException { String jsonString = String.format("{\"name\":\"%s\", \"height\": %s}", name, height); return hasProperty("payload", toJsonByteLike(jsonString)); } private PubsubMessage message( Instant timestamp, String jsonPayload, Map<String, String> attributes) { return PubsubTableProviderIT.message(timestamp, jsonPayload.getBytes(UTF_8), attributes); } private Matcher<byte[]> toJsonByteLike(String jsonString) throws IOException { return jsonBytesLike(jsonString); } } private static class PubsubAvroObjectProvider extends PubsubObjectProvider { private static final Schema NAME_HEIGHT_KNOWS_JS_SCHEMA = Schema.builder() .addNullableField("name", Schema.FieldType.STRING) .addNullableField("height", Schema.FieldType.INT32) .addNullableField("knowsJavascript", Schema.FieldType.BOOLEAN) .build(); private static final Schema NAME_HEIGHT_SCHEMA = Schema.builder() .addNullableField("name", Schema.FieldType.STRING) .addNullableField("height", Schema.FieldType.INT32) .build(); @Override protected String getPayloadFormat() { return "avro"; } @Override protected PubsubMessage messageIdName(Instant timestamp, int id, String name) throws IOException { byte[] encodedRecord = createEncodedGenericRecord( PAYLOAD_SCHEMA, org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList.of( id, name)); return message(timestamp, encodedRecord, ImmutableMap.of(name, Integer.toString(id))); } @Override protected Matcher<PubsubMessage> matcherNames(String name) throws IOException { Schema schema = Schema.builder().addStringField("name").build(); byte[] encodedRecord = createEncodedGenericRecord( schema, org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList.of( name)); return hasProperty("payload", equalTo(encodedRecord)); } @Override protected Matcher<PubsubMessage> matcherNameHeight(String name, int height) throws IOException { byte[] encodedRecord = createEncodedGenericRecord( NAME_HEIGHT_SCHEMA, org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList.of( name, height)); return hasProperty("payload", equalTo(encodedRecord)); } @Override protected Matcher<PubsubMessage> matcherNameHeightKnowsJS( String name, int height, boolean knowsJS) throws IOException { byte[] encodedRecord = createEncodedGenericRecord( NAME_HEIGHT_KNOWS_JS_SCHEMA, org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList.of( name, height, knowsJS)); return hasProperty("payload", equalTo(encodedRecord)); } private byte[] createEncodedGenericRecord(Schema beamSchema, List<Object> values) throws IOException { org.apache.avro.Schema avroSchema = AvroUtils.toAvroSchema(beamSchema); GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema); List<org.apache.avro.Schema.Field> fields = avroSchema.getFields(); for (int i = 0; i < fields.size(); ++i) { builder.set(fields.get(i), values.get(i)); } AvroCoder<GenericRecord> coder = AvroCoder.of(avroSchema); ByteArrayOutputStream out = new ByteArrayOutputStream(); coder.encode(builder.build(), out); return out.toByteArray(); } } }
class PubsubTableProviderIT implements Serializable { private static final Schema PAYLOAD_SCHEMA = Schema.builder() .addNullableField("id", Schema.FieldType.INT32) .addNullableField("name", Schema.FieldType.STRING) .build(); @Rule public transient TestPubsub eventsTopic = TestPubsub.create(); @Rule public transient TestPubsub filteredEventsTopic = TestPubsub.create(); @Rule public transient TestPubsub dlqTopic = TestPubsub.create(); @Rule public transient TestPubsubSignal resultSignal = TestPubsubSignal.create(); @Rule public transient TestPipeline pipeline = TestPipeline.create(); @Rule public transient TestPipeline filterPipeline = TestPipeline.create(); private final SchemaIOTableProviderWrapper tableProvider = new PubsubTableProvider(); @Parameters public static Collection<Object[]> data() { return Arrays.asList( new Object[][] {{new PubsubJsonObjectProvider()}, {new PubsubAvroObjectProvider()}}); } @Parameter public PubsubObjectProvider objectsProvider; /** * HACK: we need an objectmapper to turn pipelineoptions back into a map. We need to use * ReflectHelpers to get the extra PipelineOptions. */ private static final ObjectMapper MAPPER = new ObjectMapper() .registerModules(ObjectMapper.findModules(ReflectHelpers.findClassLoader())); @Test public void testSQLSelectsPayloadContent() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes MAP<VARCHAR, VARCHAR>, \n" + "payload ROW< \n" + " id INTEGER, \n" + " name VARCHAR \n" + " > \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES '{ " + "%s" + "\"timestampAttributeKey\" : \"ts\" }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam()); String queryString = "SELECT message.payload.id, message.payload.name from message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz")); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(PAYLOAD_SCHEMA), observedRows -> observedRows.equals( ImmutableSet.of( row(PAYLOAD_SCHEMA, 3, "foo"), row(PAYLOAD_SCHEMA, 5, "bar"), row(PAYLOAD_SCHEMA, 7, "baz"))))); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(5)); } @Test @Test public void testSQLWithBytePayload() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes MAP<VARCHAR, VARCHAR>, \n" + "payload VARBINARY \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES '{ " + "\"timestampAttributeKey\" : \"ts\" }'", tableProvider.getTableType(), eventsTopic.topicPath()); String queryString = "SELECT message.payload AS some_bytes FROM message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz")); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); Schema justBytesSchema = Schema.builder().addField("some_bytes", FieldType.BYTES.withNullable(true)).build(); Row expectedRow0 = row(justBytesSchema, (Object) messages.get(0).getPayload()); Row expectedRow1 = row(justBytesSchema, (Object) messages.get(1).getPayload()); Row expectedRow2 = row(justBytesSchema, (Object) messages.get(2).getPayload()); Set<Row> expected = ImmutableSet.of(expectedRow0, expectedRow1, expectedRow2); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(justBytesSchema), observedRows -> observedRows.equals(expected))); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(5)); } @Test @SuppressWarnings("unchecked") public void testUsesDlq() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes MAP<VARCHAR, VARCHAR>, \n" + "payload ROW< \n" + " id INTEGER, \n" + " name VARCHAR \n" + " > \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s" + " \"timestampAttributeKey\" : \"ts\", " + " \"deadLetterQueue\" : \"%s\"" + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam(), dlqTopic.topicPath()); String queryString = "SELECT message.payload.id, message.payload.name from message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz"), messagePayload(ts(4), "{ - }", ImmutableMap.of()), messagePayload(ts(5), "{ + }", ImmutableMap.of())); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(PAYLOAD_SCHEMA), observedRows -> observedRows.equals( ImmutableSet.of( row(PAYLOAD_SCHEMA, 3, "foo"), row(PAYLOAD_SCHEMA, 5, "bar"), row(PAYLOAD_SCHEMA, 7, "baz"))))); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(4)); dlqTopic .assertThatTopicEventuallyReceives( matcherPayload(ts(4), "{ - }"), matcherPayload(ts(5), "{ + }")) .waitForUpTo(Duration.standardSeconds(40)); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void testSQLLimit() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes MAP<VARCHAR, VARCHAR>, \n" + "payload ROW< \n" + " id INTEGER, \n" + " name VARCHAR \n" + " > \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s" + " \"timestampAttributeKey\" : \"ts\", " + " \"deadLetterQueue\" : \"%s\"" + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam(), dlqTopic.topicPath()); List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz"), objectsProvider.messageIdName(ts(4), 9, "ba2"), objectsProvider.messageIdName(ts(5), 10, "ba3"), objectsProvider.messageIdName(ts(6), 13, "ba4"), objectsProvider.messageIdName(ts(7), 15, "ba5")); CalciteConnection connection = connect(pipeline.getOptions(), new PubsubTableProvider()); Statement statement = connection.createStatement(); statement.execute(createTableString); ExecutorService pool = Executors.newFixedThreadPool(1); Future<List<String>> queryResult = pool.submit( (Callable) () -> { ResultSet resultSet = statement.executeQuery("SELECT message.payload.id FROM message LIMIT 3"); ImmutableList.Builder<String> result = ImmutableList.builder(); while (resultSet.next()) { result.add(resultSet.getString(1)); } return result.build(); }); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); assertThat(queryResult.get(2, TimeUnit.MINUTES).size(), equalTo(3)); pool.shutdown(); } @Test public void testSQLSelectsPayloadContentFlat() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "id INTEGER, \n" + "name VARCHAR \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s" + " \"timestampAttributeKey\" : \"ts\" " + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam()); String queryString = "SELECT message.id, message.name from message"; List<PubsubMessage> messages = ImmutableList.of( objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz")); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString); queryOutput.apply( "waitForSuccess", resultSignal.signalSuccessWhen( SchemaCoder.of(PAYLOAD_SCHEMA), observedRows -> observedRows.equals( ImmutableSet.of( row(PAYLOAD_SCHEMA, 3, "foo"), row(PAYLOAD_SCHEMA, 5, "bar"), row(PAYLOAD_SCHEMA, 7, "baz"))))); pipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); eventsTopic.publish(messages); resultSignal.waitForSuccess(Duration.standardMinutes(5)); } @Test @SuppressWarnings("unchecked") public void testSQLInsertRowsToPubsubFlat() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "name VARCHAR, \n" + "height INTEGER, \n" + "knowsJavascript BOOLEAN \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s" + " \"deadLetterQueue\" : \"%s\"" + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam(), dlqTopic.topicPath()); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); String queryString = "INSERT INTO message (name, height, knowsJavascript) \n" + "VALUES \n" + "('person1', 80, TRUE), \n" + "('person2', 70, FALSE)"; query(sqlEnv, pipeline, queryString); pipeline.run().waitUntilFinish(Duration.standardMinutes(5)); eventsTopic .assertThatTopicEventuallyReceives( objectsProvider.matcherNameHeightKnowsJS("person1", 80, true), objectsProvider.matcherNameHeightKnowsJS("person2", 70, false)) .waitForUpTo(Duration.standardSeconds(40)); } @Test @SuppressWarnings("unchecked") public void testSQLInsertRowsToPubsubWithTimestampAttributeFlat() throws Exception { String createTableString = String.format( "CREATE EXTERNAL TABLE message (\n" + " event_timestamp TIMESTAMP, \n" + " name VARCHAR, \n" + " height INTEGER, \n" + " knowsJavascript BOOLEAN \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES " + " '{ " + " %s " + " \"deadLetterQueue\" : \"%s\"," + " \"timestampAttributeKey\" : \"ts\"" + " }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam(), dlqTopic.topicPath()); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); String queryString = "INSERT INTO message " + "VALUES " + "(TIMESTAMP '1970-01-01 00:00:00.001', 'person1', 80, TRUE), " + "(TIMESTAMP '1970-01-01 00:00:00.002', 'person2', 70, FALSE)"; query(sqlEnv, pipeline, queryString); pipeline.run().waitUntilFinish(Duration.standardMinutes(5)); eventsTopic .assertThatTopicEventuallyReceives( matcherTsNameHeightKnowsJS(ts(1), "person1", 80, true), matcherTsNameHeightKnowsJS(ts(2), "person2", 70, false)) .waitForUpTo(Duration.standardSeconds(40)); } @Test @SuppressWarnings("unchecked") public void testSQLReadAndWriteWithSameFlatTableDefinition() throws Exception { String tblProperties = objectsProvider.getPayloadFormat() == null ? "" : String.format( "TBLPROPERTIES '{\"format\": \"%s\"}'", objectsProvider.getPayloadFormat()); String createTableString = String.format( "CREATE EXTERNAL TABLE people (\n" + "event_timestamp TIMESTAMP, \n" + "name VARCHAR, \n" + "height INTEGER, \n" + "knowsJavascript BOOLEAN \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "%s", tableProvider.getTableType(), eventsTopic.topicPath(), tblProperties); String createFilteredTableString = String.format( "CREATE EXTERNAL TABLE javascript_people (\n" + "event_timestamp TIMESTAMP, \n" + "name VARCHAR, \n" + "height INTEGER \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "%s", tableProvider.getTableType(), filteredEventsTopic.topicPath(), tblProperties); BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider()); sqlEnv.executeDdl(createTableString); sqlEnv.executeDdl(createFilteredTableString); String filterQueryString = "INSERT INTO javascript_people (name, height) (\n" + " SELECT \n" + " name, \n" + " height \n" + " FROM people \n" + " WHERE knowsJavascript \n" + ")"; String injectQueryString = "INSERT INTO people (name, height, knowsJavascript) VALUES \n" + "('person1', 80, TRUE), \n" + "('person2', 70, FALSE), \n" + "('person3', 60, TRUE), \n" + "('person4', 50, FALSE), \n" + "('person5', 40, TRUE)"; query(sqlEnv, filterPipeline, filterQueryString); query(sqlEnv, pipeline, injectQueryString); filterPipeline.run(); eventsTopic.assertSubscriptionEventuallyCreated( pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5)); pipeline.run().waitUntilFinish(Duration.standardMinutes(5)); filteredEventsTopic .assertThatTopicEventuallyReceives( objectsProvider.matcherNameHeight("person1", 80), objectsProvider.matcherNameHeight("person3", 60), objectsProvider.matcherNameHeight("person5", 40)) .waitForUpTo(Duration.standardMinutes(5)); } @SuppressWarnings("unchecked") private CalciteConnection connect(PipelineOptions options, TableProvider... tableProviders) { Map<String, String> argsMap = ((Map<String, Object>) MAPPER.convertValue(pipeline.getOptions(), Map.class).get("options")) .entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> toArg(entry.getValue()))); InMemoryMetaStore inMemoryMetaStore = new InMemoryMetaStore(); for (TableProvider tableProvider : tableProviders) { inMemoryMetaStore.registerProvider(tableProvider); } JdbcConnection connection = JdbcDriver.connect(inMemoryMetaStore, options); connection.setPipelineOptionsMap(argsMap); return connection; } private static String toArg(Object o) { try { String jsonRepr = MAPPER.writeValueAsString(o); if (jsonRepr.startsWith("\"") && jsonRepr.endsWith("\"")) { return jsonRepr.substring(1, jsonRepr.length() - 1); } else { return jsonRepr; } } catch (JsonProcessingException e) { throw new RuntimeException(e); } } private String payloadFormatParam() { return objectsProvider.getPayloadFormat() == null ? "" : String.format("\"format\" : \"%s\", ", objectsProvider.getPayloadFormat()); } private PCollection<Row> query(BeamSqlEnv sqlEnv, TestPipeline pipeline, String queryString) { return BeamSqlRelUtils.toPCollection(pipeline, sqlEnv.parseQuery(queryString)); } private Row row(Schema schema, Object... values) { return Row.withSchema(schema).addValues(values).build(); } private static PubsubMessage message( Instant timestamp, byte[] payload, Map<String, String> attributes) { return new PubsubMessage( payload, ImmutableMap.<String, String>builder() .putAll(attributes) .put("ts", String.valueOf(timestamp.getMillis())) .build()); } private Matcher<PubsubMessage> matcherTsNameHeightKnowsJS( Instant ts, String name, int height, boolean knowsJS) throws Exception { return allOf( objectsProvider.matcherNameHeightKnowsJS(name, height, knowsJS), hasProperty("attributeMap", hasEntry("ts", String.valueOf(ts.getMillis())))); } private Matcher<PubsubMessage> matcherPayload(Instant timestamp, String payload) { return allOf( hasProperty("payload", equalTo(payload.getBytes(StandardCharsets.US_ASCII))), hasProperty("attributeMap", hasEntry("ts", String.valueOf(timestamp.getMillis())))); } private Instant ts(long millis) { return Instant.ofEpochMilli(millis); } private PubsubMessage messagePayload( Instant timestamp, String payload, Map<String, String> attributes) { return message(timestamp, payload.getBytes(StandardCharsets.US_ASCII), attributes); } private abstract static class PubsubObjectProvider implements Serializable { protected abstract String getPayloadFormat(); protected abstract PubsubMessage messageIdName(Instant timestamp, int id, String name) throws Exception; protected abstract Matcher<PubsubMessage> matcherNames(String name) throws Exception; protected abstract Matcher<PubsubMessage> matcherNameHeightKnowsJS( String name, int height, boolean knowsJS) throws Exception; protected abstract Matcher<PubsubMessage> matcherNameHeight(String name, int height) throws Exception; } private static class PubsubJsonObjectProvider extends PubsubObjectProvider { @Override protected String getPayloadFormat() { return null; } @Override protected PubsubMessage messageIdName(Instant timestamp, int id, String name) { String jsonString = "{ \"id\" : " + id + ", \"name\" : \"" + name + "\" }"; return message(timestamp, jsonString, ImmutableMap.of(name, Integer.toString(id))); } @Override protected Matcher<PubsubMessage> matcherNames(String name) throws IOException { return hasProperty("payload", toJsonByteLike(String.format("{\"name\":\"%s\"}", name))); } @Override protected Matcher<PubsubMessage> matcherNameHeightKnowsJS( String name, int height, boolean knowsJS) throws IOException { String jsonString = String.format( "{\"name\":\"%s\", \"height\": %s, \"knowsJavascript\": %s}", name, height, knowsJS); return hasProperty("payload", toJsonByteLike(jsonString)); } @Override protected Matcher<PubsubMessage> matcherNameHeight(String name, int height) throws IOException { String jsonString = String.format("{\"name\":\"%s\", \"height\": %s}", name, height); return hasProperty("payload", toJsonByteLike(jsonString)); } private PubsubMessage message( Instant timestamp, String jsonPayload, Map<String, String> attributes) { return PubsubTableProviderIT.message(timestamp, jsonPayload.getBytes(UTF_8), attributes); } private Matcher<byte[]> toJsonByteLike(String jsonString) throws IOException { return jsonBytesLike(jsonString); } } private static class PubsubAvroObjectProvider extends PubsubObjectProvider { private static final Schema NAME_HEIGHT_KNOWS_JS_SCHEMA = Schema.builder() .addNullableField("name", Schema.FieldType.STRING) .addNullableField("height", Schema.FieldType.INT32) .addNullableField("knowsJavascript", Schema.FieldType.BOOLEAN) .build(); private static final Schema NAME_HEIGHT_SCHEMA = Schema.builder() .addNullableField("name", Schema.FieldType.STRING) .addNullableField("height", Schema.FieldType.INT32) .build(); @Override protected String getPayloadFormat() { return "avro"; } @Override protected PubsubMessage messageIdName(Instant timestamp, int id, String name) throws IOException { byte[] encodedRecord = createEncodedGenericRecord( PAYLOAD_SCHEMA, org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList.of( id, name)); return message(timestamp, encodedRecord, ImmutableMap.of(name, Integer.toString(id))); } @Override protected Matcher<PubsubMessage> matcherNames(String name) throws IOException { Schema schema = Schema.builder().addStringField("name").build(); byte[] encodedRecord = createEncodedGenericRecord( schema, org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList.of( name)); return hasProperty("payload", equalTo(encodedRecord)); } @Override protected Matcher<PubsubMessage> matcherNameHeight(String name, int height) throws IOException { byte[] encodedRecord = createEncodedGenericRecord( NAME_HEIGHT_SCHEMA, org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList.of( name, height)); return hasProperty("payload", equalTo(encodedRecord)); } @Override protected Matcher<PubsubMessage> matcherNameHeightKnowsJS( String name, int height, boolean knowsJS) throws IOException { byte[] encodedRecord = createEncodedGenericRecord( NAME_HEIGHT_KNOWS_JS_SCHEMA, org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList.of( name, height, knowsJS)); return hasProperty("payload", equalTo(encodedRecord)); } private byte[] createEncodedGenericRecord(Schema beamSchema, List<Object> values) throws IOException { org.apache.avro.Schema avroSchema = AvroUtils.toAvroSchema(beamSchema); GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema); List<org.apache.avro.Schema.Field> fields = avroSchema.getFields(); for (int i = 0; i < fields.size(); ++i) { builder.set(fields.get(i), values.get(i)); } AvroCoder<GenericRecord> coder = AvroCoder.of(avroSchema); ByteArrayOutputStream out = new ByteArrayOutputStream(); coder.encode(builder.build(), out); return out.toByteArray(); } } }
#37039 is resolved and add relevant changes in `2.0-stage` branch. So eventually it will merge into `2.x` branch and it will merge into the `master` branch as well.
public List<String> skipList() { return Arrays.asList("function_typedesc17.json"); }
public List<String> skipList() { return Arrays.asList("function_typedesc17.json"); }
class TypeDescContextTest extends CompletionTest { @Test(dataProvider = "completion-data-provider") @Override public void test(String config, String configPath) throws WorkspaceDocumentException, IOException { super.test(config, configPath); } @DataProvider(name = "completion-data-provider") @Override public Object[][] dataProvider() { return this.getConfigsList(); } @Override public String getTestResourceDir() { return "typedesc_context"; } @Override }
class TypeDescContextTest extends CompletionTest { @Test(dataProvider = "completion-data-provider") @Override public void test(String config, String configPath) throws WorkspaceDocumentException, IOException { super.test(config, configPath); } @DataProvider(name = "completion-data-provider") @Override public Object[][] dataProvider() { return this.getConfigsList(); } @Override public String getTestResourceDir() { return "typedesc_context"; } @Override }
We can say `isDuplicate` then. It's more intuitive.
private STNode createFuncDefNodeList(List<STNode> qualifierList) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isNodeWithSyntaxKindInList(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (isValidFuncDefQualifier(qualifier.kind)) { validatedList.add(qualifier); } else if (qualifierList.size() != nextIndex) { updateANodeInListWithInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } return STNodeFactory.createNodeList(validatedList); }
if (isNodeWithSyntaxKindInList(validatedList, qualifier.kind)) {
private STNode createFuncDefNodeList(List<STNode> qualifierList) { List<STNode> validatedList = new ArrayList<>(); for (int i = 0; i < qualifierList.size(); i++) { STNode qualifier = qualifierList.get(i); int nextIndex = i + 1; if (isDuplicate(validatedList, qualifier.kind)) { updateLastNodeInListWithInvalidNode(validatedList, qualifier, DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text()); } else if (isValidFuncDefQualifier(qualifier.kind)) { validatedList.add(qualifier); } else if (qualifierList.size() != nextIndex) { updateANodeInListWithInvalidNode(qualifierList, nextIndex, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text()); } } return STNodeFactory.createNodeList(validatedList); }
class definition. switch (nextNextToken.kind) { case CLIENT_KEYWORD: case READONLY_KEYWORD: case ISOLATED_KEYWORD: case CLASS_KEYWORD: return true; default: return false; }
class definition. switch (nextNextToken.kind) { case CLIENT_KEYWORD: case READONLY_KEYWORD: case ISOLATED_KEYWORD: case CLASS_KEYWORD: return true; default: return false; }
@IMS94 I didn't get any issues when testing this in a project, anyway will have a look. `endsWith()` is a string matching and has some more weird behaviors such as matching with other module's file with the same name(diag location.filePath() contains the relative path only).
public static List<CodeAction> getAvailableCodeActions(CodeActionContext ctx) { LSClientLogger clientLogger = LSClientLogger.getInstance(ctx.languageServercontext()); List<CodeAction> codeActions = new ArrayList<>(); CodeActionProvidersHolder codeActionProvidersHolder = CodeActionProvidersHolder.getInstance(ctx.languageServercontext()); SyntaxTree syntaxTree = ctx.workspace().syntaxTree(ctx.filePath()).orElseThrow(); Optional<NonTerminalNode> matchedNode = CodeActionUtil.getTopLevelNode(ctx.cursorPosition(), syntaxTree); CodeActionNodeType matchedNodeType = CodeActionUtil.codeActionNodeType(matchedNode.orElse(null)); SemanticModel semanticModel = ctx.workspace().semanticModel(ctx.filePath()).orElseThrow(); if (matchedNode.isPresent() && matchedNodeType != CodeActionNodeType.NONE) { Range range = CommonUtil.toRange(matchedNode.get().lineRange()); Node expressionNode = CodeActionUtil.largestExpressionNode(matchedNode.get(), range); TypeSymbol matchedTypeSymbol = semanticModel.type(expressionNode.lineRange()).orElse(null); PositionDetails posDetails = CodeActionPositionDetails.from(matchedNode.get(), null, matchedTypeSymbol); ctx.setPositionDetails(posDetails); codeActionProvidersHolder.getActiveNodeBasedProviders(matchedNodeType, ctx).forEach(provider -> { try { List<CodeAction> codeActionsOut = provider.getNodeBasedCodeActions(ctx); if (codeActionsOut != null) { codeActions.addAll(codeActionsOut); } } catch (Exception e) { String msg = "CodeAction '" + provider.getClass().getSimpleName() + "' failed!"; clientLogger.logError(msg, e, null, (Position) null); } }); } Project project = ctx.workspace().project(ctx.filePath()).orElseThrow(); Path projectRoot = (project.kind() == ProjectKind.SINGLE_FILE_PROJECT) ? project.sourceRoot().getParent() : project.sourceRoot(); ctx.allDiagnostics().stream(). filter(diag -> projectRoot.resolve(diag.location().lineRange().filePath()).equals(ctx.filePath()) && CommonUtil.isWithinRange(ctx.cursorPosition(), CommonUtil.toRange(diag.location().lineRange())) ) .forEach(diagnostic -> { Range range = CommonUtil.toRange(diagnostic.location().lineRange()); PositionDetails positionDetails = computePositionDetails(range, syntaxTree, ctx); ctx.setPositionDetails(positionDetails); codeActionProvidersHolder.getActiveDiagnosticsBasedProviders(ctx).forEach(provider -> { try { List<CodeAction> codeActionsOut = provider.getDiagBasedCodeActions(diagnostic, ctx); if (codeActionsOut != null) { codeActions.addAll(codeActionsOut); } } catch (Exception e) { String msg = "CodeAction '" + provider.getClass().getSimpleName() + "' failed!"; clientLogger.logError(msg, e, null, (Position) null); } }); }); return codeActions; }
CommonUtil.isWithinRange(ctx.cursorPosition(), CommonUtil.toRange(diag.location().lineRange()))
public static List<CodeAction> getAvailableCodeActions(CodeActionContext ctx) { LSClientLogger clientLogger = LSClientLogger.getInstance(ctx.languageServercontext()); List<CodeAction> codeActions = new ArrayList<>(); CodeActionProvidersHolder codeActionProvidersHolder = CodeActionProvidersHolder.getInstance(ctx.languageServercontext()); SyntaxTree syntaxTree = ctx.workspace().syntaxTree(ctx.filePath()).orElseThrow(); Optional<NonTerminalNode> matchedNode = CodeActionUtil.getTopLevelNode(ctx.cursorPosition(), syntaxTree); CodeActionNodeType matchedNodeType = CodeActionUtil.codeActionNodeType(matchedNode.orElse(null)); SemanticModel semanticModel = ctx.workspace().semanticModel(ctx.filePath()).orElseThrow(); if (matchedNode.isPresent() && matchedNodeType != CodeActionNodeType.NONE) { Range range = CommonUtil.toRange(matchedNode.get().lineRange()); Node expressionNode = CodeActionUtil.largestExpressionNode(matchedNode.get(), range); TypeSymbol matchedTypeSymbol = semanticModel.type(expressionNode.lineRange()).orElse(null); PositionDetails posDetails = CodeActionPositionDetails.from(matchedNode.get(), null, matchedTypeSymbol); ctx.setPositionDetails(posDetails); codeActionProvidersHolder.getActiveNodeBasedProviders(matchedNodeType, ctx).forEach(provider -> { try { List<CodeAction> codeActionsOut = provider.getNodeBasedCodeActions(ctx); if (codeActionsOut != null) { codeActions.addAll(codeActionsOut); } } catch (Exception e) { String msg = "CodeAction '" + provider.getClass().getSimpleName() + "' failed!"; clientLogger.logError(msg, e, null, (Position) null); } }); } Project project = ctx.workspace().project(ctx.filePath()).orElseThrow(); Path projectRoot = (project.kind() == ProjectKind.SINGLE_FILE_PROJECT) ? project.sourceRoot().getParent() : project.sourceRoot(); ctx.allDiagnostics().stream(). filter(diag -> projectRoot.resolve(diag.location().lineRange().filePath()).equals(ctx.filePath()) && CommonUtil.isWithinRange(ctx.cursorPosition(), CommonUtil.toRange(diag.location().lineRange())) ) .forEach(diagnostic -> { Range range = CommonUtil.toRange(diagnostic.location().lineRange()); PositionDetails positionDetails = computePositionDetails(range, syntaxTree, ctx); ctx.setPositionDetails(positionDetails); codeActionProvidersHolder.getActiveDiagnosticsBasedProviders(ctx).forEach(provider -> { try { List<CodeAction> codeActionsOut = provider.getDiagBasedCodeActions(diagnostic, ctx); if (codeActionsOut != null) { codeActions.addAll(codeActionsOut); } } catch (Exception e) { String msg = "CodeAction '" + provider.getClass().getSimpleName() + "' failed!"; clientLogger.logError(msg, e, null, (Position) null); } }); }); return codeActions; }
class CodeActionRouter { /** * Returns a list of supported code actions. * * @param ctx {@link CodeActionContext} * @return list of code actions */ }
class CodeActionRouter { /** * Returns a list of supported code actions. * * @param ctx {@link CodeActionContext} * @return list of code actions */ }
I'd prefer it to log and throw - it's an invalid Content-Range header and there is no recovery from it
public static long extractSizeFromContentRange(String contentRange) { Objects.requireNonNull(contentRange, "Cannot extract length from null 'contentRange'."); int index = contentRange.indexOf('/'); if (index == -1) { return -2; } String sizeString = contentRange.substring(index + 1).trim(); if ("*".equals(sizeString)) { return -1; } return Long.parseLong(sizeString); }
return -2;
public static long extractSizeFromContentRange(String contentRange) { Objects.requireNonNull(contentRange, "Cannot extract length from null 'contentRange'."); int index = contentRange.indexOf('/'); if (index == -1) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("The Content-Range header wasn't properly " + "formatted and didn't contain a '/size' segment. The 'contentRange' was: " + contentRange)); } String sizeString = contentRange.substring(index + 1).trim(); if ("*".equals(sizeString)) { return -1; } return Long.parseLong(sizeString); }
class from an array of Objects. * * @param args Array of objects to search through to find the first instance of the given `clazz` type. * @param clazz The type trying to be found. * @param <T> Generic type * @return The first object of the desired type, otherwise null. */ public static <T> T findFirstOfType(Object[] args, Class<T> clazz) { if (isNullOrEmpty(args)) { return null; } for (Object arg : args) { if (clazz.isInstance(arg)) { return clazz.cast(arg); } } return null; }
class from an array of Objects. * * @param args Array of objects to search through to find the first instance of the given `clazz` type. * @param clazz The type trying to be found. * @param <T> Generic type * @return The first object of the desired type, otherwise null. */ public static <T> T findFirstOfType(Object[] args, Class<T> clazz) { if (isNullOrEmpty(args)) { return null; } for (Object arg : args) { if (clazz.isInstance(arg)) { return clazz.cast(arg); } } return null; }
Why modify this? I think `org.assertj.core.api.Assertions.assertThat` is suggested test API.
void testCreateAndCloseSessions() throws Exception { List<SessionHandle> sessionHandles = new ArrayList<>(); Set<String> sessionHandleIds = new HashSet<>(); for (int num = 0; num < SESSION_NUMBER; ++num) { CompletableFuture<OpenSessionResponseBody> response = sendRequest(openSessionHeaders, emptyParameters, openSessionRequestBody); String sessionHandleId = response.get().getSessionHandle(); assertNotNull(sessionHandleId); sessionHandleIds.add(sessionHandleId); SessionHandle sessionHandle = new SessionHandle(UUID.fromString(sessionHandleId)); assertNotNull( SQL_GATEWAY_SERVICE_EXTENSION.getSessionManager().getSession(sessionHandle)); sessionHandles.add(sessionHandle); } assertThat(sessionHandleIds).hasSize(SESSION_NUMBER); for (int num = 0; num < SESSION_NUMBER; ++num) { SessionHandle sessionHandle = sessionHandles.get(num); SessionMessageParameters sessionMessageParameters = new SessionMessageParameters(sessionHandle); CompletableFuture<CloseSessionResponseBody> response = sendRequest(closeSessionHeaders, sessionMessageParameters, emptyRequestBody); String status = response.get().getStatus(); assertThat(status).isEqualTo(CLOSE_MESSAGE); assertThatThrownBy( () -> SQL_GATEWAY_SERVICE_EXTENSION .getSessionManager() .getSession(sessionHandle)) .isInstanceOf(SqlGatewayException.class); CompletableFuture<CloseSessionResponseBody> response2 = sendRequest(closeSessionHeaders, sessionMessageParameters, emptyRequestBody); assertThatThrownBy(response2::get).isInstanceOf(ExecutionException.class); } }
assertNotNull(sessionHandleId);
void testCreateAndCloseSessions() throws Exception { List<SessionHandle> sessionHandles = new ArrayList<>(); Set<String> sessionHandleIds = new HashSet<>(); for (int num = 0; num < SESSION_NUMBER; ++num) { CompletableFuture<OpenSessionResponseBody> response = sendRequest(openSessionHeaders, emptyParameters, openSessionRequestBody); String sessionHandleId = response.get().getSessionHandle(); assertThat(sessionHandleId).isNotNull(); sessionHandleIds.add(sessionHandleId); SessionHandle sessionHandle = new SessionHandle(UUID.fromString(sessionHandleId)); assertThat(SQL_GATEWAY_SERVICE_EXTENSION.getSessionManager().getSession(sessionHandle)) .isNotNull(); sessionHandles.add(sessionHandle); } assertThat(sessionHandleIds).hasSize(SESSION_NUMBER); for (int num = 0; num < SESSION_NUMBER; ++num) { SessionHandle sessionHandle = sessionHandles.get(num); SessionMessageParameters sessionMessageParameters = new SessionMessageParameters(sessionHandle); CompletableFuture<CloseSessionResponseBody> response = sendRequest(closeSessionHeaders, sessionMessageParameters, emptyRequestBody); String status = response.get().getStatus(); assertThat(status).isEqualTo(CLOSE_MESSAGE); assertThatThrownBy( () -> SQL_GATEWAY_SERVICE_EXTENSION .getSessionManager() .getSession(sessionHandle)) .isInstanceOf(SqlGatewayException.class); CompletableFuture<CloseSessionResponseBody> response2 = sendRequest(closeSessionHeaders, sessionMessageParameters, emptyRequestBody); assertThatThrownBy(response2::get).isInstanceOf(ExecutionException.class); } }
class SessionRelatedITCase extends RestAPIITCaseBase { private static final String SESSION_NAME = "test"; private static final Map<String, String> properties = new HashMap<>(); private static final int SESSION_NUMBER = 10; static { properties.put("k1", "v1"); properties.put("k2", "v2"); } private static final OpenSessionHeaders openSessionHeaders = OpenSessionHeaders.getInstance(); private static final OpenSessionRequestBody openSessionRequestBody = new OpenSessionRequestBody(SESSION_NAME, properties); private static final EmptyMessageParameters emptyParameters = EmptyMessageParameters.getInstance(); private static final CloseSessionHeaders closeSessionHeaders = CloseSessionHeaders.getInstance(); private static final EmptyRequestBody emptyRequestBody = EmptyRequestBody.getInstance(); private SessionHandle sessionHandle; private SessionMessageParameters sessionMessageParameters; @BeforeEach public void setUp() throws Exception { CompletableFuture<OpenSessionResponseBody> response = sendRequest(openSessionHeaders, emptyParameters, openSessionRequestBody); String sessionHandleId = response.get().getSessionHandle(); assertNotNull(sessionHandleId); sessionHandle = new SessionHandle(UUID.fromString(sessionHandleId)); assertNotNull(SQL_GATEWAY_SERVICE_EXTENSION.getSessionManager().getSession(sessionHandle)); sessionMessageParameters = new SessionMessageParameters(sessionHandle); } @AfterEach public void cleanUp() throws Exception { CompletableFuture<CloseSessionResponseBody> response = sendRequest(closeSessionHeaders, sessionMessageParameters, emptyRequestBody); String status = response.get().getStatus(); assertThat(status).isEqualTo(CLOSE_MESSAGE); } @Test @Test void testGetSessionConfiguration() throws Exception { CompletableFuture<GetSessionConfigResponseBody> future = sendRequest( GetSessionConfigHeaders.getInstance(), sessionMessageParameters, emptyRequestBody); Map<String, String> getProperties = future.get().getProperties(); for (String key : properties.keySet()) { assertThat(properties.get(key)).isEqualTo(getProperties.get(key)); } } @Test void testTouchSession() throws Exception { Session session = SQL_GATEWAY_SERVICE_EXTENSION.getSessionManager().getSession(sessionHandle); assertNotNull(session); long lastAccessTime = session.getLastAccessTime(); CompletableFuture<EmptyResponseBody> future = sendRequest( TriggerSessionHeartbeatHeaders.getInstance(), sessionMessageParameters, emptyRequestBody); future.get(); assertThat(session.getLastAccessTime() > lastAccessTime).isTrue(); } @Test void testConfigureSession() throws Exception { ConfigureSessionRequestBody configureSessionRequestBody = new ConfigureSessionRequestBody("set 'test' = 'configure';", -1L); CompletableFuture<EmptyResponseBody> response = sendRequest( ConfigureSessionHeaders.getINSTANCE(), sessionMessageParameters, configureSessionRequestBody); response.get(); assertThat( SQL_GATEWAY_SERVICE_EXTENSION .getSessionManager() .getSession(sessionHandle) .getSessionConfig()) .containsEntry("test", "configure"); } }
class SessionRelatedITCase extends RestAPIITCaseBase { private static final String SESSION_NAME = "test"; private static final Map<String, String> properties = new HashMap<>(); private static final int SESSION_NUMBER = 10; static { properties.put("k1", "v1"); properties.put("k2", "v2"); } private static final OpenSessionHeaders openSessionHeaders = OpenSessionHeaders.getInstance(); private static final OpenSessionRequestBody openSessionRequestBody = new OpenSessionRequestBody(SESSION_NAME, properties); private static final EmptyMessageParameters emptyParameters = EmptyMessageParameters.getInstance(); private static final CloseSessionHeaders closeSessionHeaders = CloseSessionHeaders.getInstance(); private static final EmptyRequestBody emptyRequestBody = EmptyRequestBody.getInstance(); private SessionHandle sessionHandle; private SessionMessageParameters sessionMessageParameters; @BeforeEach public void setUp() throws Exception { CompletableFuture<OpenSessionResponseBody> response = sendRequest(openSessionHeaders, emptyParameters, openSessionRequestBody); String sessionHandleId = response.get().getSessionHandle(); assertThat(sessionHandleId).isNotNull(); sessionHandle = new SessionHandle(UUID.fromString(sessionHandleId)); assertThat(SQL_GATEWAY_SERVICE_EXTENSION.getSessionManager().getSession(sessionHandle)) .isNotNull(); sessionMessageParameters = new SessionMessageParameters(sessionHandle); } @AfterEach public void cleanUp() throws Exception { CompletableFuture<CloseSessionResponseBody> response = sendRequest(closeSessionHeaders, sessionMessageParameters, emptyRequestBody); String status = response.get().getStatus(); assertThat(status).isEqualTo(CLOSE_MESSAGE); } @Test @Test void testGetSessionConfiguration() throws Exception { CompletableFuture<GetSessionConfigResponseBody> future = sendRequest( GetSessionConfigHeaders.getInstance(), sessionMessageParameters, emptyRequestBody); Map<String, String> getProperties = future.get().getProperties(); for (String key : properties.keySet()) { assertThat(properties.get(key)).isEqualTo(getProperties.get(key)); } } @Test void testTouchSession() throws Exception { Session session = SQL_GATEWAY_SERVICE_EXTENSION.getSessionManager().getSession(sessionHandle); assertThat(session).isNotNull(); long lastAccessTime = session.getLastAccessTime(); CompletableFuture<EmptyResponseBody> future = sendRequest( TriggerSessionHeartbeatHeaders.getInstance(), sessionMessageParameters, emptyRequestBody); future.get(); assertThat(session.getLastAccessTime() > lastAccessTime).isTrue(); } @Test void testConfigureSession() throws Exception { ConfigureSessionRequestBody configureSessionRequestBody = new ConfigureSessionRequestBody("set 'test' = 'configure';", -1L); CompletableFuture<EmptyResponseBody> response = sendRequest( ConfigureSessionHeaders.getINSTANCE(), sessionMessageParameters, configureSessionRequestBody); response.get(); assertThat( SQL_GATEWAY_SERVICE_EXTENSION .getSessionManager() .getSession(sessionHandle) .getSessionConfig()) .containsEntry("test", "configure"); } }
what will happen if not set startTimemMs?
public void analyze(Analyzer analyzer) throws UserException { super.analyze(analyzer); checkAuth(); labelName.analyze(analyzer); String dbName = labelName.getDbName(); Env.getCurrentInternalCatalog().getDbOrAnalysisException(dbName); analyzerSqlStmt(); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); jobExecutionConfiguration.setExecuteType(executeType); TimerDefinition timerDefinition = new TimerDefinition(); if (null != onceJobStartTimestamp) { if (onceJobStartTimestamp.equalsIgnoreCase(CURRENT_TIMESTAMP_STRING)) { jobExecutionConfiguration.setImmediate(true); timerDefinition.setStartTimeMs(System.currentTimeMillis() - 100L); } else { timerDefinition.setStartTimeMs(TimeUtils.timeStringToLong(onceJobStartTimestamp)); } } if (null != interval) { timerDefinition.setInterval(interval); } if (null != intervalTimeUnit) { IntervalUnit intervalUnit = IntervalUnit.fromString(intervalTimeUnit.toUpperCase()); if (null == intervalUnit) { throw new AnalysisException("interval time unit can not be " + intervalTimeUnit); } if (intervalUnit.equals(IntervalUnit.SECOND) && !Config.enable_job_schedule_second_for_test) { throw new AnalysisException("interval time unit can not be second"); } timerDefinition.setIntervalUnit(intervalUnit); } if (null != startsTimeStamp) { if (startsTimeStamp.equalsIgnoreCase(CURRENT_TIMESTAMP_STRING)) { jobExecutionConfiguration.setImmediate(true); timerDefinition.setStartTimeMs(System.currentTimeMillis() - 100L); } else { timerDefinition.setStartTimeMs(TimeUtils.timeStringToLong(startsTimeStamp)); } } if (null != endsTimeStamp) { timerDefinition.setEndTimeMs(TimeUtils.timeStringToLong(endsTimeStamp)); } checkJobName(labelName.getLabelName()); this.jobName = labelName.getLabelName(); jobExecutionConfiguration.setTimerDefinition(timerDefinition); String originStmt = getOrigStmt().originStmt; String executeSql = parseExecuteSql(originStmt, jobName, comment); InsertJob job = new InsertJob(jobName, JobStatus.RUNNING, labelName.getDbName(), comment, ConnectContext.get().getCurrentUserIdentity(), jobExecutionConfiguration, System.currentTimeMillis(), executeSql); jobInstance = job; }
timerDefinition.setStartTimeMs(System.currentTimeMillis() - 100L);
public void analyze(Analyzer analyzer) throws UserException { super.analyze(analyzer); checkAuth(); labelName.analyze(analyzer); String dbName = labelName.getDbName(); Env.getCurrentInternalCatalog().getDbOrAnalysisException(dbName); analyzerSqlStmt(); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); jobExecutionConfiguration.setExecuteType(executeType); TimerDefinition timerDefinition = new TimerDefinition(); if (null != onceJobStartTimestamp) { if (onceJobStartTimestamp.equalsIgnoreCase(CURRENT_TIMESTAMP_STRING)) { jobExecutionConfiguration.setImmediate(true); timerDefinition.setStartTimeMs(System.currentTimeMillis()); } else { timerDefinition.setStartTimeMs(TimeUtils.timeStringToLong(onceJobStartTimestamp)); } } if (null != interval) { timerDefinition.setInterval(interval); } if (null != intervalTimeUnit) { IntervalUnit intervalUnit = IntervalUnit.fromString(intervalTimeUnit.toUpperCase()); if (null == intervalUnit) { throw new AnalysisException("interval time unit can not be " + intervalTimeUnit); } if (intervalUnit.equals(IntervalUnit.SECOND) && !Config.enable_job_schedule_second_for_test) { throw new AnalysisException("interval time unit can not be second"); } timerDefinition.setIntervalUnit(intervalUnit); } if (null != startsTimeStamp) { if (startsTimeStamp.equalsIgnoreCase(CURRENT_TIMESTAMP_STRING)) { jobExecutionConfiguration.setImmediate(true); timerDefinition.setStartTimeMs(System.currentTimeMillis()); } else { timerDefinition.setStartTimeMs(TimeUtils.timeStringToLong(startsTimeStamp)); } } if (null != endsTimeStamp) { timerDefinition.setEndTimeMs(TimeUtils.timeStringToLong(endsTimeStamp)); } checkJobName(labelName.getLabelName()); this.jobName = labelName.getLabelName(); jobExecutionConfiguration.setTimerDefinition(timerDefinition); String originStmt = getOrigStmt().originStmt; String executeSql = parseExecuteSql(originStmt, jobName, comment); InsertJob job = new InsertJob(jobName, JobStatus.RUNNING, labelName.getDbName(), comment, ConnectContext.get().getCurrentUserIdentity(), jobExecutionConfiguration, System.currentTimeMillis(), executeSql); jobInstance = job; }
class CreateJobStmt extends DdlStmt { @Getter private StatementBase doStmt; @Getter private AbstractJob jobInstance; private final LabelName labelName; private final String onceJobStartTimestamp; private final Long interval; private final String intervalTimeUnit; private final String startsTimeStamp; private final String endsTimeStamp; private final String comment; private String jobName; public static final String CURRENT_TIMESTAMP_STRING = "current_timestamp"; private JobExecuteType executeType; private static final String excludeJobNamePrefix = "inner_"; private static final ImmutableSet<Class<? extends DdlStmt>> supportStmtSuperClass = new ImmutableSet.Builder<Class<? extends DdlStmt>>().add(InsertStmt.class) .build(); private static final HashSet<String> supportStmtClassNamesCache = new HashSet<>(16); public CreateJobStmt(LabelName labelName, JobExecuteType executeType, String onceJobStartTimestamp, Long interval, String intervalTimeUnit, String startsTimeStamp, String endsTimeStamp, String comment, StatementBase doStmt) { this.labelName = labelName; this.onceJobStartTimestamp = onceJobStartTimestamp; this.interval = interval; this.intervalTimeUnit = intervalTimeUnit; this.startsTimeStamp = startsTimeStamp; this.endsTimeStamp = endsTimeStamp; this.comment = comment; this.doStmt = doStmt; this.executeType = executeType; } @Override private void checkJobName(String jobName) throws AnalysisException { if (StringUtils.isBlank(jobName)) { throw new AnalysisException("job name can not be null"); } if (jobName.startsWith(excludeJobNamePrefix)) { throw new AnalysisException("job name can not start with " + excludeJobNamePrefix); } } protected static void checkAuth() throws AnalysisException { if (!Env.getCurrentEnv().getAccessManager().checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "ADMIN"); } } private void checkStmtSupport() throws AnalysisException { if (supportStmtClassNamesCache.contains(doStmt.getClass().getSimpleName())) { return; } for (Class<? extends DdlStmt> clazz : supportStmtSuperClass) { if (clazz.isAssignableFrom(doStmt.getClass())) { supportStmtClassNamesCache.add(doStmt.getClass().getSimpleName()); return; } } throw new AnalysisException("Not support " + doStmt.getClass().getSimpleName() + " type in job"); } private void analyzerSqlStmt() throws UserException { checkStmtSupport(); doStmt.analyze(analyzer); } /** * parse execute sql from create job stmt * Some stmt not implement toSql method,so we need to parse sql from originStmt */ private static String parseExecuteSql(String sql, String jobName, String comment) throws AnalysisException { String lowerCaseSql = sql.toLowerCase(); String lowerCaseJobName = jobName.toLowerCase(); int jobNameEndIndex = lowerCaseSql.indexOf(lowerCaseJobName) + lowerCaseJobName.length(); String subSqlStmt = lowerCaseSql.substring(jobNameEndIndex); String originSubSqlStmt = sql.substring(jobNameEndIndex); if (StringUtils.isNotBlank(comment)) { String lowerCaseComment = comment.toLowerCase(); int splitDoIndex = subSqlStmt.indexOf(lowerCaseComment) + lowerCaseComment.length(); subSqlStmt = subSqlStmt.substring(splitDoIndex); originSubSqlStmt = originSubSqlStmt.substring(splitDoIndex); } int executeSqlIndex = subSqlStmt.indexOf("do"); String executeSql = originSubSqlStmt.substring(executeSqlIndex + 2).trim(); if (StringUtils.isBlank(executeSql)) { throw new AnalysisException("execute sql has invalid format"); } return executeSql; } protected static boolean isInnerJob(String jobName) { return jobName.startsWith(excludeJobNamePrefix); } }
class CreateJobStmt extends DdlStmt { @Getter private StatementBase doStmt; @Getter private AbstractJob jobInstance; private final LabelName labelName; private final String onceJobStartTimestamp; private final Long interval; private final String intervalTimeUnit; private final String startsTimeStamp; private final String endsTimeStamp; private final String comment; private String jobName; public static final String CURRENT_TIMESTAMP_STRING = "current_timestamp"; private JobExecuteType executeType; private static final String excludeJobNamePrefix = "inner_"; private static final ImmutableSet<Class<? extends DdlStmt>> supportStmtSuperClass = new ImmutableSet.Builder<Class<? extends DdlStmt>>().add(InsertStmt.class) .build(); private static final HashSet<String> supportStmtClassNamesCache = new HashSet<>(16); public CreateJobStmt(LabelName labelName, JobExecuteType executeType, String onceJobStartTimestamp, Long interval, String intervalTimeUnit, String startsTimeStamp, String endsTimeStamp, String comment, StatementBase doStmt) { this.labelName = labelName; this.onceJobStartTimestamp = onceJobStartTimestamp; this.interval = interval; this.intervalTimeUnit = intervalTimeUnit; this.startsTimeStamp = startsTimeStamp; this.endsTimeStamp = endsTimeStamp; this.comment = comment; this.doStmt = doStmt; this.executeType = executeType; } @Override private void checkJobName(String jobName) throws AnalysisException { if (StringUtils.isBlank(jobName)) { throw new AnalysisException("job name can not be null"); } if (jobName.startsWith(excludeJobNamePrefix)) { throw new AnalysisException("job name can not start with " + excludeJobNamePrefix); } } protected static void checkAuth() throws AnalysisException { if (!Env.getCurrentEnv().getAccessManager().checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "ADMIN"); } } private void checkStmtSupport() throws AnalysisException { if (supportStmtClassNamesCache.contains(doStmt.getClass().getSimpleName())) { return; } for (Class<? extends DdlStmt> clazz : supportStmtSuperClass) { if (clazz.isAssignableFrom(doStmt.getClass())) { supportStmtClassNamesCache.add(doStmt.getClass().getSimpleName()); return; } } throw new AnalysisException("Not support " + doStmt.getClass().getSimpleName() + " type in job"); } private void analyzerSqlStmt() throws UserException { checkStmtSupport(); doStmt.analyze(analyzer); } /** * parse execute sql from create job stmt * Some stmt not implement toSql method,so we need to parse sql from originStmt */ private static String parseExecuteSql(String sql, String jobName, String comment) throws AnalysisException { String lowerCaseSql = sql.toLowerCase(); String lowerCaseJobName = jobName.toLowerCase(); int jobNameEndIndex = lowerCaseSql.indexOf(lowerCaseJobName) + lowerCaseJobName.length(); String subSqlStmt = lowerCaseSql.substring(jobNameEndIndex); String originSubSqlStmt = sql.substring(jobNameEndIndex); if (StringUtils.isNotBlank(comment)) { String lowerCaseComment = comment.toLowerCase(); int splitDoIndex = subSqlStmt.indexOf(lowerCaseComment) + lowerCaseComment.length(); subSqlStmt = subSqlStmt.substring(splitDoIndex); originSubSqlStmt = originSubSqlStmt.substring(splitDoIndex); } int executeSqlIndex = subSqlStmt.indexOf("do"); String executeSql = originSubSqlStmt.substring(executeSqlIndex + 2).trim(); if (StringUtils.isBlank(executeSql)) { throw new AnalysisException("execute sql has invalid format"); } return executeSql; } protected static boolean isInnerJob(String jobName) { return jobName.startsWith(excludeJobNamePrefix); } }
@franz1981 could pls update the comment as well? `private final Lock 1l = new ReentrantLock();` => `private volatile Lock 1l`. I know it's nitpicking but... ;-)
Collection<Resource> generate(DotName scope) { List<BeanInfo> beans = new BeanStream(beanDeployment.getBeans()).withScope(scope).collect(); ResourceClassOutput classOutput = new ResourceClassOutput(true, generateSources); String generatedName = scopeToGeneratedName.get(scope); reflectionRegistration.registerMethod(generatedName, MethodDescriptor.INIT); ClassCreator contextInstances = ClassCreator.builder().classOutput(classOutput).className(generatedName) .interfaces(ContextInstances.class).build(); Map<String, InstanceAndLock> idToFields = new HashMap<>(); int fieldIndex = 0; for (BeanInfo bean : beans) { String beanIdx = "" + fieldIndex++; FieldCreator handleField = contextInstances.getFieldCreator(beanIdx, ContextInstanceHandle.class) .setModifiers(ACC_PRIVATE | ACC_VOLATILE); FieldCreator lockField = contextInstances.getFieldCreator(beanIdx + "l", Lock.class) .setModifiers(ACC_PRIVATE | ACC_VOLATILE); FieldCreator atomicLockField = contextInstances.getFieldCreator("LAZY_" + beanIdx + "L_UPDATER", AtomicReferenceFieldUpdater.class) .setModifiers(ACC_PRIVATE | ACC_FINAL | ACC_STATIC); idToFields.put(bean.getIdentifier(), new InstanceAndLock(handleField.getFieldDescriptor(), lockField.getFieldDescriptor(), atomicLockField.getFieldDescriptor())); } implementStaticConstructor(contextInstances, idToFields); Map<String, MethodDescriptor> lazyLocks = implementLazyLocks(contextInstances, idToFields); MethodCreator constructor = contextInstances.getMethodCreator(MethodDescriptor.INIT, "V"); constructor.invokeSpecialMethod(MethodDescriptors.OBJECT_CONSTRUCTOR, constructor.getThis()); constructor.returnVoid(); implementComputeIfAbsent(contextInstances, beans, idToFields, lazyLocks); implementGetIfPresent(contextInstances, beans, idToFields); List<MethodDescriptor> remove = implementRemove(contextInstances, beans, idToFields, lazyLocks); implementGetAllPresent(contextInstances, idToFields); implementRemoveEach(contextInstances, remove); contextInstances.close(); return classOutput.getResources(); }
Collection<Resource> generate(DotName scope) { List<BeanInfo> beans = new BeanStream(beanDeployment.getBeans()).withScope(scope).collect(); ResourceClassOutput classOutput = new ResourceClassOutput(true, generateSources); String generatedName = scopeToGeneratedName.get(scope); reflectionRegistration.registerMethod(generatedName, MethodDescriptor.INIT); ClassCreator contextInstances = ClassCreator.builder().classOutput(classOutput).className(generatedName) .interfaces(ContextInstances.class).build(); Map<String, InstanceAndLock> idToFields = new HashMap<>(); int fieldIndex = 0; for (BeanInfo bean : beans) { String beanIdx = "" + fieldIndex++; FieldCreator handleField = contextInstances.getFieldCreator(beanIdx, ContextInstanceHandle.class) .setModifiers(ACC_PRIVATE | ACC_VOLATILE); FieldCreator lockField = contextInstances.getFieldCreator(beanIdx + "l", Lock.class) .setModifiers(ACC_PRIVATE | ACC_VOLATILE); FieldCreator atomicLockField = contextInstances.getFieldCreator("LAZY_" + beanIdx + "L_UPDATER", AtomicReferenceFieldUpdater.class) .setModifiers(ACC_PRIVATE | ACC_FINAL | ACC_STATIC); idToFields.put(bean.getIdentifier(), new InstanceAndLock(handleField.getFieldDescriptor(), lockField.getFieldDescriptor(), atomicLockField.getFieldDescriptor())); } implementStaticConstructor(contextInstances, idToFields); Map<String, MethodDescriptor> lazyLocks = implementLazyLocks(contextInstances, idToFields); MethodCreator constructor = contextInstances.getMethodCreator(MethodDescriptor.INIT, "V"); constructor.invokeSpecialMethod(MethodDescriptors.OBJECT_CONSTRUCTOR, constructor.getThis()); constructor.returnVoid(); implementComputeIfAbsent(contextInstances, beans, idToFields, lazyLocks); implementGetIfPresent(contextInstances, beans, idToFields); List<MethodDescriptor> remove = implementRemove(contextInstances, beans, idToFields, lazyLocks); implementGetAllPresent(contextInstances, idToFields); implementRemoveEach(contextInstances, remove); contextInstances.close(); return classOutput.getResources(); }
class ContextInstancesGenerator extends AbstractGenerator { static final String CONTEXT_INSTANCES_SUFFIX = "_ContextInstances"; private final BeanDeployment beanDeployment; private final Map<DotName, String> scopeToGeneratedName; public ContextInstancesGenerator(boolean generateSources, ReflectionRegistration reflectionRegistration, BeanDeployment beanDeployment, Map<DotName, String> scopeToGeneratedName) { super(generateSources, reflectionRegistration); this.beanDeployment = beanDeployment; this.scopeToGeneratedName = scopeToGeneratedName; } void precomputeGeneratedName(DotName scope) { String generatedName = DEFAULT_PACKAGE + "." + beanDeployment.name + UNDERSCORE + scope.toString().replace(".", UNDERSCORE) + CONTEXT_INSTANCES_SUFFIX; scopeToGeneratedName.put(scope, generatedName); } private static void implementStaticConstructor(ClassCreator contextInstances, Map<String, InstanceAndLock> idToFields) { MethodCreator staticConstructor = contextInstances.getMethodCreator(MethodDescriptor.CLINIT, void.class) .setModifiers(ACC_STATIC); MethodDescriptor newLockUpdater = MethodDescriptor.ofMethod(AtomicReferenceFieldUpdater.class, "newUpdater", AtomicReferenceFieldUpdater.class, Class.class, Class.class, String.class); for (InstanceAndLock fields : idToFields.values()) { ResultHandle updater = staticConstructor.invokeStaticMethod(newLockUpdater, staticConstructor.loadClass(contextInstances.getClassName()), staticConstructor.loadClass(Lock.class), staticConstructor.load(fields.lock.getName())); staticConstructor.writeStaticField(fields.lockUpdater, updater); } staticConstructor.returnVoid(); } private static Map<String, MethodDescriptor> implementLazyLocks(ClassCreator contextInstances, Map<String, InstanceAndLock> idToFields) { MethodDescriptor atomicReferenceFieldUpdaterCompareAndSet = MethodDescriptor.ofMethod( AtomicReferenceFieldUpdater.class, "compareAndSet", boolean.class, Object.class, Object.class, Object.class); Map<String, MethodDescriptor> lazyLockMethods = new HashMap<>(idToFields.size()); for (var namedFields : idToFields.entrySet()) { var fields = namedFields.getValue(); MethodCreator lazyLockMethod = contextInstances.getMethodCreator("lazy" + fields.lock.getName(), Lock.class) .setModifiers(ACC_PRIVATE); ResultHandle lock = lazyLockMethod.readInstanceField(fields.lock, lazyLockMethod.getThis()); lazyLockMethod .ifNotNull(lock).trueBranch() .returnValue(lock); ResultHandle newLock = lazyLockMethod.newInstance(MethodDescriptor.ofConstructor(ReentrantLock.class)); ResultHandle updated = lazyLockMethod.invokeVirtualMethod(atomicReferenceFieldUpdaterCompareAndSet, lazyLockMethod.readStaticField(fields.lockUpdater), lazyLockMethod.getThis(), lazyLockMethod.loadNull(), newLock); lazyLockMethod .ifTrue(updated).trueBranch() .returnValue(newLock); lazyLockMethod.returnValue(lazyLockMethod.readInstanceField(fields.lock, lazyLockMethod.getThis())); lazyLockMethods.put(namedFields.getKey(), lazyLockMethod.getMethodDescriptor()); } return lazyLockMethods; } private void implementGetAllPresent(ClassCreator contextInstances, Map<String, InstanceAndLock> idToFields) { MethodCreator getAllPresent = contextInstances.getMethodCreator("getAllPresent", Set.class) .setModifiers(ACC_PUBLIC); List<ResultHandle> results = new ArrayList<>(idToFields.size()); for (InstanceAndLock fields : idToFields.values()) { results.add(getAllPresent.readInstanceField(fields.instance, getAllPresent.getThis())); } ResultHandle ret = getAllPresent.newInstance(MethodDescriptor.ofConstructor(HashSet.class)); for (ResultHandle result : results) { getAllPresent.ifNotNull(result).trueBranch().invokeInterfaceMethod(MethodDescriptors.SET_ADD, ret, result); } getAllPresent.returnValue(ret); } private void implementRemoveEach(ClassCreator contextInstances, List<MethodDescriptor> removeInstances) { MethodCreator removeEach = contextInstances.getMethodCreator("removeEach", void.class, Consumer.class) .setModifiers(ACC_PUBLIC); List<ResultHandle> results = new ArrayList<>(removeInstances.size()); for (MethodDescriptor removeInstance : removeInstances) { results.add(removeEach.invokeVirtualMethod(removeInstance, removeEach.getThis())); } BytecodeCreator actionIsNotNull = removeEach.ifNotNull(removeEach.getMethodParam(0)).trueBranch(); for (ResultHandle result : results) { BytecodeCreator isNotNull = actionIsNotNull.ifNotNull(result).trueBranch(); isNotNull.invokeInterfaceMethod(MethodDescriptors.CONSUMER_ACCEPT, removeEach.getMethodParam(0), result); } removeEach.returnVoid(); }
class ContextInstancesGenerator extends AbstractGenerator { static final String CONTEXT_INSTANCES_SUFFIX = "_ContextInstances"; private final BeanDeployment beanDeployment; private final Map<DotName, String> scopeToGeneratedName; public ContextInstancesGenerator(boolean generateSources, ReflectionRegistration reflectionRegistration, BeanDeployment beanDeployment, Map<DotName, String> scopeToGeneratedName) { super(generateSources, reflectionRegistration); this.beanDeployment = beanDeployment; this.scopeToGeneratedName = scopeToGeneratedName; } void precomputeGeneratedName(DotName scope) { String generatedName = DEFAULT_PACKAGE + "." + beanDeployment.name + UNDERSCORE + scope.toString().replace(".", UNDERSCORE) + CONTEXT_INSTANCES_SUFFIX; scopeToGeneratedName.put(scope, generatedName); } private static void implementStaticConstructor(ClassCreator contextInstances, Map<String, InstanceAndLock> idToFields) { MethodCreator staticConstructor = contextInstances.getMethodCreator(MethodDescriptor.CLINIT, void.class) .setModifiers(ACC_STATIC); MethodDescriptor newLockUpdater = MethodDescriptor.ofMethod(AtomicReferenceFieldUpdater.class, "newUpdater", AtomicReferenceFieldUpdater.class, Class.class, Class.class, String.class); for (InstanceAndLock fields : idToFields.values()) { ResultHandle updater = staticConstructor.invokeStaticMethod(newLockUpdater, staticConstructor.loadClass(contextInstances.getClassName()), staticConstructor.loadClass(Lock.class), staticConstructor.load(fields.lock.getName())); staticConstructor.writeStaticField(fields.lockUpdater, updater); } staticConstructor.returnVoid(); } private static Map<String, MethodDescriptor> implementLazyLocks(ClassCreator contextInstances, Map<String, InstanceAndLock> idToFields) { MethodDescriptor atomicReferenceFieldUpdaterCompareAndSet = MethodDescriptor.ofMethod( AtomicReferenceFieldUpdater.class, "compareAndSet", boolean.class, Object.class, Object.class, Object.class); Map<String, MethodDescriptor> lazyLockMethods = new HashMap<>(idToFields.size()); for (var namedFields : idToFields.entrySet()) { var fields = namedFields.getValue(); MethodCreator lazyLockMethod = contextInstances.getMethodCreator("lazy" + fields.lock.getName(), Lock.class) .setModifiers(ACC_PRIVATE); ResultHandle lock = lazyLockMethod.readInstanceField(fields.lock, lazyLockMethod.getThis()); lazyLockMethod .ifNotNull(lock).trueBranch() .returnValue(lock); ResultHandle newLock = lazyLockMethod.newInstance(MethodDescriptor.ofConstructor(ReentrantLock.class)); ResultHandle updated = lazyLockMethod.invokeVirtualMethod(atomicReferenceFieldUpdaterCompareAndSet, lazyLockMethod.readStaticField(fields.lockUpdater), lazyLockMethod.getThis(), lazyLockMethod.loadNull(), newLock); lazyLockMethod .ifTrue(updated).trueBranch() .returnValue(newLock); lazyLockMethod.returnValue(lazyLockMethod.readInstanceField(fields.lock, lazyLockMethod.getThis())); lazyLockMethods.put(namedFields.getKey(), lazyLockMethod.getMethodDescriptor()); } return lazyLockMethods; } private void implementGetAllPresent(ClassCreator contextInstances, Map<String, InstanceAndLock> idToFields) { MethodCreator getAllPresent = contextInstances.getMethodCreator("getAllPresent", Set.class) .setModifiers(ACC_PUBLIC); List<ResultHandle> results = new ArrayList<>(idToFields.size()); for (InstanceAndLock fields : idToFields.values()) { results.add(getAllPresent.readInstanceField(fields.instance, getAllPresent.getThis())); } ResultHandle ret = getAllPresent.newInstance(MethodDescriptor.ofConstructor(HashSet.class)); for (ResultHandle result : results) { getAllPresent.ifNotNull(result).trueBranch().invokeInterfaceMethod(MethodDescriptors.SET_ADD, ret, result); } getAllPresent.returnValue(ret); } private void implementRemoveEach(ClassCreator contextInstances, List<MethodDescriptor> removeInstances) { MethodCreator removeEach = contextInstances.getMethodCreator("removeEach", void.class, Consumer.class) .setModifiers(ACC_PUBLIC); List<ResultHandle> results = new ArrayList<>(removeInstances.size()); for (MethodDescriptor removeInstance : removeInstances) { results.add(removeEach.invokeVirtualMethod(removeInstance, removeEach.getThis())); } BytecodeCreator actionIsNotNull = removeEach.ifNotNull(removeEach.getMethodParam(0)).trueBranch(); for (ResultHandle result : results) { BytecodeCreator isNotNull = actionIsNotNull.ifNotNull(result).trueBranch(); isNotNull.invokeInterfaceMethod(MethodDescriptors.CONSUMER_ACCEPT, removeEach.getMethodParam(0), result); } removeEach.returnVoid(); }
Not sure. So I made a new check here.
public void clearSparkLauncherLog() { String logPath = sparkLoadAppHandle.getLogPath(); if (!Strings.isNullOrEmpty(logPath)) { File file = new File(logPath); if (file.exists()) { file.delete(); } } }
String logPath = sparkLoadAppHandle.getLogPath();
public void clearSparkLauncherLog() { if (sparkLoadAppHandle != null) { String logPath = sparkLoadAppHandle.getLogPath(); if (!Strings.isNullOrEmpty(logPath)) { File file = new File(logPath); if (file.exists()) { file.delete(); } } } }
class SparkLoadJob extends BulkLoadJob { private static final Logger LOG = LogManager.getLogger(SparkLoadJob.class); private SparkResource sparkResource; private long etlStartTimestamp = -1; private String appId = ""; private String etlOutputPath = ""; private Map<String, Pair<String, Long>> tabletMetaToFileInfo = Maps.newHashMap(); private ResourceDesc resourceDesc; private SparkLoadAppHandle sparkLoadAppHandle; private long quorumFinishTimestamp = -1; private Map<Long, Set<Long>> tableToLoadPartitions = Maps.newHashMap(); private Map<Long, PushBrokerReaderParams> indexToPushBrokerReaderParams = Maps.newHashMap(); private Map<Long, Integer> indexToSchemaHash = Maps.newHashMap(); private Map<Long, Map<Long, PushTask>> tabletToSentReplicaPushTask = Maps.newHashMap(); private Set<Long> finishedReplicas = Sets.newHashSet(); private Set<Long> quorumTablets = Sets.newHashSet(); private Set<Long> fullTablets = Sets.newHashSet(); public SparkLoadJob() { super(); jobType = EtlJobType.SPARK; } public SparkLoadJob(long dbId, String label, ResourceDesc resourceDesc, OriginStatement originStmt) throws MetaNotFoundException { super(dbId, label, originStmt); this.resourceDesc = resourceDesc; timeoutSecond = Config.spark_load_default_timeout_second; jobType = EtlJobType.SPARK; } @Override protected void setJobProperties(Map<String, String> properties) throws DdlException { super.setJobProperties(properties); setResourceInfo(); } /** * merge system conf with load stmt * @throws DdlException */ private void setResourceInfo() throws DdlException { String resourceName = resourceDesc.getName(); Resource oriResource = Catalog.getCurrentCatalog().getResourceMgr().getResource(resourceName); if (oriResource == null) { throw new DdlException("Resource does not exist. name: " + resourceName); } sparkResource = ((SparkResource) oriResource).getCopiedResource(); sparkResource.update(resourceDesc); Map<String, String> brokerProperties = sparkResource.getBrokerPropertiesWithoutPrefix(); brokerDesc = new BrokerDesc(sparkResource.getBroker(), brokerProperties); } @Override public void beginTxn() throws LabelAlreadyUsedException, BeginTransactionException, AnalysisException, DuplicatedRequestException { transactionId = Catalog.getCurrentGlobalTransactionMgr() .beginTransaction(dbId, Lists.newArrayList(fileGroupAggInfo.getAllTableIds()), label, null, new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), LoadJobSourceType.FRONTEND, id, timeoutSecond); } @Override protected void unprotectedExecuteJob() throws LoadException { LoadTask task = new SparkLoadPendingTask(this, fileGroupAggInfo.getAggKeyToFileGroups(), sparkResource, brokerDesc); task.init(); idToTasks.put(task.getSignature(), task); Catalog.getCurrentCatalog().getLoadTaskScheduler().submit(task); } @Override public void onTaskFinished(TaskAttachment attachment) { if (attachment instanceof SparkPendingTaskAttachment) { onPendingTaskFinished((SparkPendingTaskAttachment) attachment); } } private void onPendingTaskFinished(SparkPendingTaskAttachment attachment) { writeLock(); try { if (isTxnDone()) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("state", state) .add("error_msg", "this task will be ignored when job is: " + state) .build()); return; } if (finishedTaskIds.contains(attachment.getTaskId())) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("task_id", attachment.getTaskId()) .add("error_msg", "this is a duplicated callback of pending task " + "when broker already has loading task") .build()); return; } finishedTaskIds.add(attachment.getTaskId()); sparkLoadAppHandle = attachment.getHandle(); appId = attachment.getAppId(); etlOutputPath = attachment.getOutputPath(); executeEtl(); unprotectedLogUpdateStateInfo(); } finally { writeUnlock(); } } /** * update etl start time and state in spark load job */ private void executeEtl() { etlStartTimestamp = System.currentTimeMillis(); state = JobState.ETL; LOG.info("update to {} state success. job id: {}", state, id); } private boolean checkState(JobState expectState) { readLock(); try { if (state == expectState) { return true; } return false; } finally { readUnlock(); } } /** * Check the status of etl job regularly * 1. RUNNING, update etl job progress * 2. CANCELLED, cancel load job * 3. FINISHED, get the etl output file paths, update job state to LOADING and log job update info * * Send push tasks if job state changed to LOADING */ public void updateEtlStatus() throws Exception { if (!checkState(JobState.ETL)) { return; } SparkEtlJobHandler handler = new SparkEtlJobHandler(); EtlStatus status = handler.getEtlJobStatus(sparkLoadAppHandle, appId, id, etlOutputPath, sparkResource, brokerDesc); writeLock(); try { switch (status.getState()) { case RUNNING: unprotectedUpdateEtlStatusInternal(status); break; case FINISHED: unprotectedProcessEtlFinish(status, handler); break; case CANCELLED: throw new LoadException("spark etl job failed. msg: " + status.getFailMsg()); default: LOG.warn("unknown etl state: {}", status.getState().name()); break; } } finally { writeUnlock(); } if (checkState(JobState.LOADING)) { submitPushTasks(); } } private void unprotectedUpdateEtlStatusInternal(EtlStatus etlStatus) { loadingStatus = etlStatus; progress = etlStatus.getProgress(); if (!sparkResource.isYarnMaster()) { loadingStatus.setTrackingUrl(appId); } DppResult dppResult = etlStatus.getDppResult(); if (dppResult != null) { loadStatistic.fileNum = (int) dppResult.fileNumber; loadStatistic.totalFileSizeB = dppResult.fileSize; TUniqueId dummyId = new TUniqueId(0, 0); long dummyBackendId = -1L; loadStatistic.initLoad(dummyId, Sets.newHashSet(dummyId), Lists.newArrayList(dummyBackendId)); loadStatistic.updateLoadProgress(dummyBackendId, dummyId, dummyId, dppResult.scannedRows, true); Map<String, String> counters = loadingStatus.getCounters(); counters.put(DPP_NORMAL_ALL, String.valueOf(dppResult.normalRows)); counters.put(DPP_ABNORMAL_ALL, String.valueOf(dppResult.abnormalRows)); counters.put(UNSELECTED_ROWS, String.valueOf(dppResult.unselectRows)); } } private void unprotectedProcessEtlFinish(EtlStatus etlStatus, SparkEtlJobHandler handler) throws Exception { unprotectedUpdateEtlStatusInternal(etlStatus); if (!checkDataQuality()) { throw new DataQualityException(DataQualityException.QUALITY_FAIL_MSG); } unprotectedUpdateToLoadingState(etlStatus, handler.getEtlFilePaths(etlOutputPath, brokerDesc)); unprotectedLogUpdateStateInfo(); unprotectedPrepareLoadingInfos(); } private void unprotectedUpdateToLoadingState(EtlStatus etlStatus, Map<String, Long> filePathToSize) throws LoadException { try { for (Map.Entry<String, Long> entry : filePathToSize.entrySet()) { String filePath = entry.getKey(); if (!filePath.endsWith(EtlJobConfig.ETL_OUTPUT_FILE_FORMAT)) { continue; } String tabletMetaStr = EtlJobConfig.getTabletMetaStr(filePath); tabletMetaToFileInfo.put(tabletMetaStr, Pair.create(filePath, entry.getValue())); } loadingStatus = etlStatus; progress = 0; unprotectedUpdateState(JobState.LOADING); LOG.info("update to {} state success. job id: {}", state, id); } catch (Exception e) { LOG.warn("update to {} state failed. job id: {}", state, id, e); throw new LoadException(e.getMessage(), e); } } private void unprotectedPrepareLoadingInfos() { for (String tabletMetaStr : tabletMetaToFileInfo.keySet()) { String[] fileNameArr = tabletMetaStr.split("\\."); Preconditions.checkState(fileNameArr.length == 5); long tableId = Long.parseLong(fileNameArr[0]); long partitionId = Long.parseLong(fileNameArr[1]); long indexId = Long.parseLong(fileNameArr[2]); int schemaHash = Integer.parseInt(fileNameArr[4]); if (!tableToLoadPartitions.containsKey(tableId)) { tableToLoadPartitions.put(tableId, Sets.newHashSet()); } tableToLoadPartitions.get(tableId).add(partitionId); indexToSchemaHash.put(indexId, schemaHash); } } private PushBrokerReaderParams getPushBrokerReaderParams(OlapTable table, long indexId) throws UserException { if (!indexToPushBrokerReaderParams.containsKey(indexId)) { PushBrokerReaderParams pushBrokerReaderParams = new PushBrokerReaderParams(); pushBrokerReaderParams.init(table.getSchemaByIndexId(indexId), brokerDesc); indexToPushBrokerReaderParams.put(indexId, pushBrokerReaderParams); } return indexToPushBrokerReaderParams.get(indexId); } private Set<Long> submitPushTasks() throws UserException { Database db = null; try { db = getDb(); } catch (MetaNotFoundException e) { String errMsg = new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("label", label) .add("error_msg", "db has been deleted when job is loading") .build(); throw new MetaNotFoundException(errMsg); } AgentBatchTask batchTask = new AgentBatchTask(); boolean hasLoadPartitions = false; Set<Long> totalTablets = Sets.newHashSet(); db.readLock(); try { writeLock(); try { for (Map.Entry<Long, Set<Long>> entry : tableToLoadPartitions.entrySet()) { long tableId = entry.getKey(); OlapTable table = (OlapTable) db.getTable(tableId); if (table == null) { LOG.warn("table does not exist. id: {}", tableId); continue; } Set<Long> partitionIds = entry.getValue(); for (long partitionId : partitionIds) { Partition partition = table.getPartition(partitionId); if (partition == null) { LOG.warn("partition does not exist. id: {}", partitionId); continue; } hasLoadPartitions = true; int quorumReplicaNum = table.getPartitionInfo().getReplicationNum(partitionId) / 2 + 1; List<MaterializedIndex> indexes = partition.getMaterializedIndices(IndexExtState.ALL); for (MaterializedIndex index : indexes) { long indexId = index.getId(); int schemaHash = indexToSchemaHash.get(indexId); int bucket = 0; for (Tablet tablet : index.getTablets()) { long tabletId = tablet.getId(); totalTablets.add(tabletId); String tabletMetaStr = String.format("%d.%d.%d.%d.%d", tableId, partitionId, indexId, bucket++, schemaHash); Set<Long> tabletAllReplicas = Sets.newHashSet(); Set<Long> tabletFinishedReplicas = Sets.newHashSet(); for (Replica replica : tablet.getReplicas()) { long replicaId = replica.getId(); tabletAllReplicas.add(replicaId); if (!tabletToSentReplicaPushTask.containsKey(tabletId) || !tabletToSentReplicaPushTask.get(tabletId).containsKey(replicaId)) { long backendId = replica.getBackendId(); long taskSignature = Catalog.getCurrentGlobalTransactionMgr() .getTransactionIDGenerator().getNextTransactionId(); PushBrokerReaderParams params = getPushBrokerReaderParams(table, indexId); TBrokerScanRange tBrokerScanRange = new TBrokerScanRange(params.tBrokerScanRange); TBrokerRangeDesc tBrokerRangeDesc = tBrokerScanRange.getRanges().get(0); tBrokerRangeDesc.setPath(""); tBrokerRangeDesc.setFile_size(-1); if (tabletMetaToFileInfo.containsKey(tabletMetaStr)) { Pair<String, Long> fileInfo = tabletMetaToFileInfo.get(tabletMetaStr); tBrokerRangeDesc.setPath(fileInfo.first); tBrokerRangeDesc.setFile_size(fileInfo.second); } Backend backend = Catalog.getCurrentCatalog().getCurrentSystemInfo() .getBackend(backendId); FsBroker fsBroker = Catalog.getCurrentCatalog().getBrokerMgr().getBroker( brokerDesc.getName(), backend.getHost()); tBrokerScanRange.getBroker_addresses().add( new TNetworkAddress(fsBroker.ip, fsBroker.port)); LOG.debug("push task for replica {}, broker {}:{}, backendId {}, filePath {}, fileSize {}" , replicaId, fsBroker.ip, fsBroker.port, backendId, tBrokerRangeDesc.path, tBrokerRangeDesc.file_size); PushTask pushTask = new PushTask(backendId, dbId, tableId, partitionId, indexId, tabletId, replicaId, schemaHash, 0, id, TPushType.LOAD_V2, TPriority.NORMAL, transactionId, taskSignature, tBrokerScanRange, params.tDescriptorTable); if (AgentTaskQueue.addTask(pushTask)) { batchTask.addTask(pushTask); if (!tabletToSentReplicaPushTask.containsKey(tabletId)) { tabletToSentReplicaPushTask.put(tabletId, Maps.newHashMap()); } tabletToSentReplicaPushTask.get(tabletId).put(replicaId, pushTask); } } if (finishedReplicas.contains(replicaId) && replica.getLastFailedVersion() < 0) { tabletFinishedReplicas.add(replicaId); } } if (tabletAllReplicas.size() == 0) { LOG.error("invalid situation. tablet is empty. id: {}", tabletId); } if (tabletFinishedReplicas.size() >= quorumReplicaNum) { quorumTablets.add(tabletId); if (tabletFinishedReplicas.size() == tabletAllReplicas.size()) { fullTablets.add(tabletId); } } } } } } if (batchTask.getTaskNum() > 0) { AgentTaskExecutor.submit(batchTask); } if (!hasLoadPartitions) { String errMsg = new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("label", label) .add("error_msg", "all partitions have no load data") .build(); throw new LoadException(errMsg); } return totalTablets; } finally { writeUnlock(); } } finally { db.readUnlock(); } } public void addFinishedReplica(long replicaId, long tabletId, long backendId) { writeLock(); try { if (finishedReplicas.add(replicaId)) { commitInfos.add(new TabletCommitInfo(tabletId, backendId)); Map<Long, PushTask> sentReplicaPushTask = tabletToSentReplicaPushTask.get(tabletId); if (sentReplicaPushTask != null) { if (sentReplicaPushTask.containsKey(replicaId)) { sentReplicaPushTask.put(replicaId, null); } } } } finally { writeUnlock(); } } /** * 1. Sends push tasks to Be * 2. Commit transaction after all push tasks execute successfully */ public void updateLoadingStatus() throws UserException { if (!checkState(JobState.LOADING)) { return; } Set<Long> totalTablets = submitPushTasks(); boolean canCommitJob = false; writeLock(); try { progress = fullTablets.size() * 100 / totalTablets.size(); if (progress == 100) { progress = 99; } if (quorumFinishTimestamp < 0 && quorumTablets.containsAll(totalTablets)) { quorumFinishTimestamp = System.currentTimeMillis(); } long stragglerTimeout = Config.load_straggler_wait_second * 1000; if ((quorumFinishTimestamp > 0 && System.currentTimeMillis() - quorumFinishTimestamp > stragglerTimeout) || fullTablets.containsAll(totalTablets)) { canCommitJob = true; } } finally { writeUnlock(); } if (canCommitJob) { tryCommitJob(); } } private void tryCommitJob() throws UserException { LOG.info(new LogBuilder(LogKey.LOAD_JOB, id) .add("txn_id", transactionId) .add("msg", "Load job try to commit txn") .build()); Database db = getDb(); db.writeLock(); try { Catalog.getCurrentGlobalTransactionMgr().commitTransaction( dbId, transactionId, commitInfos, new LoadJobFinalOperation(id, loadingStatus, progress, loadStartTimestamp, finishTimestamp, state, failMsg)); } catch (TabletQuorumFailedException e) { } finally { db.writeUnlock(); } } /** * load job already cancelled or finished, clear job below: * 1. kill etl job and delete etl files * 2. clear push tasks and infos that not persist */ private void clearJob() { Preconditions.checkState(state == JobState.FINISHED || state == JobState.CANCELLED); LOG.debug("kill etl job and delete etl files. id: {}, state: {}", id, state); SparkEtlJobHandler handler = new SparkEtlJobHandler(); if (state == JobState.CANCELLED) { if ((!Strings.isNullOrEmpty(appId) && sparkResource.isYarnMaster()) || sparkLoadAppHandle != null) { try { handler.killEtlJob(sparkLoadAppHandle, appId, id, sparkResource); } catch (Exception e) { LOG.warn("kill etl job failed. id: {}, state: {}", id, state, e); } } } if (!Strings.isNullOrEmpty(etlOutputPath)) { try { String outputPath = etlOutputPath.substring(0, etlOutputPath.lastIndexOf("/")); handler.deleteEtlOutputPath(outputPath, brokerDesc); } catch (Exception e) { LOG.warn("delete etl files failed. id: {}, state: {}", id, state, e); } } LOG.debug("clear push tasks and infos that not persist. id: {}, state: {}", id, state); writeLock(); try { for (Map<Long, PushTask> sentReplicaPushTask : tabletToSentReplicaPushTask.values()) { for (PushTask pushTask : sentReplicaPushTask.values()) { if (pushTask == null) { continue; } AgentTaskQueue.removeTask(pushTask.getBackendId(), pushTask.getTaskType(), pushTask.getSignature()); } } sparkLoadAppHandle = null; resourceDesc = null; tableToLoadPartitions.clear(); indexToPushBrokerReaderParams.clear(); indexToSchemaHash.clear(); tabletToSentReplicaPushTask.clear(); finishedReplicas.clear(); quorumTablets.clear(); fullTablets.clear(); } finally { writeUnlock(); } } @Override public void afterVisible(TransactionState txnState, boolean txnOperated) { super.afterVisible(txnState, txnOperated); clearJob(); } @Override public void cancelJobWithoutCheck(FailMsg failMsg, boolean abortTxn, boolean needLog) { super.cancelJobWithoutCheck(failMsg, abortTxn, needLog); clearJob(); } @Override public void cancelJob(FailMsg failMsg) throws DdlException { super.cancelJob(failMsg); clearJob(); } @Override protected String getResourceName() { return sparkResource.getName(); } @Override protected long getEtlStartTimestamp() { return etlStartTimestamp; } public SparkLoadAppHandle getHandle() { return sparkLoadAppHandle; } @Override public void write(DataOutput out) throws IOException { super.write(out); sparkResource.write(out); out.writeLong(etlStartTimestamp); Text.writeString(out, appId); Text.writeString(out, etlOutputPath); out.writeInt(tabletMetaToFileInfo.size()); for (Map.Entry<String, Pair<String, Long>> entry : tabletMetaToFileInfo.entrySet()) { Text.writeString(out, entry.getKey()); Text.writeString(out, entry.getValue().first); out.writeLong(entry.getValue().second); } } public void readFields(DataInput in) throws IOException { super.readFields(in); sparkResource = (SparkResource) Resource.read(in); etlStartTimestamp = in.readLong(); appId = Text.readString(in); etlOutputPath = Text.readString(in); int size = in.readInt(); for (int i = 0; i < size; i++) { String tabletMetaStr = Text.readString(in); Pair<String, Long> fileInfo = Pair.create(Text.readString(in), in.readLong()); tabletMetaToFileInfo.put(tabletMetaStr, fileInfo); } } /** * log load job update info when job state changed to etl or loading */ private void unprotectedLogUpdateStateInfo() { SparkLoadJobStateUpdateInfo info = new SparkLoadJobStateUpdateInfo( id, state, transactionId, etlStartTimestamp, appId, etlOutputPath, loadStartTimestamp, tabletMetaToFileInfo); Catalog.getCurrentCatalog().getEditLog().logUpdateLoadJob(info); } @Override public void replayUpdateStateInfo(LoadJobStateUpdateInfo info) { super.replayUpdateStateInfo(info); SparkLoadJobStateUpdateInfo sparkJobStateInfo = (SparkLoadJobStateUpdateInfo) info; etlStartTimestamp = sparkJobStateInfo.getEtlStartTimestamp(); appId = sparkJobStateInfo.getAppId(); etlOutputPath = sparkJobStateInfo.getEtlOutputPath(); tabletMetaToFileInfo = sparkJobStateInfo.getTabletMetaToFileInfo(); switch (state) { case ETL: break; case LOADING: unprotectedPrepareLoadingInfos(); break; default: LOG.warn("replay update load job state info failed. error: wrong state. job id: {}, state: {}", id, state); break; } } /** * Used for spark load job journal log when job state changed to ETL or LOADING */ public static class SparkLoadJobStateUpdateInfo extends LoadJobStateUpdateInfo { @SerializedName(value = "etlStartTimestamp") private long etlStartTimestamp; @SerializedName(value = "appId") private String appId; @SerializedName(value = "etlOutputPath") private String etlOutputPath; @SerializedName(value = "tabletMetaToFileInfo") private Map<String, Pair<String, Long>> tabletMetaToFileInfo; public SparkLoadJobStateUpdateInfo(long jobId, JobState state, long transactionId, long etlStartTimestamp, String appId, String etlOutputPath, long loadStartTimestamp, Map<String, Pair<String, Long>> tabletMetaToFileInfo) { super(jobId, state, transactionId, loadStartTimestamp); this.etlStartTimestamp = etlStartTimestamp; this.appId = appId; this.etlOutputPath = etlOutputPath; this.tabletMetaToFileInfo = tabletMetaToFileInfo; } public long getEtlStartTimestamp() { return etlStartTimestamp; } public String getAppId() { return appId; } public String getEtlOutputPath() { return etlOutputPath; } public Map<String, Pair<String, Long>> getTabletMetaToFileInfo() { return tabletMetaToFileInfo; } } /** * Params for be push broker reader * 1. TBrokerScanRange: file path and size, broker address, tranform expr * 2. TDescriptorTable: src and dest SlotDescriptors, src and dest tupleDescriptors * * These params are sent to Be through push task */ private static class PushBrokerReaderParams { TBrokerScanRange tBrokerScanRange; TDescriptorTable tDescriptorTable; public PushBrokerReaderParams() { this.tBrokerScanRange = new TBrokerScanRange(); this.tDescriptorTable = null; } public void init(List<Column> columns, BrokerDesc brokerDesc) throws UserException { DescriptorTable descTable = new DescriptorTable(); TupleDescriptor destTupleDesc = descTable.createTupleDescriptor(); for (Column column : columns) { SlotDescriptor destSlotDesc = descTable.addSlotDescriptor(destTupleDesc); destSlotDesc.setIsMaterialized(true); destSlotDesc.setColumn(column); if (column.isAllowNull()) { destSlotDesc.setIsNullable(true); } else { destSlotDesc.setIsNullable(false); } } initTBrokerScanRange(descTable, destTupleDesc, columns, brokerDesc); initTDescriptorTable(descTable); } private void initTBrokerScanRange(DescriptorTable descTable, TupleDescriptor destTupleDesc, List<Column> columns, BrokerDesc brokerDesc) throws AnalysisException { TBrokerScanRangeParams params = new TBrokerScanRangeParams(); params.setStrict_mode(false); params.setProperties(brokerDesc.getProperties()); TupleDescriptor srcTupleDesc = descTable.createTupleDescriptor(); Map<String, SlotDescriptor> srcSlotDescByName = Maps.newHashMap(); for (Column column : columns) { SlotDescriptor srcSlotDesc = descTable.addSlotDescriptor(srcTupleDesc); srcSlotDesc.setType(ScalarType.createType(PrimitiveType.VARCHAR)); srcSlotDesc.setIsMaterialized(true); srcSlotDesc.setIsNullable(true); srcSlotDesc.setColumn(new Column(column.getName(), PrimitiveType.VARCHAR)); params.addToSrc_slot_ids(srcSlotDesc.getId().asInt()); srcSlotDescByName.put(column.getName(), srcSlotDesc); } Map<Integer, Integer> destSidToSrcSidWithoutTrans = Maps.newHashMap(); for (SlotDescriptor destSlotDesc : destTupleDesc.getSlots()) { if (!destSlotDesc.isMaterialized()) { continue; } SlotDescriptor srcSlotDesc = srcSlotDescByName.get(destSlotDesc.getColumn().getName()); destSidToSrcSidWithoutTrans.put(destSlotDesc.getId().asInt(), srcSlotDesc.getId().asInt()); Expr expr = new SlotRef(srcSlotDesc); expr = castToSlot(destSlotDesc, expr); params.putToExpr_of_dest_slot(destSlotDesc.getId().asInt(), expr.treeToThrift()); } params.setDest_sid_to_src_sid_without_trans(destSidToSrcSidWithoutTrans); params.setSrc_tuple_id(srcTupleDesc.getId().asInt()); params.setDest_tuple_id(destTupleDesc.getId().asInt()); tBrokerScanRange.setParams(params); tBrokerScanRange.setBroker_addresses(Lists.newArrayList()); TBrokerRangeDesc tBrokerRangeDesc = new TBrokerRangeDesc(); tBrokerRangeDesc.setFile_type(TFileType.FILE_BROKER); tBrokerRangeDesc.setFormat_type(TFileFormatType.FORMAT_PARQUET); tBrokerRangeDesc.setSplittable(false); tBrokerRangeDesc.setStart_offset(0); tBrokerRangeDesc.setSize(-1); tBrokerScanRange.setRanges(Lists.newArrayList(tBrokerRangeDesc)); } private Expr castToSlot(SlotDescriptor slotDesc, Expr expr) throws AnalysisException { PrimitiveType dstType = slotDesc.getType().getPrimitiveType(); PrimitiveType srcType = expr.getType().getPrimitiveType(); if (dstType == PrimitiveType.BOOLEAN && srcType == PrimitiveType.VARCHAR) { return new CastExpr(Type.BOOLEAN, new CastExpr(Type.TINYINT, expr)); } if (dstType != srcType) { return expr.castTo(slotDesc.getType()); } return expr; } private void initTDescriptorTable(DescriptorTable descTable) { descTable.computeMemLayout(); tDescriptorTable = descTable.toThrift(); } } }
class SparkLoadJob extends BulkLoadJob { private static final Logger LOG = LogManager.getLogger(SparkLoadJob.class); private SparkResource sparkResource; private long etlStartTimestamp = -1; private String appId = ""; private String etlOutputPath = ""; private Map<String, Pair<String, Long>> tabletMetaToFileInfo = Maps.newHashMap(); private ResourceDesc resourceDesc; private SparkLoadAppHandle sparkLoadAppHandle; private long quorumFinishTimestamp = -1; private Map<Long, Set<Long>> tableToLoadPartitions = Maps.newHashMap(); private Map<Long, PushBrokerReaderParams> indexToPushBrokerReaderParams = Maps.newHashMap(); private Map<Long, Integer> indexToSchemaHash = Maps.newHashMap(); private Map<Long, Map<Long, PushTask>> tabletToSentReplicaPushTask = Maps.newHashMap(); private Set<Long> finishedReplicas = Sets.newHashSet(); private Set<Long> quorumTablets = Sets.newHashSet(); private Set<Long> fullTablets = Sets.newHashSet(); public SparkLoadJob() { super(); jobType = EtlJobType.SPARK; } public SparkLoadJob(long dbId, String label, ResourceDesc resourceDesc, OriginStatement originStmt) throws MetaNotFoundException { super(dbId, label, originStmt); this.resourceDesc = resourceDesc; timeoutSecond = Config.spark_load_default_timeout_second; jobType = EtlJobType.SPARK; } @Override protected void setJobProperties(Map<String, String> properties) throws DdlException { super.setJobProperties(properties); setResourceInfo(); } /** * merge system conf with load stmt * @throws DdlException */ private void setResourceInfo() throws DdlException { String resourceName = resourceDesc.getName(); Resource oriResource = Catalog.getCurrentCatalog().getResourceMgr().getResource(resourceName); if (oriResource == null) { throw new DdlException("Resource does not exist. name: " + resourceName); } sparkResource = ((SparkResource) oriResource).getCopiedResource(); sparkResource.update(resourceDesc); Map<String, String> brokerProperties = sparkResource.getBrokerPropertiesWithoutPrefix(); brokerDesc = new BrokerDesc(sparkResource.getBroker(), brokerProperties); } @Override public void beginTxn() throws LabelAlreadyUsedException, BeginTransactionException, AnalysisException, DuplicatedRequestException { transactionId = Catalog.getCurrentGlobalTransactionMgr() .beginTransaction(dbId, Lists.newArrayList(fileGroupAggInfo.getAllTableIds()), label, null, new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), LoadJobSourceType.FRONTEND, id, timeoutSecond); } @Override protected void unprotectedExecuteJob() throws LoadException { LoadTask task = new SparkLoadPendingTask(this, fileGroupAggInfo.getAggKeyToFileGroups(), sparkResource, brokerDesc); task.init(); idToTasks.put(task.getSignature(), task); Catalog.getCurrentCatalog().getLoadTaskScheduler().submit(task); } @Override public void onTaskFinished(TaskAttachment attachment) { if (attachment instanceof SparkPendingTaskAttachment) { onPendingTaskFinished((SparkPendingTaskAttachment) attachment); } } private void onPendingTaskFinished(SparkPendingTaskAttachment attachment) { writeLock(); try { if (isTxnDone()) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("state", state) .add("error_msg", "this task will be ignored when job is: " + state) .build()); return; } if (finishedTaskIds.contains(attachment.getTaskId())) { LOG.warn(new LogBuilder(LogKey.LOAD_JOB, id) .add("task_id", attachment.getTaskId()) .add("error_msg", "this is a duplicated callback of pending task " + "when broker already has loading task") .build()); return; } finishedTaskIds.add(attachment.getTaskId()); sparkLoadAppHandle = attachment.getHandle(); appId = attachment.getAppId(); etlOutputPath = attachment.getOutputPath(); executeEtl(); unprotectedLogUpdateStateInfo(); } finally { writeUnlock(); } } /** * update etl start time and state in spark load job */ private void executeEtl() { etlStartTimestamp = System.currentTimeMillis(); state = JobState.ETL; LOG.info("update to {} state success. job id: {}", state, id); } private boolean checkState(JobState expectState) { readLock(); try { if (state == expectState) { return true; } return false; } finally { readUnlock(); } } /** * Check the status of etl job regularly * 1. RUNNING, update etl job progress * 2. CANCELLED, cancel load job * 3. FINISHED, get the etl output file paths, update job state to LOADING and log job update info * * Send push tasks if job state changed to LOADING */ public void updateEtlStatus() throws Exception { if (!checkState(JobState.ETL)) { return; } SparkEtlJobHandler handler = new SparkEtlJobHandler(); EtlStatus status = handler.getEtlJobStatus(sparkLoadAppHandle, appId, id, etlOutputPath, sparkResource, brokerDesc); writeLock(); try { switch (status.getState()) { case RUNNING: unprotectedUpdateEtlStatusInternal(status); break; case FINISHED: unprotectedProcessEtlFinish(status, handler); break; case CANCELLED: throw new LoadException("spark etl job failed. msg: " + status.getFailMsg()); default: LOG.warn("unknown etl state: {}", status.getState().name()); break; } } finally { writeUnlock(); } if (checkState(JobState.LOADING)) { submitPushTasks(); } } private void unprotectedUpdateEtlStatusInternal(EtlStatus etlStatus) { loadingStatus = etlStatus; progress = etlStatus.getProgress(); if (!sparkResource.isYarnMaster()) { loadingStatus.setTrackingUrl(appId); } DppResult dppResult = etlStatus.getDppResult(); if (dppResult != null) { loadStatistic.fileNum = (int) dppResult.fileNumber; loadStatistic.totalFileSizeB = dppResult.fileSize; TUniqueId dummyId = new TUniqueId(0, 0); long dummyBackendId = -1L; loadStatistic.initLoad(dummyId, Sets.newHashSet(dummyId), Lists.newArrayList(dummyBackendId)); loadStatistic.updateLoadProgress(dummyBackendId, dummyId, dummyId, dppResult.scannedRows, true); Map<String, String> counters = loadingStatus.getCounters(); counters.put(DPP_NORMAL_ALL, String.valueOf(dppResult.normalRows)); counters.put(DPP_ABNORMAL_ALL, String.valueOf(dppResult.abnormalRows)); counters.put(UNSELECTED_ROWS, String.valueOf(dppResult.unselectRows)); } } private void unprotectedProcessEtlFinish(EtlStatus etlStatus, SparkEtlJobHandler handler) throws Exception { unprotectedUpdateEtlStatusInternal(etlStatus); if (!checkDataQuality()) { throw new DataQualityException(DataQualityException.QUALITY_FAIL_MSG); } unprotectedUpdateToLoadingState(etlStatus, handler.getEtlFilePaths(etlOutputPath, brokerDesc)); unprotectedLogUpdateStateInfo(); unprotectedPrepareLoadingInfos(); } private void unprotectedUpdateToLoadingState(EtlStatus etlStatus, Map<String, Long> filePathToSize) throws LoadException { try { for (Map.Entry<String, Long> entry : filePathToSize.entrySet()) { String filePath = entry.getKey(); if (!filePath.endsWith(EtlJobConfig.ETL_OUTPUT_FILE_FORMAT)) { continue; } String tabletMetaStr = EtlJobConfig.getTabletMetaStr(filePath); tabletMetaToFileInfo.put(tabletMetaStr, Pair.create(filePath, entry.getValue())); } loadingStatus = etlStatus; progress = 0; unprotectedUpdateState(JobState.LOADING); LOG.info("update to {} state success. job id: {}", state, id); } catch (Exception e) { LOG.warn("update to {} state failed. job id: {}", state, id, e); throw new LoadException(e.getMessage(), e); } } private void unprotectedPrepareLoadingInfos() { for (String tabletMetaStr : tabletMetaToFileInfo.keySet()) { String[] fileNameArr = tabletMetaStr.split("\\."); Preconditions.checkState(fileNameArr.length == 5); long tableId = Long.parseLong(fileNameArr[0]); long partitionId = Long.parseLong(fileNameArr[1]); long indexId = Long.parseLong(fileNameArr[2]); int schemaHash = Integer.parseInt(fileNameArr[4]); if (!tableToLoadPartitions.containsKey(tableId)) { tableToLoadPartitions.put(tableId, Sets.newHashSet()); } tableToLoadPartitions.get(tableId).add(partitionId); indexToSchemaHash.put(indexId, schemaHash); } } private PushBrokerReaderParams getPushBrokerReaderParams(OlapTable table, long indexId) throws UserException { if (!indexToPushBrokerReaderParams.containsKey(indexId)) { PushBrokerReaderParams pushBrokerReaderParams = new PushBrokerReaderParams(); pushBrokerReaderParams.init(table.getSchemaByIndexId(indexId), brokerDesc); indexToPushBrokerReaderParams.put(indexId, pushBrokerReaderParams); } return indexToPushBrokerReaderParams.get(indexId); } private Set<Long> submitPushTasks() throws UserException { Database db = null; try { db = getDb(); } catch (MetaNotFoundException e) { String errMsg = new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("label", label) .add("error_msg", "db has been deleted when job is loading") .build(); throw new MetaNotFoundException(errMsg); } AgentBatchTask batchTask = new AgentBatchTask(); boolean hasLoadPartitions = false; Set<Long> totalTablets = Sets.newHashSet(); db.readLock(); try { writeLock(); try { if (state != JobState.LOADING) { LOG.warn("job state is not loading. job id: {}, state: {}", id, state); return totalTablets; } for (Map.Entry<Long, Set<Long>> entry : tableToLoadPartitions.entrySet()) { long tableId = entry.getKey(); OlapTable table = (OlapTable) db.getTable(tableId); if (table == null) { LOG.warn("table does not exist. id: {}", tableId); continue; } Set<Long> partitionIds = entry.getValue(); for (long partitionId : partitionIds) { Partition partition = table.getPartition(partitionId); if (partition == null) { LOG.warn("partition does not exist. id: {}", partitionId); continue; } hasLoadPartitions = true; int quorumReplicaNum = table.getPartitionInfo().getReplicationNum(partitionId) / 2 + 1; List<MaterializedIndex> indexes = partition.getMaterializedIndices(IndexExtState.ALL); for (MaterializedIndex index : indexes) { long indexId = index.getId(); int schemaHash = indexToSchemaHash.get(indexId); int bucket = 0; for (Tablet tablet : index.getTablets()) { long tabletId = tablet.getId(); totalTablets.add(tabletId); String tabletMetaStr = String.format("%d.%d.%d.%d.%d", tableId, partitionId, indexId, bucket++, schemaHash); Set<Long> tabletAllReplicas = Sets.newHashSet(); Set<Long> tabletFinishedReplicas = Sets.newHashSet(); for (Replica replica : tablet.getReplicas()) { long replicaId = replica.getId(); tabletAllReplicas.add(replicaId); if (!tabletToSentReplicaPushTask.containsKey(tabletId) || !tabletToSentReplicaPushTask.get(tabletId).containsKey(replicaId)) { long backendId = replica.getBackendId(); long taskSignature = Catalog.getCurrentGlobalTransactionMgr() .getTransactionIDGenerator().getNextTransactionId(); PushBrokerReaderParams params = getPushBrokerReaderParams(table, indexId); TBrokerScanRange tBrokerScanRange = new TBrokerScanRange(params.tBrokerScanRange); TBrokerRangeDesc tBrokerRangeDesc = tBrokerScanRange.getRanges().get(0); tBrokerRangeDesc.setPath(""); tBrokerRangeDesc.setFile_size(-1); if (tabletMetaToFileInfo.containsKey(tabletMetaStr)) { Pair<String, Long> fileInfo = tabletMetaToFileInfo.get(tabletMetaStr); tBrokerRangeDesc.setPath(fileInfo.first); tBrokerRangeDesc.setFile_size(fileInfo.second); } Backend backend = Catalog.getCurrentCatalog().getCurrentSystemInfo() .getBackend(backendId); FsBroker fsBroker = Catalog.getCurrentCatalog().getBrokerMgr().getBroker( brokerDesc.getName(), backend.getHost()); tBrokerScanRange.getBroker_addresses().add( new TNetworkAddress(fsBroker.ip, fsBroker.port)); LOG.debug("push task for replica {}, broker {}:{}, backendId {}, filePath {}, fileSize {}" , replicaId, fsBroker.ip, fsBroker.port, backendId, tBrokerRangeDesc.path, tBrokerRangeDesc.file_size); PushTask pushTask = new PushTask(backendId, dbId, tableId, partitionId, indexId, tabletId, replicaId, schemaHash, 0, id, TPushType.LOAD_V2, TPriority.NORMAL, transactionId, taskSignature, tBrokerScanRange, params.tDescriptorTable); if (AgentTaskQueue.addTask(pushTask)) { batchTask.addTask(pushTask); if (!tabletToSentReplicaPushTask.containsKey(tabletId)) { tabletToSentReplicaPushTask.put(tabletId, Maps.newHashMap()); } tabletToSentReplicaPushTask.get(tabletId).put(replicaId, pushTask); } } if (finishedReplicas.contains(replicaId) && replica.getLastFailedVersion() < 0) { tabletFinishedReplicas.add(replicaId); } } if (tabletAllReplicas.size() == 0) { LOG.error("invalid situation. tablet is empty. id: {}", tabletId); } if (tabletFinishedReplicas.size() >= quorumReplicaNum) { quorumTablets.add(tabletId); if (tabletFinishedReplicas.size() == tabletAllReplicas.size()) { fullTablets.add(tabletId); } } } } } } if (batchTask.getTaskNum() > 0) { AgentTaskExecutor.submit(batchTask); } if (!hasLoadPartitions) { String errMsg = new LogBuilder(LogKey.LOAD_JOB, id) .add("database_id", dbId) .add("label", label) .add("error_msg", "all partitions have no load data") .build(); throw new LoadException(errMsg); } return totalTablets; } finally { writeUnlock(); } } finally { db.readUnlock(); } } public void addFinishedReplica(long replicaId, long tabletId, long backendId) { writeLock(); try { if (finishedReplicas.add(replicaId)) { commitInfos.add(new TabletCommitInfo(tabletId, backendId)); Map<Long, PushTask> sentReplicaPushTask = tabletToSentReplicaPushTask.get(tabletId); if (sentReplicaPushTask != null) { if (sentReplicaPushTask.containsKey(replicaId)) { sentReplicaPushTask.put(replicaId, null); } } } } finally { writeUnlock(); } } /** * 1. Sends push tasks to Be * 2. Commit transaction after all push tasks execute successfully */ public void updateLoadingStatus() throws UserException { if (!checkState(JobState.LOADING)) { return; } Set<Long> totalTablets = submitPushTasks(); if (totalTablets.isEmpty()) { LOG.warn("total tablets set is empty. job id: {}, state: {}", id, state); return; } boolean canCommitJob = false; writeLock(); try { progress = fullTablets.size() * 100 / totalTablets.size(); if (progress == 100) { progress = 99; } if (quorumFinishTimestamp < 0 && quorumTablets.containsAll(totalTablets)) { quorumFinishTimestamp = System.currentTimeMillis(); } long stragglerTimeout = Config.load_straggler_wait_second * 1000; if ((quorumFinishTimestamp > 0 && System.currentTimeMillis() - quorumFinishTimestamp > stragglerTimeout) || fullTablets.containsAll(totalTablets)) { canCommitJob = true; } } finally { writeUnlock(); } if (canCommitJob) { tryCommitJob(); } } private void tryCommitJob() throws UserException { LOG.info(new LogBuilder(LogKey.LOAD_JOB, id) .add("txn_id", transactionId) .add("msg", "Load job try to commit txn") .build()); Database db = getDb(); db.writeLock(); try { Catalog.getCurrentGlobalTransactionMgr().commitTransaction( dbId, transactionId, commitInfos, new LoadJobFinalOperation(id, loadingStatus, progress, loadStartTimestamp, finishTimestamp, state, failMsg)); } catch (TabletQuorumFailedException e) { } finally { db.writeUnlock(); } } /** * load job already cancelled or finished, clear job below: * 1. kill etl job and delete etl files * 2. clear push tasks and infos that not persist */ private void clearJob() { Preconditions.checkState(state == JobState.FINISHED || state == JobState.CANCELLED); LOG.debug("kill etl job and delete etl files. id: {}, state: {}", id, state); SparkEtlJobHandler handler = new SparkEtlJobHandler(); if (state == JobState.CANCELLED) { if ((!Strings.isNullOrEmpty(appId) && sparkResource.isYarnMaster()) || sparkLoadAppHandle != null) { try { handler.killEtlJob(sparkLoadAppHandle, appId, id, sparkResource); } catch (Exception e) { LOG.warn("kill etl job failed. id: {}, state: {}", id, state, e); } } } if (!Strings.isNullOrEmpty(etlOutputPath)) { try { String outputPath = etlOutputPath.substring(0, etlOutputPath.lastIndexOf("/")); handler.deleteEtlOutputPath(outputPath, brokerDesc); } catch (Exception e) { LOG.warn("delete etl files failed. id: {}, state: {}", id, state, e); } } LOG.debug("clear push tasks and infos that not persist. id: {}, state: {}", id, state); writeLock(); try { for (Map<Long, PushTask> sentReplicaPushTask : tabletToSentReplicaPushTask.values()) { for (PushTask pushTask : sentReplicaPushTask.values()) { if (pushTask == null) { continue; } AgentTaskQueue.removeTask(pushTask.getBackendId(), pushTask.getTaskType(), pushTask.getSignature()); } } sparkLoadAppHandle = null; resourceDesc = null; tableToLoadPartitions.clear(); indexToPushBrokerReaderParams.clear(); indexToSchemaHash.clear(); tabletToSentReplicaPushTask.clear(); finishedReplicas.clear(); quorumTablets.clear(); fullTablets.clear(); } finally { writeUnlock(); } } @Override public void afterVisible(TransactionState txnState, boolean txnOperated) { super.afterVisible(txnState, txnOperated); clearJob(); } @Override public void cancelJobWithoutCheck(FailMsg failMsg, boolean abortTxn, boolean needLog) { super.cancelJobWithoutCheck(failMsg, abortTxn, needLog); clearJob(); } @Override public void cancelJob(FailMsg failMsg) throws DdlException { super.cancelJob(failMsg); clearJob(); } @Override protected String getResourceName() { return sparkResource.getName(); } @Override protected long getEtlStartTimestamp() { return etlStartTimestamp; } @Override public void write(DataOutput out) throws IOException { super.write(out); sparkResource.write(out); out.writeLong(etlStartTimestamp); Text.writeString(out, appId); Text.writeString(out, etlOutputPath); out.writeInt(tabletMetaToFileInfo.size()); for (Map.Entry<String, Pair<String, Long>> entry : tabletMetaToFileInfo.entrySet()) { Text.writeString(out, entry.getKey()); Text.writeString(out, entry.getValue().first); out.writeLong(entry.getValue().second); } } public void readFields(DataInput in) throws IOException { super.readFields(in); sparkResource = (SparkResource) Resource.read(in); etlStartTimestamp = in.readLong(); appId = Text.readString(in); etlOutputPath = Text.readString(in); int size = in.readInt(); for (int i = 0; i < size; i++) { String tabletMetaStr = Text.readString(in); Pair<String, Long> fileInfo = Pair.create(Text.readString(in), in.readLong()); tabletMetaToFileInfo.put(tabletMetaStr, fileInfo); } } /** * log load job update info when job state changed to etl or loading */ private void unprotectedLogUpdateStateInfo() { SparkLoadJobStateUpdateInfo info = new SparkLoadJobStateUpdateInfo( id, state, transactionId, etlStartTimestamp, appId, etlOutputPath, loadStartTimestamp, tabletMetaToFileInfo); Catalog.getCurrentCatalog().getEditLog().logUpdateLoadJob(info); } @Override public void replayUpdateStateInfo(LoadJobStateUpdateInfo info) { super.replayUpdateStateInfo(info); SparkLoadJobStateUpdateInfo sparkJobStateInfo = (SparkLoadJobStateUpdateInfo) info; etlStartTimestamp = sparkJobStateInfo.getEtlStartTimestamp(); appId = sparkJobStateInfo.getAppId(); etlOutputPath = sparkJobStateInfo.getEtlOutputPath(); tabletMetaToFileInfo = sparkJobStateInfo.getTabletMetaToFileInfo(); switch (state) { case ETL: break; case LOADING: unprotectedPrepareLoadingInfos(); break; default: LOG.warn("replay update load job state info failed. error: wrong state. job id: {}, state: {}", id, state); break; } } /** * Used for spark load job journal log when job state changed to ETL or LOADING */ public static class SparkLoadJobStateUpdateInfo extends LoadJobStateUpdateInfo { @SerializedName(value = "etlStartTimestamp") private long etlStartTimestamp; @SerializedName(value = "appId") private String appId; @SerializedName(value = "etlOutputPath") private String etlOutputPath; @SerializedName(value = "tabletMetaToFileInfo") private Map<String, Pair<String, Long>> tabletMetaToFileInfo; public SparkLoadJobStateUpdateInfo(long jobId, JobState state, long transactionId, long etlStartTimestamp, String appId, String etlOutputPath, long loadStartTimestamp, Map<String, Pair<String, Long>> tabletMetaToFileInfo) { super(jobId, state, transactionId, loadStartTimestamp); this.etlStartTimestamp = etlStartTimestamp; this.appId = appId; this.etlOutputPath = etlOutputPath; this.tabletMetaToFileInfo = tabletMetaToFileInfo; } public long getEtlStartTimestamp() { return etlStartTimestamp; } public String getAppId() { return appId; } public String getEtlOutputPath() { return etlOutputPath; } public Map<String, Pair<String, Long>> getTabletMetaToFileInfo() { return tabletMetaToFileInfo; } } /** * Params for be push broker reader * 1. TBrokerScanRange: file path and size, broker address, tranform expr * 2. TDescriptorTable: src and dest SlotDescriptors, src and dest tupleDescriptors * * These params are sent to Be through push task */ private static class PushBrokerReaderParams { TBrokerScanRange tBrokerScanRange; TDescriptorTable tDescriptorTable; public PushBrokerReaderParams() { this.tBrokerScanRange = new TBrokerScanRange(); this.tDescriptorTable = null; } public void init(List<Column> columns, BrokerDesc brokerDesc) throws UserException { DescriptorTable descTable = new DescriptorTable(); TupleDescriptor destTupleDesc = descTable.createTupleDescriptor(); for (Column column : columns) { SlotDescriptor destSlotDesc = descTable.addSlotDescriptor(destTupleDesc); destSlotDesc.setIsMaterialized(true); destSlotDesc.setColumn(column); if (column.isAllowNull()) { destSlotDesc.setIsNullable(true); } else { destSlotDesc.setIsNullable(false); } } initTBrokerScanRange(descTable, destTupleDesc, columns, brokerDesc); initTDescriptorTable(descTable); } private void initTBrokerScanRange(DescriptorTable descTable, TupleDescriptor destTupleDesc, List<Column> columns, BrokerDesc brokerDesc) throws AnalysisException { TBrokerScanRangeParams params = new TBrokerScanRangeParams(); params.setStrict_mode(false); params.setProperties(brokerDesc.getProperties()); TupleDescriptor srcTupleDesc = descTable.createTupleDescriptor(); Map<String, SlotDescriptor> srcSlotDescByName = Maps.newHashMap(); for (Column column : columns) { SlotDescriptor srcSlotDesc = descTable.addSlotDescriptor(srcTupleDesc); srcSlotDesc.setType(ScalarType.createType(PrimitiveType.VARCHAR)); srcSlotDesc.setIsMaterialized(true); srcSlotDesc.setIsNullable(true); srcSlotDesc.setColumn(new Column(column.getName(), PrimitiveType.VARCHAR)); params.addToSrc_slot_ids(srcSlotDesc.getId().asInt()); srcSlotDescByName.put(column.getName(), srcSlotDesc); } Map<Integer, Integer> destSidToSrcSidWithoutTrans = Maps.newHashMap(); for (SlotDescriptor destSlotDesc : destTupleDesc.getSlots()) { if (!destSlotDesc.isMaterialized()) { continue; } SlotDescriptor srcSlotDesc = srcSlotDescByName.get(destSlotDesc.getColumn().getName()); destSidToSrcSidWithoutTrans.put(destSlotDesc.getId().asInt(), srcSlotDesc.getId().asInt()); Expr expr = new SlotRef(srcSlotDesc); expr = castToSlot(destSlotDesc, expr); params.putToExpr_of_dest_slot(destSlotDesc.getId().asInt(), expr.treeToThrift()); } params.setDest_sid_to_src_sid_without_trans(destSidToSrcSidWithoutTrans); params.setSrc_tuple_id(srcTupleDesc.getId().asInt()); params.setDest_tuple_id(destTupleDesc.getId().asInt()); tBrokerScanRange.setParams(params); tBrokerScanRange.setBroker_addresses(Lists.newArrayList()); TBrokerRangeDesc tBrokerRangeDesc = new TBrokerRangeDesc(); tBrokerRangeDesc.setFile_type(TFileType.FILE_BROKER); tBrokerRangeDesc.setFormat_type(TFileFormatType.FORMAT_PARQUET); tBrokerRangeDesc.setSplittable(false); tBrokerRangeDesc.setStart_offset(0); tBrokerRangeDesc.setSize(-1); tBrokerScanRange.setRanges(Lists.newArrayList(tBrokerRangeDesc)); } private Expr castToSlot(SlotDescriptor slotDesc, Expr expr) throws AnalysisException { PrimitiveType dstType = slotDesc.getType().getPrimitiveType(); PrimitiveType srcType = expr.getType().getPrimitiveType(); if (dstType == PrimitiveType.BOOLEAN && srcType == PrimitiveType.VARCHAR) { return new CastExpr(Type.BOOLEAN, new CastExpr(Type.TINYINT, expr)); } if (dstType != srcType) { return expr.castTo(slotDesc.getType()); } return expr; } private void initTDescriptorTable(DescriptorTable descTable) { descTable.computeMemLayout(); tDescriptorTable = descTable.toThrift(); } } }
```suggestion getProject().getLogger().warn("quarkus info is experimental, its options and output might change in future versions"); ``` In maven output this comes possibly many many lines away from what you just executed thus better to be explicit.
public void logInfo() { getProject().getLogger().warn("This task is experimental, its options and output might change in future versions"); final QuarkusProject quarkusProject = getQuarkusProject(false); final Map<String, Object> params = new HashMap<>(); params.put(UpdateCommandHandler.APP_MODEL, extension().getApplicationModel()); params.put(UpdateCommandHandler.LOG_STATE_PER_MODULE, perModule); final QuarkusCommandInvocation invocation = new QuarkusCommandInvocation(quarkusProject, params); final QuarkusCommandOutcome outcome; try { outcome = new InfoCommandHandler().execute(invocation); } catch (Exception e) { throw new GradleException("Failed to collect Quarkus project information", e); } if (outcome.getValue(InfoCommandHandler.RECOMMENDATIONS_AVAILABLE, false)) { this.getProject().getLogger().warn( "Non-recommended Quarkus platform BOM and/or extension versions were found. For more details, please, execute 'gradle quarkusUpdate --rectify'"); } }
getProject().getLogger().warn("This task is experimental, its options and output might change in future versions");
public void logInfo() { getProject().getLogger().warn(getName() + " is experimental, its options and output might change in future versions"); final QuarkusProject quarkusProject = getQuarkusProject(false); final Map<String, Object> params = new HashMap<>(); params.put(UpdateCommandHandler.APP_MODEL, extension().getApplicationModel()); params.put(UpdateCommandHandler.LOG_STATE_PER_MODULE, perModule); final QuarkusCommandInvocation invocation = new QuarkusCommandInvocation(quarkusProject, params); final QuarkusCommandOutcome outcome; try { outcome = new InfoCommandHandler().execute(invocation); } catch (Exception e) { throw new GradleException("Failed to collect Quarkus project information", e); } if (outcome.getValue(InfoCommandHandler.RECOMMENDATIONS_AVAILABLE, false)) { this.getProject().getLogger().warn( "Non-recommended Quarkus platform BOM and/or extension versions were found. For more details, please, execute 'gradle quarkusUpdate --rectify'"); } }
class QuarkusInfo extends QuarkusPlatformTask { private boolean perModule = false; @Input public boolean getPerModule() { return perModule; } @Option(description = "Log project's state per module.", option = "perModule") public void setPerModule(boolean perModule) { this.perModule = perModule; } public QuarkusInfo() { super("Log Quarkus-specific project information, such as imported Quarkus platform BOMs, Quarkus extensions found among the project dependencies, etc."); } @TaskAction }
class QuarkusInfo extends QuarkusPlatformTask { private boolean perModule = false; @Input public boolean getPerModule() { return perModule; } @Option(description = "Log project's state per module.", option = "perModule") public void setPerModule(boolean perModule) { this.perModule = perModule; } public QuarkusInfo() { super("Log Quarkus-specific project information, such as imported Quarkus platform BOMs, Quarkus extensions found among the project dependencies, etc."); } @TaskAction }
yeah agree, ideally. Just not confident enough and want to keep the change limited to what necessary to fix bug (though change is already not minor)
public WriteResult expandUntriggered(PCollection<KV<DestinationT, ElementT>> input) { Pipeline p = input.getPipeline(); final PCollectionView<String> loadJobIdPrefixView = createJobIdPrefixView(p, JobType.LOAD); final PCollectionView<String> tempLoadJobIdPrefixView = createJobIdPrefixView(p, JobType.TEMP_TABLE_LOAD); final PCollectionView<String> zeroLoadJobIdPrefixView = createJobIdPrefixView(p, JobType.ZERO_LOAD); final PCollectionView<String> copyJobIdPrefixView = createJobIdPrefixView(p, JobType.COPY); final PCollectionView<String> tempFilePrefixView = createTempFilePrefixView(p, loadJobIdPrefixView); PCollection<KV<DestinationT, ElementT>> inputInGlobalWindow = input.apply( "rewindowIntoGlobal", Window.<KV<DestinationT, ElementT>>into(new GlobalWindows()) .triggering(DefaultTrigger.of()) .discardingFiredPanes()); PCollection<WriteBundlesToFiles.Result<DestinationT>> results = (numFileShards == 0) ? writeDynamicallyShardedFilesUntriggered(inputInGlobalWindow, tempFilePrefixView) : writeStaticallyShardedFiles(inputInGlobalWindow, tempFilePrefixView); TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>> multiPartitionsTag = new TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>>("multiPartitionsTag") {}; TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>> singlePartitionTag = new TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>>("singlePartitionTag") {}; PCollectionTuple partitions = results .apply("ReifyResults", new ReifyAsIterable<>()) .setCoder(IterableCoder.of(WriteBundlesToFiles.ResultCoder.of(destinationCoder))) .apply( "WritePartitionUntriggered", ParDo.of( new WritePartition<>( singletonTable, dynamicDestinations, tempFilePrefixView, maxFilesPerPartition, maxBytesPerPartition, multiPartitionsTag, singlePartitionTag, rowWriterFactory)) .withSideInputs(tempFilePrefixView) .withOutputTags(multiPartitionsTag, TupleTagList.of(singlePartitionTag))); PCollection<TableDestination> successfulSinglePartitionWrites = writeSinglePartition(partitions.get(singlePartitionTag), loadJobIdPrefixView); List<PCollectionView<?>> sideInputsForUpdateSchema = Lists.newArrayList(zeroLoadJobIdPrefixView); sideInputsForUpdateSchema.addAll(dynamicDestinations.getSideInputs()); PCollection<TableDestination> successfulMultiPartitionWrites = writeTempTables(partitions.get(multiPartitionsTag), tempLoadJobIdPrefixView) .apply("ReifyRenameInput", new ReifyAsIterable<>()) .apply( ParDo.of( new UpdateSchemaDestination<DestinationT>( bigQueryServices, zeroLoadJobIdPrefixView, loadJobProjectId, WriteDisposition.WRITE_APPEND, CreateDisposition.CREATE_NEVER, maxRetryJobs, kmsKey, schemaUpdateOptions, destinationsWithMatching)) .withSideInputs(sideInputsForUpdateSchema)) .apply( "WriteRenameUntriggered", ParDo.of( new WriteRename( bigQueryServices, copyJobIdPrefixView, writeDisposition, createDisposition, maxRetryJobs, kmsKey, loadJobProjectId)) .withSideInputs(copyJobIdPrefixView)) .setCoder(tableDestinationCoder); PCollectionList<TableDestination> allSuccessfulWrites = PCollectionList.of(successfulSinglePartitionWrites).and(successfulMultiPartitionWrites); return writeResult(p, allSuccessfulWrites.apply(Flatten.pCollections())); }
PCollection<TableDestination> successfulMultiPartitionWrites =
public WriteResult expandUntriggered(PCollection<KV<DestinationT, ElementT>> input) { Pipeline p = input.getPipeline(); final PCollectionView<String> loadJobIdPrefixView = createJobIdPrefixView(p, JobType.LOAD); final PCollectionView<String> tempLoadJobIdPrefixView = createJobIdPrefixView(p, JobType.TEMP_TABLE_LOAD); final PCollectionView<String> zeroLoadJobIdPrefixView = createJobIdPrefixView(p, JobType.SCHEMA_UPDATE); final PCollectionView<String> copyJobIdPrefixView = createJobIdPrefixView(p, JobType.COPY); final PCollectionView<String> tempFilePrefixView = createTempFilePrefixView(p, loadJobIdPrefixView); PCollection<KV<DestinationT, ElementT>> inputInGlobalWindow = input.apply( "rewindowIntoGlobal", Window.<KV<DestinationT, ElementT>>into(new GlobalWindows()) .triggering(DefaultTrigger.of()) .discardingFiredPanes()); PCollection<WriteBundlesToFiles.Result<DestinationT>> results = (numFileShards == 0) ? writeDynamicallyShardedFilesUntriggered(inputInGlobalWindow, tempFilePrefixView) : writeStaticallyShardedFiles(inputInGlobalWindow, tempFilePrefixView); TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>> multiPartitionsTag = new TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>>("multiPartitionsTag") {}; TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>> singlePartitionTag = new TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>>("singlePartitionTag") {}; PCollectionTuple partitions = results .apply("ReifyResults", new ReifyAsIterable<>()) .setCoder(IterableCoder.of(WriteBundlesToFiles.ResultCoder.of(destinationCoder))) .apply( "WritePartitionUntriggered", ParDo.of( new WritePartition<>( singletonTable, dynamicDestinations, tempFilePrefixView, maxFilesPerPartition, maxBytesPerPartition, multiPartitionsTag, singlePartitionTag, rowWriterFactory)) .withSideInputs(tempFilePrefixView) .withOutputTags(multiPartitionsTag, TupleTagList.of(singlePartitionTag))); PCollection<TableDestination> successfulSinglePartitionWrites = writeSinglePartition(partitions.get(singlePartitionTag), loadJobIdPrefixView); List<PCollectionView<?>> sideInputsForUpdateSchema = Lists.newArrayList(zeroLoadJobIdPrefixView); sideInputsForUpdateSchema.addAll(dynamicDestinations.getSideInputs()); PCollection<TableDestination> successfulMultiPartitionWrites = writeTempTables(partitions.get(multiPartitionsTag), tempLoadJobIdPrefixView) .apply("ReifyRenameInput", new ReifyAsIterable<>()) .apply( ParDo.of( new UpdateSchemaDestination<DestinationT>( bigQueryServices, zeroLoadJobIdPrefixView, loadJobProjectId, WriteDisposition.WRITE_APPEND, CreateDisposition.CREATE_NEVER, maxRetryJobs, kmsKey, schemaUpdateOptions, destinationsWithMatching)) .withSideInputs(sideInputsForUpdateSchema)) .apply( "WriteRenameUntriggered", ParDo.of( new WriteRename( bigQueryServices, copyJobIdPrefixView, writeDisposition, createDisposition, maxRetryJobs, kmsKey, loadJobProjectId)) .withSideInputs(copyJobIdPrefixView)) .setCoder(tableDestinationCoder); PCollectionList<TableDestination> allSuccessfulWrites = PCollectionList.of(successfulSinglePartitionWrites).and(successfulMultiPartitionWrites); return writeResult(p, allSuccessfulWrites.apply(Flatten.pCollections())); }
class BatchLoads<DestinationT, ElementT> extends PTransform<PCollection<KV<DestinationT, ElementT>>, WriteResult> { private static final Logger LOG = LoggerFactory.getLogger(BatchLoads.class); @VisibleForTesting static final int DEFAULT_MAX_NUM_WRITERS_PER_BUNDLE = 20; @VisibleForTesting static final int DEFAULT_MAX_FILES_PER_PARTITION = 10000; @VisibleForTesting static final long DEFAULT_MAX_BYTES_PER_PARTITION = 11 * (1L << 40); static final long DEFAULT_MAX_FILE_SIZE = 4 * (1L << 40); static final int DEFAULT_NUM_FILE_SHARDS = 0; static final int FILE_TRIGGERING_RECORD_COUNT = 500000; static final int DEFAULT_FILE_TRIGGERING_BYTE_COUNT = AsyncWriteChannelOptions.UPLOAD_CHUNK_SIZE_DEFAULT; static final Duration FILE_TRIGGERING_BATCHING_DURATION = Duration.standardSeconds(1); static final int LOAD_JOB_POLL_MAX_RETRIES = Integer.MAX_VALUE; static final int DEFAULT_MAX_RETRY_JOBS = 3; private BigQueryServices bigQueryServices; private final WriteDisposition writeDisposition; private final CreateDisposition createDisposition; private Set<SchemaUpdateOption> schemaUpdateOptions; private final boolean ignoreUnknownValues; private final boolean useAvroLogicalTypes; private final boolean singletonTable; private final DynamicDestinations<?, DestinationT> dynamicDestinations; /** * destinationsWithMatching wraps the dynamicDestinations redirects the schema, partitioning, etc * to the final destination tables, if the final destination table exists already (and we're * appending to it). It is used in writing to temp tables and updating final table schema. */ private DynamicDestinations<?, DestinationT> destinationsWithMatching; private final Coder<DestinationT> destinationCoder; private int maxNumWritersPerBundle; private long maxFileSize; private int maxFilesPerPartition; private long maxBytesPerPartition; private int numFileShards; private @Nullable Duration triggeringFrequency; private ValueProvider<String> customGcsTempLocation; private @Nullable ValueProvider<String> loadJobProjectId; private final Coder<ElementT> elementCoder; private final RowWriterFactory<ElementT, DestinationT> rowWriterFactory; private final @Nullable String kmsKey; private final String tempDataset; private Coder<TableDestination> tableDestinationCoder; private int maxRetryJobs = DEFAULT_MAX_RETRY_JOBS; BatchLoads( WriteDisposition writeDisposition, CreateDisposition createDisposition, boolean singletonTable, DynamicDestinations<?, DestinationT> dynamicDestinations, Coder<DestinationT> destinationCoder, ValueProvider<String> customGcsTempLocation, @Nullable ValueProvider<String> loadJobProjectId, boolean ignoreUnknownValues, Coder<ElementT> elementCoder, RowWriterFactory<ElementT, DestinationT> rowWriterFactory, @Nullable String kmsKey, boolean clusteringEnabled, boolean useAvroLogicalTypes, String tempDataset) { bigQueryServices = new BigQueryServicesImpl(); this.writeDisposition = writeDisposition; this.createDisposition = createDisposition; this.singletonTable = singletonTable; this.dynamicDestinations = dynamicDestinations; this.destinationsWithMatching = DynamicDestinationsHelpers.matchTableDynamicDestinations( dynamicDestinations, bigQueryServices); this.destinationCoder = destinationCoder; this.maxNumWritersPerBundle = DEFAULT_MAX_NUM_WRITERS_PER_BUNDLE; this.maxFileSize = DEFAULT_MAX_FILE_SIZE; this.numFileShards = DEFAULT_NUM_FILE_SHARDS; this.maxFilesPerPartition = DEFAULT_MAX_FILES_PER_PARTITION; this.maxBytesPerPartition = DEFAULT_MAX_BYTES_PER_PARTITION; this.triggeringFrequency = null; this.customGcsTempLocation = customGcsTempLocation; this.loadJobProjectId = loadJobProjectId; this.ignoreUnknownValues = ignoreUnknownValues; this.useAvroLogicalTypes = useAvroLogicalTypes; this.elementCoder = elementCoder; this.kmsKey = kmsKey; this.rowWriterFactory = rowWriterFactory; schemaUpdateOptions = Collections.emptySet(); this.tempDataset = tempDataset; this.tableDestinationCoder = clusteringEnabled ? TableDestinationCoderV3.of() : TableDestinationCoderV2.of(); } void setSchemaUpdateOptions(Set<SchemaUpdateOption> schemaUpdateOptions) { this.schemaUpdateOptions = schemaUpdateOptions; if (schemaUpdateOptions != null && !schemaUpdateOptions.isEmpty()) { this.destinationsWithMatching = dynamicDestinations; } else { this.destinationsWithMatching = DynamicDestinationsHelpers.matchTableDynamicDestinations( dynamicDestinations, bigQueryServices); } } void setTestServices(BigQueryServices bigQueryServices) { this.bigQueryServices = bigQueryServices; } /** Get the maximum number of file writers that will be open simultaneously in a bundle. */ public int getMaxNumWritersPerBundle() { return maxNumWritersPerBundle; } /** Set the maximum number of file writers that will be open simultaneously in a bundle. */ public void setMaxNumWritersPerBundle(int maxNumWritersPerBundle) { this.maxNumWritersPerBundle = maxNumWritersPerBundle; } public void setTriggeringFrequency(Duration triggeringFrequency) { this.triggeringFrequency = triggeringFrequency; } public int getMaxRetryJobs() { return maxRetryJobs; } public void setMaxRetryJobs(int maxRetryJobs) { this.maxRetryJobs = maxRetryJobs; } public void setNumFileShards(int numFileShards) { this.numFileShards = numFileShards; } @VisibleForTesting void setMaxFileSize(long maxFileSize) { this.maxFileSize = maxFileSize; } @VisibleForTesting void setMaxFilesPerPartition(int maxFilesPerPartition) { this.maxFilesPerPartition = maxFilesPerPartition; } @VisibleForTesting void setMaxBytesPerPartition(long maxBytesPerPartition) { this.maxBytesPerPartition = maxBytesPerPartition; } @Override public void validate(@Nullable PipelineOptions maybeOptions) { PipelineOptions options = Preconditions.checkArgumentNotNull(maybeOptions); String tempLocation; if (customGcsTempLocation == null) { tempLocation = options.getTempLocation(); } else { if (!customGcsTempLocation.isAccessible()) { return; } tempLocation = customGcsTempLocation.get(); } checkArgument( !Strings.isNullOrEmpty(tempLocation), "BigQueryIO.Write needs a GCS temp location to store temp files. " + "This can be set by withCustomGcsTempLocation() in the Builder " + "or through the fallback pipeline option --tempLocation."); if (bigQueryServices == null) { try { GcsPath.fromUri(tempLocation); } catch (IllegalArgumentException e) { throw new IllegalArgumentException( String.format( "BigQuery temp location expected a valid 'gs: tempLocation), e); } } } private WriteResult expandTriggered(PCollection<KV<DestinationT, ElementT>> input) { Duration triggeringFrequency = Preconditions.checkStateNotNull(this.triggeringFrequency); Pipeline p = input.getPipeline(); final PCollectionView<String> loadJobIdPrefixView = createJobIdPrefixView(p, JobType.LOAD); final PCollectionView<String> tempLoadJobIdPrefixView = createJobIdPrefixView(p, JobType.TEMP_TABLE_LOAD); final PCollectionView<String> zeroLoadJobIdPrefixView = createJobIdPrefixView(p, JobType.ZERO_LOAD); final PCollectionView<String> copyJobIdPrefixView = createJobIdPrefixView(p, JobType.COPY); final PCollectionView<String> tempFilePrefixView = createTempFilePrefixView(p, loadJobIdPrefixView); PCollection<WriteBundlesToFiles.Result<DestinationT>> results; if (numFileShards > 0) { PCollection<KV<DestinationT, ElementT>> inputInGlobalWindow = input.apply( "rewindowIntoGlobal", Window.<KV<DestinationT, ElementT>>into(new GlobalWindows()) .triggering( Repeatedly.forever( AfterFirst.of( AfterProcessingTime.pastFirstElementInPane() .plusDelayOf(triggeringFrequency), AfterPane.elementCountAtLeast(FILE_TRIGGERING_RECORD_COUNT)))) .discardingFiredPanes()); results = writeStaticallyShardedFiles(inputInGlobalWindow, tempFilePrefixView); } else { PCollection<KV<DestinationT, ElementT>> inputInGlobalWindow = input.apply( "rewindowIntoGlobal", Window.<KV<DestinationT, ElementT>>into(new GlobalWindows()) .triggering(DefaultTrigger.of()) .discardingFiredPanes()); results = writeDynamicallyShardedFilesTriggered(inputInGlobalWindow, tempFilePrefixView); } results = results.apply( "applyUserTrigger", Window.<WriteBundlesToFiles.Result<DestinationT>>into(new GlobalWindows()) .triggering( Repeatedly.forever( AfterProcessingTime.pastFirstElementInPane() .plusDelayOf(triggeringFrequency))) .discardingFiredPanes()); TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>> multiPartitionsTag = new TupleTag<>("multiPartitionsTag"); TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>> singlePartitionTag = new TupleTag<>("singlePartitionTag"); PCollectionTuple partitions = results .apply("AttachDestinationKey", WithKeys.of(result -> result.destination)) .setCoder( KvCoder.of(destinationCoder, WriteBundlesToFiles.ResultCoder.of(destinationCoder))) .apply("GroupFilesByDestination", GroupByKey.create()) .apply("ExtractResultValues", Values.create()) .apply( "WritePartitionTriggered", ParDo.of( new WritePartition<>( singletonTable, dynamicDestinations, tempFilePrefixView, maxFilesPerPartition, maxBytesPerPartition, multiPartitionsTag, singlePartitionTag, rowWriterFactory)) .withSideInputs(tempFilePrefixView) .withOutputTags(multiPartitionsTag, TupleTagList.of(singlePartitionTag))); PCollection<KV<DestinationT, WriteTables.Result>> tempTables = writeTempTables(partitions.get(multiPartitionsTag), tempLoadJobIdPrefixView); List<PCollectionView<?>> sideInputsForUpdateSchema = Lists.newArrayList(zeroLoadJobIdPrefixView); sideInputsForUpdateSchema.addAll(dynamicDestinations.getSideInputs()); PCollection<TableDestination> successfulMultiPartitionWrites = tempTables .apply( "Window Into Global Windows", Window.<KV<DestinationT, WriteTables.Result>>into(new GlobalWindows()) .triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1)))) .apply("Add Void Key", WithKeys.of((Void) null)) .setCoder(KvCoder.of(VoidCoder.of(), tempTables.getCoder())) .apply("GroupByKey", GroupByKey.create()) .apply("Extract Values", Values.create()) .apply( ParDo.of( new UpdateSchemaDestination<DestinationT>( bigQueryServices, zeroLoadJobIdPrefixView, loadJobProjectId, WriteDisposition.WRITE_APPEND, CreateDisposition.CREATE_NEVER, maxRetryJobs, kmsKey, schemaUpdateOptions, destinationsWithMatching)) .withSideInputs(sideInputsForUpdateSchema)) .apply( "WriteRenameTriggered", ParDo.of( new WriteRename( bigQueryServices, copyJobIdPrefixView, writeDisposition, createDisposition, maxRetryJobs, kmsKey, loadJobProjectId)) .withSideInputs(copyJobIdPrefixView)); PCollection<TableDestination> successfulSinglePartitionWrites = writeSinglePartition(partitions.get(singlePartitionTag), loadJobIdPrefixView) .apply( "RewindowSinglePartitionResults", Window.<TableDestination>into(new GlobalWindows()) .triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1)))); PCollectionList<TableDestination> allSuccessfulWrites = PCollectionList.of(successfulMultiPartitionWrites).and(successfulSinglePartitionWrites); return writeResult(p, allSuccessfulWrites.apply(Flatten.pCollections())); } private PCollectionView<String> createJobIdPrefixView(Pipeline p, final JobType type) { return p.apply("JobIdCreationRoot_" + type.toString(), Create.of((Void) null)) .apply( "CreateJobId_" + type.toString(), ParDo.of( new DoFn<Void, String>() { @ProcessElement public void process(ProcessContext c) { c.output( BigQueryResourceNaming.createJobIdPrefix( c.getPipelineOptions().getJobName(), BigQueryHelpers.randomUUIDString(), type)); } })) .apply("JobIdSideInput_" + type.toString(), View.asSingleton()); } private PCollectionView<String> createTempFilePrefixView( Pipeline p, final PCollectionView<String> jobIdView) { return p.apply("Create dummy value", Create.of("")) .apply( "GetTempFilePrefix", ParDo.of( new DoFn<String, String>() { @ProcessElement public void getTempFilePrefix(ProcessContext c) { String tempLocationRoot; if (customGcsTempLocation != null) { tempLocationRoot = customGcsTempLocation.get(); } else { tempLocationRoot = c.getPipelineOptions().getTempLocation(); } String tempLocation = resolveTempLocation( tempLocationRoot, "BigQueryWriteTemp", c.sideInput(jobIdView)); LOG.info( "Writing BigQuery temporary files to {} before loading them.", tempLocation); c.output(tempLocation); } }) .withSideInputs(jobIdView)) .apply("TempFilePrefixView", View.asSingleton()); } PCollection<WriteBundlesToFiles.Result<DestinationT>> writeDynamicallyShardedFilesUntriggered( PCollection<KV<DestinationT, ElementT>> input, PCollectionView<String> tempFilePrefix) { TupleTag<WriteBundlesToFiles.Result<DestinationT>> writtenFilesTag = new TupleTag<WriteBundlesToFiles.Result<DestinationT>>("writtenFiles") {}; TupleTag<KV<ShardedKey<DestinationT>, ElementT>> unwrittedRecordsTag = new TupleTag<KV<ShardedKey<DestinationT>, ElementT>>("unwrittenRecords") {}; PCollectionTuple writeBundlesTuple = input.apply( "WriteBundlesToFiles", ParDo.of( new WriteBundlesToFiles<>( tempFilePrefix, unwrittedRecordsTag, maxNumWritersPerBundle, maxFileSize, rowWriterFactory)) .withSideInputs(tempFilePrefix) .withOutputTags(writtenFilesTag, TupleTagList.of(unwrittedRecordsTag))); PCollection<WriteBundlesToFiles.Result<DestinationT>> writtenFiles = writeBundlesTuple .get(writtenFilesTag) .setCoder(WriteBundlesToFiles.ResultCoder.of(destinationCoder)); PCollection<KV<ShardedKey<DestinationT>, ElementT>> unwrittenRecords = writeBundlesTuple .get(unwrittedRecordsTag) .setCoder(KvCoder.of(ShardedKeyCoder.of(destinationCoder), elementCoder)); PCollection<WriteBundlesToFiles.Result<DestinationT>> writtenFilesGrouped = writeShardedRecords(unwrittenRecords, tempFilePrefix); return PCollectionList.of(writtenFiles) .and(writtenFilesGrouped) .apply("FlattenFiles", Flatten.pCollections()) .setCoder(WriteBundlesToFiles.ResultCoder.of(destinationCoder)); } PCollection<WriteBundlesToFiles.Result<DestinationT>> writeStaticallyShardedFiles( PCollection<KV<DestinationT, ElementT>> input, PCollectionView<String> tempFilePrefix) { checkState(numFileShards > 0); PCollection<KV<ShardedKey<DestinationT>, ElementT>> shardedRecords = input .apply( "AddShard", ParDo.of( new DoFn<KV<DestinationT, ElementT>, KV<ShardedKey<DestinationT>, ElementT>>() { int shardNumber; @Setup public void setup() { shardNumber = ThreadLocalRandom.current().nextInt(numFileShards); } @ProcessElement public void processElement( @Element KV<DestinationT, ElementT> element, OutputReceiver<KV<ShardedKey<DestinationT>, ElementT>> o) { DestinationT destination = element.getKey(); o.output( KV.of( ShardedKey.of(destination, ++shardNumber % numFileShards), element.getValue())); } })) .setCoder(KvCoder.of(ShardedKeyCoder.of(destinationCoder), elementCoder)); return writeShardedRecords(shardedRecords, tempFilePrefix); } PCollection<WriteBundlesToFiles.Result<DestinationT>> writeDynamicallyShardedFilesTriggered( PCollection<KV<DestinationT, ElementT>> input, PCollectionView<String> tempFilePrefix) { BigQueryOptions options = input.getPipeline().getOptions().as(BigQueryOptions.class); Duration maxBufferingDuration = options.getMaxBufferingDurationMilliSec() > 0 ? Duration.millis(options.getMaxBufferingDurationMilliSec()) : FILE_TRIGGERING_BATCHING_DURATION; GcsOptions gcsOptions = input.getPipeline().getOptions().as(GcsOptions.class); int byteSize = MoreObjects.firstNonNull( gcsOptions.getGcsUploadBufferSizeBytes(), DEFAULT_FILE_TRIGGERING_BYTE_COUNT); return input .apply( GroupIntoBatches.<DestinationT, ElementT>ofSize(FILE_TRIGGERING_RECORD_COUNT) .withByteSize(byteSize) .withMaxBufferingDuration(maxBufferingDuration) .withShardedKey()) .setCoder( KvCoder.of( org.apache.beam.sdk.util.ShardedKey.Coder.of(destinationCoder), IterableCoder.of(elementCoder))) .apply( "StripShardId", MapElements.via( new SimpleFunction< KV<org.apache.beam.sdk.util.ShardedKey<DestinationT>, Iterable<ElementT>>, KV<DestinationT, Iterable<ElementT>>>() { @Override public KV<DestinationT, Iterable<ElementT>> apply( KV<org.apache.beam.sdk.util.ShardedKey<DestinationT>, Iterable<ElementT>> input) { return KV.of(input.getKey().getKey(), input.getValue()); } })) .setCoder(KvCoder.of(destinationCoder, IterableCoder.of(elementCoder))) .apply( "WriteGroupedRecords", ParDo.of( new WriteGroupedRecordsToFiles<DestinationT, ElementT>( tempFilePrefix, maxFileSize, rowWriterFactory)) .withSideInputs(tempFilePrefix)) .setCoder(WriteBundlesToFiles.ResultCoder.of(destinationCoder)); } private PCollection<Result<DestinationT>> writeShardedRecords( PCollection<KV<ShardedKey<DestinationT>, ElementT>> shardedRecords, PCollectionView<String> tempFilePrefix) { return shardedRecords .apply("GroupByDestination", GroupByKey.create()) .apply( "StripShardId", MapElements.via( new SimpleFunction< KV<ShardedKey<DestinationT>, Iterable<ElementT>>, KV<DestinationT, Iterable<ElementT>>>() { @Override public KV<DestinationT, Iterable<ElementT>> apply( KV<ShardedKey<DestinationT>, Iterable<ElementT>> input) { return KV.of(input.getKey().getKey(), input.getValue()); } })) .setCoder(KvCoder.of(destinationCoder, IterableCoder.of(elementCoder))) .apply( "WriteGroupedRecords", ParDo.of( new WriteGroupedRecordsToFiles<>(tempFilePrefix, maxFileSize, rowWriterFactory)) .withSideInputs(tempFilePrefix)) .setCoder(WriteBundlesToFiles.ResultCoder.of(destinationCoder)); } private PCollection<KV<DestinationT, WriteTables.Result>> writeTempTables( PCollection<KV<ShardedKey<DestinationT>, WritePartition.Result>> input, PCollectionView<String> jobIdTokenView) { List<PCollectionView<?>> sideInputs = Lists.newArrayList(jobIdTokenView); sideInputs.addAll(dynamicDestinations.getSideInputs()); Coder<KV<ShardedKey<DestinationT>, WritePartition.Result>> partitionsCoder = KvCoder.of( ShardedKeyCoder.of(NullableCoder.of(destinationCoder)), WritePartition.ResultCoder.INSTANCE); return input .setCoder(partitionsCoder) .apply("MultiPartitionsReshuffle", Reshuffle.of()) .apply( "MultiPartitionsWriteTables", new WriteTables<>( true, bigQueryServices, jobIdTokenView, WriteDisposition.WRITE_EMPTY, CreateDisposition.CREATE_IF_NEEDED, sideInputs, destinationsWithMatching, loadJobProjectId, maxRetryJobs, ignoreUnknownValues, kmsKey, rowWriterFactory.getSourceFormat(), useAvroLogicalTypes, schemaUpdateOptions, tempDataset)) .setCoder(KvCoder.of(destinationCoder, WriteTables.ResultCoder.INSTANCE)); } PCollection<TableDestination> writeSinglePartition( PCollection<KV<ShardedKey<DestinationT>, WritePartition.Result>> input, PCollectionView<String> loadJobIdPrefixView) { List<PCollectionView<?>> sideInputs = Lists.newArrayList(loadJobIdPrefixView); sideInputs.addAll(dynamicDestinations.getSideInputs()); Coder<KV<ShardedKey<DestinationT>, WritePartition.Result>> partitionsCoder = KvCoder.of( ShardedKeyCoder.of(NullableCoder.of(destinationCoder)), WritePartition.ResultCoder.INSTANCE); PCollection<KV<DestinationT, WriteTables.Result>> successfulWrites = input .setCoder(partitionsCoder) .apply("SinglePartitionsReshuffle", Reshuffle.of()) .apply( "SinglePartitionWriteTables", new WriteTables<>( false, bigQueryServices, loadJobIdPrefixView, writeDisposition, createDisposition, sideInputs, dynamicDestinations, loadJobProjectId, maxRetryJobs, ignoreUnknownValues, kmsKey, rowWriterFactory.getSourceFormat(), useAvroLogicalTypes, schemaUpdateOptions, null)) .setCoder(KvCoder.of(destinationCoder, WriteTables.ResultCoder.INSTANCE)); BigQueryOptions options = input.getPipeline().getOptions().as(BigQueryOptions.class); String defaultProjectId = options.getBigQueryProject() == null ? options.getProject() : options.getBigQueryProject(); return successfulWrites .apply(Keys.create()) .apply( "Convert to TableDestinations", ParDo.of( new DoFn<DestinationT, TableDestination>() { @ProcessElement public void processElement(ProcessContext c) { dynamicDestinations.setSideInputAccessorFromProcessContext(c); TableDestination tableDestination = dynamicDestinations.getTable(c.element()); TableReference tableReference = tableDestination.getTableReference(); if (Strings.isNullOrEmpty(tableReference.getProjectId())) { tableReference.setProjectId(defaultProjectId); tableDestination = tableDestination.withTableReference(tableReference); } c.output(tableDestination); } }) .withSideInputs(sideInputs)) .setCoder(tableDestinationCoder); } private WriteResult writeResult(Pipeline p, PCollection<TableDestination> successfulWrites) { PCollection<TableRow> empty = p.apply("CreateEmptyFailedInserts", Create.empty(TypeDescriptor.of(TableRow.class))); return WriteResult.in( p, new TupleTag<>("failedInserts"), empty, null, new TupleTag<>("successfulInserts"), successfulWrites, null, null); } @Override public WriteResult expand(PCollection<KV<DestinationT, ElementT>> input) { return (triggeringFrequency != null) ? expandTriggered(input) : expandUntriggered(input); } }
class BatchLoads<DestinationT, ElementT> extends PTransform<PCollection<KV<DestinationT, ElementT>>, WriteResult> { private static final Logger LOG = LoggerFactory.getLogger(BatchLoads.class); @VisibleForTesting static final int DEFAULT_MAX_NUM_WRITERS_PER_BUNDLE = 20; @VisibleForTesting static final int DEFAULT_MAX_FILES_PER_PARTITION = 10000; @VisibleForTesting static final long DEFAULT_MAX_BYTES_PER_PARTITION = 11 * (1L << 40); static final long DEFAULT_MAX_FILE_SIZE = 4 * (1L << 40); static final int DEFAULT_NUM_FILE_SHARDS = 0; static final int FILE_TRIGGERING_RECORD_COUNT = 500000; static final int DEFAULT_FILE_TRIGGERING_BYTE_COUNT = AsyncWriteChannelOptions.UPLOAD_CHUNK_SIZE_DEFAULT; static final Duration FILE_TRIGGERING_BATCHING_DURATION = Duration.standardSeconds(1); static final int LOAD_JOB_POLL_MAX_RETRIES = Integer.MAX_VALUE; static final int DEFAULT_MAX_RETRY_JOBS = 3; private BigQueryServices bigQueryServices; private final WriteDisposition writeDisposition; private final CreateDisposition createDisposition; private Set<SchemaUpdateOption> schemaUpdateOptions; private final boolean ignoreUnknownValues; private final boolean useAvroLogicalTypes; private final boolean singletonTable; private final DynamicDestinations<?, DestinationT> dynamicDestinations; /** * destinationsWithMatching wraps the dynamicDestinations redirects the schema, partitioning, etc * to the final destination tables, if the final destination table exists already (and we're * appending to it). It is used in writing to temp tables and updating final table schema. */ private DynamicDestinations<?, DestinationT> destinationsWithMatching; private final Coder<DestinationT> destinationCoder; private int maxNumWritersPerBundle; private long maxFileSize; private int maxFilesPerPartition; private long maxBytesPerPartition; private int numFileShards; private @Nullable Duration triggeringFrequency; private ValueProvider<String> customGcsTempLocation; private @Nullable ValueProvider<String> loadJobProjectId; private final Coder<ElementT> elementCoder; private final RowWriterFactory<ElementT, DestinationT> rowWriterFactory; private final @Nullable String kmsKey; private final String tempDataset; private Coder<TableDestination> tableDestinationCoder; private int maxRetryJobs = DEFAULT_MAX_RETRY_JOBS; BatchLoads( WriteDisposition writeDisposition, CreateDisposition createDisposition, boolean singletonTable, DynamicDestinations<?, DestinationT> dynamicDestinations, Coder<DestinationT> destinationCoder, ValueProvider<String> customGcsTempLocation, @Nullable ValueProvider<String> loadJobProjectId, boolean ignoreUnknownValues, Coder<ElementT> elementCoder, RowWriterFactory<ElementT, DestinationT> rowWriterFactory, @Nullable String kmsKey, boolean clusteringEnabled, boolean useAvroLogicalTypes, String tempDataset) { bigQueryServices = new BigQueryServicesImpl(); this.writeDisposition = writeDisposition; this.createDisposition = createDisposition; this.singletonTable = singletonTable; this.dynamicDestinations = dynamicDestinations; this.destinationsWithMatching = DynamicDestinationsHelpers.matchTableDynamicDestinations( dynamicDestinations, bigQueryServices); this.destinationCoder = destinationCoder; this.maxNumWritersPerBundle = DEFAULT_MAX_NUM_WRITERS_PER_BUNDLE; this.maxFileSize = DEFAULT_MAX_FILE_SIZE; this.numFileShards = DEFAULT_NUM_FILE_SHARDS; this.maxFilesPerPartition = DEFAULT_MAX_FILES_PER_PARTITION; this.maxBytesPerPartition = DEFAULT_MAX_BYTES_PER_PARTITION; this.triggeringFrequency = null; this.customGcsTempLocation = customGcsTempLocation; this.loadJobProjectId = loadJobProjectId; this.ignoreUnknownValues = ignoreUnknownValues; this.useAvroLogicalTypes = useAvroLogicalTypes; this.elementCoder = elementCoder; this.kmsKey = kmsKey; this.rowWriterFactory = rowWriterFactory; schemaUpdateOptions = Collections.emptySet(); this.tempDataset = tempDataset; this.tableDestinationCoder = clusteringEnabled ? TableDestinationCoderV3.of() : TableDestinationCoderV2.of(); } void setSchemaUpdateOptions(Set<SchemaUpdateOption> schemaUpdateOptions) { this.schemaUpdateOptions = schemaUpdateOptions; if (schemaUpdateOptions != null && !schemaUpdateOptions.isEmpty()) { this.destinationsWithMatching = dynamicDestinations; } else { this.destinationsWithMatching = DynamicDestinationsHelpers.matchTableDynamicDestinations( dynamicDestinations, bigQueryServices); } } void setTestServices(BigQueryServices bigQueryServices) { this.bigQueryServices = bigQueryServices; } /** Get the maximum number of file writers that will be open simultaneously in a bundle. */ public int getMaxNumWritersPerBundle() { return maxNumWritersPerBundle; } /** Set the maximum number of file writers that will be open simultaneously in a bundle. */ public void setMaxNumWritersPerBundle(int maxNumWritersPerBundle) { this.maxNumWritersPerBundle = maxNumWritersPerBundle; } public void setTriggeringFrequency(Duration triggeringFrequency) { this.triggeringFrequency = triggeringFrequency; } public int getMaxRetryJobs() { return maxRetryJobs; } public void setMaxRetryJobs(int maxRetryJobs) { this.maxRetryJobs = maxRetryJobs; } public void setNumFileShards(int numFileShards) { this.numFileShards = numFileShards; } @VisibleForTesting void setMaxFileSize(long maxFileSize) { this.maxFileSize = maxFileSize; } @VisibleForTesting void setMaxFilesPerPartition(int maxFilesPerPartition) { this.maxFilesPerPartition = maxFilesPerPartition; } @VisibleForTesting void setMaxBytesPerPartition(long maxBytesPerPartition) { this.maxBytesPerPartition = maxBytesPerPartition; } @Override public void validate(@Nullable PipelineOptions maybeOptions) { PipelineOptions options = Preconditions.checkArgumentNotNull(maybeOptions); String tempLocation; if (customGcsTempLocation == null) { tempLocation = options.getTempLocation(); } else { if (!customGcsTempLocation.isAccessible()) { return; } tempLocation = customGcsTempLocation.get(); } checkArgument( !Strings.isNullOrEmpty(tempLocation), "BigQueryIO.Write needs a GCS temp location to store temp files. " + "This can be set by withCustomGcsTempLocation() in the Builder " + "or through the fallback pipeline option --tempLocation."); if (bigQueryServices == null) { try { GcsPath.fromUri(tempLocation); } catch (IllegalArgumentException e) { throw new IllegalArgumentException( String.format( "BigQuery temp location expected a valid 'gs: tempLocation), e); } } } private WriteResult expandTriggered(PCollection<KV<DestinationT, ElementT>> input) { Duration triggeringFrequency = Preconditions.checkStateNotNull(this.triggeringFrequency); Pipeline p = input.getPipeline(); final PCollectionView<String> loadJobIdPrefixView = createJobIdPrefixView(p, JobType.LOAD); final PCollectionView<String> tempLoadJobIdPrefixView = createJobIdPrefixView(p, JobType.TEMP_TABLE_LOAD); final PCollectionView<String> zeroLoadJobIdPrefixView = createJobIdPrefixView(p, JobType.SCHEMA_UPDATE); final PCollectionView<String> copyJobIdPrefixView = createJobIdPrefixView(p, JobType.COPY); final PCollectionView<String> tempFilePrefixView = createTempFilePrefixView(p, loadJobIdPrefixView); PCollection<WriteBundlesToFiles.Result<DestinationT>> results; if (numFileShards > 0) { PCollection<KV<DestinationT, ElementT>> inputInGlobalWindow = input.apply( "rewindowIntoGlobal", Window.<KV<DestinationT, ElementT>>into(new GlobalWindows()) .triggering( Repeatedly.forever( AfterFirst.of( AfterProcessingTime.pastFirstElementInPane() .plusDelayOf(triggeringFrequency), AfterPane.elementCountAtLeast(FILE_TRIGGERING_RECORD_COUNT)))) .discardingFiredPanes()); results = writeStaticallyShardedFiles(inputInGlobalWindow, tempFilePrefixView); } else { PCollection<KV<DestinationT, ElementT>> inputInGlobalWindow = input.apply( "rewindowIntoGlobal", Window.<KV<DestinationT, ElementT>>into(new GlobalWindows()) .triggering(DefaultTrigger.of()) .discardingFiredPanes()); results = writeDynamicallyShardedFilesTriggered(inputInGlobalWindow, tempFilePrefixView); } results = results.apply( "applyUserTrigger", Window.<WriteBundlesToFiles.Result<DestinationT>>into(new GlobalWindows()) .triggering( Repeatedly.forever( AfterProcessingTime.pastFirstElementInPane() .plusDelayOf(triggeringFrequency))) .discardingFiredPanes()); TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>> multiPartitionsTag = new TupleTag<>("multiPartitionsTag"); TupleTag<KV<ShardedKey<DestinationT>, WritePartition.Result>> singlePartitionTag = new TupleTag<>("singlePartitionTag"); PCollectionTuple partitions = results .apply("AttachDestinationKey", WithKeys.of(result -> result.destination)) .setCoder( KvCoder.of(destinationCoder, WriteBundlesToFiles.ResultCoder.of(destinationCoder))) .apply("GroupFilesByDestination", GroupByKey.create()) .apply("ExtractResultValues", Values.create()) .apply( "WritePartitionTriggered", ParDo.of( new WritePartition<>( singletonTable, dynamicDestinations, tempFilePrefixView, maxFilesPerPartition, maxBytesPerPartition, multiPartitionsTag, singlePartitionTag, rowWriterFactory)) .withSideInputs(tempFilePrefixView) .withOutputTags(multiPartitionsTag, TupleTagList.of(singlePartitionTag))); PCollection<KV<DestinationT, WriteTables.Result>> tempTables = writeTempTables(partitions.get(multiPartitionsTag), tempLoadJobIdPrefixView); List<PCollectionView<?>> sideInputsForUpdateSchema = Lists.newArrayList(zeroLoadJobIdPrefixView); sideInputsForUpdateSchema.addAll(dynamicDestinations.getSideInputs()); PCollection<TableDestination> successfulMultiPartitionWrites = tempTables .apply( "Window Into Global Windows", Window.<KV<DestinationT, WriteTables.Result>>into(new GlobalWindows()) .triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1)))) .apply("Add Void Key", WithKeys.of((Void) null)) .setCoder(KvCoder.of(VoidCoder.of(), tempTables.getCoder())) .apply("GroupByKey", GroupByKey.create()) .apply("Extract Values", Values.create()) .apply( ParDo.of( new UpdateSchemaDestination<DestinationT>( bigQueryServices, zeroLoadJobIdPrefixView, loadJobProjectId, WriteDisposition.WRITE_APPEND, CreateDisposition.CREATE_NEVER, maxRetryJobs, kmsKey, schemaUpdateOptions, destinationsWithMatching)) .withSideInputs(sideInputsForUpdateSchema)) .apply( "WriteRenameTriggered", ParDo.of( new WriteRename( bigQueryServices, copyJobIdPrefixView, writeDisposition, createDisposition, maxRetryJobs, kmsKey, loadJobProjectId)) .withSideInputs(copyJobIdPrefixView)); PCollection<TableDestination> successfulSinglePartitionWrites = writeSinglePartition(partitions.get(singlePartitionTag), loadJobIdPrefixView) .apply( "RewindowSinglePartitionResults", Window.<TableDestination>into(new GlobalWindows()) .triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1)))); PCollectionList<TableDestination> allSuccessfulWrites = PCollectionList.of(successfulMultiPartitionWrites).and(successfulSinglePartitionWrites); return writeResult(p, allSuccessfulWrites.apply(Flatten.pCollections())); } private PCollectionView<String> createJobIdPrefixView(Pipeline p, final JobType type) { return p.apply("JobIdCreationRoot_" + type.toString(), Create.of((Void) null)) .apply( "CreateJobId_" + type.toString(), ParDo.of( new DoFn<Void, String>() { @ProcessElement public void process(ProcessContext c) { c.output( BigQueryResourceNaming.createJobIdPrefix( c.getPipelineOptions().getJobName(), BigQueryHelpers.randomUUIDString(), type)); } })) .apply("JobIdSideInput_" + type.toString(), View.asSingleton()); } private PCollectionView<String> createTempFilePrefixView( Pipeline p, final PCollectionView<String> jobIdView) { return p.apply("Create dummy value", Create.of("")) .apply( "GetTempFilePrefix", ParDo.of( new DoFn<String, String>() { @ProcessElement public void getTempFilePrefix(ProcessContext c) { String tempLocationRoot; if (customGcsTempLocation != null) { tempLocationRoot = customGcsTempLocation.get(); } else { tempLocationRoot = c.getPipelineOptions().getTempLocation(); } String tempLocation = resolveTempLocation( tempLocationRoot, "BigQueryWriteTemp", c.sideInput(jobIdView)); LOG.info( "Writing BigQuery temporary files to {} before loading them.", tempLocation); c.output(tempLocation); } }) .withSideInputs(jobIdView)) .apply("TempFilePrefixView", View.asSingleton()); } PCollection<WriteBundlesToFiles.Result<DestinationT>> writeDynamicallyShardedFilesUntriggered( PCollection<KV<DestinationT, ElementT>> input, PCollectionView<String> tempFilePrefix) { TupleTag<WriteBundlesToFiles.Result<DestinationT>> writtenFilesTag = new TupleTag<WriteBundlesToFiles.Result<DestinationT>>("writtenFiles") {}; TupleTag<KV<ShardedKey<DestinationT>, ElementT>> unwrittedRecordsTag = new TupleTag<KV<ShardedKey<DestinationT>, ElementT>>("unwrittenRecords") {}; PCollectionTuple writeBundlesTuple = input.apply( "WriteBundlesToFiles", ParDo.of( new WriteBundlesToFiles<>( tempFilePrefix, unwrittedRecordsTag, maxNumWritersPerBundle, maxFileSize, rowWriterFactory)) .withSideInputs(tempFilePrefix) .withOutputTags(writtenFilesTag, TupleTagList.of(unwrittedRecordsTag))); PCollection<WriteBundlesToFiles.Result<DestinationT>> writtenFiles = writeBundlesTuple .get(writtenFilesTag) .setCoder(WriteBundlesToFiles.ResultCoder.of(destinationCoder)); PCollection<KV<ShardedKey<DestinationT>, ElementT>> unwrittenRecords = writeBundlesTuple .get(unwrittedRecordsTag) .setCoder(KvCoder.of(ShardedKeyCoder.of(destinationCoder), elementCoder)); PCollection<WriteBundlesToFiles.Result<DestinationT>> writtenFilesGrouped = writeShardedRecords(unwrittenRecords, tempFilePrefix); return PCollectionList.of(writtenFiles) .and(writtenFilesGrouped) .apply("FlattenFiles", Flatten.pCollections()) .setCoder(WriteBundlesToFiles.ResultCoder.of(destinationCoder)); } PCollection<WriteBundlesToFiles.Result<DestinationT>> writeStaticallyShardedFiles( PCollection<KV<DestinationT, ElementT>> input, PCollectionView<String> tempFilePrefix) { checkState(numFileShards > 0); PCollection<KV<ShardedKey<DestinationT>, ElementT>> shardedRecords = input .apply( "AddShard", ParDo.of( new DoFn<KV<DestinationT, ElementT>, KV<ShardedKey<DestinationT>, ElementT>>() { int shardNumber; @Setup public void setup() { shardNumber = ThreadLocalRandom.current().nextInt(numFileShards); } @ProcessElement public void processElement( @Element KV<DestinationT, ElementT> element, OutputReceiver<KV<ShardedKey<DestinationT>, ElementT>> o) { DestinationT destination = element.getKey(); o.output( KV.of( ShardedKey.of(destination, ++shardNumber % numFileShards), element.getValue())); } })) .setCoder(KvCoder.of(ShardedKeyCoder.of(destinationCoder), elementCoder)); return writeShardedRecords(shardedRecords, tempFilePrefix); } PCollection<WriteBundlesToFiles.Result<DestinationT>> writeDynamicallyShardedFilesTriggered( PCollection<KV<DestinationT, ElementT>> input, PCollectionView<String> tempFilePrefix) { BigQueryOptions options = input.getPipeline().getOptions().as(BigQueryOptions.class); Duration maxBufferingDuration = options.getMaxBufferingDurationMilliSec() > 0 ? Duration.millis(options.getMaxBufferingDurationMilliSec()) : FILE_TRIGGERING_BATCHING_DURATION; GcsOptions gcsOptions = input.getPipeline().getOptions().as(GcsOptions.class); int byteSize = MoreObjects.firstNonNull( gcsOptions.getGcsUploadBufferSizeBytes(), DEFAULT_FILE_TRIGGERING_BYTE_COUNT); return input .apply( GroupIntoBatches.<DestinationT, ElementT>ofSize(FILE_TRIGGERING_RECORD_COUNT) .withByteSize(byteSize) .withMaxBufferingDuration(maxBufferingDuration) .withShardedKey()) .setCoder( KvCoder.of( org.apache.beam.sdk.util.ShardedKey.Coder.of(destinationCoder), IterableCoder.of(elementCoder))) .apply( "StripShardId", MapElements.via( new SimpleFunction< KV<org.apache.beam.sdk.util.ShardedKey<DestinationT>, Iterable<ElementT>>, KV<DestinationT, Iterable<ElementT>>>() { @Override public KV<DestinationT, Iterable<ElementT>> apply( KV<org.apache.beam.sdk.util.ShardedKey<DestinationT>, Iterable<ElementT>> input) { return KV.of(input.getKey().getKey(), input.getValue()); } })) .setCoder(KvCoder.of(destinationCoder, IterableCoder.of(elementCoder))) .apply( "WriteGroupedRecords", ParDo.of( new WriteGroupedRecordsToFiles<DestinationT, ElementT>( tempFilePrefix, maxFileSize, rowWriterFactory)) .withSideInputs(tempFilePrefix)) .setCoder(WriteBundlesToFiles.ResultCoder.of(destinationCoder)); } private PCollection<Result<DestinationT>> writeShardedRecords( PCollection<KV<ShardedKey<DestinationT>, ElementT>> shardedRecords, PCollectionView<String> tempFilePrefix) { return shardedRecords .apply("GroupByDestination", GroupByKey.create()) .apply( "StripShardId", MapElements.via( new SimpleFunction< KV<ShardedKey<DestinationT>, Iterable<ElementT>>, KV<DestinationT, Iterable<ElementT>>>() { @Override public KV<DestinationT, Iterable<ElementT>> apply( KV<ShardedKey<DestinationT>, Iterable<ElementT>> input) { return KV.of(input.getKey().getKey(), input.getValue()); } })) .setCoder(KvCoder.of(destinationCoder, IterableCoder.of(elementCoder))) .apply( "WriteGroupedRecords", ParDo.of( new WriteGroupedRecordsToFiles<>(tempFilePrefix, maxFileSize, rowWriterFactory)) .withSideInputs(tempFilePrefix)) .setCoder(WriteBundlesToFiles.ResultCoder.of(destinationCoder)); } private PCollection<KV<DestinationT, WriteTables.Result>> writeTempTables( PCollection<KV<ShardedKey<DestinationT>, WritePartition.Result>> input, PCollectionView<String> jobIdTokenView) { List<PCollectionView<?>> sideInputs = Lists.newArrayList(jobIdTokenView); sideInputs.addAll(dynamicDestinations.getSideInputs()); Coder<KV<ShardedKey<DestinationT>, WritePartition.Result>> partitionsCoder = KvCoder.of( ShardedKeyCoder.of(NullableCoder.of(destinationCoder)), WritePartition.ResultCoder.INSTANCE); return input .setCoder(partitionsCoder) .apply("MultiPartitionsReshuffle", Reshuffle.of()) .apply( "MultiPartitionsWriteTables", new WriteTables<>( true, bigQueryServices, jobIdTokenView, WriteDisposition.WRITE_EMPTY, CreateDisposition.CREATE_IF_NEEDED, sideInputs, destinationsWithMatching, loadJobProjectId, maxRetryJobs, ignoreUnknownValues, kmsKey, rowWriterFactory.getSourceFormat(), useAvroLogicalTypes, schemaUpdateOptions, tempDataset)) .setCoder(KvCoder.of(destinationCoder, WriteTables.ResultCoder.INSTANCE)); } PCollection<TableDestination> writeSinglePartition( PCollection<KV<ShardedKey<DestinationT>, WritePartition.Result>> input, PCollectionView<String> loadJobIdPrefixView) { List<PCollectionView<?>> sideInputs = Lists.newArrayList(loadJobIdPrefixView); sideInputs.addAll(dynamicDestinations.getSideInputs()); Coder<KV<ShardedKey<DestinationT>, WritePartition.Result>> partitionsCoder = KvCoder.of( ShardedKeyCoder.of(NullableCoder.of(destinationCoder)), WritePartition.ResultCoder.INSTANCE); PCollection<KV<DestinationT, WriteTables.Result>> successfulWrites = input .setCoder(partitionsCoder) .apply("SinglePartitionsReshuffle", Reshuffle.of()) .apply( "SinglePartitionWriteTables", new WriteTables<>( false, bigQueryServices, loadJobIdPrefixView, writeDisposition, createDisposition, sideInputs, dynamicDestinations, loadJobProjectId, maxRetryJobs, ignoreUnknownValues, kmsKey, rowWriterFactory.getSourceFormat(), useAvroLogicalTypes, schemaUpdateOptions, null)) .setCoder(KvCoder.of(destinationCoder, WriteTables.ResultCoder.INSTANCE)); BigQueryOptions options = input.getPipeline().getOptions().as(BigQueryOptions.class); String defaultProjectId = options.getBigQueryProject() == null ? options.getProject() : options.getBigQueryProject(); return successfulWrites .apply(Keys.create()) .apply( "Convert to TableDestinations", ParDo.of( new DoFn<DestinationT, TableDestination>() { @ProcessElement public void processElement(ProcessContext c) { dynamicDestinations.setSideInputAccessorFromProcessContext(c); TableDestination tableDestination = dynamicDestinations.getTable(c.element()); TableReference tableReference = tableDestination.getTableReference(); if (Strings.isNullOrEmpty(tableReference.getProjectId())) { tableReference.setProjectId(defaultProjectId); tableDestination = tableDestination.withTableReference(tableReference); } c.output(tableDestination); } }) .withSideInputs(sideInputs)) .setCoder(tableDestinationCoder); } private WriteResult writeResult(Pipeline p, PCollection<TableDestination> successfulWrites) { PCollection<TableRow> empty = p.apply("CreateEmptyFailedInserts", Create.empty(TypeDescriptor.of(TableRow.class))); return WriteResult.in( p, new TupleTag<>("failedInserts"), empty, null, new TupleTag<>("successfulInserts"), successfulWrites, null, null); } @Override public WriteResult expand(PCollection<KV<DestinationT, ElementT>> input) { return (triggeringFrequency != null) ? expandTriggered(input) : expandUntriggered(input); } }
that might be done by Netty under the hood, what is the purpose of keeping the transfer-encoding untouched ? perhaps there is something wrong that should be fixed
public static void runTest(String endpoint, String acceptEncoding, String contentEncoding, String contentLength) { LOG.infof("Endpoint %s; Accept-Encoding: %s; Content-Encoding: %s; Content-Length: %s", endpoint, acceptEncoding, contentEncoding, contentLength); final WebClient client = WebClient.create(Vertx.vertx(), new WebClientOptions() .setLogActivity(true) .setFollowRedirects(true) .setDecompressionSupported(false)); final CompletableFuture<HttpResponse<Buffer>> future = new CompletableFuture<>(); client.requestAbs(HttpMethod.GET, endpoint) .putHeader(HttpHeaders.ACCEPT_ENCODING.toString(), acceptEncoding) .putHeader(HttpHeaders.ACCEPT.toString(), "*/*") .putHeader(HttpHeaders.USER_AGENT.toString(), "Tester") .send(ar -> { if (ar.succeeded()) { future.complete(ar.result()); } else { future.completeExceptionally(ar.cause()); } }); try { final HttpResponse<Buffer> response = future.get(); final String actualEncoding = response.headers().get("content-encoding"); assertEquals(OK.code(), response.statusCode(), "Http status must be OK."); assertEquals(contentEncoding, actualEncoding, "Unexpected compressor selected."); final int receivedLength = parseInt(response.headers().get("content-length")); final int expectedLength = parseInt(contentLength); assertTrue(receivedLength <= expectedLength, "Compression apparently failed: receivedLength: " + receivedLength + ", expectedLength: " + expectedLength); final String body; if (actualEncoding != null && !"identity".equalsIgnoreCase(actualEncoding)) { EmbeddedChannel channel = null; if ("gzip".equalsIgnoreCase(actualEncoding)) { channel = new EmbeddedChannel(newZlibDecoder(ZlibWrapper.GZIP)); } else if ("deflate".equalsIgnoreCase(actualEncoding)) { channel = new EmbeddedChannel(newZlibDecoder(ZlibWrapper.ZLIB)); } else if ("br".equalsIgnoreCase(actualEncoding)) { channel = new EmbeddedChannel(new BrotliDecoder()); } else { fail("Unexpected compression used by server: " + actualEncoding); } channel.writeInbound(Unpooled.copiedBuffer(response.body().getBytes())); channel.finish(); final ByteBuf decompressed = channel.readInbound(); body = decompressed.readCharSequence(decompressed.readableBytes(), StandardCharsets.UTF_8).toString(); } else { body = response.body().toString(StandardCharsets.UTF_8); } assertEquals(TEXT, body, "Unexpected body text."); } catch (InterruptedException | ExecutionException e) { fail(e); } }
client.requestAbs(HttpMethod.GET, endpoint)
public static void runTest(String endpoint, String acceptEncoding, String contentEncoding, String contentLength) { LOG.infof("Endpoint %s; Accept-Encoding: %s; Content-Encoding: %s; Content-Length: %s", endpoint, acceptEncoding, contentEncoding, contentLength); final WebClient client = WebClient.create(Vertx.vertx(), new WebClientOptions() .setLogActivity(true) .setFollowRedirects(true) .setDecompressionSupported(false)); final CompletableFuture<HttpResponse<Buffer>> future = new CompletableFuture<>(); client.requestAbs(HttpMethod.GET, endpoint) .putHeader(HttpHeaders.ACCEPT_ENCODING.toString(), acceptEncoding) .putHeader(HttpHeaders.ACCEPT.toString(), "*/*") .putHeader(HttpHeaders.USER_AGENT.toString(), "Tester") .send(ar -> { if (ar.succeeded()) { future.complete(ar.result()); } else { future.completeExceptionally(ar.cause()); } }); try { final HttpResponse<Buffer> response = future.get(); final String actualEncoding = response.headers().get("content-encoding"); assertEquals(OK.code(), response.statusCode(), "Http status must be OK."); assertEquals(contentEncoding, actualEncoding, "Unexpected compressor selected."); final int receivedLength = parseInt(response.headers().get("content-length")); final int expectedLength = parseInt(contentLength); if (contentEncoding == null) { assertEquals(expectedLength, receivedLength, "No compression was expected, so the content-length must match exactly."); } else { final int expectedLengthWithTolerance = expectedLength + (expectedLength / 100 * COMPRESSION_TOLERANCE_PERCENT); assertTrue(receivedLength <= expectedLengthWithTolerance, "Compression apparently failed: receivedLength: " + receivedLength + " was supposed to be less or equal to expectedLength: " + expectedLength + " plus " + COMPRESSION_TOLERANCE_PERCENT + "% tolerance, i.e. " + expectedLengthWithTolerance + "."); } final String body; if (actualEncoding != null && !"identity".equalsIgnoreCase(actualEncoding)) { EmbeddedChannel channel = null; if ("gzip".equalsIgnoreCase(actualEncoding)) { channel = new EmbeddedChannel(newZlibDecoder(ZlibWrapper.GZIP)); } else if ("deflate".equalsIgnoreCase(actualEncoding)) { channel = new EmbeddedChannel(newZlibDecoder(ZlibWrapper.ZLIB)); } else if ("br".equalsIgnoreCase(actualEncoding)) { channel = new EmbeddedChannel(new BrotliDecoder()); } else { fail("Unexpected compression used by server: " + actualEncoding); } channel.writeInbound(Unpooled.copiedBuffer(response.body().getBytes())); channel.finish(); final ByteBuf decompressed = channel.readInbound(); body = decompressed.readCharSequence(decompressed.readableBytes(), StandardCharsets.UTF_8).toString(); } else { body = response.body().toString(StandardCharsets.UTF_8); } assertEquals(TEXT, body, "Unexpected body text."); } catch (InterruptedException | ExecutionException e) { fail(e); } }
class Testflow { /** * This test logic is shared by both "all" module and "some" module. * See their RESTEndpointsTest classes. * * @param endpoint * @param acceptEncoding * @param contentEncoding * @param contentLength */ }
class Testflow { public static final int COMPRESSION_TOLERANCE_PERCENT = 2; /** * This test logic is shared by both "all" module and "some" module. * See their RESTEndpointsTest classes. * * @param endpoint * @param acceptEncoding * @param contentEncoding * @param contentLength */ }
do you think creating a github issue and putting it here will help to keep track of this?
static void validateMultipageReceiptData(List<RecognizedForm> recognizedReceipts) { assertEquals(3, recognizedReceipts.size()); RecognizedForm receiptPage1 = recognizedReceipts.get(0); RecognizedForm receiptPage2 = recognizedReceipts.get(1); RecognizedForm receiptPage3 = recognizedReceipts.get(2); assertEquals(1, receiptPage1.getPageRange().getFirstPageNumber()); assertEquals(1, receiptPage1.getPageRange().getLastPageNumber()); Map<String, FormField> receiptPage1Fields = receiptPage1.getFields(); assertEquals(EXPECTED_MULTIPAGE_ADDRESS_VALUE, receiptPage1Fields.get("MerchantAddress") .getValue().asString()); assertEquals("Bilbo Baggins", receiptPage1Fields.get("MerchantName") .getValue().asString()); assertEquals(EXPECTED_MULTIPAGE_PHONE_NUMBER_VALUE, receiptPage1Fields.get("MerchantPhoneNumber") .getValue().asPhoneNumber()); assertNotNull(receiptPage1.getPages()); assertEquals(ITEMIZED_RECEIPT_VALUE, receiptPage1Fields.get("ReceiptType").getValue().asString()); assertEquals(0, receiptPage2.getFields().size()); List<FormPage> receipt2Pages = receiptPage2.getPages(); assertEquals(1, receipt2Pages.size()); assertNull(receipt2Pages.stream().findFirst().get().getLines()); assertEquals(2, receiptPage2.getPageRange().getFirstPageNumber()); assertEquals(2, receiptPage2.getPageRange().getLastPageNumber()); assertEquals(3, receiptPage3.getPageRange().getFirstPageNumber()); assertEquals(3, receiptPage3.getPageRange().getLastPageNumber()); Map<String, FormField> receiptPage3Fields = receiptPage3.getFields(); assertEquals(EXPECTED_MULTIPAGE_ADDRESS_VALUE, receiptPage3Fields.get("MerchantAddress").getValue().asString()); assertEquals("Frodo Baggins", receiptPage3Fields.get("MerchantName").getValue().asString()); assertEquals(EXPECTED_MULTIPAGE_PHONE_NUMBER_VALUE, receiptPage3Fields.get("MerchantPhoneNumber").getValue().asPhoneNumber()); assertEquals(ITEMIZED_RECEIPT_VALUE, receiptPage3Fields.get("ReceiptType").getValue().asString()); }
static void validateMultipageReceiptData(List<RecognizedForm> recognizedReceipts) { assertEquals(3, recognizedReceipts.size()); RecognizedForm receiptPage1 = recognizedReceipts.get(0); RecognizedForm receiptPage2 = recognizedReceipts.get(1); RecognizedForm receiptPage3 = recognizedReceipts.get(2); assertEquals(1, receiptPage1.getPageRange().getFirstPageNumber()); assertEquals(1, receiptPage1.getPageRange().getLastPageNumber()); Map<String, FormField> receiptPage1Fields = receiptPage1.getFields(); assertEquals(EXPECTED_MULTIPAGE_ADDRESS_VALUE, receiptPage1Fields.get("MerchantAddress") .getValue().asString()); assertEquals("Bilbo Baggins", receiptPage1Fields.get("MerchantName") .getValue().asString()); assertEquals(EXPECTED_MULTIPAGE_PHONE_NUMBER_VALUE, receiptPage1Fields.get("MerchantPhoneNumber") .getValue().asPhoneNumber()); assertNotNull(receiptPage1.getPages()); assertEquals(ITEMIZED_RECEIPT_VALUE, receiptPage1Fields.get("ReceiptType").getValue().asString()); assertEquals(0, receiptPage2.getFields().size()); List<FormPage> receipt2Pages = receiptPage2.getPages(); assertEquals(1, receipt2Pages.size()); assertEquals(0, receipt2Pages.stream().findFirst().get().getLines().size()); assertEquals(2, receiptPage2.getPageRange().getFirstPageNumber()); assertEquals(2, receiptPage2.getPageRange().getLastPageNumber()); assertEquals(3, receiptPage3.getPageRange().getFirstPageNumber()); assertEquals(3, receiptPage3.getPageRange().getLastPageNumber()); Map<String, FormField> receiptPage3Fields = receiptPage3.getFields(); assertEquals(EXPECTED_MULTIPAGE_ADDRESS_VALUE, receiptPage3Fields.get("MerchantAddress").getValue().asString()); assertEquals("Frodo Baggins", receiptPage3Fields.get("MerchantName").getValue().asString()); assertEquals(EXPECTED_MULTIPAGE_PHONE_NUMBER_VALUE, receiptPage3Fields.get("MerchantPhoneNumber").getValue().asPhoneNumber()); assertEquals(ITEMIZED_RECEIPT_VALUE, receiptPage3Fields.get("ReceiptType").getValue().asString()); }
class FormRecognizerClientTestBase extends TestBase { private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+"); private static final String EXPECTED_MULTIPAGE_ADDRESS_VALUE = "123 Hobbit Lane 567 Main St. Redmond, WA Redmond," + " WA"; private static final String EXPECTED_MULTIPAGE_PHONE_NUMBER_VALUE = "+15555555555"; private static final String ITEMIZED_RECEIPT_VALUE = "Itemized"; static final String RECEIPT_CONTOSO_JPG = "contoso-allinone.jpg"; static final String RECEIPT_CONTOSO_PNG = "contoso-receipt.png"; static final String INVOICE_6_PDF = "Invoice_6.pdf"; static final String MULTIPAGE_INVOICE_PDF = "multipage_invoice1.pdf"; static final String BUSINESS_CARD_JPG = "businessCard.jpg"; static final String BUSINESS_CARD_PNG = "businessCard.png"; static final String MULTIPAGE_BUSINESS_CARD_PDF = "business-card-multipage.pdf"; static final String INVOICE_PDF = "Invoice_1.pdf"; static final String MULTIPAGE_VENDOR_INVOICE_PDF = "multipage_vendor_invoice.pdf"; static final String BAD_ARGUMENT_CODE = "BadArgument"; static final String INVALID_IMAGE_ERROR_CODE = "InvalidImage"; static final String INVALID_MODEL_ID_ERROR_CODE = "1001"; static final String MODEL_ID_NOT_FOUND_ERROR_CODE = "1022"; static final String URL_BADLY_FORMATTED_ERROR_CODE = "2001"; static final String UNABLE_TO_READ_FILE_ERROR_CODE = "2005"; static final String HTTPS_EXCEPTION_MESSAGE = "Max retries 3 times exceeded. Error Details: Key credentials require HTTPS to prevent leaking the key."; static final String INVALID_UUID_EXCEPTION_MESSAGE = "Invalid UUID string: "; static final String MODEL_ID_IS_REQUIRED_EXCEPTION_MESSAGE = "'modelId' is required and cannot be null."; static final String INVALID_ENDPOINT = "https: static final String LOCAL_FILE_PATH = "src/test/resources/sample_files/Test/"; static final String ENCODED_EMPTY_SPACE = "{\"source\":\"https: static final List<String> BUSINESS_CARD_FIELDS = Arrays.asList("ContactNames", "JobTitles", "Departments", "Emails", "Websites", "MobilePhones", "OtherPhones", "Faxes", "Addresses", "CompanyNames"); static final List<String> RECEIPT_FIELDS = Arrays.asList("MerchantName", "MerchantPhoneNumber", "MerchantAddress", "Total", "Subtotal", "Tax", "TransactionDate", "TransactionDate", "TransactionTime", "Items"); static final List<String> INVOICE_FIELDS = Arrays.asList("CustomerAddressRecipient", "InvoiceId", "VendorName", "VendorAddress", "CustomerAddress", "CustomerName", "InvoiceTotal", "DueDate", "InvoiceDate"); enum PrebuiltType { RECEIPT, BUSINESS_CARD, INVOICE } Duration durationTestMode; public static final String INVOICE_TEST_URL = "https: + "feature/formrecognizer_v2.1-preview2/sdk/formrecognizer/azure-ai-formrecognizer/src/test/resources/" + "sample_files/Test/Invoice_1.pdf"; /** * Use duration of nearly zero value for PLAYBACK test mode, otherwise, use default duration value for LIVE mode. */ @Override protected void beforeTest() { if (interceptorManager.isPlaybackMode()) { durationTestMode = ONE_NANO_DURATION; } else { durationTestMode = DEFAULT_POLL_INTERVAL; } } FormRecognizerClientBuilder getFormRecognizerClientBuilder(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { FormRecognizerClientBuilder builder = new FormRecognizerClientBuilder() .endpoint(getEndpoint()) .httpClient(httpClient == null ? interceptorManager.getPlaybackClient() : httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(serviceVersion) .addPolicy(interceptorManager.getRecordPolicy()); if (getTestMode() == TestMode.PLAYBACK) { builder.credential(new AzureKeyCredential(INVALID_KEY)); } else { builder.credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get("AZURE_FORM_RECOGNIZER_API_KEY"))); } return builder; } FormTrainingClientBuilder getFormTrainingClientBuilder(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { FormTrainingClientBuilder builder = new FormTrainingClientBuilder() .endpoint(getEndpoint()) .httpClient(httpClient == null ? interceptorManager.getPlaybackClient() : httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(serviceVersion) .addPolicy(interceptorManager.getRecordPolicy()); if (getTestMode() == TestMode.PLAYBACK) { builder.credential(new AzureKeyCredential(INVALID_KEY)); } else { builder.credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get(AZURE_FORM_RECOGNIZER_API_KEY))); } return builder; } private static void validateReferenceElementsData(List<String> expectedElements, List<FormElement> actualFormElementList, List<ReadResult> readResults) { if (expectedElements != null && actualFormElementList != null) { assertEquals(expectedElements.size(), actualFormElementList.size()); for (int i = 0; i < actualFormElementList.size(); i++) { String[] indices = NON_DIGIT_PATTERN.matcher(expectedElements.get(i)).replaceAll(" ").trim().split(" "); if (indices.length < 2) { return; } int readResultIndex = Integer.parseInt(indices[0]); int lineIndex = Integer.parseInt(indices[1]); if (indices.length == 3) { int wordIndex = Integer.parseInt(indices[2]); TextWord expectedTextWord = readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex); TextLine expectedTextLine = readResults.get(readResultIndex).getLines().get(lineIndex); if (actualFormElementList.get(i) instanceof FormLine) { FormLine actualFormLine = (FormLine) actualFormElementList.get(i); validateFormWordData(expectedTextLine.getWords(), actualFormLine.getWords()); } FormWord actualFormWord = (FormWord) actualFormElementList.get(i); assertEquals(expectedTextWord.getText(), actualFormWord.getText()); if (expectedTextWord.getConfidence() != null) { assertEquals(expectedTextWord.getConfidence(), actualFormWord.getConfidence()); } else { assertEquals(1.0f, actualFormWord.getConfidence()); } validateBoundingBoxData(expectedTextWord.getBoundingBox(), actualFormWord.getBoundingBox()); } } } } private static void validateFormTableData(List<DataTable> expectedFormTables, List<FormTable> actualFormTable, List<ReadResult> readResults, boolean includeFieldElements, int pageNumber) { assertEquals(expectedFormTables.size(), actualFormTable.size()); for (int i = 0; i < actualFormTable.size(); i++) { DataTable expectedTable = expectedFormTables.get(i); FormTable actualTable = actualFormTable.get(i); assertEquals(pageNumber, actualTable.getPageNumber()); assertEquals(expectedTable.getColumns(), actualTable.getColumnCount()); validateCellData(expectedTable.getCells(), actualTable.getCells(), readResults, includeFieldElements); assertEquals(expectedTable.getRows(), actualTable.getRowCount()); validateBoundingBoxData(expectedTable.getBoundingBox(), actualTable.getFieldBoundingBox()); } } private static void validateCellData(List<DataTableCell> expectedTableCells, List<FormTableCell> actualTableCellList, List<ReadResult> readResults, boolean includeFieldElements) { assertEquals(expectedTableCells.size(), actualTableCellList.size()); for (int i = 0; i < actualTableCellList.size(); i++) { DataTableCell expectedTableCell = expectedTableCells.get(i); FormTableCell actualTableCell = actualTableCellList.get(i); assertEquals(expectedTableCell.getColumnIndex(), actualTableCell.getColumnIndex()); if (expectedTableCell.getColumnSpan() != null) { assertEquals(expectedTableCell.getColumnSpan(), actualTableCell.getColumnSpan()); } assertNotNull(actualTableCell.getColumnSpan()); assertEquals(expectedTableCell.getRowIndex(), actualTableCell.getRowIndex()); if (expectedTableCell.getRowSpan() != null) { assertEquals(expectedTableCell.getRowSpan(), actualTableCell.getRowSpan()); } assertNotNull(actualTableCell.getRowSpan()); validateBoundingBoxData(expectedTableCell.getBoundingBox(), actualTableCell.getBoundingBox()); if (includeFieldElements) { validateReferenceElementsData(expectedTableCell.getElements(), actualTableCell.getFieldElements(), readResults); } } } private static void validateFormLineData(List<TextLine> expectedLines, List<FormLine> actualLineList) { assertEquals(expectedLines.size(), actualLineList.size()); for (int i = 0; i < actualLineList.size(); i++) { TextLine expectedLine = expectedLines.get(i); FormLine actualLine = actualLineList.get(i); assertEquals(expectedLine.getText(), actualLine.getText()); validateBoundingBoxData(expectedLine.getBoundingBox(), actualLine.getBoundingBox()); validateFormWordData(expectedLine.getWords(), actualLine.getWords()); } } private static void validateFormSelectionMarkData(List<SelectionMark> expectedMarks, List<FormSelectionMark> actualMarks, int pageNumber) { for (int i = 0; i < actualMarks.size(); i++) { final SelectionMark expectedMark = expectedMarks.get(i); final FormSelectionMark actualMark = actualMarks.get(i); assertEquals(expectedMark.getState().toString(), actualMark.getState().toString()); validateBoundingBoxData(expectedMark.getBoundingBox(), actualMark.getBoundingBox()); assertNull(actualMark.getText()); assertEquals(pageNumber, actualMark.getPageNumber()); } } private static void validateFormWordData(List<TextWord> expectedFormWords, List<FormWord> actualFormWordList) { assertEquals(expectedFormWords.size(), actualFormWordList.size()); for (int i = 0; i < actualFormWordList.size(); i++) { TextWord expectedWord = expectedFormWords.get(i); FormWord actualWord = actualFormWordList.get(i); assertEquals(expectedWord.getText(), actualWord.getText()); validateBoundingBoxData(expectedWord.getBoundingBox(), actualWord.getBoundingBox()); if (expectedWord.getConfidence() != null) { assertEquals(expectedWord.getConfidence(), actualWord.getConfidence()); } else { assertEquals(1.0f, actualWord.getConfidence()); } } } private static void validateBoundingBoxData(List<Float> expectedBoundingBox, FieldBoundingBox actualFieldBoundingBox) { if (actualFieldBoundingBox != null && actualFieldBoundingBox.getPoints() != null) { int i = 0; for (Point point : actualFieldBoundingBox.getPoints()) { assertEquals(expectedBoundingBox.get(i), point.getX()); assertEquals(expectedBoundingBox.get(++i), point.getY()); i++; } } } @SuppressWarnings("unchecked") private static void validateFieldValueTransforms(FieldValue expectedFieldValue, FormField actualFormField, List<ReadResult> readResults, boolean includeFieldElements) { if (expectedFieldValue != null) { if (expectedFieldValue.getBoundingBox() != null) { validateBoundingBoxData(expectedFieldValue.getBoundingBox(), actualFormField.getValueData().getBoundingBox()); } if (includeFieldElements && expectedFieldValue.getElements() != null) { validateReferenceElementsData(expectedFieldValue.getElements(), actualFormField.getValueData().getFieldElements(), readResults); } switch (expectedFieldValue.getType()) { case NUMBER: if (expectedFieldValue.getValueNumber() != null) { assertEquals(expectedFieldValue.getValueNumber(), actualFormField.getValue().asFloat()); } break; case DATE: assertEquals(expectedFieldValue.getValueDate(), actualFormField.getValue().asDate()); break; case TIME: assertEquals(LocalTime.parse(expectedFieldValue.getValueTime(), DateTimeFormatter.ofPattern("HH:mm:ss")), actualFormField.getValue().asTime()); break; case STRING: if (actualFormField.getName() != "ReceiptType") { assertEquals(expectedFieldValue.getValueString(), actualFormField.getValue().asString()); } break; case INTEGER: assertEquals(expectedFieldValue.getValueInteger(), actualFormField.getValue().asLong()); break; case PHONE_NUMBER: assertEquals(expectedFieldValue.getValuePhoneNumber(), actualFormField.getValue().asPhoneNumber()); break; case OBJECT: expectedFieldValue.getValueObject().forEach((key, formField) -> { FormField actualFormFieldValue = actualFormField.getValue().asMap().get(key); validateFieldValueTransforms(formField, actualFormFieldValue, readResults, includeFieldElements); }); break; case ARRAY: assertEquals(expectedFieldValue.getValueArray().size(), actualFormField.getValue().asList().size()); for (int i = 0; i < expectedFieldValue.getValueArray().size(); i++) { FieldValue expectedReceiptItem = expectedFieldValue.getValueArray().get(i); FormField actualReceiptItem = actualFormField.getValue().asList().get(i); validateFieldValueTransforms(expectedReceiptItem, actualReceiptItem, readResults, includeFieldElements); } break; default: assertFalse(false, "Field type not supported."); } } } private static void validatePageRangeData(int expectedPageInfo, FormPageRange actualPageInfo) { assertEquals(expectedPageInfo, actualPageInfo.getFirstPageNumber()); assertEquals(expectedPageInfo, actualPageInfo.getLastPageNumber()); } @Test abstract void recognizeReceiptData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptFromDataMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptSourceUrlWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptFromUrlMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContent(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentResultWithNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentResultWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentResultWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentFromDataMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentWithSelectionMarks(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentFromUrlWithPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentFromUrlMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentWithSelectionMarksFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithJpgContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithBlankPdfContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataExcludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithNullFormData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithNullModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithEmptyModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormInvalidStatus(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormMultiPageLabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithSelectionMark(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUnlabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUnlabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormMultiPageUnlabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUnlabeledDataWithJpgContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUnlabeledDataWithBlankPdfContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlUnlabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlUnlabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlMultiPageUnlabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormFromUrlLabeledDataWithNullModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormFromUrlLabeledDataWithEmptyModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlLabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlLabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlMultiPageLabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlLabeledDataWithSelectionMark(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardSourceUrlWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); void validateContentResultData(List<FormPage> actualFormPageList, boolean includeFieldElements) { AnalyzeResult analyzeResult = getAnalyzeRawResponse().getAnalyzeResult(); final List<PageResult> pageResults = analyzeResult.getPageResults(); final List<ReadResult> readResults = analyzeResult.getReadResults(); for (int i = 0; i < actualFormPageList.size(); i++) { FormPage actualFormPage = actualFormPageList.get(i); ReadResult readResult = readResults.get(i); if (readResult.getAngle() > 180) { assertEquals(readResult.getAngle() - 360, actualFormPage.getTextAngle()); } else { assertEquals(readResult.getAngle(), actualFormPage.getTextAngle()); } assertEquals(readResult.getWidth(), actualFormPage.getWidth()); assertEquals(readResult.getHeight(), actualFormPage.getHeight()); assertEquals(readResult.getUnit().toString(), actualFormPage.getUnit().toString()); assertEquals(readResult.getPage(), actualFormPage.getPageNumber()); if (includeFieldElements) { validateFormLineData(readResult.getLines(), actualFormPage.getLines()); } validateFormSelectionMarkData(readResult.getSelectionMarks(), actualFormPage.getSelectionMarks(), readResult.getPage()); if (pageResults != null) { validateFormTableData(pageResults.get(i).getTables(), actualFormPage.getTables(), readResults, includeFieldElements, pageResults.get(i).getPage()); } } } void validateBlankPdfResultData(List<RecognizedForm> actualReceiptList) { assertEquals(1, actualReceiptList.size()); final RecognizedForm actualReceipt = actualReceiptList.get(0); assertTrue(actualReceipt.getFields().isEmpty()); } void validateRecognizedResult(List<RecognizedForm> actualFormList, boolean includeFieldElements, boolean isLabeled) { final AnalyzeResult rawResponse = getAnalyzeRawResponse().getAnalyzeResult(); List<ReadResult> readResults = rawResponse.getReadResults(); List<PageResult> pageResults = rawResponse.getPageResults(); List<DocumentResult> documentResults = rawResponse.getDocumentResults(); for (int i = 0; i < actualFormList.size(); i++) { validateContentResultData(actualFormList.get(i).getPages(), includeFieldElements); if (isLabeled) { validateLabeledData(actualFormList.get(i), includeFieldElements, readResults, documentResults.get(i)); } else { validateUnLabeledResult(actualFormList.get(i), includeFieldElements, readResults, pageResults.get(i)); } } } void validatePrebuiltResultData(List<RecognizedForm> actualPrebuiltRecognizedForms, boolean includeFieldElements, PrebuiltType prebuiltType) { final AnalyzeResult rawResponse = getAnalyzeRawResponse().getAnalyzeResult(); final List<ReadResult> rawReadResults = rawResponse.getReadResults(); for (int i = 0; i < actualPrebuiltRecognizedForms.size(); i++) { final RecognizedForm actualForm = actualPrebuiltRecognizedForms.get(i); final DocumentResult rawDocumentResult = rawResponse.getDocumentResults().get(i); validateLabeledData(actualForm, includeFieldElements, rawReadResults, rawDocumentResult); if (BUSINESS_CARD.equals(prebuiltType)) { assertEquals("prebuilt:businesscard", actualForm.getFormType()); BUSINESS_CARD_FIELDS.forEach(businessCardField -> validateFieldValueTransforms(rawDocumentResult.getFields().get(businessCardField), actualForm.getFields().get(businessCardField), rawReadResults, includeFieldElements)); } else if (RECEIPT.equals(prebuiltType)) { assertEquals("prebuilt:receipt", actualForm.getFormType()); RECEIPT_FIELDS.forEach(receiptField -> { final Map<String, FormField> actualRecognizedReceiptFields = actualForm.getFields(); Map<String, FieldValue> expectedReceiptFields = rawDocumentResult.getFields(); assertEquals(expectedReceiptFields.get("ReceiptType").getValueString(), actualRecognizedReceiptFields.get("ReceiptType").getValue().asString()); assertEquals(expectedReceiptFields.get("ReceiptType").getConfidence(), actualRecognizedReceiptFields.get("ReceiptType").getConfidence()); validateFieldValueTransforms(rawDocumentResult.getFields().get(receiptField), actualRecognizedReceiptFields.get(receiptField), rawReadResults, includeFieldElements); }); } else if (INVOICE.equals(prebuiltType)) { assertEquals("prebuilt:invoice", actualForm.getFormType()); INVOICE_FIELDS.forEach(invoiceField -> { final Map<String, FormField> actualRecognizedInvoiceFields = actualForm.getFields(); Map<String, FieldValue> expectedInvoiceFields = rawDocumentResult.getFields(); validateFieldValueTransforms(expectedInvoiceFields.get(invoiceField), actualRecognizedInvoiceFields.get(invoiceField), rawReadResults, includeFieldElements); }); } else { throw new RuntimeException("prebuilt type not supported"); } } } void invalidSourceUrlRunner(Consumer<String> testRunner) { testRunner.accept(TestUtils.INVALID_RECEIPT_URL); } void encodedBlankSpaceSourceUrlRunner(Consumer<String> testRunner) { testRunner.accept(FAKE_ENCODED_EMPTY_SPACE_URL); } void urlRunner(Consumer<String> testRunner, String fileName) { testRunner.accept(URL_TEST_FILE_FORMAT + fileName); } void testingContainerUrlRunner(Consumer<String> testRunner, String fileName) { testRunner.accept(getStorageTestingFileUrl(fileName)); } void dataRunner(BiConsumer<InputStream, Long> testRunner, String fileName) { final long fileLength = new File(LOCAL_FILE_PATH + fileName).length(); if (interceptorManager.isPlaybackMode()) { testRunner.accept(new ByteArrayInputStream(TEST_DATA_PNG.getBytes(StandardCharsets.UTF_8)), fileLength); } else { try { testRunner.accept(new FileInputStream(LOCAL_FILE_PATH + fileName), fileLength); } catch (FileNotFoundException e) { throw new RuntimeException("Local file not found.", e); } } } void localFilePathRunner(BiConsumer<String, Long> testRunner, String fileName) { final long fileLength = new File(LOCAL_FILE_PATH + fileName).length(); testRunner.accept(LOCAL_FILE_PATH + fileName, fileLength); } void damagedPdfDataRunner(BiConsumer<InputStream, Integer> testRunner) { testRunner.accept(new ByteArrayInputStream(new byte[]{0x25, 0x50, 0x44, 0x46, 0x55, 0x55, 0x55}), 7); } void beginTrainingUnlabeledRunner(BiConsumer<String, Boolean> testRunner) { testRunner.accept(getTrainingSasUri(), false); } void beginTrainingLabeledRunner(BiConsumer<String, Boolean> testRunner) { testRunner.accept(getTrainingSasUri(), true); } void beginSelectionMarkTrainingLabeledRunner(BiConsumer<String, Boolean> testRunner) { testRunner.accept(getSelectionMarkTrainingSasUri(), true); } void beginTrainingMultipageRunner(Consumer<String> testRunner) { testRunner.accept(getMultipageTrainingSasUri()); } private void validateUnLabeledResult(RecognizedForm actualForm, boolean includeFieldElements, List<ReadResult> readResults, PageResult expectedPage) { validatePageRangeData(expectedPage.getPage(), actualForm.getPageRange()); int i = 0; for (Map.Entry<String, FormField> entry : actualForm.getFields().entrySet()) { FormField actualFormField = entry.getValue(); final KeyValuePair expectedFormField = expectedPage.getKeyValuePairs().get(i++); assertEquals(expectedFormField.getConfidence(), actualFormField.getConfidence()); assertEquals(expectedFormField.getKey().getText(), actualFormField.getLabelData().getText()); validateBoundingBoxData(expectedFormField.getKey().getBoundingBox(), actualFormField.getLabelData().getBoundingBox()); if (includeFieldElements) { validateReferenceElementsData(expectedFormField.getKey().getElements(), actualFormField.getLabelData().getFieldElements(), readResults); validateReferenceElementsData(expectedFormField.getValue().getElements(), actualFormField.getValueData().getFieldElements(), readResults); } assertEquals(expectedFormField.getValue().getText(), actualFormField.getValueData().getText()); validateBoundingBoxData(expectedFormField.getValue().getBoundingBox(), actualFormField.getValueData().getBoundingBox()); } } private void validateLabeledData(RecognizedForm actualForm, boolean includeFieldElements, List<ReadResult> readResults, DocumentResult documentResult) { assertEquals(documentResult.getPageRange().get(0), actualForm.getPageRange().getFirstPageNumber()); assertEquals(documentResult.getPageRange().get(1), actualForm.getPageRange().getLastPageNumber()); assertEquals(documentResult.getFields().keySet(), actualForm.getFields().keySet()); documentResult.getFields().forEach((label, expectedFieldValue) -> { final FormField actualFormField = actualForm.getFields().get(label); assertEquals(label, actualFormField.getName()); if (expectedFieldValue != null) { if (expectedFieldValue.getConfidence() != null) { assertEquals(expectedFieldValue.getConfidence(), actualFormField.getConfidence()); } else { assertEquals(1.0f, actualFormField.getConfidence()); } validateFieldValueTransforms(expectedFieldValue, actualFormField, readResults, includeFieldElements); } }); } static void validateMultiPageDataLabeled(List<RecognizedForm> actualRecognizedFormsList) { actualRecognizedFormsList.forEach(recognizedForm -> { assertEquals(1, recognizedForm.getPageRange().getFirstPageNumber()); assertEquals(3, recognizedForm.getPageRange().getLastPageNumber()); assertEquals(3, recognizedForm.getPages().size()); recognizedForm.getFields().forEach((label, formField) -> { assertNotNull(formField.getName()); assertNotNull(formField.getValue()); assertNotNull(formField.getValueData().getText()); assertNull(formField.getLabelData()); }); }); } static void validateMultiPageDataUnlabeled(List<RecognizedForm> actualRecognizedFormsList) { actualRecognizedFormsList.forEach(recognizedForm -> { assertNotNull(recognizedForm.getFormType()); assertEquals(1, (long) recognizedForm.getPages().size()); recognizedForm.getFields().forEach((label, formField) -> { assertNotNull(formField.getName()); assertNotNull(formField.getValue()); assertNotNull(formField.getValueData().getText()); assertNotNull(formField.getLabelData().getText()); }); }); } static void validateMultipageBusinessData(List<RecognizedForm> recognizedBusinessCards) { assertEquals(2, recognizedBusinessCards.size()); RecognizedForm businessCard1 = recognizedBusinessCards.get(0); RecognizedForm businessCard2 = recognizedBusinessCards.get(1); assertEquals(1, businessCard1.getPageRange().getFirstPageNumber()); assertEquals(1, businessCard1.getPageRange().getLastPageNumber()); Map<String, FormField> businessCard1Fields = businessCard1.getFields(); List<FormField> emailList = businessCard1Fields.get("Emails").getValue().asList(); assertEquals("johnsinger@contoso.com", emailList.get(0).getValue().asString()); List<FormField> phoneNumberList = businessCard1Fields.get("OtherPhones").getValue().asList(); assertEquals("+14257793479", phoneNumberList.get(0).getValue().asPhoneNumber()); assertNotNull(businessCard1.getPages()); FormField contactNameField = businessCard1Fields.get("ContactNames").getValue().asList().get(0); assertEquals(contactNameField.getValueData().getPageNumber(), 1); assertEquals(contactNameField.getValueData().getText(), "JOHN SINGER"); assertEquals(2, businessCard2.getPageRange().getFirstPageNumber()); assertEquals(2, businessCard2.getPageRange().getLastPageNumber()); Map<String, FormField> businessCard2Fields = businessCard2.getFields(); List<FormField> email2List = businessCard2Fields.get("Emails").getValue().asList(); assertEquals("avery.smith@contoso.com", email2List.get(0).getValue().asString()); List<FormField> phoneNumber2List = businessCard2Fields.get("OtherPhones").getValue().asList(); assertEquals("+44 (0) 20 9876 5432", phoneNumber2List.get(0).getValueData().getText()); assertNotNull(businessCard2.getPages()); FormField contactName2Field = businessCard2Fields.get("ContactNames").getValue().asList().get(0); assertEquals(contactName2Field.getValueData().getPageNumber(), 2); assertEquals(contactName2Field.getValueData().getText(), "Dr. Avery Smith"); } static void validateMultipageInvoiceData(List<RecognizedForm> recognizedInvoices) { assertEquals(1, recognizedInvoices.size()); RecognizedForm recognizedForm = recognizedInvoices.get(0); assertEquals(1, recognizedForm.getPageRange().getFirstPageNumber()); assertEquals(2, recognizedForm.getPageRange().getLastPageNumber()); Map<String, FormField> recognizedInvoiceFields = recognizedForm.getFields(); final FormField remittanceAddressRecipient = recognizedInvoiceFields.get("RemittanceAddressRecipient"); assertEquals("Contoso Ltd.", remittanceAddressRecipient.getValue().asString()); assertEquals(1, remittanceAddressRecipient.getValueData().getPageNumber()); final FormField remittanceAddress = recognizedInvoiceFields.get("RemittanceAddress"); assertEquals("2345 Dogwood Lane Birch, Kansas 98123", remittanceAddress.getValue().asString()); assertEquals(1, remittanceAddress.getValueData().getPageNumber()); final FormField vendorName = recognizedInvoiceFields.get("VendorName"); assertEquals("Southridge Video", vendorName.getValue().asString()); assertEquals(2, vendorName.getValueData().getPageNumber()); assertEquals(2, recognizedForm.getPages().size()); } protected String getEndpoint() { return interceptorManager.isPlaybackMode() ? "https: : Configuration.getGlobalConfiguration().get(AZURE_FORM_RECOGNIZER_ENDPOINT); } /** * Get the training data set SAS Url value based on the test running mode. * * @return the training data set Url */ private String getTrainingSasUri() { if (interceptorManager.isPlaybackMode()) { return "https: } else { return Configuration.getGlobalConfiguration().get(FORM_RECOGNIZER_TRAINING_BLOB_CONTAINER_SAS_URL); } } private String getSelectionMarkTrainingSasUri() { if (interceptorManager.isPlaybackMode()) { return "https: } else { return Configuration.getGlobalConfiguration().get(FORM_RECOGNIZER_SELECTION_MARK_BLOB_CONTAINER_SAS_URL); } } /** * Get the training data set SAS Url value based on the test running mode. * * @return the training data set Url */ private String getMultipageTrainingSasUri() { if (interceptorManager.isPlaybackMode()) { return "https: } else { return Configuration.getGlobalConfiguration() .get(FORM_RECOGNIZER_MULTIPAGE_TRAINING_BLOB_CONTAINER_SAS_URL); } } /** * Get the testing data set SAS Url value based on the test running mode. * * @return the testing data set Url */ private String getTestingSasUri() { if (interceptorManager.isPlaybackMode()) { return "https: } else { return Configuration.getGlobalConfiguration().get("FORM_RECOGNIZER_TESTING_BLOB_CONTAINER_SAS_URL"); } } /** * Prepare the file url from the testing data set SAS Url value. * * @return the testing data specific file Url */ private String getStorageTestingFileUrl(String fileName) { if (interceptorManager.isPlaybackMode()) { return "https: } else { final String[] urlParts = getTestingSasUri().split("\\?"); return urlParts[0] + "/" + fileName + "?" + urlParts[1]; } } /** * Prepare the expected test data from service raw response. * * @return the {@code AnalyzeOperationResult} test data */ private AnalyzeOperationResult getAnalyzeRawResponse() { final SerializerAdapter serializerAdapter = getSerializerAdapter(); final NetworkCallRecord networkCallRecord = interceptorManager.getRecordedData().findFirstAndRemoveNetworkCall(record -> { AnalyzeOperationResult rawModelResponse = deserializeRawResponse(serializerAdapter, record, AnalyzeOperationResult.class); return rawModelResponse != null && rawModelResponse.getStatus() == OperationStatus.SUCCEEDED; }); interceptorManager.getRecordedData().addNetworkCall(networkCallRecord); return deserializeRawResponse(serializerAdapter, networkCallRecord, AnalyzeOperationResult.class); } void validateNetworkCallRecord(String requestParam, String value) { final RecordedData copyRecordedData = interceptorManager.getRecordedData(); final NetworkCallRecord networkCallRecord = copyRecordedData.findFirstAndRemoveNetworkCall(record -> true); copyRecordedData.addNetworkCall(networkCallRecord); URL url = null; try { url = new URL(networkCallRecord.getUri()); } catch (MalformedURLException e) { assertFalse(false, e.getMessage()); } Pattern.compile("&").splitAsStream(url.getQuery()) .map(s -> Arrays.copyOf(s.split("="), 2)) .map(o -> new AbstractMap.SimpleEntry<String, String>(o[0], o[1] == null ? "" : o[1])) .map(entry -> { if (entry.getKey().equals(requestParam)) { assertEquals(value, entry.getValue()); return true; } else { return false; } }); } }
class FormRecognizerClientTestBase extends TestBase { private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+"); private static final String EXPECTED_MULTIPAGE_ADDRESS_VALUE = "123 Hobbit Lane 567 Main St. Redmond, WA Redmond," + " WA"; private static final String EXPECTED_MULTIPAGE_PHONE_NUMBER_VALUE = "+15555555555"; private static final String ITEMIZED_RECEIPT_VALUE = "Itemized"; static final String RECEIPT_CONTOSO_JPG = "contoso-allinone.jpg"; static final String RECEIPT_CONTOSO_PNG = "contoso-receipt.png"; static final String INVOICE_6_PDF = "Invoice_6.pdf"; static final String MULTIPAGE_INVOICE_PDF = "multipage_invoice1.pdf"; static final String BUSINESS_CARD_JPG = "businessCard.jpg"; static final String BUSINESS_CARD_PNG = "businessCard.png"; static final String MULTIPAGE_BUSINESS_CARD_PDF = "business-card-multipage.pdf"; static final String INVOICE_PDF = "Invoice_1.pdf"; static final String MULTIPAGE_VENDOR_INVOICE_PDF = "multipage_vendor_invoice.pdf"; static final String BAD_ARGUMENT_CODE = "BadArgument"; static final String INVALID_IMAGE_ERROR_CODE = "InvalidImage"; static final String INVALID_MODEL_ID_ERROR_CODE = "1001"; static final String MODEL_ID_NOT_FOUND_ERROR_CODE = "1022"; static final String URL_BADLY_FORMATTED_ERROR_CODE = "2001"; static final String UNABLE_TO_READ_FILE_ERROR_CODE = "2005"; static final String HTTPS_EXCEPTION_MESSAGE = "Max retries 3 times exceeded. Error Details: Key credentials require HTTPS to prevent leaking the key."; static final String INVALID_UUID_EXCEPTION_MESSAGE = "Invalid UUID string: "; static final String MODEL_ID_IS_REQUIRED_EXCEPTION_MESSAGE = "'modelId' is required and cannot be null."; static final String INVALID_ENDPOINT = "https: static final String LOCAL_FILE_PATH = "src/test/resources/sample_files/Test/"; static final String ENCODED_EMPTY_SPACE = "{\"source\":\"https: static final List<String> BUSINESS_CARD_FIELDS = Arrays.asList("ContactNames", "JobTitles", "Departments", "Emails", "Websites", "MobilePhones", "OtherPhones", "Faxes", "Addresses", "CompanyNames"); static final List<String> RECEIPT_FIELDS = Arrays.asList("MerchantName", "MerchantPhoneNumber", "MerchantAddress", "Total", "Subtotal", "Tax", "TransactionDate", "TransactionDate", "TransactionTime", "Items"); static final List<String> INVOICE_FIELDS = Arrays.asList("CustomerAddressRecipient", "InvoiceId", "VendorName", "VendorAddress", "CustomerAddress", "CustomerName", "InvoiceTotal", "DueDate", "InvoiceDate"); enum PrebuiltType { RECEIPT, BUSINESS_CARD, INVOICE } Duration durationTestMode; public static final String INVOICE_TEST_URL = "https: + "feature/formrecognizer_v2.1-preview2/sdk/formrecognizer/azure-ai-formrecognizer/src/test/resources/" + "sample_files/Test/Invoice_1.pdf"; /** * Use duration of nearly zero value for PLAYBACK test mode, otherwise, use default duration value for LIVE mode. */ @Override protected void beforeTest() { if (interceptorManager.isPlaybackMode()) { durationTestMode = ONE_NANO_DURATION; } else { durationTestMode = DEFAULT_POLL_INTERVAL; } } FormRecognizerClientBuilder getFormRecognizerClientBuilder(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { FormRecognizerClientBuilder builder = new FormRecognizerClientBuilder() .endpoint(getEndpoint()) .httpClient(httpClient == null ? interceptorManager.getPlaybackClient() : httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(serviceVersion) .addPolicy(interceptorManager.getRecordPolicy()); if (getTestMode() == TestMode.PLAYBACK) { builder.credential(new AzureKeyCredential(INVALID_KEY)); } else { builder.credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get(AZURE_FORM_RECOGNIZER_API_KEY))); } return builder; } FormTrainingClientBuilder getFormTrainingClientBuilder(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { FormTrainingClientBuilder builder = new FormTrainingClientBuilder() .endpoint(getEndpoint()) .httpClient(httpClient == null ? interceptorManager.getPlaybackClient() : httpClient) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(serviceVersion) .addPolicy(interceptorManager.getRecordPolicy()); if (getTestMode() == TestMode.PLAYBACK) { builder.credential(new AzureKeyCredential(INVALID_KEY)); } else { builder.credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get(AZURE_FORM_RECOGNIZER_API_KEY))); } return builder; } private static void validateReferenceElementsData(List<String> expectedElements, List<FormElement> actualFormElementList, List<ReadResult> readResults) { if (expectedElements != null && actualFormElementList != null) { assertEquals(expectedElements.size(), actualFormElementList.size()); for (int i = 0; i < actualFormElementList.size(); i++) { String[] indices = NON_DIGIT_PATTERN.matcher(expectedElements.get(i)).replaceAll(" ").trim().split(" "); if (indices.length < 2) { return; } int readResultIndex = Integer.parseInt(indices[0]); int lineIndex = Integer.parseInt(indices[1]); if (indices.length == 3) { int wordIndex = Integer.parseInt(indices[2]); TextWord expectedTextWord = readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex); TextLine expectedTextLine = readResults.get(readResultIndex).getLines().get(lineIndex); if (actualFormElementList.get(i) instanceof FormLine) { FormLine actualFormLine = (FormLine) actualFormElementList.get(i); validateFormWordData(expectedTextLine.getWords(), actualFormLine.getWords()); } FormWord actualFormWord = (FormWord) actualFormElementList.get(i); assertEquals(expectedTextWord.getText(), actualFormWord.getText()); if (expectedTextWord.getConfidence() != null) { assertEquals(expectedTextWord.getConfidence(), actualFormWord.getConfidence()); } else { assertEquals(1.0f, actualFormWord.getConfidence()); } validateBoundingBoxData(expectedTextWord.getBoundingBox(), actualFormWord.getBoundingBox()); } } } } private static void validateFormTableData(List<DataTable> expectedFormTables, List<FormTable> actualFormTable, List<ReadResult> readResults, boolean includeFieldElements, int pageNumber) { assertEquals(expectedFormTables.size(), actualFormTable.size()); for (int i = 0; i < actualFormTable.size(); i++) { DataTable expectedTable = expectedFormTables.get(i); FormTable actualTable = actualFormTable.get(i); assertEquals(pageNumber, actualTable.getPageNumber()); assertEquals(expectedTable.getColumns(), actualTable.getColumnCount()); validateCellData(expectedTable.getCells(), actualTable.getCells(), readResults, includeFieldElements); assertEquals(expectedTable.getRows(), actualTable.getRowCount()); validateBoundingBoxData(expectedTable.getBoundingBox(), actualTable.getFieldBoundingBox()); } } private static void validateCellData(List<DataTableCell> expectedTableCells, List<FormTableCell> actualTableCellList, List<ReadResult> readResults, boolean includeFieldElements) { assertEquals(expectedTableCells.size(), actualTableCellList.size()); for (int i = 0; i < actualTableCellList.size(); i++) { DataTableCell expectedTableCell = expectedTableCells.get(i); FormTableCell actualTableCell = actualTableCellList.get(i); assertEquals(expectedTableCell.getColumnIndex(), actualTableCell.getColumnIndex()); if (expectedTableCell.getColumnSpan() != null) { assertEquals(expectedTableCell.getColumnSpan(), actualTableCell.getColumnSpan()); } assertNotNull(actualTableCell.getColumnSpan()); assertEquals(expectedTableCell.getRowIndex(), actualTableCell.getRowIndex()); if (expectedTableCell.getRowSpan() != null) { assertEquals(expectedTableCell.getRowSpan(), actualTableCell.getRowSpan()); } assertNotNull(actualTableCell.getRowSpan()); validateBoundingBoxData(expectedTableCell.getBoundingBox(), actualTableCell.getBoundingBox()); if (includeFieldElements) { validateReferenceElementsData(expectedTableCell.getElements(), actualTableCell.getFieldElements(), readResults); } } } private static void validateFormLineData(List<TextLine> expectedLines, List<FormLine> actualLineList) { assertEquals(expectedLines.size(), actualLineList.size()); for (int i = 0; i < actualLineList.size(); i++) { TextLine expectedLine = expectedLines.get(i); FormLine actualLine = actualLineList.get(i); assertEquals(expectedLine.getText(), actualLine.getText()); validateBoundingBoxData(expectedLine.getBoundingBox(), actualLine.getBoundingBox()); validateFormWordData(expectedLine.getWords(), actualLine.getWords()); } } private static void validateFormSelectionMarkData(List<SelectionMark> expectedMarks, List<FormSelectionMark> actualMarks, int pageNumber) { for (int i = 0; i < actualMarks.size(); i++) { final SelectionMark expectedMark = expectedMarks.get(i); final FormSelectionMark actualMark = actualMarks.get(i); assertEquals(expectedMark.getState().toString(), actualMark.getState().toString()); validateBoundingBoxData(expectedMark.getBoundingBox(), actualMark.getBoundingBox()); assertNull(actualMark.getText()); assertEquals(pageNumber, actualMark.getPageNumber()); } } private static void validateFormWordData(List<TextWord> expectedFormWords, List<FormWord> actualFormWordList) { assertEquals(expectedFormWords.size(), actualFormWordList.size()); for (int i = 0; i < actualFormWordList.size(); i++) { TextWord expectedWord = expectedFormWords.get(i); FormWord actualWord = actualFormWordList.get(i); assertEquals(expectedWord.getText(), actualWord.getText()); validateBoundingBoxData(expectedWord.getBoundingBox(), actualWord.getBoundingBox()); if (expectedWord.getConfidence() != null) { assertEquals(expectedWord.getConfidence(), actualWord.getConfidence()); } else { assertEquals(1.0f, actualWord.getConfidence()); } } } private static void validateBoundingBoxData(List<Float> expectedBoundingBox, FieldBoundingBox actualFieldBoundingBox) { if (actualFieldBoundingBox != null && actualFieldBoundingBox.getPoints() != null) { int i = 0; for (Point point : actualFieldBoundingBox.getPoints()) { assertEquals(expectedBoundingBox.get(i), point.getX()); assertEquals(expectedBoundingBox.get(++i), point.getY()); i++; } } } @SuppressWarnings("unchecked") private static void validateFieldValueTransforms(FieldValue expectedFieldValue, FormField actualFormField, List<ReadResult> readResults, boolean includeFieldElements) { if (expectedFieldValue != null) { if (expectedFieldValue.getBoundingBox() != null) { validateBoundingBoxData(expectedFieldValue.getBoundingBox(), actualFormField.getValueData().getBoundingBox()); } if (includeFieldElements && expectedFieldValue.getElements() != null) { validateReferenceElementsData(expectedFieldValue.getElements(), actualFormField.getValueData().getFieldElements(), readResults); } switch (expectedFieldValue.getType()) { case NUMBER: if (expectedFieldValue.getValueNumber() != null) { assertEquals(expectedFieldValue.getValueNumber(), actualFormField.getValue().asFloat()); } break; case DATE: assertEquals(expectedFieldValue.getValueDate(), actualFormField.getValue().asDate()); break; case TIME: assertEquals(LocalTime.parse(expectedFieldValue.getValueTime(), DateTimeFormatter.ofPattern("HH:mm:ss")), actualFormField.getValue().asTime()); break; case STRING: if (actualFormField.getName() != "ReceiptType") { assertEquals(expectedFieldValue.getValueString(), actualFormField.getValue().asString()); } break; case INTEGER: assertEquals(expectedFieldValue.getValueInteger(), actualFormField.getValue().asLong()); break; case PHONE_NUMBER: assertEquals(expectedFieldValue.getValuePhoneNumber(), actualFormField.getValue().asPhoneNumber()); break; case OBJECT: expectedFieldValue.getValueObject().forEach((key, formField) -> { FormField actualFormFieldValue = actualFormField.getValue().asMap().get(key); validateFieldValueTransforms(formField, actualFormFieldValue, readResults, includeFieldElements); }); break; case ARRAY: assertEquals(expectedFieldValue.getValueArray().size(), actualFormField.getValue().asList().size()); for (int i = 0; i < expectedFieldValue.getValueArray().size(); i++) { FieldValue expectedReceiptItem = expectedFieldValue.getValueArray().get(i); FormField actualReceiptItem = actualFormField.getValue().asList().get(i); validateFieldValueTransforms(expectedReceiptItem, actualReceiptItem, readResults, includeFieldElements); } break; default: assertFalse(false, "Field type not supported."); } } } private static void validatePageRangeData(int expectedPageInfo, FormPageRange actualPageInfo) { assertEquals(expectedPageInfo, actualPageInfo.getFirstPageNumber()); assertEquals(expectedPageInfo, actualPageInfo.getLastPageNumber()); } @Test abstract void recognizeReceiptData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptFromDataMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptSourceUrlWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeReceiptFromUrlMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContent(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentResultWithNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentResultWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentResultWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentFromDataMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentWithSelectionMarks(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentFromUrlWithPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentFromUrlMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeContentWithSelectionMarksFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithJpgContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithBlankPdfContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataExcludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithNullFormData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithNullModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithEmptyModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormInvalidStatus(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormMultiPageLabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormLabeledDataWithSelectionMark(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUnlabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUnlabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormMultiPageUnlabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUnlabeledDataWithJpgContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUnlabeledDataWithBlankPdfContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlUnlabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlUnlabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlMultiPageUnlabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormFromUrlLabeledDataWithNullModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormFromUrlLabeledDataWithEmptyModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlLabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlLabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlMultiPageLabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeCustomFormUrlLabeledDataWithSelectionMark(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); @Test abstract void recognizeBusinessCardSourceUrlWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion); void validateContentResultData(List<FormPage> actualFormPageList, boolean includeFieldElements) { AnalyzeResult analyzeResult = getAnalyzeRawResponse().getAnalyzeResult(); final List<PageResult> pageResults = analyzeResult.getPageResults(); final List<ReadResult> readResults = analyzeResult.getReadResults(); for (int i = 0; i < actualFormPageList.size(); i++) { FormPage actualFormPage = actualFormPageList.get(i); ReadResult readResult = readResults.get(i); if (readResult.getAngle() > 180) { assertEquals(readResult.getAngle() - 360, actualFormPage.getTextAngle()); } else { assertEquals(readResult.getAngle(), actualFormPage.getTextAngle()); } assertEquals(readResult.getWidth(), actualFormPage.getWidth()); assertEquals(readResult.getHeight(), actualFormPage.getHeight()); assertEquals(readResult.getUnit().toString(), actualFormPage.getUnit().toString()); assertEquals(readResult.getPage(), actualFormPage.getPageNumber()); if (includeFieldElements) { validateFormLineData(readResult.getLines(), actualFormPage.getLines()); } validateFormSelectionMarkData(readResult.getSelectionMarks(), actualFormPage.getSelectionMarks(), readResult.getPage()); if (pageResults != null) { validateFormTableData(pageResults.get(i).getTables(), actualFormPage.getTables(), readResults, includeFieldElements, pageResults.get(i).getPage()); } } } void validateBlankPdfResultData(List<RecognizedForm> actualReceiptList) { assertEquals(1, actualReceiptList.size()); final RecognizedForm actualReceipt = actualReceiptList.get(0); assertTrue(actualReceipt.getFields().isEmpty()); } void validateRecognizedResult(List<RecognizedForm> actualFormList, boolean includeFieldElements, boolean isLabeled) { final AnalyzeResult rawResponse = getAnalyzeRawResponse().getAnalyzeResult(); List<ReadResult> readResults = rawResponse.getReadResults(); List<PageResult> pageResults = rawResponse.getPageResults(); List<DocumentResult> documentResults = rawResponse.getDocumentResults(); for (int i = 0; i < actualFormList.size(); i++) { validateContentResultData(actualFormList.get(i).getPages(), includeFieldElements); if (isLabeled) { validateLabeledData(actualFormList.get(i), includeFieldElements, readResults, documentResults.get(i)); } else { validateUnLabeledResult(actualFormList.get(i), includeFieldElements, readResults, pageResults.get(i)); } } } void validatePrebuiltResultData(List<RecognizedForm> actualPrebuiltRecognizedForms, boolean includeFieldElements, PrebuiltType prebuiltType) { final AnalyzeResult rawResponse = getAnalyzeRawResponse().getAnalyzeResult(); final List<ReadResult> rawReadResults = rawResponse.getReadResults(); for (int i = 0; i < actualPrebuiltRecognizedForms.size(); i++) { final RecognizedForm actualForm = actualPrebuiltRecognizedForms.get(i); final DocumentResult rawDocumentResult = rawResponse.getDocumentResults().get(i); validateLabeledData(actualForm, includeFieldElements, rawReadResults, rawDocumentResult); if (BUSINESS_CARD.equals(prebuiltType)) { assertEquals("prebuilt:businesscard", actualForm.getFormType()); BUSINESS_CARD_FIELDS.forEach(businessCardField -> validateFieldValueTransforms(rawDocumentResult.getFields().get(businessCardField), actualForm.getFields().get(businessCardField), rawReadResults, includeFieldElements)); } else if (RECEIPT.equals(prebuiltType)) { assertEquals("prebuilt:receipt", actualForm.getFormType()); RECEIPT_FIELDS.forEach(receiptField -> { final Map<String, FormField> actualRecognizedReceiptFields = actualForm.getFields(); Map<String, FieldValue> expectedReceiptFields = rawDocumentResult.getFields(); assertEquals(expectedReceiptFields.get("ReceiptType").getValueString(), actualRecognizedReceiptFields.get("ReceiptType").getValue().asString()); assertEquals(expectedReceiptFields.get("ReceiptType").getConfidence(), actualRecognizedReceiptFields.get("ReceiptType").getConfidence()); validateFieldValueTransforms(rawDocumentResult.getFields().get(receiptField), actualRecognizedReceiptFields.get(receiptField), rawReadResults, includeFieldElements); }); } else if (INVOICE.equals(prebuiltType)) { assertEquals("prebuilt:invoice", actualForm.getFormType()); INVOICE_FIELDS.forEach(invoiceField -> { final Map<String, FormField> actualRecognizedInvoiceFields = actualForm.getFields(); Map<String, FieldValue> expectedInvoiceFields = rawDocumentResult.getFields(); validateFieldValueTransforms(expectedInvoiceFields.get(invoiceField), actualRecognizedInvoiceFields.get(invoiceField), rawReadResults, includeFieldElements); }); } else { throw new RuntimeException("prebuilt type not supported"); } } } void invalidSourceUrlRunner(Consumer<String> testRunner) { testRunner.accept(TestUtils.INVALID_RECEIPT_URL); } void encodedBlankSpaceSourceUrlRunner(Consumer<String> testRunner) { testRunner.accept(FAKE_ENCODED_EMPTY_SPACE_URL); } void urlRunner(Consumer<String> testRunner, String fileName) { testRunner.accept(URL_TEST_FILE_FORMAT + fileName); } void testingContainerUrlRunner(Consumer<String> testRunner, String fileName) { testRunner.accept(getStorageTestingFileUrl(fileName)); } void dataRunner(BiConsumer<InputStream, Long> testRunner, String fileName) { final long fileLength = new File(LOCAL_FILE_PATH + fileName).length(); if (interceptorManager.isPlaybackMode()) { testRunner.accept(new ByteArrayInputStream(TEST_DATA_PNG.getBytes(StandardCharsets.UTF_8)), fileLength); } else { try { testRunner.accept(new FileInputStream(LOCAL_FILE_PATH + fileName), fileLength); } catch (FileNotFoundException e) { throw new RuntimeException("Local file not found.", e); } } } void localFilePathRunner(BiConsumer<String, Long> testRunner, String fileName) { final long fileLength = new File(LOCAL_FILE_PATH + fileName).length(); testRunner.accept(LOCAL_FILE_PATH + fileName, fileLength); } void damagedPdfDataRunner(BiConsumer<InputStream, Integer> testRunner) { testRunner.accept(new ByteArrayInputStream(new byte[]{0x25, 0x50, 0x44, 0x46, 0x55, 0x55, 0x55}), 7); } void beginTrainingUnlabeledRunner(BiConsumer<String, Boolean> testRunner) { testRunner.accept(getTrainingSasUri(), false); } void beginTrainingLabeledRunner(BiConsumer<String, Boolean> testRunner) { testRunner.accept(getTrainingSasUri(), true); } void beginSelectionMarkTrainingLabeledRunner(BiConsumer<String, Boolean> testRunner) { testRunner.accept(getSelectionMarkTrainingSasUri(), true); } void beginTrainingMultipageRunner(Consumer<String> testRunner) { testRunner.accept(getMultipageTrainingSasUri()); } private void validateUnLabeledResult(RecognizedForm actualForm, boolean includeFieldElements, List<ReadResult> readResults, PageResult expectedPage) { validatePageRangeData(expectedPage.getPage(), actualForm.getPageRange()); int i = 0; for (Map.Entry<String, FormField> entry : actualForm.getFields().entrySet()) { FormField actualFormField = entry.getValue(); final KeyValuePair expectedFormField = expectedPage.getKeyValuePairs().get(i++); assertEquals(expectedFormField.getConfidence(), actualFormField.getConfidence()); assertEquals(expectedFormField.getKey().getText(), actualFormField.getLabelData().getText()); validateBoundingBoxData(expectedFormField.getKey().getBoundingBox(), actualFormField.getLabelData().getBoundingBox()); if (includeFieldElements) { validateReferenceElementsData(expectedFormField.getKey().getElements(), actualFormField.getLabelData().getFieldElements(), readResults); validateReferenceElementsData(expectedFormField.getValue().getElements(), actualFormField.getValueData().getFieldElements(), readResults); } assertEquals(expectedFormField.getValue().getText(), actualFormField.getValueData().getText()); validateBoundingBoxData(expectedFormField.getValue().getBoundingBox(), actualFormField.getValueData().getBoundingBox()); } } private void validateLabeledData(RecognizedForm actualForm, boolean includeFieldElements, List<ReadResult> readResults, DocumentResult documentResult) { assertEquals(documentResult.getPageRange().get(0), actualForm.getPageRange().getFirstPageNumber()); assertEquals(documentResult.getPageRange().get(1), actualForm.getPageRange().getLastPageNumber()); assertEquals(documentResult.getFields().keySet(), actualForm.getFields().keySet()); documentResult.getFields().forEach((label, expectedFieldValue) -> { final FormField actualFormField = actualForm.getFields().get(label); assertEquals(label, actualFormField.getName()); if (expectedFieldValue != null) { if (expectedFieldValue.getConfidence() != null) { assertEquals(expectedFieldValue.getConfidence(), actualFormField.getConfidence()); } else { assertEquals(1.0f, actualFormField.getConfidence()); } validateFieldValueTransforms(expectedFieldValue, actualFormField, readResults, includeFieldElements); } }); } static void validateMultiPageDataLabeled(List<RecognizedForm> actualRecognizedFormsList) { actualRecognizedFormsList.forEach(recognizedForm -> { assertEquals(1, recognizedForm.getPageRange().getFirstPageNumber()); assertEquals(3, recognizedForm.getPageRange().getLastPageNumber()); assertEquals(3, recognizedForm.getPages().size()); recognizedForm.getFields().forEach((label, formField) -> { assertNotNull(formField.getName()); assertNotNull(formField.getValue()); assertNotNull(formField.getValueData().getText()); assertNull(formField.getLabelData()); }); }); } static void validateMultiPageDataUnlabeled(List<RecognizedForm> actualRecognizedFormsList) { actualRecognizedFormsList.forEach(recognizedForm -> { assertNotNull(recognizedForm.getFormType()); assertEquals(1, (long) recognizedForm.getPages().size()); recognizedForm.getFields().forEach((label, formField) -> { assertNotNull(formField.getName()); assertNotNull(formField.getValue()); assertNotNull(formField.getValueData().getText()); assertNotNull(formField.getLabelData().getText()); }); }); } static void validateMultipageBusinessData(List<RecognizedForm> recognizedBusinessCards) { assertEquals(2, recognizedBusinessCards.size()); RecognizedForm businessCard1 = recognizedBusinessCards.get(0); RecognizedForm businessCard2 = recognizedBusinessCards.get(1); assertEquals(1, businessCard1.getPageRange().getFirstPageNumber()); assertEquals(1, businessCard1.getPageRange().getLastPageNumber()); Map<String, FormField> businessCard1Fields = businessCard1.getFields(); List<FormField> emailList = businessCard1Fields.get("Emails").getValue().asList(); assertEquals("johnsinger@contoso.com", emailList.get(0).getValue().asString()); List<FormField> phoneNumberList = businessCard1Fields.get("OtherPhones").getValue().asList(); assertEquals("+14257793479", phoneNumberList.get(0).getValue().asPhoneNumber()); assertNotNull(businessCard1.getPages()); FormField contactNameField = businessCard1Fields.get("ContactNames").getValue().asList().get(0); assertEquals(contactNameField.getValueData().getPageNumber(), 1); assertEquals(contactNameField.getValueData().getText(), "JOHN SINGER"); assertEquals(2, businessCard2.getPageRange().getFirstPageNumber()); assertEquals(2, businessCard2.getPageRange().getLastPageNumber()); Map<String, FormField> businessCard2Fields = businessCard2.getFields(); List<FormField> email2List = businessCard2Fields.get("Emails").getValue().asList(); assertEquals("avery.smith@contoso.com", email2List.get(0).getValue().asString()); List<FormField> phoneNumber2List = businessCard2Fields.get("OtherPhones").getValue().asList(); assertEquals("+44 (0) 20 9876 5432", phoneNumber2List.get(0).getValueData().getText()); assertNotNull(businessCard2.getPages()); FormField contactName2Field = businessCard2Fields.get("ContactNames").getValue().asList().get(0); assertEquals(contactName2Field.getValueData().getPageNumber(), 2); assertEquals(contactName2Field.getValueData().getText(), "Dr. Avery Smith"); } static void validateMultipageInvoiceData(List<RecognizedForm> recognizedInvoices) { assertEquals(1, recognizedInvoices.size()); RecognizedForm recognizedForm = recognizedInvoices.get(0); assertEquals(1, recognizedForm.getPageRange().getFirstPageNumber()); assertEquals(2, recognizedForm.getPageRange().getLastPageNumber()); Map<String, FormField> recognizedInvoiceFields = recognizedForm.getFields(); final FormField remittanceAddressRecipient = recognizedInvoiceFields.get("RemittanceAddressRecipient"); assertEquals("Contoso Ltd.", remittanceAddressRecipient.getValue().asString()); assertEquals(1, remittanceAddressRecipient.getValueData().getPageNumber()); final FormField remittanceAddress = recognizedInvoiceFields.get("RemittanceAddress"); assertEquals("2345 Dogwood Lane Birch, Kansas 98123", remittanceAddress.getValue().asString()); assertEquals(1, remittanceAddress.getValueData().getPageNumber()); final FormField vendorName = recognizedInvoiceFields.get("VendorName"); assertEquals("Southridge Video", vendorName.getValue().asString()); assertEquals(2, vendorName.getValueData().getPageNumber()); assertEquals(2, recognizedForm.getPages().size()); } protected String getEndpoint() { return interceptorManager.isPlaybackMode() ? "https: : Configuration.getGlobalConfiguration().get(AZURE_FORM_RECOGNIZER_ENDPOINT); } /** * Get the training data set SAS Url value based on the test running mode. * * @return the training data set Url */ private String getTrainingSasUri() { if (interceptorManager.isPlaybackMode()) { return "https: } else { return Configuration.getGlobalConfiguration().get(FORM_RECOGNIZER_TRAINING_BLOB_CONTAINER_SAS_URL); } } private String getSelectionMarkTrainingSasUri() { if (interceptorManager.isPlaybackMode()) { return "https: } else { return Configuration.getGlobalConfiguration().get(FORM_RECOGNIZER_SELECTION_MARK_BLOB_CONTAINER_SAS_URL); } } /** * Get the training data set SAS Url value based on the test running mode. * * @return the training data set Url */ private String getMultipageTrainingSasUri() { if (interceptorManager.isPlaybackMode()) { return "https: } else { return Configuration.getGlobalConfiguration() .get(FORM_RECOGNIZER_MULTIPAGE_TRAINING_BLOB_CONTAINER_SAS_URL); } } /** * Get the testing data set SAS Url value based on the test running mode. * * @return the testing data set Url */ private String getTestingSasUri() { if (interceptorManager.isPlaybackMode()) { return "https: } else { return Configuration.getGlobalConfiguration().get("FORM_RECOGNIZER_TESTING_BLOB_CONTAINER_SAS_URL"); } } /** * Prepare the file url from the testing data set SAS Url value. * * @return the testing data specific file Url */ private String getStorageTestingFileUrl(String fileName) { if (interceptorManager.isPlaybackMode()) { return "https: } else { final String[] urlParts = getTestingSasUri().split("\\?"); return urlParts[0] + "/" + fileName + "?" + urlParts[1]; } } /** * Prepare the expected test data from service raw response. * * @return the {@code AnalyzeOperationResult} test data */ private AnalyzeOperationResult getAnalyzeRawResponse() { final SerializerAdapter serializerAdapter = getSerializerAdapter(); final NetworkCallRecord networkCallRecord = interceptorManager.getRecordedData().findFirstAndRemoveNetworkCall(record -> { AnalyzeOperationResult rawModelResponse = deserializeRawResponse(serializerAdapter, record, AnalyzeOperationResult.class); return rawModelResponse != null && rawModelResponse.getStatus() == OperationStatus.SUCCEEDED; }); interceptorManager.getRecordedData().addNetworkCall(networkCallRecord); return deserializeRawResponse(serializerAdapter, networkCallRecord, AnalyzeOperationResult.class); } void validateNetworkCallRecord(String requestParam, String value) { final NetworkCallRecord networkCallRecord1 = interceptorManager.getRecordedData().findFirstAndRemoveNetworkCall(networkCallRecord -> { URL url = null; try { url = new URL(networkCallRecord.getUri()); } catch (MalformedURLException e) { assertFalse(false, e.getMessage()); } if (url.getQuery() != null) { String[] params = url.getQuery().split("&"); for (String param : params) { String name = param.split("=")[0]; String queryValue = param.split("=")[1]; if (name.equals(requestParam) && value.equals(queryValue)) { return true; } } return false; } return false; }); assertNotNull(networkCallRecord1); interceptorManager.getRecordedData().addNetworkCall(networkCallRecord1); } }
This should be in a try with resources block, or better yet just use Files.write()
public static void writeFile(OutputTargetBuildItem target, String name, String output) throws IOException { FileOutputStream os = new FileOutputStream(target.getOutputDirectory().resolve(name).toFile()); os.write(output.getBytes(StandardCharsets.UTF_8)); os.close(); }
FileOutputStream os = new FileOutputStream(target.getOutputDirectory().resolve(name).toFile());
public static void writeFile(OutputTargetBuildItem target, String name, String output) throws IOException { Path artifact = target.getOutputDirectory().resolve(name); String targetUri = target.getOutputDirectory().resolve("function.zip").toUri().toString().replace("file:", "fileb:"); output = output.replace("${artifactId}", target.getBaseName()) .replace("${buildDir}", target.getOutputDirectory().toString()) .replace("${targetUri}", targetUri); Files.write(artifact, output.getBytes(StandardCharsets.UTF_8)); }
class LambdaUtil { /** * Strips period, dash, and numbers. Turns characters after to uppercase. i.e. * Also strips "-SNAPSHOT" from end of name. * * "foo.bar-1.0-SNAPSHOT" to "FooBar" * * @param basename * @return */ public static String artifactToLambda(String basename) { if (basename.endsWith("-SNAPSHOT")) basename = basename.substring(0, basename.length() - "-SNAPSHOT".length()); String name = convertToken(basename, "[.0-9-]"); return name.trim(); } protected static String convertToken(String basename, String token) { String[] splits = basename.split(token); if (splits == null || splits.length == 0) return basename; String name = ""; for (String split : splits) { split = split.trim(); if (split.isEmpty()) continue; name = name + split.substring(0, 1).toUpperCase() + split.substring(1).toLowerCase(); } return name; } public static String copyResource(String resource) throws Exception { InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); int nRead; byte[] data = new byte[1024]; while ((nRead = inputStream.read(data, 0, data.length)) != -1) { buffer.write(data, 0, nRead); } buffer.flush(); byte[] byteArray = buffer.toByteArray(); return new String(byteArray, StandardCharsets.UTF_8); } }
class LambdaUtil { /** * Strips period, dash, and numbers. Turns characters after to uppercase. i.e. * Also strips "-SNAPSHOT" from end of name. * * "foo.bar-1.0-SNAPSHOT" to "FooBar" * * @param basename * @return */ public static String artifactToLambda(String basename) { if (basename.endsWith("-SNAPSHOT")) basename = basename.substring(0, basename.length() - "-SNAPSHOT".length()); String name = convertToken(basename, "[^a-zA-Z]"); return name.trim(); } protected static String convertToken(String basename, String token) { String[] splits = basename.split(token); if (splits == null || splits.length == 0) return basename; String name = ""; for (String split : splits) { split = split.trim(); if (split.isEmpty()) continue; name = name + split.substring(0, 1).toUpperCase() + split.substring(1).toLowerCase(); } return name; } public static String copyResource(String resource) throws Exception { try (InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource)) { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); int nRead; byte[] data = new byte[1024]; while ((nRead = inputStream.read(data, 0, data.length)) != -1) { buffer.write(data, 0, nRead); } buffer.flush(); byte[] byteArray = buffer.toByteArray(); return new String(byteArray, StandardCharsets.UTF_8); } } }
The cache generation is skipped for the pulled packages because platform dependencies are found in them and not for the platform dependencies. So does this change properly give that meaning?
static boolean pullDependencyPackages(String orgName, String packageName, String version) { Path ballerinaUserHomeDirPath = ProjectUtils.createAndGetHomeReposPath(); Path centralRepositoryDirPath = ballerinaUserHomeDirPath.resolve(ProjectConstants.REPOSITORIES_DIR) .resolve(ProjectConstants.CENTRAL_REPOSITORY_CACHE_NAME); Path balaDirPath = centralRepositoryDirPath.resolve(ProjectConstants.BALA_DIR_NAME); Path balaPath = ProjectUtils.getPackagePath(balaDirPath, orgName, packageName, version); String ballerinaShortVersion = RepoUtils.getBallerinaShortVersion(); Path cacheDir = centralRepositoryDirPath.resolve( ProjectConstants.CACHES_DIR_NAME + "-" + ballerinaShortVersion); ProjectEnvironmentBuilder defaultBuilder = ProjectEnvironmentBuilder.getDefaultBuilder(); defaultBuilder.addCompilationCacheFactory(new FileSystemCache.FileSystemCacheFactory(cacheDir)); BalaProject balaProject = BalaProject.loadProject(defaultBuilder, balaPath); Path packageCacheDir = cacheDir.resolve(orgName).resolve(packageName).resolve(version); if (packageCacheDir.toFile().exists()) { deleteDirectory(packageCacheDir); } PackageCompilation packageCompilation = balaProject.currentPackage().getCompilation(); Collection<Diagnostic> resolutionDiagnostics = packageCompilation.getResolution() .diagnosticResult().diagnostics(); if (!resolutionDiagnostics.isEmpty()) { printDiagnostics(resolutionDiagnostics); } if (packageCompilation.getResolution().diagnosticResult().hasErrors()) { return true; } if (!hasProvidedPlatformDeps(packageCompilation)) { JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JvmTarget.JAVA_17); Collection<Diagnostic> backendDiagnostics = jBallerinaBackend.diagnosticResult().diagnostics(false); if (!backendDiagnostics.isEmpty()) { printDiagnostics(backendDiagnostics); } return jBallerinaBackend.diagnosticResult().hasErrors(); } errStream.println("Warning: Cache generation skipped due to platform dependencies with 'provided' scope"); return false; }
errStream.println("Warning: Cache generation skipped due to platform dependencies with 'provided' scope");
static boolean pullDependencyPackages(String orgName, String packageName, String version) { Path ballerinaUserHomeDirPath = ProjectUtils.createAndGetHomeReposPath(); Path centralRepositoryDirPath = ballerinaUserHomeDirPath.resolve(ProjectConstants.REPOSITORIES_DIR) .resolve(ProjectConstants.CENTRAL_REPOSITORY_CACHE_NAME); Path balaDirPath = centralRepositoryDirPath.resolve(ProjectConstants.BALA_DIR_NAME); Path balaPath = ProjectUtils.getPackagePath(balaDirPath, orgName, packageName, version); String ballerinaShortVersion = RepoUtils.getBallerinaShortVersion(); Path cacheDir = centralRepositoryDirPath.resolve( ProjectConstants.CACHES_DIR_NAME + "-" + ballerinaShortVersion); ProjectEnvironmentBuilder defaultBuilder = ProjectEnvironmentBuilder.getDefaultBuilder(); defaultBuilder.addCompilationCacheFactory(new FileSystemCache.FileSystemCacheFactory(cacheDir)); BalaProject balaProject = BalaProject.loadProject(defaultBuilder, balaPath); Path packageCacheDir = cacheDir.resolve(orgName).resolve(packageName).resolve(version); if (packageCacheDir.toFile().exists()) { deleteDirectory(packageCacheDir); } PackageCompilation packageCompilation = balaProject.currentPackage().getCompilation(); Collection<Diagnostic> resolutionDiagnostics = packageCompilation.getResolution() .diagnosticResult().diagnostics(); if (!resolutionDiagnostics.isEmpty()) { printDiagnostics(resolutionDiagnostics); } if (packageCompilation.getResolution().diagnosticResult().hasErrors()) { return true; } if (!hasProvidedPlatformDeps(packageCompilation)) { JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JvmTarget.JAVA_17); Collection<Diagnostic> backendDiagnostics = jBallerinaBackend.diagnosticResult().diagnostics(false); if (!backendDiagnostics.isEmpty()) { printDiagnostics(backendDiagnostics); } return jBallerinaBackend.diagnosticResult().hasErrors(); } errStream.println("Warning: Cache generation skipped due to platform dependencies with 'provided' scope"); return false; }
class CommandUtil { public static final String ORG_NAME = "ORG_NAME"; public static final String PKG_NAME = "PKG_NAME"; public static final String DIST_VERSION = "DIST_VERSION"; public static final String TOOL_ID = "TOOL_ID"; public static final String USER_HOME = "user.home"; public static final String GITIGNORE = "gitignore"; public static final String DEVCONTAINER = "devcontainer"; public static final String NEW_CMD_DEFAULTS = "new_cmd_defaults"; public static final String CREATE_CMD_TEMPLATES = "create_cmd_templates"; public static final String LIBS_DIR = "libs"; public static final String DEFAULT_TEMPLATE = "default"; public static final String MAIN_TEMPLATE = "main"; public static final String FILE_STRING_SEPARATOR = ", "; private static FileSystem jarFs; private static Map<String, String> env; private static PrintStream errStream; private static PrintStream outStream; private static Path homeCache; private static boolean exitWhenFinish; private static String platform; static void setPrintStream(PrintStream errStream) { CommandUtil.errStream = errStream; } public static void initJarFs() { URI uri = null; try { uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI(); if (uri.toString().contains("!")) { final String[] array = uri.toString().split("!"); if (null == jarFs) { env = new HashMap<>(); jarFs = FileSystems.newFileSystem(URI.create(array[0]), env); } } } catch (URISyntaxException | IOException e) { throw new AssertionError(); } } /** * Print command errors with a standard format. * * @param stream error will be sent to this stream * @param error error message * @param usage usage if any * @param help if the help message should be printed */ public static void printError(PrintStream stream, String error, String usage, boolean help) { stream.println("ballerina: " + error); if (null != usage) { stream.println(); stream.println("USAGE:"); stream.println(" " + usage); } if (help) { stream.println(); stream.println("For more information try --help"); } } /** * Exit with error code 1. * * @param exit Whether to exit or not. */ public static void exitError(boolean exit) { if (exit) { Runtime.getRuntime().exit(1); } } static void applyTemplate(String orgName, String templatePkgName, String version, String packageName, Path projectPath, Path balaCache, List<Path> filesInDir) { Path balaPath = getPlatformSpecificBalaPath(orgName, templatePkgName, version, balaCache); if (!Files.exists(balaPath)) { CommandUtil.printError(errStream, "unable to find the bala: " + balaPath, null, false); CommandUtil.exitError(exitWhenFinish); } try { addModules(balaPath, projectPath, packageName); } catch (IOException e) { ProjectUtils.deleteSelectedFilesInDirectory(projectPath, filesInDir); CommandUtil.printError(errStream, "error occurred while creating the package: " + e.getMessage(), null, false); CommandUtil.exitError(exitWhenFinish); } } private static void addModules(Path balaPath, Path projectPath, String packageName) throws IOException { Gson gson = new Gson(); Path packageJsonPath = balaPath.resolve(PACKAGE_JSON); Path dependencyGraphJsonPath = balaPath.resolve(DEPENDENCY_GRAPH_JSON); Path balToolJsonPath = balaPath.resolve(TOOL_DIR).resolve(ProjectConstants.BAL_TOOL_JSON); PackageJson templatePackageJson = null; DependencyGraphJson templateDependencyGraphJson = null; BalToolJson templateBalToolJson = null; try (InputStream inputStream = new FileInputStream(String.valueOf(packageJsonPath))) { Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); templatePackageJson = gson.fromJson(fileReader, PackageJson.class); } catch (IOException e) { printError(errStream, "Error while reading the package json file: " + e.getMessage(), null, false); getRuntime().exit(1); } if (dependencyGraphJsonPath.toFile().exists()) { try (InputStream inputStream = new FileInputStream(String.valueOf(dependencyGraphJsonPath))) { Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); templateDependencyGraphJson = gson.fromJson(fileReader, DependencyGraphJson.class); } catch (IOException e) { printError(errStream, "Error while reading the dependency graph json file: " + e.getMessage(), null, false); getRuntime().exit(1); } } if (balToolJsonPath.toFile().exists()) { try (InputStream inputStream = new FileInputStream(String.valueOf(balToolJsonPath))) { Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); templateBalToolJson = gson.fromJson(fileReader, BalToolJson.class); } catch (IOException e) { printError(errStream, "Error while reading the " + BAL_TOOL_JSON + " file: " + e.getMessage(), null, false); getRuntime().exit(1); } } if (!templatePackageJson.getTemplate()) { throw createLauncherException("unable to create the package: " + "specified package is not a template"); } Path ballerinaToml = projectPath.resolve(ProjectConstants.BALLERINA_TOML); Files.createDirectories(projectPath); Files.createFile(ballerinaToml); writeBallerinaToml(ballerinaToml, templatePackageJson, packageName, platform); if (dependencyGraphJsonPath.toFile().exists()) { Path dependenciesToml = projectPath.resolve(DEPENDENCIES_TOML); Files.createFile(dependenciesToml); writeDependenciesToml(projectPath, templateDependencyGraphJson, templatePackageJson); } if (balToolJsonPath.toFile().exists()) { Path balToolToml = projectPath.resolve(BAL_TOOL_TOML); Files.createFile(balToolToml); writeBalToolToml(balToolToml, templateBalToolJson, packageName); copyToolDependencies(projectPath, balaPath.resolve(TOOL_DIR).resolve(LIBS_DIR)); } Path packageMDFilePath = balaPath.resolve("docs") .resolve(ProjectConstants.PACKAGE_MD_FILE_NAME); Path toPackageMdPath = projectPath.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME); if (Files.exists(packageMDFilePath)) { Files.copy(packageMDFilePath, toPackageMdPath, StandardCopyOption.REPLACE_EXISTING); } createDefaultGitignore(projectPath); createDefaultDevContainer(projectPath); String templatePkgName = templatePackageJson.getName(); Path modulesRoot = balaPath.resolve(ProjectConstants.MODULES_ROOT); Path moduleMdDirRoot = balaPath.resolve("docs").resolve(ProjectConstants.MODULES_ROOT); List<Path> modulesList; try (Stream<Path> pathStream = Files.list(modulesRoot)) { modulesList = pathStream.collect(Collectors.toList()); } for (Path moduleRoot : modulesList) { Path moduleDir = Optional.of(moduleRoot.getFileName()).get(); Path destDir; if (moduleDir.toString().equals(templatePkgName)) { destDir = projectPath; } else { String moduleDirName = moduleDir.toString().split(templatePkgName + ProjectConstants.DOT, 2)[1]; destDir = projectPath.resolve(ProjectConstants.MODULES_ROOT).resolve(moduleDirName); Files.createDirectories(destDir); } Files.walkFileTree(moduleRoot, new FileUtils.Copy(moduleRoot, destDir, templatePkgName, packageName)); Path moduleMdSource = moduleMdDirRoot.resolve(moduleDir).resolve(ProjectConstants.MODULE_MD_FILE_NAME); if (Files.exists(moduleMdSource)) { Files.copy(moduleMdSource, destDir.resolve(ProjectConstants.MODULE_MD_FILE_NAME), StandardCopyOption.REPLACE_EXISTING); } } copyIcon(balaPath, projectPath); copyPlatformLibraries(balaPath, projectPath); copyIncludeFiles(balaPath, projectPath, templatePackageJson); } private static void copyIcon(Path balaPath, Path projectPath) { Path docsPath = balaPath.resolve(ProjectConstants.BALA_DOCS_DIR); try (Stream<Path> pathStream = Files.walk(docsPath, 1)) { List<Path> icon = pathStream .filter(FileSystems.getDefault().getPathMatcher("glob:**.png")::matches) .collect(Collectors.toList()); if (!icon.isEmpty()) { Path projectDocsDir = projectPath.resolve(ProjectConstants.BALA_DOCS_DIR); Files.createDirectory(projectDocsDir); Path projectIconPath = projectDocsDir.resolve(Optional.of(icon.get(0).getFileName()).get()); Files.copy(icon.get(0), projectIconPath, StandardCopyOption.REPLACE_EXISTING); } } catch (IOException e) { printError(errStream, "Error while retrieving the icon: " + e.getMessage(), null, false); getRuntime().exit(1); } } private static void copyPlatformLibraries(Path balaPath, Path projectPath) throws IOException { Path platformLibPath = balaPath.resolve("platform").resolve(platform); if (Files.exists(platformLibPath)) { Path libs = projectPath.resolve("libs"); Files.createDirectories(libs); Files.walkFileTree(platformLibPath, new FileUtils.Copy(platformLibPath, libs)); } } private static void copyIncludeFiles(Path balaPath, Path projectPath, PackageJson templatePackageJson) throws IOException { if (templatePackageJson.getInclude() != null) { String templatePkgName = templatePackageJson.getName(); List<Path> includePaths = ProjectUtils.getPathsMatchingIncludePatterns( templatePackageJson.getInclude(), balaPath); for (Path includePath : includePaths) { Path moduleNameUpdatedIncludePath = updateModuleDirectoryNaming(includePath, balaPath, templatePkgName); Path fromIncludeFilePath = balaPath.resolve(includePath); Path toIncludeFilePath = projectPath.resolve(moduleNameUpdatedIncludePath); if (Files.notExists(toIncludeFilePath)) { Files.createDirectories(toIncludeFilePath); Files.walkFileTree(fromIncludeFilePath, new FileUtils.Copy(fromIncludeFilePath, toIncludeFilePath)); } } } } private static Path updateModuleDirectoryNaming(Path includePath, Path balaPath, String templatePkgName) { Path modulesDirPath = balaPath.resolve(ProjectConstants.MODULES_ROOT); Path absoluteIncludePath = balaPath.resolve(includePath); if (absoluteIncludePath.startsWith(modulesDirPath)) { Path moduleRootPath = modulesDirPath.relativize(absoluteIncludePath).subpath(0, 1); String moduleDirName = Optional.of(moduleRootPath.getFileName()).get().toString(); String destinationDirName = moduleDirName.split(templatePkgName + ProjectConstants.DOT, 2)[1]; Path includePathRelativeToModuleRoot = modulesDirPath.resolve(moduleRootPath) .relativize(absoluteIncludePath); Path updatedIncludePath = Paths.get(ProjectConstants.MODULES_ROOT).resolve(destinationDirName) .resolve(includePathRelativeToModuleRoot); return updatedIncludePath; } return includePath; } /** * Find the bala path for a given template. * * @param template template name */ static Path findBalaTemplate(String template, Path balaCache) { String packageName = findPkgName(template); String orgName = findOrg(template); String version = findPkgVersion(template); if (version != null) { Path balaPath = getPlatformSpecificBalaPath(orgName, packageName, version, balaCache); if (Files.exists(balaPath)) { return balaPath; } else { return null; } } else { return null; } } public static void initPackageFromCentral(Path balaCache, Path projectPath, String packageName, String template, List<Path> filesInDir) { System.setProperty(CentralClientConstants.ENABLE_OUTPUT_STREAM, "true"); String templatePackageName = findPkgName(template); String orgName = findOrg(template); String version = findPkgVersion(template); Path pkgCacheParent = balaCache.resolve(orgName).resolve(templatePackageName); try { pullPackageFromRemote(orgName, templatePackageName, version, pkgCacheParent); } catch (PackageAlreadyExistsException e) { if (version == null) { List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent); PackageVersion latest = findLatest(packageVersions); if (latest == null) { throw createLauncherException("unable to find package in the filesystem cache." + " This is an unexpected error : " + e.getMessage()); } version = latest.toString(); } } catch (CentralClientException e) { errStream.println("Warning: Unable to pull the package from Ballerina Central: " + e.getMessage()); if (findBalaTemplate(template, balaCache) == null) { List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent); PackageVersion latest = findLatest(packageVersions); if (latest == null) { throw createLauncherException("template not found in filesystem cache."); } version = latest.toString(); } } if (version == null) { List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent); PackageVersion latest = findLatest(packageVersions); version = Objects.requireNonNull(latest).toString(); } applyTemplate(orgName, templatePackageName, version, packageName, projectPath, balaCache, filesInDir); } private static void pullPackageFromRemote(String orgName, String packageName, String version, Path destination) throws CentralClientException { String supportedPlatform = Arrays.stream(JvmTarget.values()) .map(target -> target.code()) .collect(Collectors.joining(",")); Settings settings; try { settings = readSettings(); } catch (SettingsTomlException e) { settings = Settings.from(); } CentralAPIClient client = new CentralAPIClient(RepoUtils.getRemoteRepoURL(), initializeProxy(settings.getProxy()), settings.getProxy().username(), settings.getProxy().password(), getAccessTokenOfCLI(settings), settings.getCentral().getConnectTimeout(), settings.getCentral().getReadTimeout(), settings.getCentral().getWriteTimeout(), settings.getCentral().getCallTimeout()); try { client.pullPackage(orgName, packageName, version, destination, supportedPlatform, RepoUtils.getBallerinaVersion(), false); } catch (CentralClientException e) { throw e; } } public static void writeBallerinaToml(Path balTomlPath, PackageJson packageJson, String packageName, String platform) throws IOException { Files.writeString(balTomlPath, "[package]", StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\norg = \"" + packageJson.getOrganization() + "\"", StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\nname = \"" + packageName + "\"", StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\nversion = \"" + packageJson.getVersion() + "\"", StandardOpenOption.APPEND); List<String> newModuleNames = packageJson.getExport().stream().map(module -> module.replaceFirst(packageJson.getName(), packageName)).collect(Collectors.toList()); StringJoiner stringJoiner = new StringJoiner(","); for (String newModuleName : newModuleNames) { stringJoiner.add("\"" + newModuleName + "\""); } Files.writeString(balTomlPath, "\nexport = [" + stringJoiner + "]" .replaceFirst(packageJson.getName(), packageName), StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\ndistribution = \"" + packageJson.getBallerinaVersion() + "\"", StandardOpenOption.APPEND); writePackageAttributeArray(balTomlPath, packageJson.getLicenses(), "license"); writePackageAttributeArray(balTomlPath, packageJson.getAuthors(), "authors"); writePackageAttributeArray(balTomlPath, packageJson.getKeywords(), "keywords"); writePackageAttributeValue(balTomlPath, packageJson.getSourceRepository(), "repository"); writePackageAttributeValue(balTomlPath, packageJson.getVisibility(), "visibility"); writePackageAttributeValue(balTomlPath, packageJson.getIcon(), "icon"); Files.writeString(balTomlPath, "\n\n[build-options]", StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\nobservabilityIncluded = true\n", StandardOpenOption.APPEND); JsonArray platformLibraries = packageJson.getPlatformDependencies(); if (platformLibraries == null) { return; } Files.writeString(balTomlPath, "\n[[platform." + platform + ".dependency]]", StandardOpenOption.APPEND); for (Object dependencies : platformLibraries) { JsonObject dependenciesObj = (JsonObject) dependencies; if (null == dependenciesObj.get("scope")) { String libPath = dependenciesObj.get("path").getAsString(); Path libName = Optional.of(Paths.get(libPath).getFileName()).get(); Path libRelPath = Paths.get("libs", libName.toString()); Files.writeString(balTomlPath, "\npath = \"" + libRelPath + "\"", StandardOpenOption.APPEND); } if (dependenciesObj.get("artifactId") != null) { String artifactId = dependenciesObj.get("artifactId").getAsString(); Files.writeString(balTomlPath, "\nartifactId = \"" + artifactId + "\"", StandardOpenOption.APPEND); } if (dependenciesObj.get("groupId") != null) { String groupId = dependenciesObj.get("groupId").getAsString(); Files.writeString(balTomlPath, "\ngroupId = \"" + groupId + "\"", StandardOpenOption.APPEND); } if (dependenciesObj.get("version") != null) { String dependencyVersion = dependenciesObj.get("version").getAsString(); Files.writeString(balTomlPath, "\nversion = \"" + dependencyVersion + "\"\n", StandardOpenOption.APPEND); } if (null != dependenciesObj.get("scope") && dependenciesObj.get("scope").getAsString().equals("provided")) { String scope = dependenciesObj.get("scope").getAsString(); Files.writeString(balTomlPath, "scope = \"" + scope + "\"\n", StandardOpenOption.APPEND); String artifactId = dependenciesObj.get("artifactId").getAsString(); printError(errStream, "WARNING: path for the platform dependency " + artifactId + " with provided scope " + "should be specified in the Ballerina.toml", null, false); } } } public static void writeDependenciesToml(Path projectPath, DependencyGraphJson templateDependencyGraphJson, PackageJson templatePackageJson) throws IOException { Path depsTomlPath = projectPath.resolve(DEPENDENCIES_TOML); String autoGenCode = " "\n" + " " "\n"; Files.writeString(depsTomlPath, autoGenCode, StandardOpenOption.APPEND); String balTomlVersion = "[ballerina]\n" + "dependencies-toml-version = \"" + ProjectConstants.DEPENDENCIES_TOML_VERSION + "\"\n" + "\n"; Files.writeString(depsTomlPath, balTomlVersion, StandardOpenOption.APPEND); List<ModuleDependency> currentPkgModules = new ArrayList<>(); for (ModuleDependency module : templateDependencyGraphJson.getModuleDependencies()) { if (module.getOrg().equals(templatePackageJson.getOrganization()) && module.getPackageName().equals(templatePackageJson.getName())) { List<ModuleDependency> currentPkgModuleDeps = module.getDependencies(); currentPkgModules.addAll(currentPkgModuleDeps); } } StringBuilder pkgDesc = new StringBuilder(); for (Dependency packageDependency : templateDependencyGraphJson.getPackageDependencyGraph()) { if (templatePackageJson.getOrganization().equals(packageDependency.getOrg()) && templatePackageJson.getName().equals(packageDependency.getName())) { pkgDesc.append("[[package]]\n") .append("org = \"").append(packageDependency.getOrg()).append("\"\n") .append("name = \"").append(ProjectUtils.defaultName(projectPath)).append("\"\n") .append("version = \"").append(packageDependency.getVersion()).append("\"\n"); pkgDesc.append(getDependenciesArrayContent(packageDependency)); pkgDesc.append(getDependencyModulesArrayContent( templateDependencyGraphJson.getModuleDependencies(), true, projectPath)); } else { pkgDesc.append("[[package]]\n") .append("org = \"").append(packageDependency.getOrg()).append("\"\n") .append("name = \"").append(packageDependency.getName()).append("\"\n") .append("version = \"").append(packageDependency.getVersion()).append("\"\n"); pkgDesc.append(getDependenciesArrayContent(packageDependency)); List<ModuleDependency> packageDependencyModules = new ArrayList<>(); for (ModuleDependency module : currentPkgModules) { if (packageDependency.getOrg().equals(module.getOrg()) && packageDependency.getName().equals(module.getPackageName())) { packageDependencyModules.add(module); } } if (!packageDependencyModules.isEmpty()) { pkgDesc.append(getDependencyModulesArrayContent(packageDependencyModules, false, projectPath)); } } pkgDesc.append("\n"); } Files.writeString(depsTomlPath, pkgDesc.toString(), StandardOpenOption.APPEND); } public static Path getPlatformSpecificBalaPath(String orgName, String pkgName, String version, Path balaCache) { Path balaPath = balaCache.resolve( ProjectUtils.getRelativeBalaPath(orgName, pkgName, version, null)); platform = ANY_PLATFORM; if (!Files.exists(balaPath)) { for (JvmTarget supportedPlatform : JvmTarget.values()) { balaPath = balaCache.resolve( ProjectUtils.getRelativeBalaPath(orgName, pkgName, version, supportedPlatform.code())); if (Files.exists(balaPath)) { platform = supportedPlatform.code(); break; } } } return balaPath; } /** * Write to BalTool.toml file. * * @param balToolTomlPath path to BalTool.toml * @param balToolJson Bal-tool.json content */ public static void writeBalToolToml(Path balToolTomlPath, BalToolJson balToolJson, String packageName) throws IOException { Files.writeString(balToolTomlPath, "[tool]", StandardOpenOption.APPEND); Files.writeString(balToolTomlPath, "\nid = \"" + packageName + "\"\n", StandardOpenOption.APPEND); List<String> dependencyPaths = balToolJson.dependencyPaths(); StringBuilder dependenciesContent = new StringBuilder(); for (String dependencyPath: dependencyPaths) { dependenciesContent.append("\n[[dependency]]\n").append("path = \"").append(dependencyPath).append("\"\n"); } Files.writeString(balToolTomlPath, dependenciesContent.toString(), StandardOpenOption.APPEND); } /** * Copy dependency jars to new package from template package. * * @param projectPath path to new project * @param toolsLibPath Path to /tool/libs directory containing dependencies */ public static void copyToolDependencies(Path projectPath, Path toolsLibPath) throws IOException { Path toolDirectory = projectPath.resolve(TOOL_DIR); Files.createDirectory(toolDirectory); Files.createDirectory(toolDirectory.resolve(LIBS_DIR)); Files.walkFileTree(toolsLibPath, new FileUtils.Copy(toolsLibPath, toolDirectory.resolve(LIBS_DIR))); } /** * Get formatted dependencies array content for Dependencies.toml dependency. * * @param packageDependency package dependency * @return formatted dependencies array content */ private static String getDependenciesArrayContent(Dependency packageDependency) { StringBuilder dependenciesContent = new StringBuilder(); if (!packageDependency.getDependencies().isEmpty()) { for (Dependency dependency : packageDependency.getDependencies()) { dependenciesContent.append("\t{org = \"").append(dependency.getOrg()) .append("\", name = \"").append(dependency.getName()) .append("\"},\n"); } String dependenciesPart = dependenciesContent.toString(); dependenciesPart = removeLastCharacter(trimStartingWhitespaces(dependenciesPart)); return "dependencies = [\n" + dependenciesPart + "\n]\n"; } return ""; } /** * Get formatted modules array content for Dependencies.toml dependency. * <code> * modules = [ * {org = "ballerinax", packageName = "redis", moduleName = "redis"} * ] * </code> * * @param dependencyModules modules of the given dependency package * @param isCurrentPackage is modules array generating for current package * @param projectPath project path * @return formatted modules array content */ private static String getDependencyModulesArrayContent(List<ModuleDependency> dependencyModules, boolean isCurrentPackage, Path projectPath) { StringBuilder modulesContent = new StringBuilder(); if (isCurrentPackage) { for (ModuleDependency module : dependencyModules) { String currentPkgName = ProjectUtils.defaultName(projectPath).value(); String modulePkgPart = module.getModuleName().split("\\.")[0]; String currentPkgModuleName = module.getModuleName().replaceFirst(modulePkgPart, currentPkgName); modulesContent.append("\t{org = \"").append(module.getOrg()) .append("\", packageName = \"").append(currentPkgName) .append("\", moduleName = \"").append(currentPkgModuleName) .append("\"},\n"); } } else { for (ModuleDependency module : dependencyModules) { modulesContent.append("\t{org = \"").append(module.getOrg()) .append("\", packageName = \"").append(module.getPackageName()) .append("\", moduleName = \"").append(module.getModuleName()) .append("\"},\n"); } } String modulesPart = modulesContent.toString(); modulesPart = removeLastCharacter(trimStartingWhitespaces(modulesPart)); return "modules = [\n" + modulesPart + "\n]\n"; } /** * Write Ballerina.toml package attribute array from template package.json to new project Ballerina.toml. * * @param balTomlPath Ballerina.toml path of the new project * @param attributeArray package attribute values array * @param attributeName package attribute name * @throws IOException when error occurs writing to the Ballerina.toml */ private static void writePackageAttributeArray(Path balTomlPath, List<String> attributeArray, String attributeName) throws IOException { if (attributeArray != null && !attributeArray.isEmpty()) { StringJoiner stringJoiner = new StringJoiner(","); for (String attributeElement : attributeArray) { stringJoiner.add("\"" + attributeElement + "\""); } Files.writeString(balTomlPath, "\n" + attributeName + " = [" + stringJoiner + "]", StandardOpenOption.APPEND); } } /** * Write Ballerina.toml package attribute from template package.json to new project Ballerina.toml. * * @param balTomlPath Ballerina.toml path of the new project * @param attributeValue package attribute value * @param attributeName package attribute name * @throws IOException when error occurs writing to the Ballerina.toml */ private static void writePackageAttributeValue(Path balTomlPath, String attributeValue, String attributeName) throws IOException { if (attributeValue != null && !attributeValue.isEmpty()) { Files.writeString(balTomlPath, "\n" + attributeName + " = \"" + attributeValue + "\"", StandardOpenOption.APPEND); } } /** * Find the package name for a given template. * * @param template template name * @return packageName - package name of the module */ public static String findPkgName(String template) { String[] orgSplit = template.split("/"); String packageName = ""; String packagePart = (orgSplit.length > 1) ? orgSplit[1] : ""; String[] pkgSplit = packagePart.split(":"); packageName = pkgSplit[0].trim(); return packageName; } /** * Find the organization for a given template. * * @param template template name * @return orgName - org of the module */ public static String findOrg(String template) { String[] orgSplit = template.split("/"); return orgSplit[0].trim(); } /** * Find the package version for a given template. * * @param template template name * @return version - version of the module */ public static String findPkgVersion(String template) { String[] orgSplit = template.split("/"); String packagePart = (orgSplit.length > 1) ? orgSplit[1] : ""; String[] pkgSplit = packagePart.split(":"); if (pkgSplit.length > 1) { return pkgSplit[1].trim(); } else { return null; } } /** * Initialize a new ballerina project in the given path. * * @param path project path * @param packageName name of the package * @param template package template * @param balFilesExist if bal files exist in the project * @throws IOException If any IO exception occurred * @throws URISyntaxException If any URISyntaxException occurred */ public static void initPackageByTemplate(Path path, String packageName, String template, boolean balFilesExist) throws IOException, URISyntaxException { applyTemplate(path, template, balFilesExist); if (template.equalsIgnoreCase(LIB_DIR)) { initLibPackage(path, packageName); Path source = path.resolve("lib.bal"); Files.move(source, source.resolveSibling(guessPkgName(packageName, template) + ".bal"), StandardCopyOption.REPLACE_EXISTING); } else if (template.equalsIgnoreCase(TOOL_DIR)) { initToolPackage(path, packageName); } else { initPackage(path, packageName); } createDefaultGitignore(path); createDefaultDevContainer(path); } private static void createDefaultGitignore(Path path) throws IOException { Path gitignore = path.resolve(ProjectConstants.GITIGNORE_FILE_NAME); if (Files.notExists(gitignore)) { Files.createFile(gitignore); } if (Files.size(gitignore) == 0) { String defaultGitignore = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + GITIGNORE); Files.write(gitignore, defaultGitignore.getBytes(StandardCharsets.UTF_8)); } } private static void createDefaultDevContainer(Path path) throws IOException { Path devContainer = path.resolve(ProjectConstants.DEVCONTAINER); if (Files.notExists(devContainer)) { Files.createFile(devContainer); } if (Files.size(devContainer) == 0) { String defaultDevContainer = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + DEVCONTAINER); defaultDevContainer = defaultDevContainer.replace("latest", RepoUtils.getBallerinaVersion()); Files.write(devContainer, defaultDevContainer.getBytes(StandardCharsets.UTF_8)); } } /** * Get the list of templates. * * @return list of templates */ public static List<String> getTemplates() { try { Path templateDir = getTemplatePath(); Stream<Path> walk = Files.walk(templateDir, 1); List<String> templates = walk.filter(Files::isDirectory) .filter(directory -> !templateDir.equals(directory)) .filter(directory -> directory.getFileName() != null) .map(directory -> directory.getFileName()) .map(fileName -> fileName.toString()) .collect(Collectors.toList()); if (null != jarFs) { return templates.stream().map(t -> t .replace(jarFs.getSeparator(), "")) .collect(Collectors.toList()); } else { return templates; } } catch (IOException | URISyntaxException e) { return new ArrayList<String>(); } } /** * Get the path to the given template. * * @return path of the given template * @throws URISyntaxException if any URISyntaxException occured */ private static Path getTemplatePath() throws URISyntaxException { URI uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI(); if (uri.toString().contains("!")) { final String[] array = uri.toString().split("!"); return jarFs.getPath(array[1]); } else { return Paths.get(uri); } } /** * Apply the template to the created module. * * @param modulePath path to the module * @param template template name * @param balFilesExist if bal files exist in the project * @throws IOException if any IOException occurred * @throws URISyntaxException if any URISyntaxException occurred */ public static void applyTemplate(Path modulePath, String template, boolean balFilesExist) throws IOException, URISyntaxException { Path templateDir = getTemplatePath().resolve(template); if (template.equalsIgnoreCase(MAIN_TEMPLATE)) { templateDir = getTemplatePath().resolve(DEFAULT_TEMPLATE); Path tempDirTest = getTemplatePath().resolve(MAIN_TEMPLATE); Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath)); Files.walkFileTree(tempDirTest, new FileUtils.Copy(tempDirTest, modulePath)); } else if (template.equalsIgnoreCase(DEFAULT_TEMPLATE)) { if (!balFilesExist) { Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath)); } } else { Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath)); } } /** * Initialize a new ballerina project in the given path. * * @param path Project path * @throws IOException If any IO exception occurred */ public static void initPackage(Path path, String packageName) throws IOException { Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML); Files.createFile(ballerinaToml); String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-app.toml"); defaultManifest = defaultManifest .replaceAll(ORG_NAME, ProjectUtils.guessOrgName()) .replaceAll(PKG_NAME, guessPkgName(packageName, "app")) .replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion()); Files.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8)); } private static void initLibPackage(Path path, String packageName) throws IOException { Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML); Files.createFile(ballerinaToml); String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-lib.toml"); defaultManifest = defaultManifest.replaceAll(ORG_NAME, ProjectUtils.guessOrgName()) .replaceAll(PKG_NAME, guessPkgName(packageName, "lib")) .replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion()); write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8)); String packageMd = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/Package.md"); write(path.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME), packageMd.getBytes(StandardCharsets.UTF_8)); } /** * Initialize a new ballerina tool package in the given path. * * @param path Project path * @param packageName package name * @throws IOException If any IO exception occurred */ private static void initToolPackage(Path path, String packageName) throws IOException { Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML); Files.createFile(ballerinaToml); String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-app.toml"); defaultManifest = defaultManifest .replaceAll(ORG_NAME, ProjectUtils.guessOrgName()) .replaceAll(PKG_NAME, guessPkgName(packageName, TOOL_DIR)) .replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion()); Files.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8)); Path balToolToml = path.resolve(ProjectConstants.BAL_TOOL_TOML); Files.createFile(balToolToml); String balToolManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-tool.toml"); balToolManifest = balToolManifest.replaceAll(TOOL_ID, guessPkgName(packageName, TOOL_DIR)); write(balToolToml, balToolManifest.getBytes(StandardCharsets.UTF_8)); } protected static PackageVersion findLatest(List<PackageVersion> packageVersions) { if (packageVersions.isEmpty()) { return null; } PackageVersion latestVersion = packageVersions.get(0); for (PackageVersion pkgVersion : packageVersions) { latestVersion = getLatest(latestVersion, pkgVersion); } return latestVersion; } protected static PackageVersion getLatest(PackageVersion v1, PackageVersion v2) { SemanticVersion semVer1 = v1.value(); SemanticVersion semVer2 = v2.value(); boolean isV1PreReleaseVersion = semVer1.isPreReleaseVersion(); boolean isV2PreReleaseVersion = semVer2.isPreReleaseVersion(); if (isV1PreReleaseVersion ^ isV2PreReleaseVersion) { return isV1PreReleaseVersion ? v2 : v1; } else { return semVer1.greaterThanOrEqualTo(semVer2) ? v1 : v2; } } public static List<PackageVersion> getPackageVersions(Path balaPackagePath) { List<Path> versions = new ArrayList<>(); if (Files.exists(balaPackagePath)) { Stream<Path> collectVersions; try { collectVersions = Files.list(balaPackagePath); } catch (IOException e) { throw new RuntimeException("Error while accessing Distribution cache: " + e.getMessage()); } versions.addAll(collectVersions.collect(Collectors.toList())); } return pathToVersions(versions); } protected static List<PackageVersion> pathToVersions(List<Path> versions) { List<PackageVersion> availableVersions = new ArrayList<>(); versions.stream().map(path -> Optional.ofNullable(path) .map(Path::getFileName) .map(Path::toString) .orElse("0.0.0")).forEach(version -> { try { availableVersions.add(PackageVersion.from(version)); } catch (ProjectException ignored) { } }); return availableVersions; } /** * Remove starting whitespaces of a string. * * @param str given string * @return starting whitespaces removed string */ private static String trimStartingWhitespaces(String str) { return str.replaceFirst("\\s++$", ""); } /** * Remove last character of a string. * * @param str given string * @return last character removed string */ private static String removeLastCharacter(String str) { return str.substring(0, str.length() - 1); } /** * Check if files of the given template exist in a given path. * * @param template given string * @param packagePath given path * @throws URISyntaxException if URI syntax exception occurred * @throws IOException if IO exception occurred */ public static String checkTemplateFilesExists(String template, Path packagePath) throws URISyntaxException, IOException { Path templateDir = getTemplatePath().resolve(template); Stream<Path> paths = Files.list(templateDir); List<Path> templateFilePathList = paths.collect(Collectors.toList()); StringBuilder existingFiles = new StringBuilder(); for (Path path : templateFilePathList) { Optional<String> fileNameOptional = Optional.ofNullable(path.getFileName()).map(path1 -> path1.toString()); if (fileNameOptional.isPresent()) { String fileName = fileNameOptional.get(); if (!fileName.endsWith(ProjectConstants.BLANG_SOURCE_EXT) && Files.exists(packagePath.resolve(fileName))) { existingFiles.append(fileName).append(FILE_STRING_SEPARATOR); } } } return existingFiles.toString(); } /** * Check if common files of a package exist in a given path. * * @param packagePath given path */ public static String checkPackageFilesExists(Path packagePath) { String[] packageFiles = {DEPENDENCIES_TOML, BAL_TOOL_TOML, ProjectConstants.PACKAGE_MD_FILE_NAME, ProjectConstants.MODULE_MD_FILE_NAME, ProjectConstants.MODULES_ROOT, ProjectConstants.TEST_DIR_NAME}; StringBuilder existingFiles = new StringBuilder(); for (String file : packageFiles) { if (Files.exists(packagePath.resolve(file))) { existingFiles.append(file).append(FILE_STRING_SEPARATOR); } } return existingFiles.toString(); } /** * Check if .bal files exist in a given path. * * @param packagePath given path * @return error message if files exists */ public static boolean balFilesExists(Path packagePath) throws IOException { return Files.list(packagePath).anyMatch(path -> path.toString().endsWith(ProjectConstants.BLANG_SOURCE_EXT)); } /** * Get the latest version from a given list of versions. * * @param versions the list of strings * @return the latest version */ static String getLatestVersion(List<String> versions) { String latestVersion = versions.get(0); for (String version : versions) { if (SemanticVersion.from(version).greaterThan(SemanticVersion.from(latestVersion))) { latestVersion = version; } } return latestVersion; } /** * Pull the dependencies of a given package from central. * * @param orgName org name of the dependent package * @param packageName name of the dependent package * @param version version of the dependent package * @return true if the dependent package compilation has errors */ private static void printDiagnostics(Collection<Diagnostic> diagnostics) { for (Diagnostic diagnostic: diagnostics) { CommandUtil.printError(errStream, diagnostic.toString(), null, false); } } private static boolean hasProvidedPlatformDeps(PackageCompilation packageCompilation) { Set<Object> providedDeps = new HashSet<>(); packageCompilation.getResolution().allDependencies() .stream() .map(ResolvedPackageDependency::packageInstance) .map(Package::manifest) .flatMap(pkgManifest -> pkgManifest.platforms().values().stream()) .filter(Objects::nonNull) .flatMap(pkgPlatform -> pkgPlatform.dependencies().stream()) .filter(dependency -> "provided".equals(dependency.get("scope"))) .forEach(providedDeps::add); return !providedDeps.isEmpty(); } }
class CommandUtil { public static final String ORG_NAME = "ORG_NAME"; public static final String PKG_NAME = "PKG_NAME"; public static final String DIST_VERSION = "DIST_VERSION"; public static final String TOOL_ID = "TOOL_ID"; public static final String USER_HOME = "user.home"; public static final String GITIGNORE = "gitignore"; public static final String DEVCONTAINER = "devcontainer"; public static final String NEW_CMD_DEFAULTS = "new_cmd_defaults"; public static final String CREATE_CMD_TEMPLATES = "create_cmd_templates"; public static final String LIBS_DIR = "libs"; public static final String DEFAULT_TEMPLATE = "default"; public static final String MAIN_TEMPLATE = "main"; public static final String FILE_STRING_SEPARATOR = ", "; private static FileSystem jarFs; private static Map<String, String> env; private static PrintStream errStream; private static PrintStream outStream; private static Path homeCache; private static boolean exitWhenFinish; private static String platform; static void setPrintStream(PrintStream errStream) { CommandUtil.errStream = errStream; } public static void initJarFs() { URI uri = null; try { uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI(); if (uri.toString().contains("!")) { final String[] array = uri.toString().split("!"); if (null == jarFs) { env = new HashMap<>(); jarFs = FileSystems.newFileSystem(URI.create(array[0]), env); } } } catch (URISyntaxException | IOException e) { throw new AssertionError(); } } /** * Print command errors with a standard format. * * @param stream error will be sent to this stream * @param error error message * @param usage usage if any * @param help if the help message should be printed */ public static void printError(PrintStream stream, String error, String usage, boolean help) { stream.println("ballerina: " + error); if (null != usage) { stream.println(); stream.println("USAGE:"); stream.println(" " + usage); } if (help) { stream.println(); stream.println("For more information try --help"); } } /** * Exit with error code 1. * * @param exit Whether to exit or not. */ public static void exitError(boolean exit) { if (exit) { Runtime.getRuntime().exit(1); } } static void applyTemplate(String orgName, String templatePkgName, String version, String packageName, Path projectPath, Path balaCache, List<Path> filesInDir) { Path balaPath = getPlatformSpecificBalaPath(orgName, templatePkgName, version, balaCache); if (!Files.exists(balaPath)) { CommandUtil.printError(errStream, "unable to find the bala: " + balaPath, null, false); CommandUtil.exitError(exitWhenFinish); } try { addModules(balaPath, projectPath, packageName); } catch (IOException e) { ProjectUtils.deleteSelectedFilesInDirectory(projectPath, filesInDir); CommandUtil.printError(errStream, "error occurred while creating the package: " + e.getMessage(), null, false); CommandUtil.exitError(exitWhenFinish); } } private static void addModules(Path balaPath, Path projectPath, String packageName) throws IOException { Gson gson = new Gson(); Path packageJsonPath = balaPath.resolve(PACKAGE_JSON); Path dependencyGraphJsonPath = balaPath.resolve(DEPENDENCY_GRAPH_JSON); Path balToolJsonPath = balaPath.resolve(TOOL_DIR).resolve(ProjectConstants.BAL_TOOL_JSON); PackageJson templatePackageJson = null; DependencyGraphJson templateDependencyGraphJson = null; BalToolJson templateBalToolJson = null; try (InputStream inputStream = new FileInputStream(String.valueOf(packageJsonPath))) { Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); templatePackageJson = gson.fromJson(fileReader, PackageJson.class); } catch (IOException e) { printError(errStream, "Error while reading the package json file: " + e.getMessage(), null, false); getRuntime().exit(1); } if (dependencyGraphJsonPath.toFile().exists()) { try (InputStream inputStream = new FileInputStream(String.valueOf(dependencyGraphJsonPath))) { Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); templateDependencyGraphJson = gson.fromJson(fileReader, DependencyGraphJson.class); } catch (IOException e) { printError(errStream, "Error while reading the dependency graph json file: " + e.getMessage(), null, false); getRuntime().exit(1); } } if (balToolJsonPath.toFile().exists()) { try (InputStream inputStream = new FileInputStream(String.valueOf(balToolJsonPath))) { Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); templateBalToolJson = gson.fromJson(fileReader, BalToolJson.class); } catch (IOException e) { printError(errStream, "Error while reading the " + BAL_TOOL_JSON + " file: " + e.getMessage(), null, false); getRuntime().exit(1); } } if (!templatePackageJson.getTemplate()) { throw createLauncherException("unable to create the package: " + "specified package is not a template"); } Path ballerinaToml = projectPath.resolve(ProjectConstants.BALLERINA_TOML); Files.createDirectories(projectPath); Files.createFile(ballerinaToml); writeBallerinaToml(ballerinaToml, templatePackageJson, packageName, platform); if (dependencyGraphJsonPath.toFile().exists()) { Path dependenciesToml = projectPath.resolve(DEPENDENCIES_TOML); Files.createFile(dependenciesToml); writeDependenciesToml(projectPath, templateDependencyGraphJson, templatePackageJson); } if (balToolJsonPath.toFile().exists()) { Path balToolToml = projectPath.resolve(BAL_TOOL_TOML); Files.createFile(balToolToml); writeBalToolToml(balToolToml, templateBalToolJson, packageName); copyToolDependencies(projectPath, balaPath.resolve(TOOL_DIR).resolve(LIBS_DIR)); } Path packageMDFilePath = balaPath.resolve("docs") .resolve(ProjectConstants.PACKAGE_MD_FILE_NAME); Path toPackageMdPath = projectPath.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME); if (Files.exists(packageMDFilePath)) { Files.copy(packageMDFilePath, toPackageMdPath, StandardCopyOption.REPLACE_EXISTING); } createDefaultGitignore(projectPath); createDefaultDevContainer(projectPath); String templatePkgName = templatePackageJson.getName(); Path modulesRoot = balaPath.resolve(ProjectConstants.MODULES_ROOT); Path moduleMdDirRoot = balaPath.resolve("docs").resolve(ProjectConstants.MODULES_ROOT); List<Path> modulesList; try (Stream<Path> pathStream = Files.list(modulesRoot)) { modulesList = pathStream.collect(Collectors.toList()); } for (Path moduleRoot : modulesList) { Path moduleDir = Optional.of(moduleRoot.getFileName()).get(); Path destDir; if (moduleDir.toString().equals(templatePkgName)) { destDir = projectPath; } else { String moduleDirName = moduleDir.toString().split(templatePkgName + ProjectConstants.DOT, 2)[1]; destDir = projectPath.resolve(ProjectConstants.MODULES_ROOT).resolve(moduleDirName); Files.createDirectories(destDir); } Files.walkFileTree(moduleRoot, new FileUtils.Copy(moduleRoot, destDir, templatePkgName, packageName)); Path moduleMdSource = moduleMdDirRoot.resolve(moduleDir).resolve(ProjectConstants.MODULE_MD_FILE_NAME); if (Files.exists(moduleMdSource)) { Files.copy(moduleMdSource, destDir.resolve(ProjectConstants.MODULE_MD_FILE_NAME), StandardCopyOption.REPLACE_EXISTING); } } copyIcon(balaPath, projectPath); copyPlatformLibraries(balaPath, projectPath); copyIncludeFiles(balaPath, projectPath, templatePackageJson); } private static void copyIcon(Path balaPath, Path projectPath) { Path docsPath = balaPath.resolve(ProjectConstants.BALA_DOCS_DIR); try (Stream<Path> pathStream = Files.walk(docsPath, 1)) { List<Path> icon = pathStream .filter(FileSystems.getDefault().getPathMatcher("glob:**.png")::matches) .collect(Collectors.toList()); if (!icon.isEmpty()) { Path projectDocsDir = projectPath.resolve(ProjectConstants.BALA_DOCS_DIR); Files.createDirectory(projectDocsDir); Path projectIconPath = projectDocsDir.resolve(Optional.of(icon.get(0).getFileName()).get()); Files.copy(icon.get(0), projectIconPath, StandardCopyOption.REPLACE_EXISTING); } } catch (IOException e) { printError(errStream, "Error while retrieving the icon: " + e.getMessage(), null, false); getRuntime().exit(1); } } private static void copyPlatformLibraries(Path balaPath, Path projectPath) throws IOException { Path platformLibPath = balaPath.resolve("platform").resolve(platform); if (Files.exists(platformLibPath)) { Path libs = projectPath.resolve("libs"); Files.createDirectories(libs); Files.walkFileTree(platformLibPath, new FileUtils.Copy(platformLibPath, libs)); } } private static void copyIncludeFiles(Path balaPath, Path projectPath, PackageJson templatePackageJson) throws IOException { if (templatePackageJson.getInclude() != null) { String templatePkgName = templatePackageJson.getName(); List<Path> includePaths = ProjectUtils.getPathsMatchingIncludePatterns( templatePackageJson.getInclude(), balaPath); for (Path includePath : includePaths) { Path moduleNameUpdatedIncludePath = updateModuleDirectoryNaming(includePath, balaPath, templatePkgName); Path fromIncludeFilePath = balaPath.resolve(includePath); Path toIncludeFilePath = projectPath.resolve(moduleNameUpdatedIncludePath); if (Files.notExists(toIncludeFilePath)) { Files.createDirectories(toIncludeFilePath); Files.walkFileTree(fromIncludeFilePath, new FileUtils.Copy(fromIncludeFilePath, toIncludeFilePath)); } } } } private static Path updateModuleDirectoryNaming(Path includePath, Path balaPath, String templatePkgName) { Path modulesDirPath = balaPath.resolve(ProjectConstants.MODULES_ROOT); Path absoluteIncludePath = balaPath.resolve(includePath); if (absoluteIncludePath.startsWith(modulesDirPath)) { Path moduleRootPath = modulesDirPath.relativize(absoluteIncludePath).subpath(0, 1); String moduleDirName = Optional.of(moduleRootPath.getFileName()).get().toString(); String destinationDirName = moduleDirName.split(templatePkgName + ProjectConstants.DOT, 2)[1]; Path includePathRelativeToModuleRoot = modulesDirPath.resolve(moduleRootPath) .relativize(absoluteIncludePath); Path updatedIncludePath = Paths.get(ProjectConstants.MODULES_ROOT).resolve(destinationDirName) .resolve(includePathRelativeToModuleRoot); return updatedIncludePath; } return includePath; } /** * Find the bala path for a given template. * * @param template template name */ static Path findBalaTemplate(String template, Path balaCache) { String packageName = findPkgName(template); String orgName = findOrg(template); String version = findPkgVersion(template); if (version != null) { Path balaPath = getPlatformSpecificBalaPath(orgName, packageName, version, balaCache); if (Files.exists(balaPath)) { return balaPath; } else { return null; } } else { return null; } } public static void initPackageFromCentral(Path balaCache, Path projectPath, String packageName, String template, List<Path> filesInDir) { System.setProperty(CentralClientConstants.ENABLE_OUTPUT_STREAM, "true"); String templatePackageName = findPkgName(template); String orgName = findOrg(template); String version = findPkgVersion(template); Path pkgCacheParent = balaCache.resolve(orgName).resolve(templatePackageName); try { pullPackageFromRemote(orgName, templatePackageName, version, pkgCacheParent); } catch (PackageAlreadyExistsException e) { if (version == null) { List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent); PackageVersion latest = findLatest(packageVersions); if (latest == null) { throw createLauncherException("unable to find package in the filesystem cache." + " This is an unexpected error : " + e.getMessage()); } version = latest.toString(); } } catch (CentralClientException e) { errStream.println("Warning: Unable to pull the package from Ballerina Central: " + e.getMessage()); if (findBalaTemplate(template, balaCache) == null) { List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent); PackageVersion latest = findLatest(packageVersions); if (latest == null) { throw createLauncherException("template not found in filesystem cache."); } version = latest.toString(); } } if (version == null) { List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent); PackageVersion latest = findLatest(packageVersions); version = Objects.requireNonNull(latest).toString(); } applyTemplate(orgName, templatePackageName, version, packageName, projectPath, balaCache, filesInDir); } private static void pullPackageFromRemote(String orgName, String packageName, String version, Path destination) throws CentralClientException { String supportedPlatform = Arrays.stream(JvmTarget.values()) .map(target -> target.code()) .collect(Collectors.joining(",")); Settings settings; try { settings = readSettings(); } catch (SettingsTomlException e) { settings = Settings.from(); } CentralAPIClient client = new CentralAPIClient(RepoUtils.getRemoteRepoURL(), initializeProxy(settings.getProxy()), settings.getProxy().username(), settings.getProxy().password(), getAccessTokenOfCLI(settings), settings.getCentral().getConnectTimeout(), settings.getCentral().getReadTimeout(), settings.getCentral().getWriteTimeout(), settings.getCentral().getCallTimeout()); try { client.pullPackage(orgName, packageName, version, destination, supportedPlatform, RepoUtils.getBallerinaVersion(), false); } catch (CentralClientException e) { throw e; } } public static void writeBallerinaToml(Path balTomlPath, PackageJson packageJson, String packageName, String platform) throws IOException { Files.writeString(balTomlPath, "[package]", StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\norg = \"" + packageJson.getOrganization() + "\"", StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\nname = \"" + packageName + "\"", StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\nversion = \"" + packageJson.getVersion() + "\"", StandardOpenOption.APPEND); List<String> newModuleNames = packageJson.getExport().stream().map(module -> module.replaceFirst(packageJson.getName(), packageName)).collect(Collectors.toList()); StringJoiner stringJoiner = new StringJoiner(","); for (String newModuleName : newModuleNames) { stringJoiner.add("\"" + newModuleName + "\""); } Files.writeString(balTomlPath, "\nexport = [" + stringJoiner + "]" .replaceFirst(packageJson.getName(), packageName), StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\ndistribution = \"" + packageJson.getBallerinaVersion() + "\"", StandardOpenOption.APPEND); writePackageAttributeArray(balTomlPath, packageJson.getLicenses(), "license"); writePackageAttributeArray(balTomlPath, packageJson.getAuthors(), "authors"); writePackageAttributeArray(balTomlPath, packageJson.getKeywords(), "keywords"); writePackageAttributeValue(balTomlPath, packageJson.getSourceRepository(), "repository"); writePackageAttributeValue(balTomlPath, packageJson.getVisibility(), "visibility"); writePackageAttributeValue(balTomlPath, packageJson.getIcon(), "icon"); Files.writeString(balTomlPath, "\n\n[build-options]", StandardOpenOption.APPEND); Files.writeString(balTomlPath, "\nobservabilityIncluded = true\n", StandardOpenOption.APPEND); JsonArray platformLibraries = packageJson.getPlatformDependencies(); if (platformLibraries == null) { return; } Files.writeString(balTomlPath, "\n[[platform." + platform + ".dependency]]", StandardOpenOption.APPEND); for (Object dependencies : platformLibraries) { JsonObject dependenciesObj = (JsonObject) dependencies; if (null == dependenciesObj.get("scope")) { String libPath = dependenciesObj.get("path").getAsString(); Path libName = Optional.of(Paths.get(libPath).getFileName()).get(); Path libRelPath = Paths.get("libs", libName.toString()); Files.writeString(balTomlPath, "\npath = \"" + libRelPath + "\"", StandardOpenOption.APPEND); } if (dependenciesObj.get("artifactId") != null) { String artifactId = dependenciesObj.get("artifactId").getAsString(); Files.writeString(balTomlPath, "\nartifactId = \"" + artifactId + "\"", StandardOpenOption.APPEND); } if (dependenciesObj.get("groupId") != null) { String groupId = dependenciesObj.get("groupId").getAsString(); Files.writeString(balTomlPath, "\ngroupId = \"" + groupId + "\"", StandardOpenOption.APPEND); } if (dependenciesObj.get("version") != null) { String dependencyVersion = dependenciesObj.get("version").getAsString(); Files.writeString(balTomlPath, "\nversion = \"" + dependencyVersion + "\"\n", StandardOpenOption.APPEND); } if (null != dependenciesObj.get("scope") && dependenciesObj.get("scope").getAsString().equals("provided")) { String scope = dependenciesObj.get("scope").getAsString(); Files.writeString(balTomlPath, "scope = \"" + scope + "\"\n", StandardOpenOption.APPEND); String artifactId = dependenciesObj.get("artifactId").getAsString(); printError(errStream, "WARNING: path for the platform dependency " + artifactId + " with provided scope " + "should be specified in the Ballerina.toml", null, false); } } } public static void writeDependenciesToml(Path projectPath, DependencyGraphJson templateDependencyGraphJson, PackageJson templatePackageJson) throws IOException { Path depsTomlPath = projectPath.resolve(DEPENDENCIES_TOML); String autoGenCode = " "\n" + " " "\n"; Files.writeString(depsTomlPath, autoGenCode, StandardOpenOption.APPEND); String balTomlVersion = "[ballerina]\n" + "dependencies-toml-version = \"" + ProjectConstants.DEPENDENCIES_TOML_VERSION + "\"\n" + "\n"; Files.writeString(depsTomlPath, balTomlVersion, StandardOpenOption.APPEND); List<ModuleDependency> currentPkgModules = new ArrayList<>(); for (ModuleDependency module : templateDependencyGraphJson.getModuleDependencies()) { if (module.getOrg().equals(templatePackageJson.getOrganization()) && module.getPackageName().equals(templatePackageJson.getName())) { List<ModuleDependency> currentPkgModuleDeps = module.getDependencies(); currentPkgModules.addAll(currentPkgModuleDeps); } } StringBuilder pkgDesc = new StringBuilder(); for (Dependency packageDependency : templateDependencyGraphJson.getPackageDependencyGraph()) { if (templatePackageJson.getOrganization().equals(packageDependency.getOrg()) && templatePackageJson.getName().equals(packageDependency.getName())) { pkgDesc.append("[[package]]\n") .append("org = \"").append(packageDependency.getOrg()).append("\"\n") .append("name = \"").append(ProjectUtils.defaultName(projectPath)).append("\"\n") .append("version = \"").append(packageDependency.getVersion()).append("\"\n"); pkgDesc.append(getDependenciesArrayContent(packageDependency)); pkgDesc.append(getDependencyModulesArrayContent( templateDependencyGraphJson.getModuleDependencies(), true, projectPath)); } else { pkgDesc.append("[[package]]\n") .append("org = \"").append(packageDependency.getOrg()).append("\"\n") .append("name = \"").append(packageDependency.getName()).append("\"\n") .append("version = \"").append(packageDependency.getVersion()).append("\"\n"); pkgDesc.append(getDependenciesArrayContent(packageDependency)); List<ModuleDependency> packageDependencyModules = new ArrayList<>(); for (ModuleDependency module : currentPkgModules) { if (packageDependency.getOrg().equals(module.getOrg()) && packageDependency.getName().equals(module.getPackageName())) { packageDependencyModules.add(module); } } if (!packageDependencyModules.isEmpty()) { pkgDesc.append(getDependencyModulesArrayContent(packageDependencyModules, false, projectPath)); } } pkgDesc.append("\n"); } Files.writeString(depsTomlPath, pkgDesc.toString(), StandardOpenOption.APPEND); } public static Path getPlatformSpecificBalaPath(String orgName, String pkgName, String version, Path balaCache) { Path balaPath = balaCache.resolve( ProjectUtils.getRelativeBalaPath(orgName, pkgName, version, null)); platform = ANY_PLATFORM; if (!Files.exists(balaPath)) { for (JvmTarget supportedPlatform : JvmTarget.values()) { balaPath = balaCache.resolve( ProjectUtils.getRelativeBalaPath(orgName, pkgName, version, supportedPlatform.code())); if (Files.exists(balaPath)) { platform = supportedPlatform.code(); break; } } } return balaPath; } /** * Write to BalTool.toml file. * * @param balToolTomlPath path to BalTool.toml * @param balToolJson Bal-tool.json content */ public static void writeBalToolToml(Path balToolTomlPath, BalToolJson balToolJson, String packageName) throws IOException { Files.writeString(balToolTomlPath, "[tool]", StandardOpenOption.APPEND); Files.writeString(balToolTomlPath, "\nid = \"" + packageName + "\"\n", StandardOpenOption.APPEND); List<String> dependencyPaths = balToolJson.dependencyPaths(); StringBuilder dependenciesContent = new StringBuilder(); for (String dependencyPath: dependencyPaths) { dependenciesContent.append("\n[[dependency]]\n").append("path = \"").append(dependencyPath).append("\"\n"); } Files.writeString(balToolTomlPath, dependenciesContent.toString(), StandardOpenOption.APPEND); } /** * Copy dependency jars to new package from template package. * * @param projectPath path to new project * @param toolsLibPath Path to /tool/libs directory containing dependencies */ public static void copyToolDependencies(Path projectPath, Path toolsLibPath) throws IOException { Path toolDirectory = projectPath.resolve(TOOL_DIR); Files.createDirectory(toolDirectory); Files.createDirectory(toolDirectory.resolve(LIBS_DIR)); Files.walkFileTree(toolsLibPath, new FileUtils.Copy(toolsLibPath, toolDirectory.resolve(LIBS_DIR))); } /** * Get formatted dependencies array content for Dependencies.toml dependency. * * @param packageDependency package dependency * @return formatted dependencies array content */ private static String getDependenciesArrayContent(Dependency packageDependency) { StringBuilder dependenciesContent = new StringBuilder(); if (!packageDependency.getDependencies().isEmpty()) { for (Dependency dependency : packageDependency.getDependencies()) { dependenciesContent.append("\t{org = \"").append(dependency.getOrg()) .append("\", name = \"").append(dependency.getName()) .append("\"},\n"); } String dependenciesPart = dependenciesContent.toString(); dependenciesPart = removeLastCharacter(trimStartingWhitespaces(dependenciesPart)); return "dependencies = [\n" + dependenciesPart + "\n]\n"; } return ""; } /** * Get formatted modules array content for Dependencies.toml dependency. * <code> * modules = [ * {org = "ballerinax", packageName = "redis", moduleName = "redis"} * ] * </code> * * @param dependencyModules modules of the given dependency package * @param isCurrentPackage is modules array generating for current package * @param projectPath project path * @return formatted modules array content */ private static String getDependencyModulesArrayContent(List<ModuleDependency> dependencyModules, boolean isCurrentPackage, Path projectPath) { StringBuilder modulesContent = new StringBuilder(); if (isCurrentPackage) { for (ModuleDependency module : dependencyModules) { String currentPkgName = ProjectUtils.defaultName(projectPath).value(); String modulePkgPart = module.getModuleName().split("\\.")[0]; String currentPkgModuleName = module.getModuleName().replaceFirst(modulePkgPart, currentPkgName); modulesContent.append("\t{org = \"").append(module.getOrg()) .append("\", packageName = \"").append(currentPkgName) .append("\", moduleName = \"").append(currentPkgModuleName) .append("\"},\n"); } } else { for (ModuleDependency module : dependencyModules) { modulesContent.append("\t{org = \"").append(module.getOrg()) .append("\", packageName = \"").append(module.getPackageName()) .append("\", moduleName = \"").append(module.getModuleName()) .append("\"},\n"); } } String modulesPart = modulesContent.toString(); modulesPart = removeLastCharacter(trimStartingWhitespaces(modulesPart)); return "modules = [\n" + modulesPart + "\n]\n"; } /** * Write Ballerina.toml package attribute array from template package.json to new project Ballerina.toml. * * @param balTomlPath Ballerina.toml path of the new project * @param attributeArray package attribute values array * @param attributeName package attribute name * @throws IOException when error occurs writing to the Ballerina.toml */ private static void writePackageAttributeArray(Path balTomlPath, List<String> attributeArray, String attributeName) throws IOException { if (attributeArray != null && !attributeArray.isEmpty()) { StringJoiner stringJoiner = new StringJoiner(","); for (String attributeElement : attributeArray) { stringJoiner.add("\"" + attributeElement + "\""); } Files.writeString(balTomlPath, "\n" + attributeName + " = [" + stringJoiner + "]", StandardOpenOption.APPEND); } } /** * Write Ballerina.toml package attribute from template package.json to new project Ballerina.toml. * * @param balTomlPath Ballerina.toml path of the new project * @param attributeValue package attribute value * @param attributeName package attribute name * @throws IOException when error occurs writing to the Ballerina.toml */ private static void writePackageAttributeValue(Path balTomlPath, String attributeValue, String attributeName) throws IOException { if (attributeValue != null && !attributeValue.isEmpty()) { Files.writeString(balTomlPath, "\n" + attributeName + " = \"" + attributeValue + "\"", StandardOpenOption.APPEND); } } /** * Find the package name for a given template. * * @param template template name * @return packageName - package name of the module */ public static String findPkgName(String template) { String[] orgSplit = template.split("/"); String packageName = ""; String packagePart = (orgSplit.length > 1) ? orgSplit[1] : ""; String[] pkgSplit = packagePart.split(":"); packageName = pkgSplit[0].trim(); return packageName; } /** * Find the organization for a given template. * * @param template template name * @return orgName - org of the module */ public static String findOrg(String template) { String[] orgSplit = template.split("/"); return orgSplit[0].trim(); } /** * Find the package version for a given template. * * @param template template name * @return version - version of the module */ public static String findPkgVersion(String template) { String[] orgSplit = template.split("/"); String packagePart = (orgSplit.length > 1) ? orgSplit[1] : ""; String[] pkgSplit = packagePart.split(":"); if (pkgSplit.length > 1) { return pkgSplit[1].trim(); } else { return null; } } /** * Initialize a new ballerina project in the given path. * * @param path project path * @param packageName name of the package * @param template package template * @param balFilesExist if bal files exist in the project * @throws IOException If any IO exception occurred * @throws URISyntaxException If any URISyntaxException occurred */ public static void initPackageByTemplate(Path path, String packageName, String template, boolean balFilesExist) throws IOException, URISyntaxException { applyTemplate(path, template, balFilesExist); if (template.equalsIgnoreCase(LIB_DIR)) { initLibPackage(path, packageName); Path source = path.resolve("lib.bal"); Files.move(source, source.resolveSibling(guessPkgName(packageName, template) + ".bal"), StandardCopyOption.REPLACE_EXISTING); } else if (template.equalsIgnoreCase(TOOL_DIR)) { initToolPackage(path, packageName); } else { initPackage(path, packageName); } createDefaultGitignore(path); createDefaultDevContainer(path); } private static void createDefaultGitignore(Path path) throws IOException { Path gitignore = path.resolve(ProjectConstants.GITIGNORE_FILE_NAME); if (Files.notExists(gitignore)) { Files.createFile(gitignore); } if (Files.size(gitignore) == 0) { String defaultGitignore = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + GITIGNORE); Files.write(gitignore, defaultGitignore.getBytes(StandardCharsets.UTF_8)); } } private static void createDefaultDevContainer(Path path) throws IOException { Path devContainer = path.resolve(ProjectConstants.DEVCONTAINER); if (Files.notExists(devContainer)) { Files.createFile(devContainer); } if (Files.size(devContainer) == 0) { String defaultDevContainer = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + DEVCONTAINER); defaultDevContainer = defaultDevContainer.replace("latest", RepoUtils.getBallerinaVersion()); Files.write(devContainer, defaultDevContainer.getBytes(StandardCharsets.UTF_8)); } } /** * Get the list of templates. * * @return list of templates */ public static List<String> getTemplates() { try { Path templateDir = getTemplatePath(); Stream<Path> walk = Files.walk(templateDir, 1); List<String> templates = walk.filter(Files::isDirectory) .filter(directory -> !templateDir.equals(directory)) .filter(directory -> directory.getFileName() != null) .map(directory -> directory.getFileName()) .map(fileName -> fileName.toString()) .collect(Collectors.toList()); if (null != jarFs) { return templates.stream().map(t -> t .replace(jarFs.getSeparator(), "")) .collect(Collectors.toList()); } else { return templates; } } catch (IOException | URISyntaxException e) { return new ArrayList<String>(); } } /** * Get the path to the given template. * * @return path of the given template * @throws URISyntaxException if any URISyntaxException occured */ private static Path getTemplatePath() throws URISyntaxException { URI uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI(); if (uri.toString().contains("!")) { final String[] array = uri.toString().split("!"); return jarFs.getPath(array[1]); } else { return Paths.get(uri); } } /** * Apply the template to the created module. * * @param modulePath path to the module * @param template template name * @param balFilesExist if bal files exist in the project * @throws IOException if any IOException occurred * @throws URISyntaxException if any URISyntaxException occurred */ public static void applyTemplate(Path modulePath, String template, boolean balFilesExist) throws IOException, URISyntaxException { Path templateDir = getTemplatePath().resolve(template); if (template.equalsIgnoreCase(MAIN_TEMPLATE)) { templateDir = getTemplatePath().resolve(DEFAULT_TEMPLATE); Path tempDirTest = getTemplatePath().resolve(MAIN_TEMPLATE); Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath)); Files.walkFileTree(tempDirTest, new FileUtils.Copy(tempDirTest, modulePath)); } else if (template.equalsIgnoreCase(DEFAULT_TEMPLATE)) { if (!balFilesExist) { Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath)); } } else { Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath)); } } /** * Initialize a new ballerina project in the given path. * * @param path Project path * @throws IOException If any IO exception occurred */ public static void initPackage(Path path, String packageName) throws IOException { Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML); Files.createFile(ballerinaToml); String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-app.toml"); defaultManifest = defaultManifest .replaceAll(ORG_NAME, ProjectUtils.guessOrgName()) .replaceAll(PKG_NAME, guessPkgName(packageName, "app")) .replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion()); Files.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8)); } private static void initLibPackage(Path path, String packageName) throws IOException { Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML); Files.createFile(ballerinaToml); String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-lib.toml"); defaultManifest = defaultManifest.replaceAll(ORG_NAME, ProjectUtils.guessOrgName()) .replaceAll(PKG_NAME, guessPkgName(packageName, "lib")) .replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion()); write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8)); String packageMd = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/Package.md"); write(path.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME), packageMd.getBytes(StandardCharsets.UTF_8)); } /** * Initialize a new ballerina tool package in the given path. * * @param path Project path * @param packageName package name * @throws IOException If any IO exception occurred */ private static void initToolPackage(Path path, String packageName) throws IOException { Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML); Files.createFile(ballerinaToml); String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-app.toml"); defaultManifest = defaultManifest .replaceAll(ORG_NAME, ProjectUtils.guessOrgName()) .replaceAll(PKG_NAME, guessPkgName(packageName, TOOL_DIR)) .replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion()); Files.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8)); Path balToolToml = path.resolve(ProjectConstants.BAL_TOOL_TOML); Files.createFile(balToolToml); String balToolManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-tool.toml"); balToolManifest = balToolManifest.replaceAll(TOOL_ID, guessPkgName(packageName, TOOL_DIR)); write(balToolToml, balToolManifest.getBytes(StandardCharsets.UTF_8)); } protected static PackageVersion findLatest(List<PackageVersion> packageVersions) { if (packageVersions.isEmpty()) { return null; } PackageVersion latestVersion = packageVersions.get(0); for (PackageVersion pkgVersion : packageVersions) { latestVersion = getLatest(latestVersion, pkgVersion); } return latestVersion; } protected static PackageVersion getLatest(PackageVersion v1, PackageVersion v2) { SemanticVersion semVer1 = v1.value(); SemanticVersion semVer2 = v2.value(); boolean isV1PreReleaseVersion = semVer1.isPreReleaseVersion(); boolean isV2PreReleaseVersion = semVer2.isPreReleaseVersion(); if (isV1PreReleaseVersion ^ isV2PreReleaseVersion) { return isV1PreReleaseVersion ? v2 : v1; } else { return semVer1.greaterThanOrEqualTo(semVer2) ? v1 : v2; } } public static List<PackageVersion> getPackageVersions(Path balaPackagePath) { List<Path> versions = new ArrayList<>(); if (Files.exists(balaPackagePath)) { Stream<Path> collectVersions; try { collectVersions = Files.list(balaPackagePath); } catch (IOException e) { throw new RuntimeException("Error while accessing Distribution cache: " + e.getMessage()); } versions.addAll(collectVersions.collect(Collectors.toList())); } return pathToVersions(versions); } protected static List<PackageVersion> pathToVersions(List<Path> versions) { List<PackageVersion> availableVersions = new ArrayList<>(); versions.stream().map(path -> Optional.ofNullable(path) .map(Path::getFileName) .map(Path::toString) .orElse("0.0.0")).forEach(version -> { try { availableVersions.add(PackageVersion.from(version)); } catch (ProjectException ignored) { } }); return availableVersions; } /** * Remove starting whitespaces of a string. * * @param str given string * @return starting whitespaces removed string */ private static String trimStartingWhitespaces(String str) { return str.replaceFirst("\\s++$", ""); } /** * Remove last character of a string. * * @param str given string * @return last character removed string */ private static String removeLastCharacter(String str) { return str.substring(0, str.length() - 1); } /** * Check if files of the given template exist in a given path. * * @param template given string * @param packagePath given path * @throws URISyntaxException if URI syntax exception occurred * @throws IOException if IO exception occurred */ public static String checkTemplateFilesExists(String template, Path packagePath) throws URISyntaxException, IOException { Path templateDir = getTemplatePath().resolve(template); Stream<Path> paths = Files.list(templateDir); List<Path> templateFilePathList = paths.collect(Collectors.toList()); StringBuilder existingFiles = new StringBuilder(); for (Path path : templateFilePathList) { Optional<String> fileNameOptional = Optional.ofNullable(path.getFileName()).map(path1 -> path1.toString()); if (fileNameOptional.isPresent()) { String fileName = fileNameOptional.get(); if (!fileName.endsWith(ProjectConstants.BLANG_SOURCE_EXT) && Files.exists(packagePath.resolve(fileName))) { existingFiles.append(fileName).append(FILE_STRING_SEPARATOR); } } } return existingFiles.toString(); } /** * Check if common files of a package exist in a given path. * * @param packagePath given path */ public static String checkPackageFilesExists(Path packagePath) { String[] packageFiles = {DEPENDENCIES_TOML, BAL_TOOL_TOML, ProjectConstants.PACKAGE_MD_FILE_NAME, ProjectConstants.MODULE_MD_FILE_NAME, ProjectConstants.MODULES_ROOT, ProjectConstants.TEST_DIR_NAME}; StringBuilder existingFiles = new StringBuilder(); for (String file : packageFiles) { if (Files.exists(packagePath.resolve(file))) { existingFiles.append(file).append(FILE_STRING_SEPARATOR); } } return existingFiles.toString(); } /** * Check if .bal files exist in a given path. * * @param packagePath given path * @return error message if files exists */ public static boolean balFilesExists(Path packagePath) throws IOException { return Files.list(packagePath).anyMatch(path -> path.toString().endsWith(ProjectConstants.BLANG_SOURCE_EXT)); } /** * Get the latest version from a given list of versions. * * @param versions the list of strings * @return the latest version */ static String getLatestVersion(List<String> versions) { String latestVersion = versions.get(0); for (String version : versions) { if (SemanticVersion.from(version).greaterThan(SemanticVersion.from(latestVersion))) { latestVersion = version; } } return latestVersion; } /** * Pull the dependencies of a given package from central. * * @param orgName org name of the dependent package * @param packageName name of the dependent package * @param version version of the dependent package * @return true if the dependent package compilation has errors */ private static void printDiagnostics(Collection<Diagnostic> diagnostics) { for (Diagnostic diagnostic: diagnostics) { CommandUtil.printError(errStream, diagnostic.toString(), null, false); } } private static boolean hasProvidedPlatformDeps(PackageCompilation packageCompilation) { Set<Object> providedDeps = new HashSet<>(); packageCompilation.getResolution().allDependencies() .stream() .map(ResolvedPackageDependency::packageInstance) .map(Package::manifest) .flatMap(pkgManifest -> pkgManifest.platforms().values().stream()) .filter(Objects::nonNull) .flatMap(pkgPlatform -> pkgPlatform.dependencies().stream()) .filter(dependency -> "provided".equals(dependency.get("scope"))) .forEach(providedDeps::add); return !providedDeps.isEmpty(); } }
Made this change to fix CCE for `redeclared variable ANS7` ``` const NUM1 = -1; const int NUM2 = -9223372036854775807 - 1; const int ANS7 = NUM2 - 1; const int ANS7 = NUM2 + NUM1; ``` Is that acceptable?
private void checkUniqueness(BLangConstant constant) { if (constant.symbol.kind == SymbolKind.CONSTANT) { String nameString = constant.name.value; BLangConstantValue value = constant.symbol.value; if (constantMap.containsKey(nameString)) { if (value == null) { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString); } else { BLangConstantValue lastValue = constantMap.get(nameString); if (!value.toString().equals(lastValue.toString())) { if (lastValue == null) { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString); } else { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL_WITH_ANOTHER, nameString, lastValue); } } } } else { constantMap.put(nameString, value); } } }
if (!value.toString().equals(lastValue.toString())) {
private void checkUniqueness(BLangConstant constant) { if (constant.symbol.kind == SymbolKind.CONSTANT) { String nameString = constant.name.value; BLangConstantValue value = constant.symbol.value; if (constantMap.containsKey(nameString)) { if (value == null) { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString); } else { BLangConstantValue lastValue = constantMap.get(nameString); if (!value.equals(lastValue)) { if (lastValue == null) { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL, nameString); } else { dlog.error(constant.name.pos, DiagnosticErrorCode.ALREADY_INITIALIZED_SYMBOL_WITH_ANOTHER, nameString, lastValue); } } } } else { constantMap.put(nameString, value); } } }
class ConstantValueResolver extends BLangNodeVisitor { private static final CompilerContext.Key<ConstantValueResolver> CONSTANT_VALUE_RESOLVER_KEY = new CompilerContext.Key<>(); private BConstantSymbol currentConstSymbol; private BLangConstantValue result; private BLangDiagnosticLog dlog; private Location currentPos; private BLangAnonymousModelHelper anonymousModelHelper; private SymbolEnv symEnv; private Names names; private SymbolTable symTable; private Types types; private PackageID pkgID; private Map<BConstantSymbol, BLangConstant> unresolvedConstants = new HashMap<>(); private Map<String, BLangConstantValue> constantMap = new HashMap<>(); private ArrayList<BConstantSymbol> resolvingConstants = new ArrayList<>(); private HashSet<BConstantSymbol> unresolvableConstants = new HashSet<>(); private HashMap<BSymbol, BLangTypeDefinition> createdTypeDefinitions = new HashMap<>(); private Stack<String> anonTypeNameSuffixes = new Stack<>(); private ConstantValueResolver(CompilerContext context) { context.put(CONSTANT_VALUE_RESOLVER_KEY, this); this.dlog = BLangDiagnosticLog.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.names = Names.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.types = Types.getInstance(context); } public static ConstantValueResolver getInstance(CompilerContext context) { ConstantValueResolver constantValueResolver = context.get(CONSTANT_VALUE_RESOLVER_KEY); if (constantValueResolver == null) { constantValueResolver = new ConstantValueResolver(context); } return constantValueResolver; } public void resolve(List<BLangConstant> constants, PackageID packageID, SymbolEnv symEnv) { this.dlog.setCurrentPackageId(packageID); this.pkgID = packageID; this.symEnv = symEnv; constants.forEach(constant -> this.unresolvedConstants.put(constant.symbol, constant)); constants.forEach(constant -> constant.accept(this)); constantMap.clear(); } @Override public void visit(BLangConstant constant) { if (!unresolvedConstants.containsKey(constant.symbol)) { return; } BConstantSymbol tempCurrentConstSymbol = this.currentConstSymbol; this.currentConstSymbol = constant.symbol; this.resolvingConstants.add(this.currentConstSymbol); this.currentConstSymbol.value = constructBLangConstantValue(constant.expr); this.resolvingConstants.remove(this.currentConstSymbol); this.anonTypeNameSuffixes.push(constant.name.value); updateConstantType(constant); this.anonTypeNameSuffixes.pop(); checkUniqueness(constant); unresolvedConstants.remove(this.currentConstSymbol); this.currentConstSymbol = tempCurrentConstSymbol; } @Override public void visit(BLangLiteral literal) { this.result = new BLangConstantValue(literal.value, literal.getBType()); } @Override public void visit(BLangNumericLiteral literal) { this.result = new BLangConstantValue(literal.value, literal.getBType()); } @Override public void visit(BLangConstRef constRef) { this.result = ((BConstantSymbol) constRef.symbol).value; } @Override public void visit(BLangSimpleVarRef varRef) { if (varRef.symbol == null || (varRef.symbol.tag & SymTag.CONSTANT) != SymTag.CONSTANT) { this.result = null; return; } BConstantSymbol constSymbol = (BConstantSymbol) varRef.symbol; BLangConstantValue constVal = constSymbol.value; if (constVal != null) { this.result = constVal; return; } if (this.currentConstSymbol == constSymbol) { dlog.error(varRef.pos, DiagnosticErrorCode.SELF_REFERENCE_CONSTANT, constSymbol.name); return; } if (!this.unresolvedConstants.containsKey(constSymbol)) { if (this.unresolvableConstants.contains(constSymbol)) { this.result = null; return; } this.unresolvableConstants.add(constSymbol); dlog.error(varRef.pos, DiagnosticErrorCode.CANNOT_RESOLVE_CONST, constSymbol.name.value); this.result = null; return; } if (this.resolvingConstants.contains(constSymbol)) { for (BConstantSymbol symbol : this.resolvingConstants) { this.unresolvableConstants.add(symbol); } dlog.error(varRef.pos, DiagnosticErrorCode.CONSTANT_CYCLIC_REFERENCE, this.resolvingConstants); this.result = null; return; } this.unresolvedConstants.get(constSymbol).accept(this); this.result = constSymbol.value; } @Override public void visit(BLangRecordLiteral recordLiteral) { Map<String, BLangConstantValue> mapConstVal = new LinkedHashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { String key; BLangConstantValue value; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; NodeKind nodeKind = keyValuePair.key.expr.getKind(); if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) { key = (String) ((BLangLiteral) keyValuePair.key.expr).value; } else if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) { key = ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value; } else { continue; } value = constructBLangConstantValue(keyValuePair.valueExpr); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; key = varNameField.variableName.value; value = constructBLangConstantValue(varNameField); } else { BLangConstantValue spreadOpConstValue = constructBLangConstantValue(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr); if (spreadOpConstValue != null) { mapConstVal.putAll((Map<String, BLangConstantValue>) spreadOpConstValue.value); } continue; } mapConstVal.put(key, value); } this.result = new BLangConstantValue(mapConstVal, recordLiteral.getBType()); } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { List<BLangExpression> memberExprs = listConstructorExpr.exprs; List<BLangConstantValue> listConstValue = new ArrayList<>(memberExprs.size()); for (BLangExpression memberExpr : memberExprs) { listConstValue.add(constructBLangConstantValue(memberExpr)); } this.result = new BLangConstantValue(listConstValue, listConstructorExpr.getBType()); } @Override public void visit(BLangListConstructorExpr.BLangArrayLiteral listConstructorExpr) { visit((BLangListConstructorExpr) listConstructorExpr); } @Override public void visit(BLangListConstructorExpr.BLangTupleLiteral listConstructorExpr) { visit((BLangListConstructorExpr) listConstructorExpr); } @Override public void visit(BLangBinaryExpr binaryExpr) { BLangConstantValue lhs = constructBLangConstantValue(binaryExpr.lhsExpr); BLangConstantValue rhs = constructBLangConstantValue(binaryExpr.rhsExpr); this.result = calculateConstValue(lhs, rhs, binaryExpr.opKind); } public void visit(BLangGroupExpr groupExpr) { this.result = constructBLangConstantValue(groupExpr.expression); } public void visit(BLangUnaryExpr unaryExpr) { BLangConstantValue value = constructBLangConstantValue(unaryExpr.expr); this.result = evaluateUnaryOperator(value, unaryExpr.operator); } private BLangConstantValue calculateConstValue(BLangConstantValue lhs, BLangConstantValue rhs, OperatorKind kind) { if (lhs == null || rhs == null || lhs.value == null || rhs.value == null) { return new BLangConstantValue(null, this.currentConstSymbol.type); } try { switch (kind) { case ADD: return calculateAddition(lhs, rhs); case SUB: return calculateSubtract(lhs, rhs); case MUL: return calculateMultiplication(lhs, rhs); case DIV: return calculateDivision(lhs, rhs); case MOD: return calculateMod(lhs, rhs); case BITWISE_AND: return calculateBitWiseOp(lhs, rhs, (a, b) -> a & b); case BITWISE_OR: return calculateBitWiseOp(lhs, rhs, (a, b) -> a | b); case BITWISE_LEFT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a << b); case BITWISE_RIGHT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a >> b); case BITWISE_UNSIGNED_RIGHT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a >>> b); case BITWISE_XOR: return calculateBitWiseOp(lhs, rhs, (a, b) -> a ^ b); default: dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED); } } catch (NumberFormatException nfe) { } catch (ArithmeticException ae) { dlog.error(currentPos, DiagnosticErrorCode.INVALID_CONST_EXPRESSION, ae.getMessage()); } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue evaluateUnaryOperator(BLangConstantValue value, OperatorKind kind) { if (value == null || value.value == null) { return new BLangConstantValue(null, this.currentConstSymbol.type); } try { switch (kind) { case ADD: return new BLangConstantValue(value.value, currentConstSymbol.type); case SUB: return calculateNegation(value); case BITWISE_COMPLEMENT: return calculateBitWiseComplement(value); case NOT: return calculateBooleanComplement(value); } } catch (ClassCastException ce) { } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue calculateBitWiseOp(BLangConstantValue lhs, BLangConstantValue rhs, BiFunction<Long, Long, Long> func) { switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: Long val = func.apply((Long) lhs.value, (Long) rhs.value); return new BLangConstantValue(val, this.currentConstSymbol.type); default: dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED); } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue calculateAddition(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: result = Math.addExact((Long) lhs.value, (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) + Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.add(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; case TypeTags.STRING: result = String.valueOf(lhs.value) + String.valueOf(rhs.value); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateSubtract(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: result = Math.subtractExact((Long) lhs.value, (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) - Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.subtract(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateMultiplication(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: result = Math.multiplyExact((Long) lhs.value, (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) * Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.multiply(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateDivision(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: if ((Long) lhs.value == Long.MIN_VALUE && (Long) rhs.value == -1) { throw new ArithmeticException("long overflow"); } result = (Long) ((Long) lhs.value / (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) / Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.divide(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateMod(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) ((Long) lhs.value % (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) % Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.remainder(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private Object calculateNegationForInt(BLangConstantValue value) { if ((Long) (value.value) == Long.MIN_VALUE) { throw new ArithmeticException("long overflow"); } return -1 * ((Long) (value.value)); } private Object calculateNegationForFloat(BLangConstantValue value) { return String.valueOf(-1 * Double.parseDouble(String.valueOf(value.value))); } private Object calculateNegationForDecimal(BLangConstantValue value) { BigDecimal valDecimal = new BigDecimal(String.valueOf(value.value), MathContext.DECIMAL128); BigDecimal negDecimal = new BigDecimal(String.valueOf(-1), MathContext.DECIMAL128); BigDecimal resultDecimal = valDecimal.multiply(negDecimal, MathContext.DECIMAL128); return resultDecimal.toPlainString(); } private BLangConstantValue calculateNegation(BLangConstantValue value) { Object result = null; BType constSymbolValType = value.type; int constSymbolValTypeTag = constSymbolValType.tag; try { switch (constSymbolValTypeTag) { case TypeTags.INT: result = calculateNegationForInt(value); break; case TypeTags.FLOAT: result = calculateNegationForFloat(value); break; case TypeTags.DECIMAL: result = calculateNegationForDecimal(value); break; } } catch (NumberFormatException nfe) { } catch (ArithmeticException ae) { result = null; dlog.error(currentPos, DiagnosticErrorCode.INVALID_CONST_EXPRESSION, ae.getMessage()); } return new BLangConstantValue(result, constSymbolValType); } private BLangConstantValue calculateBitWiseComplement(BLangConstantValue value) { Object result = null; if (Types.getReferredType(this.currentConstSymbol.type).tag == TypeTags.INT) { result = ~((Long) (value.value)); } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateBooleanComplement(BLangConstantValue value) { Object result = null; if (Types.getReferredType(this.currentConstSymbol.type).tag == TypeTags.BOOLEAN) { result = !((Boolean) (value.value)); } return new BLangConstantValue(result, currentConstSymbol.type); } BLangConstantValue constructBLangConstantValueWithExactType(BLangExpression expression, BConstantSymbol constantSymbol, SymbolEnv env) { return constructBLangConstantValueWithExactType(expression, constantSymbol, env, new Stack<>()); } BLangConstantValue constructBLangConstantValueWithExactType(BLangExpression expression, BConstantSymbol constantSymbol, SymbolEnv env, Stack<String> anonTypeNameSuffixes) { BLangConstantValue value = constructBLangConstantValue(expression); constantSymbol.value = value; if (value == null) { return value; } this.anonTypeNameSuffixes = anonTypeNameSuffixes; updateConstantType(constantSymbol, expression, env); return value; } private BLangConstantValue constructBLangConstantValue(BLangExpression node) { if (!node.typeChecked) { return null; } switch (node.getKind()) { case LITERAL: case NUMERIC_LITERAL: case RECORD_LITERAL_EXPR: case LIST_CONSTRUCTOR_EXPR: case ARRAY_LITERAL_EXPR: case TUPLE_LITERAL_EXPR: case SIMPLE_VARIABLE_REF: case BINARY_EXPR: case GROUP_EXPR: case UNARY_EXPR: BLangConstantValue prevResult = this.result; Location prevPos = this.currentPos; this.currentPos = node.pos; this.result = null; node.accept(this); BLangConstantValue newResult = this.result; this.result = prevResult; this.currentPos = prevPos; return newResult; default: return null; } } private void updateConstantType(BLangConstant constant) { BConstantSymbol symbol = constant.symbol; updateConstantType(symbol, constant.expr, symEnv); BType resolvedType = symbol.type; if (resolvedType.tag == TypeTags.INTERSECTION) { BIntersectionType intersectionType = (BIntersectionType) resolvedType; if (intersectionType.effectiveType.tag == TypeTags.RECORD) { addAssociatedTypeDefinition(constant, intersectionType); } } } private void updateConstantType(BConstantSymbol symbol, BLangExpression expr, SymbolEnv env) { BType type = Types.getReferredType(symbol.type); if (type.getKind() == TypeKind.FINITE) { return; } if (symbol.value == null) { return; } BType resolvedType = checkType(expr, symbol, symbol.value.value, type, symbol.pos, env); if (resolvedType == null) { return; } if (resolvedType.getKind() == TypeKind.INTERSECTION && isListOrMapping(type.tag)) { expr.setBType(((BIntersectionType) resolvedType).effectiveType); symbol.type = resolvedType; symbol.literalType = resolvedType; symbol.value.type = resolvedType; return; } expr.setBType(resolvedType); symbol.type = resolvedType; } private boolean isListOrMapping(int tag) { switch (tag) { case TypeTags.RECORD: case TypeTags.MAP: case TypeTags.ARRAY: case TypeTags.TUPLE: return true; } return false; } private BFiniteType createFiniteType(BConstantSymbol constantSymbol, BLangExpression expr) { BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, constantSymbol.flags, Names.EMPTY, constantSymbol.pkgID, null, constantSymbol.owner, constantSymbol.pos, VIRTUAL); BFiniteType finiteType = new BFiniteType(finiteTypeSymbol); finiteType.addValue(expr); return finiteType; } private BType checkType(BLangExpression expr, BConstantSymbol constantSymbol, Object value, BType type, Location pos, SymbolEnv env) { if (expr != null && expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (((BLangSimpleVarRef) expr).symbol.type.getKind() == TypeKind.FINITE || ((BLangSimpleVarRef) expr).symbol.type.getKind() == TypeKind.INTERSECTION)) { return ((BLangSimpleVarRef) expr).symbol.type; } type = Types.getReferredType(type); switch (type.tag) { case TypeTags.INT: case TypeTags.FLOAT: case TypeTags.DECIMAL: BLangNumericLiteral numericLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); return createFiniteType(constantSymbol, updateLiteral(numericLiteral, value, type, pos)); case TypeTags.BYTE: BLangNumericLiteral byteLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); return createFiniteType(constantSymbol, updateLiteral(byteLiteral, value, symTable.intType, pos)); case TypeTags.STRING: case TypeTags.NIL: case TypeTags.BOOLEAN: BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); return createFiniteType(constantSymbol, updateLiteral(literal, value, type, pos)); case TypeTags.MAP: case TypeTags.RECORD: if (value != null) { return createRecordType(expr, constantSymbol, value, pos, env); } return null; case TypeTags.ARRAY: case TypeTags.TUPLE: if (value != null) { return createTupleType(expr, constantSymbol, pos, value, env); } return null; default: return null; } } private BLangLiteral updateLiteral(BLangLiteral literal, Object value, BType type, Location pos) { literal.value = value; literal.isConstant = true; literal.setBType(type); literal.pos = pos; return literal; } private BField createField(BVarSymbol symbol, BType symbolType, String fieldName, Location pos) { symbol.type = symbolType; if (symbolType.getKind() == TypeKind.INTERSECTION) { for (BType memberType : ((BIntersectionType) symbolType).getConstituentTypes()) { if (memberType.getKind() == TypeKind.RECORD) { symbol.type = memberType; } } } BField field = new BField(Names.fromString(fieldName), pos, symbol); field.symbol.flags |= Flags.REQUIRED; return field; } private void createTypeDefinition(BRecordType type, Location pos, SymbolEnv env) { BRecordTypeSymbol recordSymbol = (BRecordTypeSymbol) type.tsymbol; BTypeDefinitionSymbol typeDefinitionSymbol = Symbols.createTypeDefinitionSymbol(type.tsymbol.flags, type.tsymbol.name, pkgID, null, env.scope.owner, pos, VIRTUAL); typeDefinitionSymbol.scope = new Scope(typeDefinitionSymbol); typeDefinitionSymbol.scope.define(names.fromString(typeDefinitionSymbol.name.value), typeDefinitionSymbol); type.tsymbol.scope = new Scope(type.tsymbol); for (BField field : ((HashMap<String, BField>) type.fields).values()) { type.tsymbol.scope.define(field.name, field.symbol); field.symbol.owner = recordSymbol; } typeDefinitionSymbol.type = type; recordSymbol.type = type; recordSymbol.typeDefinitionSymbol = typeDefinitionSymbol; recordSymbol.markdownDocumentation = new MarkdownDocAttachment(0); BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(new ArrayList<>(), type, pos); TypeDefBuilderHelper.populateStructureFields(types, symTable, null, names, recordTypeNode, type, type, pos, env, pkgID, null, 0, false); recordTypeNode.sealed = true; type.restFieldType = new BNoType(TypeTags.NONE); BLangTypeDefinition typeDefinition = TypeDefBuilderHelper.createTypeDefinitionForTSymbol(null, typeDefinitionSymbol, recordTypeNode, env); typeDefinition.symbol.scope = new Scope(typeDefinition.symbol); typeDefinition.symbol.type = type; typeDefinition.flagSet = new HashSet<>(); typeDefinition.flagSet.add(Flag.PUBLIC); typeDefinition.flagSet.add(Flag.ANONYMOUS); createdTypeDefinitions.put(type.tsymbol, typeDefinition); } private BLangTypeDefinition findTypeDefinition(List<BLangTypeDefinition> typeDefinitionArrayList, String name) { for (int i = typeDefinitionArrayList.size() - 1; i >= 0; i--) { BLangTypeDefinition typeDefinition = typeDefinitionArrayList.get(i); if (typeDefinition.name.value.equals(name)) { return typeDefinition; } } return null; } private void addAssociatedTypeDefinition(BLangConstant constant, BIntersectionType immutableType) { BLangTypeDefinition typeDefinition = findTypeDefinition(symEnv.enclPkg.typeDefinitions, immutableType.effectiveType.tsymbol.name.value); constant.associatedTypeDefinition = typeDefinition; } private BType createRecordType(BLangExpression expr, BConstantSymbol constantSymbol, Object value, Location pos, SymbolEnv env) { if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return expr.getBType(); } HashMap<String, BLangConstantValue> constValueMap = (HashMap<String, BLangConstantValue>) value; for (BLangConstantValue memberValue : constValueMap.values()) { if (memberValue == null) { return null; } } Name genName = Names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID, this.anonTypeNameSuffixes)); BRecordTypeSymbol recordTypeSymbol = new BRecordTypeSymbol(SymTag.RECORD, constantSymbol.flags | Flags.ANONYMOUS, genName, constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); recordTypeSymbol.scope = constantSymbol.scope; BRecordType recordType = new BRecordType(recordTypeSymbol); recordType.tsymbol.name = genName; recordType.sealed = true; recordType.restFieldType = new BNoType(TypeTags.NONE); recordTypeSymbol.type = recordType; if (constValueMap.size() != 0) { if (!populateRecordFields(expr, constantSymbol, pos, constValueMap, recordType, env)) { return null; } } createTypeDefinition(recordType, pos, env); BIntersectionType intersectionType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, recordType, env, symTable, anonymousModelHelper, names, new HashSet<>()); return intersectionType; } private boolean populateRecordFields(BLangExpression expr, BConstantSymbol constantSymbol, Location pos, HashMap<String, BLangConstantValue> constValueMap, BRecordType recordType, SymbolEnv env) { for (RecordLiteralNode.RecordField field : ((BLangRecordLiteral) expr).fields) { String key; BVarSymbol newSymbol; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangExpression exprValueField = keyValuePair.valueExpr; if (exprValueField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { key = keyValuePair.key.toString(); newSymbol = new BVarSymbol(constantSymbol.flags, Names.fromString(key), constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); BLangSimpleVarRef simpleVarRefExpr = (BLangSimpleVarRef) exprValueField; if (simpleVarRefExpr.symbol.type.getKind() == TypeKind.FINITE || simpleVarRefExpr.symbol.type.getKind() == TypeKind.INTERSECTION) { BType resolvedType = simpleVarRefExpr.symbol.type; recordType.fields.put(key, createField(newSymbol, resolvedType, key, pos)); keyValuePair.setBType(resolvedType); if (resolvedType.getKind() != TypeKind.FINITE) { constValueMap.get(key).type = resolvedType; if (resolvedType.getKind() == TypeKind.INTERSECTION) { simpleVarRefExpr.setBType(((BIntersectionType) resolvedType).effectiveType); } } continue; } } key = keyValuePair.key.toString(); newSymbol = new BVarSymbol(constantSymbol.flags, Names.fromString(key), constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); this.anonTypeNameSuffixes.push(key); BType newType = checkType(exprValueField, constantSymbol, constValueMap.get(key).value, constValueMap.get(key).type, pos, env); this.anonTypeNameSuffixes.pop(); if (newType == null) { return false; } keyValuePair.setBType(newType); if (newType.getKind() != TypeKind.FINITE) { constValueMap.get(key).type = newType; if (newType.getKind() == TypeKind.INTERSECTION) { exprValueField.setBType(((BIntersectionType) newType).effectiveType); } } recordType.fields.put(key, createField(newSymbol, newType, key, pos)); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; key = varNameField.variableName.value; newSymbol = new BVarSymbol(constantSymbol.flags, Names.fromString(key), constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); BType resolvedType = varNameField.symbol.type; varNameField.setBType(resolvedType); recordType.fields.put(key, createField(newSymbol, resolvedType, key, pos)); continue; } else { BLangExpression exprSpreadField = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; if (exprSpreadField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRefExpr = (BLangSimpleVarRef) exprSpreadField; if (simpleVarRefExpr.symbol.type.getKind() == TypeKind.FINITE || simpleVarRefExpr.symbol.type.getKind() == TypeKind.INTERSECTION) { BRecordType resolvedType = (BRecordType) ((BIntersectionType) simpleVarRefExpr.symbol.type).effectiveType; exprSpreadField.setBType(resolvedType); for (String spreadFieldKeys : ((HashMap<String, BField>) resolvedType.fields).keySet()) { newSymbol = new BVarSymbol(constantSymbol.flags, Names.fromString(spreadFieldKeys), constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); BType spreadFieldType = resolvedType.fields.get(spreadFieldKeys).type; recordType.fields.put(spreadFieldKeys, createField(newSymbol, spreadFieldType, spreadFieldKeys, pos)); } } continue; } } } return true; } private BType createTupleType(BLangExpression expr, BConstantSymbol constantSymbol, Location pos, Object constValue, SymbolEnv env) { if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return expr.getBType(); } List<BLangConstantValue> constValueList = (List<BLangConstantValue>) constValue; for (BLangConstantValue memberValue : constValueList) { if (memberValue == null) { return null; } } List<BLangExpression> memberExprs = ((BLangListConstructorExpr) expr).exprs; List<BType> tupleTypes = new ArrayList<>(constValueList.size()); for (int i = 0; i < memberExprs.size(); i++) { BLangExpression memberExpr = memberExprs.get(i); BLangConstantValue memberConstValue = constValueList.get(i); if (memberExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BType type = ((BLangSimpleVarRef) memberExpr).symbol.type; int tag = type.tag; if (tag == TypeTags.FINITE) { tupleTypes.add(type); continue; } if (tag == TypeTags.INTERSECTION) { memberConstValue.type = type; tupleTypes.add(type); continue; } } BType newType = checkType(memberExpr, constantSymbol, memberConstValue.value, memberConstValue.type, pos, env); if (newType == null) { return null; } tupleTypes.add(newType); if (newType.tag != TypeTags.FINITE) { memberConstValue.type = newType; memberExpr.setBType(newType.tag == TypeTags.INTERSECTION ? ((BIntersectionType) newType).effectiveType : newType); } } BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.enclPkg.symbol.pkgID, null, env.scope.owner, pos, VIRTUAL); return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, new BTupleType(tupleTypeSymbol, tupleTypes), env, symTable, anonymousModelHelper, names, new HashSet<>()); } }
class ConstantValueResolver extends BLangNodeVisitor { private static final CompilerContext.Key<ConstantValueResolver> CONSTANT_VALUE_RESOLVER_KEY = new CompilerContext.Key<>(); private BConstantSymbol currentConstSymbol; private BLangConstantValue result; private BLangDiagnosticLog dlog; private Location currentPos; private BLangAnonymousModelHelper anonymousModelHelper; private SymbolEnv symEnv; private Names names; private SymbolTable symTable; private Types types; private PackageID pkgID; private Map<BConstantSymbol, BLangConstant> unresolvedConstants = new HashMap<>(); private Map<String, BLangConstantValue> constantMap = new HashMap<>(); private ArrayList<BConstantSymbol> resolvingConstants = new ArrayList<>(); private HashSet<BConstantSymbol> unresolvableConstants = new HashSet<>(); private HashMap<BSymbol, BLangTypeDefinition> createdTypeDefinitions = new HashMap<>(); private Stack<String> anonTypeNameSuffixes = new Stack<>(); private ConstantValueResolver(CompilerContext context) { context.put(CONSTANT_VALUE_RESOLVER_KEY, this); this.dlog = BLangDiagnosticLog.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.names = Names.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.types = Types.getInstance(context); } public static ConstantValueResolver getInstance(CompilerContext context) { ConstantValueResolver constantValueResolver = context.get(CONSTANT_VALUE_RESOLVER_KEY); if (constantValueResolver == null) { constantValueResolver = new ConstantValueResolver(context); } return constantValueResolver; } public void resolve(List<BLangConstant> constants, PackageID packageID, SymbolEnv symEnv) { this.dlog.setCurrentPackageId(packageID); this.pkgID = packageID; this.symEnv = symEnv; constants.forEach(constant -> this.unresolvedConstants.put(constant.symbol, constant)); constants.forEach(constant -> constant.accept(this)); constantMap.clear(); } @Override public void visit(BLangConstant constant) { if (!unresolvedConstants.containsKey(constant.symbol)) { return; } BConstantSymbol tempCurrentConstSymbol = this.currentConstSymbol; this.currentConstSymbol = constant.symbol; this.resolvingConstants.add(this.currentConstSymbol); this.currentConstSymbol.value = constructBLangConstantValue(constant.expr); this.resolvingConstants.remove(this.currentConstSymbol); this.anonTypeNameSuffixes.push(constant.name.value); updateConstantType(constant); this.anonTypeNameSuffixes.pop(); checkUniqueness(constant); unresolvedConstants.remove(this.currentConstSymbol); this.currentConstSymbol = tempCurrentConstSymbol; } @Override public void visit(BLangLiteral literal) { this.result = new BLangConstantValue(literal.value, literal.getBType()); } @Override public void visit(BLangNumericLiteral literal) { this.result = new BLangConstantValue(literal.value, literal.getBType()); } @Override public void visit(BLangConstRef constRef) { this.result = ((BConstantSymbol) constRef.symbol).value; } @Override public void visit(BLangSimpleVarRef varRef) { if (varRef.symbol == null || (varRef.symbol.tag & SymTag.CONSTANT) != SymTag.CONSTANT) { this.result = null; return; } BConstantSymbol constSymbol = (BConstantSymbol) varRef.symbol; BLangConstantValue constVal = constSymbol.value; if (constVal != null) { this.result = constVal; return; } if (this.currentConstSymbol == constSymbol) { dlog.error(varRef.pos, DiagnosticErrorCode.SELF_REFERENCE_CONSTANT, constSymbol.name); return; } if (!this.unresolvedConstants.containsKey(constSymbol)) { if (this.unresolvableConstants.contains(constSymbol)) { this.result = null; return; } this.unresolvableConstants.add(constSymbol); dlog.error(varRef.pos, DiagnosticErrorCode.CANNOT_RESOLVE_CONST, constSymbol.name.value); this.result = null; return; } if (this.resolvingConstants.contains(constSymbol)) { for (BConstantSymbol symbol : this.resolvingConstants) { this.unresolvableConstants.add(symbol); } dlog.error(varRef.pos, DiagnosticErrorCode.CONSTANT_CYCLIC_REFERENCE, this.resolvingConstants); this.result = null; return; } this.unresolvedConstants.get(constSymbol).accept(this); this.result = constSymbol.value; } @Override public void visit(BLangRecordLiteral recordLiteral) { Map<String, BLangConstantValue> mapConstVal = new LinkedHashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { String key; BLangConstantValue value; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; NodeKind nodeKind = keyValuePair.key.expr.getKind(); if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) { key = (String) ((BLangLiteral) keyValuePair.key.expr).value; } else if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) { key = ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value; } else { continue; } value = constructBLangConstantValue(keyValuePair.valueExpr); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; key = varNameField.variableName.value; value = constructBLangConstantValue(varNameField); } else { BLangConstantValue spreadOpConstValue = constructBLangConstantValue(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr); if (spreadOpConstValue != null) { mapConstVal.putAll((Map<String, BLangConstantValue>) spreadOpConstValue.value); } continue; } mapConstVal.put(key, value); } this.result = new BLangConstantValue(mapConstVal, recordLiteral.getBType()); } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { List<BLangExpression> memberExprs = listConstructorExpr.exprs; List<BLangConstantValue> listConstValue = new ArrayList<>(memberExprs.size()); for (BLangExpression memberExpr : memberExprs) { listConstValue.add(constructBLangConstantValue(memberExpr)); } this.result = new BLangConstantValue(listConstValue, listConstructorExpr.getBType()); } @Override public void visit(BLangListConstructorExpr.BLangArrayLiteral listConstructorExpr) { visit((BLangListConstructorExpr) listConstructorExpr); } @Override public void visit(BLangListConstructorExpr.BLangTupleLiteral listConstructorExpr) { visit((BLangListConstructorExpr) listConstructorExpr); } @Override public void visit(BLangBinaryExpr binaryExpr) { BLangConstantValue lhs = constructBLangConstantValue(binaryExpr.lhsExpr); BLangConstantValue rhs = constructBLangConstantValue(binaryExpr.rhsExpr); this.result = calculateConstValue(lhs, rhs, binaryExpr.opKind); } public void visit(BLangGroupExpr groupExpr) { this.result = constructBLangConstantValue(groupExpr.expression); } public void visit(BLangUnaryExpr unaryExpr) { BLangConstantValue value = constructBLangConstantValue(unaryExpr.expr); this.result = evaluateUnaryOperator(value, unaryExpr.operator); } private BLangConstantValue calculateConstValue(BLangConstantValue lhs, BLangConstantValue rhs, OperatorKind kind) { if (lhs == null || rhs == null || lhs.value == null || rhs.value == null) { return new BLangConstantValue(null, this.currentConstSymbol.type); } try { switch (kind) { case ADD: return calculateAddition(lhs, rhs); case SUB: return calculateSubtract(lhs, rhs); case MUL: return calculateMultiplication(lhs, rhs); case DIV: return calculateDivision(lhs, rhs); case MOD: return calculateMod(lhs, rhs); case BITWISE_AND: return calculateBitWiseOp(lhs, rhs, (a, b) -> a & b); case BITWISE_OR: return calculateBitWiseOp(lhs, rhs, (a, b) -> a | b); case BITWISE_LEFT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a << b); case BITWISE_RIGHT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a >> b); case BITWISE_UNSIGNED_RIGHT_SHIFT: return calculateBitWiseOp(lhs, rhs, (a, b) -> a >>> b); case BITWISE_XOR: return calculateBitWiseOp(lhs, rhs, (a, b) -> a ^ b); default: dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED); } } catch (NumberFormatException nfe) { } catch (ArithmeticException ae) { dlog.error(currentPos, DiagnosticErrorCode.INVALID_CONST_EXPRESSION, ae.getMessage()); } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue evaluateUnaryOperator(BLangConstantValue value, OperatorKind kind) { if (value == null || value.value == null) { return new BLangConstantValue(null, this.currentConstSymbol.type); } try { switch (kind) { case ADD: return new BLangConstantValue(value.value, currentConstSymbol.type); case SUB: return calculateNegation(value); case BITWISE_COMPLEMENT: return calculateBitWiseComplement(value); case NOT: return calculateBooleanComplement(value); } } catch (ClassCastException ce) { } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue calculateBitWiseOp(BLangConstantValue lhs, BLangConstantValue rhs, BiFunction<Long, Long, Long> func) { switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: Long val = func.apply((Long) lhs.value, (Long) rhs.value); return new BLangConstantValue(val, this.currentConstSymbol.type); default: dlog.error(currentPos, DiagnosticErrorCode.CONSTANT_EXPRESSION_NOT_SUPPORTED); } return new BLangConstantValue(null, this.currentConstSymbol.type); } private BLangConstantValue calculateAddition(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: try { result = Math.addExact((Long) lhs.value, (Long) rhs.value); } catch (ArithmeticException ae) { dlog.error(currentPos, DiagnosticErrorCode.INT_RANGE_OVERFLOW_ERROR); } break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) + Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.add(rhsDecimal, MathContext.DECIMAL128); resultDecimal = types.getValidDecimalNumber(currentPos, resultDecimal); result = resultDecimal != null ? resultDecimal.toPlainString() : null; break; case TypeTags.STRING: result = String.valueOf(lhs.value) + String.valueOf(rhs.value); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateSubtract(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: try { result = Math.subtractExact((Long) lhs.value, (Long) rhs.value); } catch (ArithmeticException ae) { dlog.error(currentPos, DiagnosticErrorCode.INT_RANGE_OVERFLOW_ERROR); } break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) - Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.subtract(rhsDecimal, MathContext.DECIMAL128); resultDecimal = types.getValidDecimalNumber(currentPos, resultDecimal); result = resultDecimal != null ? resultDecimal.toPlainString() : null; break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateMultiplication(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: try { result = Math.multiplyExact((Long) lhs.value, (Long) rhs.value); } catch (ArithmeticException ae) { dlog.error(currentPos, DiagnosticErrorCode.INT_RANGE_OVERFLOW_ERROR); } break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) * Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.multiply(rhsDecimal, MathContext.DECIMAL128); resultDecimal = types.getValidDecimalNumber(currentPos, resultDecimal); result = resultDecimal != null ? resultDecimal.toPlainString() : null; break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateDivision(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: if ((Long) lhs.value == Long.MIN_VALUE && (Long) rhs.value == -1) { dlog.error(currentPos, DiagnosticErrorCode.INT_RANGE_OVERFLOW_ERROR); return new BLangConstantValue(null, this.currentConstSymbol.type); } result = (Long) ((Long) lhs.value / (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) / Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.divide(rhsDecimal, MathContext.DECIMAL128); resultDecimal = types.getValidDecimalNumber(currentPos, resultDecimal); result = resultDecimal != null ? resultDecimal.toPlainString() : null; break; } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateMod(BLangConstantValue lhs, BLangConstantValue rhs) { Object result = null; switch (Types.getReferredType(this.currentConstSymbol.type).tag) { case TypeTags.INT: case TypeTags.BYTE: result = (Long) ((Long) lhs.value % (Long) rhs.value); break; case TypeTags.FLOAT: result = String.valueOf(Double.parseDouble(String.valueOf(lhs.value)) % Double.parseDouble(String.valueOf(rhs.value))); break; case TypeTags.DECIMAL: BigDecimal lhsDecimal = new BigDecimal(String.valueOf(lhs.value), MathContext.DECIMAL128); BigDecimal rhsDecimal = new BigDecimal(String.valueOf(rhs.value), MathContext.DECIMAL128); BigDecimal resultDecimal = lhsDecimal.remainder(rhsDecimal, MathContext.DECIMAL128); result = resultDecimal.toPlainString(); break; } return new BLangConstantValue(result, currentConstSymbol.type); } private Object calculateNegationForInt(BLangConstantValue value) { if ((Long) (value.value) == Long.MIN_VALUE) { dlog.error(currentPos, DiagnosticErrorCode.INT_RANGE_OVERFLOW_ERROR); return new BLangConstantValue(null, this.currentConstSymbol.type); } return -1 * ((Long) (value.value)); } private Object calculateNegationForFloat(BLangConstantValue value) { return String.valueOf(-1 * Double.parseDouble(String.valueOf(value.value))); } private Object calculateNegationForDecimal(BLangConstantValue value) { BigDecimal valDecimal = new BigDecimal(String.valueOf(value.value), MathContext.DECIMAL128); BigDecimal negDecimal = new BigDecimal(String.valueOf(-1), MathContext.DECIMAL128); BigDecimal resultDecimal = valDecimal.multiply(negDecimal, MathContext.DECIMAL128); return resultDecimal.toPlainString(); } private BLangConstantValue calculateNegation(BLangConstantValue value) { Object result = null; BType constSymbolValType = value.type; int constSymbolValTypeTag = constSymbolValType.tag; switch (constSymbolValTypeTag) { case TypeTags.INT: result = calculateNegationForInt(value); break; case TypeTags.FLOAT: result = calculateNegationForFloat(value); break; case TypeTags.DECIMAL: result = calculateNegationForDecimal(value); break; } return new BLangConstantValue(result, constSymbolValType); } private BLangConstantValue calculateBitWiseComplement(BLangConstantValue value) { Object result = null; if (Types.getReferredType(this.currentConstSymbol.type).tag == TypeTags.INT) { result = ~((Long) (value.value)); } return new BLangConstantValue(result, currentConstSymbol.type); } private BLangConstantValue calculateBooleanComplement(BLangConstantValue value) { Object result = null; if (Types.getReferredType(this.currentConstSymbol.type).tag == TypeTags.BOOLEAN) { result = !((Boolean) (value.value)); } return new BLangConstantValue(result, currentConstSymbol.type); } BLangConstantValue constructBLangConstantValueWithExactType(BLangExpression expression, BConstantSymbol constantSymbol, SymbolEnv env) { return constructBLangConstantValueWithExactType(expression, constantSymbol, env, new Stack<>()); } BLangConstantValue constructBLangConstantValueWithExactType(BLangExpression expression, BConstantSymbol constantSymbol, SymbolEnv env, Stack<String> anonTypeNameSuffixes) { BLangConstantValue value = constructBLangConstantValue(expression); constantSymbol.value = value; if (value == null) { return value; } this.anonTypeNameSuffixes = anonTypeNameSuffixes; updateConstantType(constantSymbol, expression, env); return value; } private BLangConstantValue constructBLangConstantValue(BLangExpression node) { if (!node.typeChecked) { return null; } switch (node.getKind()) { case LITERAL: case NUMERIC_LITERAL: case RECORD_LITERAL_EXPR: case LIST_CONSTRUCTOR_EXPR: case ARRAY_LITERAL_EXPR: case TUPLE_LITERAL_EXPR: case SIMPLE_VARIABLE_REF: case BINARY_EXPR: case GROUP_EXPR: case UNARY_EXPR: BLangConstantValue prevResult = this.result; Location prevPos = this.currentPos; this.currentPos = node.pos; this.result = null; node.accept(this); BLangConstantValue newResult = this.result; this.result = prevResult; this.currentPos = prevPos; return newResult; default: return null; } } private void updateConstantType(BLangConstant constant) { BConstantSymbol symbol = constant.symbol; updateConstantType(symbol, constant.expr, symEnv); BType resolvedType = symbol.type; if (resolvedType.tag == TypeTags.INTERSECTION) { BIntersectionType intersectionType = (BIntersectionType) resolvedType; if (intersectionType.effectiveType.tag == TypeTags.RECORD) { addAssociatedTypeDefinition(constant, intersectionType); } } } private void updateConstantType(BConstantSymbol symbol, BLangExpression expr, SymbolEnv env) { BType type = Types.getReferredType(symbol.type); if (type.getKind() == TypeKind.FINITE) { return; } if (symbol.value == null) { return; } BType resolvedType = checkType(expr, symbol, symbol.value.value, type, symbol.pos, env); if (resolvedType == null) { return; } if (resolvedType.getKind() == TypeKind.INTERSECTION && isListOrMapping(type.tag)) { expr.setBType(((BIntersectionType) resolvedType).effectiveType); symbol.type = resolvedType; symbol.literalType = resolvedType; symbol.value.type = resolvedType; return; } expr.setBType(resolvedType); symbol.type = resolvedType; } private boolean isListOrMapping(int tag) { switch (tag) { case TypeTags.RECORD: case TypeTags.MAP: case TypeTags.ARRAY: case TypeTags.TUPLE: return true; } return false; } private BFiniteType createFiniteType(BConstantSymbol constantSymbol, BLangExpression expr) { BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, constantSymbol.flags, Names.EMPTY, constantSymbol.pkgID, null, constantSymbol.owner, constantSymbol.pos, VIRTUAL); BFiniteType finiteType = new BFiniteType(finiteTypeSymbol); finiteType.addValue(expr); return finiteType; } private BType checkType(BLangExpression expr, BConstantSymbol constantSymbol, Object value, BType type, Location pos, SymbolEnv env) { if (expr != null && expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (((BLangSimpleVarRef) expr).symbol.type.getKind() == TypeKind.FINITE || ((BLangSimpleVarRef) expr).symbol.type.getKind() == TypeKind.INTERSECTION)) { return ((BLangSimpleVarRef) expr).symbol.type; } type = Types.getReferredType(type); switch (type.tag) { case TypeTags.INT: case TypeTags.FLOAT: case TypeTags.DECIMAL: BLangNumericLiteral numericLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); return createFiniteType(constantSymbol, updateLiteral(numericLiteral, value, type, pos)); case TypeTags.BYTE: BLangNumericLiteral byteLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); return createFiniteType(constantSymbol, updateLiteral(byteLiteral, value, symTable.intType, pos)); case TypeTags.STRING: case TypeTags.NIL: case TypeTags.BOOLEAN: BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); return createFiniteType(constantSymbol, updateLiteral(literal, value, type, pos)); case TypeTags.MAP: case TypeTags.RECORD: if (value != null) { return createRecordType(expr, constantSymbol, value, pos, env); } return null; case TypeTags.ARRAY: case TypeTags.TUPLE: if (value != null) { return createTupleType(expr, constantSymbol, pos, value, env); } return null; default: return null; } } private BLangLiteral updateLiteral(BLangLiteral literal, Object value, BType type, Location pos) { literal.value = value; literal.isConstant = true; literal.setBType(type); literal.pos = pos; return literal; } private BField createField(BVarSymbol symbol, BType symbolType, String fieldName, Location pos) { symbol.type = symbolType; if (symbolType.getKind() == TypeKind.INTERSECTION) { for (BType memberType : ((BIntersectionType) symbolType).getConstituentTypes()) { if (memberType.getKind() == TypeKind.RECORD) { symbol.type = memberType; } } } BField field = new BField(Names.fromString(fieldName), pos, symbol); field.symbol.flags |= Flags.REQUIRED; return field; } private void createTypeDefinition(BRecordType type, Location pos, SymbolEnv env) { BRecordTypeSymbol recordSymbol = (BRecordTypeSymbol) type.tsymbol; BTypeDefinitionSymbol typeDefinitionSymbol = Symbols.createTypeDefinitionSymbol(type.tsymbol.flags, type.tsymbol.name, pkgID, null, env.scope.owner, pos, VIRTUAL); typeDefinitionSymbol.scope = new Scope(typeDefinitionSymbol); typeDefinitionSymbol.scope.define(names.fromString(typeDefinitionSymbol.name.value), typeDefinitionSymbol); type.tsymbol.scope = new Scope(type.tsymbol); for (BField field : ((HashMap<String, BField>) type.fields).values()) { type.tsymbol.scope.define(field.name, field.symbol); field.symbol.owner = recordSymbol; } typeDefinitionSymbol.type = type; recordSymbol.type = type; recordSymbol.typeDefinitionSymbol = typeDefinitionSymbol; recordSymbol.markdownDocumentation = new MarkdownDocAttachment(0); BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(new ArrayList<>(), type, pos); TypeDefBuilderHelper.populateStructureFields(types, symTable, null, names, recordTypeNode, type, type, pos, env, pkgID, null, 0, false); recordTypeNode.sealed = true; type.restFieldType = new BNoType(TypeTags.NONE); BLangTypeDefinition typeDefinition = TypeDefBuilderHelper.createTypeDefinitionForTSymbol(null, typeDefinitionSymbol, recordTypeNode, env); typeDefinition.symbol.scope = new Scope(typeDefinition.symbol); typeDefinition.symbol.type = type; typeDefinition.flagSet = new HashSet<>(); typeDefinition.flagSet.add(Flag.PUBLIC); typeDefinition.flagSet.add(Flag.ANONYMOUS); createdTypeDefinitions.put(type.tsymbol, typeDefinition); } private BLangTypeDefinition findTypeDefinition(List<BLangTypeDefinition> typeDefinitionArrayList, String name) { for (int i = typeDefinitionArrayList.size() - 1; i >= 0; i--) { BLangTypeDefinition typeDefinition = typeDefinitionArrayList.get(i); if (typeDefinition.name.value.equals(name)) { return typeDefinition; } } return null; } private void addAssociatedTypeDefinition(BLangConstant constant, BIntersectionType immutableType) { BLangTypeDefinition typeDefinition = findTypeDefinition(symEnv.enclPkg.typeDefinitions, immutableType.effectiveType.tsymbol.name.value); constant.associatedTypeDefinition = typeDefinition; } private BType createRecordType(BLangExpression expr, BConstantSymbol constantSymbol, Object value, Location pos, SymbolEnv env) { if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return expr.getBType(); } HashMap<String, BLangConstantValue> constValueMap = (HashMap<String, BLangConstantValue>) value; for (BLangConstantValue memberValue : constValueMap.values()) { if (memberValue == null) { return null; } } Name genName = Names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID, this.anonTypeNameSuffixes)); BRecordTypeSymbol recordTypeSymbol = new BRecordTypeSymbol(SymTag.RECORD, constantSymbol.flags | Flags.ANONYMOUS, genName, constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); recordTypeSymbol.scope = constantSymbol.scope; BRecordType recordType = new BRecordType(recordTypeSymbol); recordType.tsymbol.name = genName; recordType.sealed = true; recordType.restFieldType = new BNoType(TypeTags.NONE); recordTypeSymbol.type = recordType; if (constValueMap.size() != 0) { if (!populateRecordFields(expr, constantSymbol, pos, constValueMap, recordType, env)) { return null; } } createTypeDefinition(recordType, pos, env); BIntersectionType intersectionType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, recordType, env, symTable, anonymousModelHelper, names, new HashSet<>()); return intersectionType; } private boolean populateRecordFields(BLangExpression expr, BConstantSymbol constantSymbol, Location pos, HashMap<String, BLangConstantValue> constValueMap, BRecordType recordType, SymbolEnv env) { for (RecordLiteralNode.RecordField field : ((BLangRecordLiteral) expr).fields) { String key; BVarSymbol newSymbol; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangExpression exprValueField = keyValuePair.valueExpr; if (exprValueField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { key = keyValuePair.key.toString(); newSymbol = new BVarSymbol(constantSymbol.flags, Names.fromString(key), constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); BLangSimpleVarRef simpleVarRefExpr = (BLangSimpleVarRef) exprValueField; if (simpleVarRefExpr.symbol.type.getKind() == TypeKind.FINITE || simpleVarRefExpr.symbol.type.getKind() == TypeKind.INTERSECTION) { BType resolvedType = simpleVarRefExpr.symbol.type; recordType.fields.put(key, createField(newSymbol, resolvedType, key, pos)); keyValuePair.setBType(resolvedType); if (resolvedType.getKind() != TypeKind.FINITE) { constValueMap.get(key).type = resolvedType; if (resolvedType.getKind() == TypeKind.INTERSECTION) { simpleVarRefExpr.setBType(((BIntersectionType) resolvedType).effectiveType); } } continue; } } key = keyValuePair.key.toString(); newSymbol = new BVarSymbol(constantSymbol.flags, Names.fromString(key), constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); this.anonTypeNameSuffixes.push(key); BType newType = checkType(exprValueField, constantSymbol, constValueMap.get(key).value, constValueMap.get(key).type, pos, env); this.anonTypeNameSuffixes.pop(); if (newType == null) { return false; } keyValuePair.setBType(newType); if (newType.getKind() != TypeKind.FINITE) { constValueMap.get(key).type = newType; if (newType.getKind() == TypeKind.INTERSECTION) { exprValueField.setBType(((BIntersectionType) newType).effectiveType); } } recordType.fields.put(key, createField(newSymbol, newType, key, pos)); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; key = varNameField.variableName.value; newSymbol = new BVarSymbol(constantSymbol.flags, Names.fromString(key), constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); BType resolvedType = varNameField.symbol.type; varNameField.setBType(resolvedType); recordType.fields.put(key, createField(newSymbol, resolvedType, key, pos)); continue; } else { BLangExpression exprSpreadField = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; if (exprSpreadField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef simpleVarRefExpr = (BLangSimpleVarRef) exprSpreadField; if (simpleVarRefExpr.symbol.type.getKind() == TypeKind.FINITE || simpleVarRefExpr.symbol.type.getKind() == TypeKind.INTERSECTION) { BRecordType resolvedType = (BRecordType) ((BIntersectionType) simpleVarRefExpr.symbol.type).effectiveType; exprSpreadField.setBType(resolvedType); for (String spreadFieldKeys : ((HashMap<String, BField>) resolvedType.fields).keySet()) { newSymbol = new BVarSymbol(constantSymbol.flags, Names.fromString(spreadFieldKeys), constantSymbol.pkgID, null, constantSymbol.owner, pos, VIRTUAL); BType spreadFieldType = resolvedType.fields.get(spreadFieldKeys).type; recordType.fields.put(spreadFieldKeys, createField(newSymbol, spreadFieldType, spreadFieldKeys, pos)); } } continue; } } } return true; } private BType createTupleType(BLangExpression expr, BConstantSymbol constantSymbol, Location pos, Object constValue, SymbolEnv env) { if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return expr.getBType(); } List<BLangConstantValue> constValueList = (List<BLangConstantValue>) constValue; for (BLangConstantValue memberValue : constValueList) { if (memberValue == null) { return null; } } List<BLangExpression> memberExprs = ((BLangListConstructorExpr) expr).exprs; List<BType> tupleTypes = new ArrayList<>(constValueList.size()); for (int i = 0; i < memberExprs.size(); i++) { BLangExpression memberExpr = memberExprs.get(i); BLangConstantValue memberConstValue = constValueList.get(i); if (memberExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BType type = ((BLangSimpleVarRef) memberExpr).symbol.type; int tag = type.tag; if (tag == TypeTags.FINITE) { tupleTypes.add(type); continue; } if (tag == TypeTags.INTERSECTION) { memberConstValue.type = type; tupleTypes.add(type); continue; } } BType newType = checkType(memberExpr, constantSymbol, memberConstValue.value, memberConstValue.type, pos, env); if (newType == null) { return null; } tupleTypes.add(newType); if (newType.tag != TypeTags.FINITE) { memberConstValue.type = newType; memberExpr.setBType(newType.tag == TypeTags.INTERSECTION ? ((BIntersectionType) newType).effectiveType : newType); } } BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)), Names.EMPTY, env.enclPkg.symbol.pkgID, null, env.scope.owner, pos, VIRTUAL); return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, new BTupleType(tupleTypeSymbol, tupleTypes), env, symTable, anonymousModelHelper, names, new HashSet<>()); } }
Add some code comments here ? ```java /* sum = */ ifThenElse(isNull(operands()[0]), sum, minus(sum, operands()[0])), ```
public Expression[] retractExpressions() { return new Expression[] { ifThenElse(isNull(operands()[0]), sum, minus(sum, operands()[0])), ifThenElse(isNull(operands()[0]), count, minus(count, literal(1L))), }; }
ifThenElse(isNull(operands()[0]), sum, minus(sum, operands()[0])),
public Expression[] retractExpressions() { return new Expression[] { /* sum = */ ifThenElse(isNull(operand(0)), sum, minus(sum, operand(0))), /* count = */ ifThenElse(isNull(operand(0)), count, minus(count, literal(1L))), }; }
class AvgAggFunction extends DeclarativeAggregateFunction { private UnresolvedAggBufferReference sum = new UnresolvedAggBufferReference("sum", getSumType()); private UnresolvedAggBufferReference count = new UnresolvedAggBufferReference("count", Types.LONG); public TypeInformation getSumType() { return Types.LONG; } @Override public int inputCount() { return 1; } @Override public UnresolvedAggBufferReference[] aggBufferAttributes() { return new UnresolvedAggBufferReference[] {sum, count}; } @Override public Expression[] initialValuesExpressions() { return new Expression[] {literal(0L, getSumType()), literal(0L)}; } @Override public Expression[] accumulateExpressions() { return new Expression[] { ifThenElse(isNull(operands()[0]), sum, plus(sum, operands()[0])), ifThenElse(isNull(operands()[0]), count, plus(count, literal(1L))), }; } @Override @Override public Expression[] mergeExpressions() { return new Expression[] { plus(sum, mergeInput(sum)), plus(count, mergeInput(count)) }; } /** * If all input are nulls, count will be 0 and we will get null after the division. */ @Override public Expression getValueExpression() { return ifThenElse(equalTo(count, literal(0L)), nullValue(getResultType()), div(sum, count)); } /** * Built-in Int Avg aggregate function for integral arguments, * including BYTE, SHORT, INT, LONG. * The result type is DOUBLE. */ public static class IntegralAvgAggFunction extends AvgAggFunction { @Override public TypeInformation getResultType() { return Types.DOUBLE; } } /** * Built-in Double Avg aggregate function. */ public static class DoubleAvgAggFunction extends AvgAggFunction { @Override public TypeInformation getResultType() { return Types.DOUBLE; } @Override public TypeInformation getSumType() { return Types.DOUBLE; } @Override public Expression[] initialValuesExpressions() { return new Expression[] {literal(0D), literal(0L)}; } } /** * Built-in Decimal Avg aggregate function. */ public static class DecimalAvgAggFunction extends AvgAggFunction { private final DecimalTypeInfo type; public DecimalAvgAggFunction(DecimalTypeInfo type) { this.type = type; } @Override public TypeInformation getResultType() { return DecimalType.inferAggAvgType(type.scale()).toTypeInfo(); } @Override public TypeInformation getSumType() { return DecimalType.inferAggSumType(type.scale()).toTypeInfo(); } @Override public Expression[] initialValuesExpressions() { return new Expression[] {literal(new BigDecimal(0)), literal(0L)}; } } }
class AvgAggFunction extends DeclarativeAggregateFunction { private FieldReferenceExpression sum = new FieldReferenceExpression("sum", getSumType()); private FieldReferenceExpression count = new FieldReferenceExpression("count", Types.LONG); public TypeInformation getSumType() { return Types.LONG; } @Override public int operandCount() { return 1; } @Override public FieldReferenceExpression[] aggBufferAttributes() { return new FieldReferenceExpression[] { sum, count}; } @Override public Expression[] initialValuesExpressions() { return new Expression[] { /* sum = */ literal(0L, getSumType()), /* count = */ literal(0L)}; } @Override public Expression[] accumulateExpressions() { return new Expression[] { /* sum = */ ifThenElse(isNull(operand(0)), sum, plus(sum, operand(0))), /* count = */ ifThenElse(isNull(operand(0)), count, plus(count, literal(1L))), }; } @Override @Override public Expression[] mergeExpressions() { return new Expression[] { /* sum = */ plus(sum, mergeOperand(sum)), /* count = */ plus(count, mergeOperand(count)) }; } /** * If all input are nulls, count will be 0 and we will get null after the division. */ @Override public Expression getValueExpression() { return ifThenElse(equalTo(count, literal(0L)), nullOf(getResultType()), div(sum, count)); } /** * Built-in Int Avg aggregate function for integral arguments, * including BYTE, SHORT, INT, LONG. * The result type is DOUBLE. */ public static class IntegralAvgAggFunction extends AvgAggFunction { @Override public TypeInformation getResultType() { return Types.DOUBLE; } } /** * Built-in Double Avg aggregate function. */ public static class DoubleAvgAggFunction extends AvgAggFunction { @Override public TypeInformation getResultType() { return Types.DOUBLE; } @Override public TypeInformation getSumType() { return Types.DOUBLE; } @Override public Expression[] initialValuesExpressions() { return new Expression[] {literal(0D), literal(0L)}; } } /** * Built-in Decimal Avg aggregate function. */ public static class DecimalAvgAggFunction extends AvgAggFunction { private final DecimalTypeInfo type; public DecimalAvgAggFunction(DecimalTypeInfo type) { this.type = type; } @Override public TypeInformation getResultType() { return DecimalType.inferAggAvgType(type.scale()).toTypeInfo(); } @Override public TypeInformation getSumType() { return DecimalType.inferAggSumType(type.scale()).toTypeInfo(); } @Override public Expression[] initialValuesExpressions() { return new Expression[] {literal(new BigDecimal(0)), literal(0L)}; } } }
```suggestion if (operation.getParameters() != null && operation.getParameters().size() > 0) { ```
private void setPaths(OpenAPI openAPI) throws BallerinaOpenApiException { if (openAPI.getPaths() == null) { return; } this.paths = new LinkedHashSet<>(); Paths pathList = openAPI.getPaths(); for (Map.Entry<String, PathItem> path : pathList.entrySet()) { setPathName(path.getKey()); BallerinaPath balPath = new BallerinaPath().buildContext(path.getValue(), openAPI); if (balPath.isNoOperationsForPath()) { balPath.setResourceName(escapeIdentifier(path.getKey())); } else { balPath.getOperations().forEach(operation -> { if (operation.getValue().getOperationId() == null) { String pathName = path.getKey().substring(1); String operationId = operation.getKey() + StringUtils.capitalize(pathName); operation.getValue().setOperationId(escapeIdentifier(CodegenUtils.normalizeForBIdentifier( operationId))); } else { String opId = operation.getValue().getOperationId(); operation.getValue().setOperationId(escapeIdentifier(opId)); } }); } String resourcePath; if (hasPathParams(path.getValue())) { resourcePath = "string `" + path.getKey().replace("{", "${") + "`"; } else { resourcePath = "\"" + path.getKey() + "\""; } paths.add(new AbstractMap.SimpleEntry<>(resourcePath, balPath)); } } /** * Checks if there are any path parameters in the operations. * @param path The path item in the OpenAPI spec. * @return True if there are path parameters, else false. */ private boolean hasPathParams(PathItem path) { if (null != path.getParameters() && path.getParameters().size() > 0) { return path.getParameters().stream() .anyMatch(parameter -> null != parameter.getIn() && parameter.getIn().equals("path")); } if (path.readOperations().size() > 0) { return path.readOperations().stream().anyMatch(operation -> { if (null != operation.getParameters() && operation.getParameters().size() > 0) { return operation.getParameters().stream() .anyMatch(parameter -> null != parameter.getIn() && parameter.getIn().equals("path")); } return false; }); } return false; } /** * Populate schemas into a "Set". * * @param openAPI <code>OpenAPI</code> definition object with schema definition */ private void setSchemas(OpenAPI openAPI) { this.schemas = new LinkedHashSet<>(); Map<String, Schema> schemaMap; if (openAPI.getComponents() == null || openAPI.getComponents().getSchemas() == null) { return; } schemaMap = openAPI.getComponents().getSchemas(); for (Map.Entry<String, Schema> entry : schemaMap.entrySet()) { try { BallerinaSchema schema = new BallerinaSchema().buildContext(entry.getValue(), openAPI); if (StringUtils.isEmpty(schema.getType())) { schema.setType(entry.getKey()); } schemas.add(new AbstractMap.SimpleEntry<>(entry.getKey(), schema)); } catch (BallerinaOpenApiException e) { } } } /** * Extract endpoint information from OpenAPI server list. * If no servers were found, default {@link BallerinaServer} will be set as the server * * @param openAPI <code>OpenAPI</code> definition object with server details * @throws BallerinaOpenApiException on failure to parse {@code Server} list */ private void setServers(OpenAPI openAPI) throws BallerinaOpenApiException { this.servers = new ArrayList<>(); List<Server> serverList = openAPI.getServers(); if (serverList == null) { BallerinaServer server = new BallerinaServer().getDefaultValue(); servers.add(server); return; } serverList.forEach(server -> { try { BallerinaServer balServer = new BallerinaServer().buildContext(server); servers.add(balServer); } catch (BallerinaOpenApiException e) { servers.add(new BallerinaServer().getDefaultValue()); } }); } /** * Extract security requirements as a set. * * @param openAPI <code>OpenAPI</code> definition object with security definition */ private void setSecurityRequirements(OpenAPI openAPI) { this.security = new LinkedHashSet<>(); List<SecurityRequirement> requirements = openAPI.getSecurity(); if (requirements == null || requirements.isEmpty()) { return; } requirements.forEach(r -> r.forEach((key, value) -> { Map.Entry entry = new AbstractMap.SimpleEntry<>(key, value); security.add(entry); })); } public BallerinaOpenApi srcPackage(String srcPackage) { if (srcPackage != null) { this.srcPackage = srcPackage.replaceFirst("\\.", "/"); } return this; } public BallerinaOpenApi modelPackage(String modelPackage) { if (modelPackage != null) { this.modelPackage = modelPackage.replaceFirst("\\.", "/"); } return this; } public String getSrcPackage() { return srcPackage; } public String getModelPackage() { return modelPackage; } public String getOpenapi() { return openapi; } public Info getInfo() { return info; } public ExternalDocumentation getExternalDocs() { return externalDocs; } public List<BallerinaServer> getServers() { return servers; } public Set<Map.Entry<String, String>> getSecurity() { return security; } public List<Tag> getTags() { return tags; } public Set<Map.Entry<String, BallerinaPath>> getPaths() { return paths; } public Components getComponents() { return components; } public Map<String, Object> getExtensions() { return extensions; } public Set<Map.Entry<String, BallerinaSchema>> getSchemas() { return schemas; } public void setDefinitionPath(String definitionPath) { this.definitionPath = definitionPath; } public String getDefinitionPath() { return definitionPath; } public static void setPathName(String pathName) { BallerinaOpenApi.pathName = pathName; } public static String getPathName() { return pathName; } }
if (null != operation.getParameters() && operation.getParameters().size() > 0) {
private void setPaths(OpenAPI openAPI) throws BallerinaOpenApiException { if (openAPI.getPaths() == null) { return; } this.paths = new LinkedHashSet<>(); Paths pathList = openAPI.getPaths(); for (Map.Entry<String, PathItem> path : pathList.entrySet()) { setPathName(path.getKey()); BallerinaPath balPath = new BallerinaPath().buildContext(path.getValue(), openAPI); if (balPath.isNoOperationsForPath()) { balPath.setResourceName(escapeIdentifier(path.getKey())); } else { balPath.getOperations().forEach(operation -> { if (operation.getValue().getOperationId() == null) { String pathName = path.getKey().substring(1); String operationId = operation.getKey() + StringUtils.capitalize(pathName); operation.getValue().setOperationId(escapeIdentifier(CodegenUtils.normalizeForBIdentifier( operationId))); } else { String opId = operation.getValue().getOperationId(); operation.getValue().setOperationId(escapeIdentifier(opId)); } }); } String resourcePath; if (hasPathParams(path.getValue())) { resourcePath = "string `" + path.getKey().replace("{", "${") + "`"; } else { resourcePath = "\"" + path.getKey() + "\""; } paths.add(new AbstractMap.SimpleEntry<>(resourcePath, balPath)); } } /** * Checks if there are any path parameters in the operations. * @param path The path item in the OpenAPI spec. * @return True if there are path parameters, else false. */ private boolean hasPathParams(PathItem path) { if (path.getParameters() != null && !path.getParameters().isEmpty()) { return path.getParameters().stream().anyMatch(parameter -> parameter.getIn() != null && parameter.getIn() .equals(GeneratorConstants.PATH)); } if (path.readOperations().isEmpty()) { return false; } return path.readOperations().stream().anyMatch(operation -> { if (operation.getParameters() != null && !operation.getParameters().isEmpty()) { return operation.getParameters().stream().anyMatch(parameter -> parameter.getIn() != null && parameter.getIn().equals(GeneratorConstants.PATH)); } return false; }); } /** * Populate schemas into a "Set". * * @param openAPI <code>OpenAPI</code> definition object with schema definition */ private void setSchemas(OpenAPI openAPI) { this.schemas = new LinkedHashSet<>(); Map<String, Schema> schemaMap; if (openAPI.getComponents() == null || openAPI.getComponents().getSchemas() == null) { return; } schemaMap = openAPI.getComponents().getSchemas(); for (Map.Entry<String, Schema> entry : schemaMap.entrySet()) { try { BallerinaSchema schema = new BallerinaSchema().buildContext(entry.getValue(), openAPI); if (StringUtils.isEmpty(schema.getType())) { schema.setType(entry.getKey()); } schemas.add(new AbstractMap.SimpleEntry<>(entry.getKey(), schema)); } catch (BallerinaOpenApiException e) { } } } /** * Extract endpoint information from OpenAPI server list. * If no servers were found, default {@link BallerinaServer} will be set as the server * * @param openAPI <code>OpenAPI</code> definition object with server details * @throws BallerinaOpenApiException on failure to parse {@code Server} list */ private void setServers(OpenAPI openAPI) throws BallerinaOpenApiException { this.servers = new ArrayList<>(); List<Server> serverList = openAPI.getServers(); if (serverList == null) { BallerinaServer server = new BallerinaServer().getDefaultValue(); servers.add(server); return; } serverList.forEach(server -> { try { BallerinaServer balServer = new BallerinaServer().buildContext(server); servers.add(balServer); } catch (BallerinaOpenApiException e) { servers.add(new BallerinaServer().getDefaultValue()); } }); } /** * Extract security requirements as a set. * * @param openAPI <code>OpenAPI</code> definition object with security definition */ private void setSecurityRequirements(OpenAPI openAPI) { this.security = new LinkedHashSet<>(); List<SecurityRequirement> requirements = openAPI.getSecurity(); if (requirements == null || requirements.isEmpty()) { return; } requirements.forEach(r -> r.forEach((key, value) -> { Map.Entry entry = new AbstractMap.SimpleEntry<>(key, value); security.add(entry); })); } public BallerinaOpenApi srcPackage(String srcPackage) { if (srcPackage != null) { this.srcPackage = srcPackage.replaceFirst("\\.", "/"); } return this; } public BallerinaOpenApi modelPackage(String modelPackage) { if (modelPackage != null) { this.modelPackage = modelPackage.replaceFirst("\\.", "/"); } return this; } public String getSrcPackage() { return srcPackage; } public String getModelPackage() { return modelPackage; } public String getOpenapi() { return openapi; } public Info getInfo() { return info; } public ExternalDocumentation getExternalDocs() { return externalDocs; } public List<BallerinaServer> getServers() { return servers; } public Set<Map.Entry<String, String>> getSecurity() { return security; } public List<Tag> getTags() { return tags; } public Set<Map.Entry<String, BallerinaPath>> getPaths() { return paths; } public Components getComponents() { return components; } public Map<String, Object> getExtensions() { return extensions; } public Set<Map.Entry<String, BallerinaSchema>> getSchemas() { return schemas; } public void setDefinitionPath(String definitionPath) { this.definitionPath = definitionPath; } public String getDefinitionPath() { return definitionPath; } public static void setPathName(String pathName) { BallerinaOpenApi.pathName = pathName; } public static String getPathName() { return pathName; } }
class BallerinaOpenApi implements BallerinaOpenApiObject<BallerinaOpenApi, OpenAPI> { private String srcPackage; private String modelPackage; private String openapi = "3.0.0"; private String definitionPath = ""; private Info info = null; private ExternalDocumentation externalDocs = null; private List<BallerinaServer> servers = null; private Set<Map.Entry<String, String>> security = null; private List<Tag> tags = null; private Set<Map.Entry<String, BallerinaPath>> paths = null; private Set<Map.Entry<String, BallerinaSchema>> schemas = null; private Components components = null; private Map<String, Object> extensions = null; private static String pathName; /** * Build a {@link BallerinaOpenApi} object from a {@link OpenAPI} object. * All non iterable objects using handlebars library is converted into * supported iterable object types. * * @param openAPI {@link OpenAPI} type object to be converted * @return Converted {@link BallerinaOpenApi} object * @throws BallerinaOpenApiException when OpenAPI to BallerinaOpenApi parsing failed */ @Override public BallerinaOpenApi buildContext(OpenAPI openAPI) throws BallerinaOpenApiException { this.openapi = openAPI.getOpenapi(); this.info = openAPI.getInfo(); this.externalDocs = openAPI.getExternalDocs(); this.tags = openAPI.getTags(); this.components = openAPI.getComponents(); this.extensions = openAPI.getExtensions(); setPaths(openAPI); setSecurityRequirements(openAPI); setServers(openAPI); setSchemas(openAPI); return this; } @Override public BallerinaOpenApi buildContext(OpenAPI definition, OpenAPI openAPI) throws BallerinaOpenApiException { return buildContext(definition); } @Override public BallerinaOpenApi getDefaultValue() { return null; } /** * Populate path models into iterable structure. * This method will also add an operationId to each operation, * if operationId not provided in openApi definition * * @param openAPI {@code OpenAPI} definition object with schema definition * @throws BallerinaOpenApiException when context building fails */
class BallerinaOpenApi implements BallerinaOpenApiObject<BallerinaOpenApi, OpenAPI> { private String srcPackage; private String modelPackage; private String openapi = "3.0.0"; private String definitionPath = ""; private Info info = null; private ExternalDocumentation externalDocs = null; private List<BallerinaServer> servers = null; private Set<Map.Entry<String, String>> security = null; private List<Tag> tags = null; private Set<Map.Entry<String, BallerinaPath>> paths = null; private Set<Map.Entry<String, BallerinaSchema>> schemas = null; private Components components = null; private Map<String, Object> extensions = null; private static String pathName; /** * Build a {@link BallerinaOpenApi} object from a {@link OpenAPI} object. * All non iterable objects using handlebars library is converted into * supported iterable object types. * * @param openAPI {@link OpenAPI} type object to be converted * @return Converted {@link BallerinaOpenApi} object * @throws BallerinaOpenApiException when OpenAPI to BallerinaOpenApi parsing failed */ @Override public BallerinaOpenApi buildContext(OpenAPI openAPI) throws BallerinaOpenApiException { this.openapi = openAPI.getOpenapi(); this.info = openAPI.getInfo(); this.externalDocs = openAPI.getExternalDocs(); this.tags = openAPI.getTags(); this.components = openAPI.getComponents(); this.extensions = openAPI.getExtensions(); setPaths(openAPI); setSecurityRequirements(openAPI); setServers(openAPI); setSchemas(openAPI); return this; } @Override public BallerinaOpenApi buildContext(OpenAPI definition, OpenAPI openAPI) throws BallerinaOpenApiException { return buildContext(definition); } @Override public BallerinaOpenApi getDefaultValue() { return null; } /** * Populate path models into iterable structure. * This method will also add an operationId to each operation, * if operationId not provided in openApi definition * * @param openAPI {@code OpenAPI} definition object with schema definition * @throws BallerinaOpenApiException when context building fails */
Hm, nice catch. Shouldn't always trust the IDE's suggested changes it seems. Will fix.
private Optional<String> childAsString(Optional<ModelElement> element, String childTagName) { return element.map(modelElement -> modelElement.childAsString(childTagName)); }
return element.map(modelElement -> modelElement.childAsString(childTagName));
private Optional<String> childAsString(Optional<ModelElement> element, String childTagName) { if (element.isEmpty()) return Optional.empty(); return Optional.ofNullable(element.get().childAsString(childTagName)); }
class XmlNodeBuilder { private final ModelElement clusterElement; private final ModelElement element; private XmlNodeBuilder(ModelElement clusterElement, ModelElement element) { this.clusterElement = clusterElement; this.element = element; } public StorageNode build(DeployState deployState, ContentCluster parent, StorageGroup storageGroup) { StorageNode sNode = new StorageNode.Builder().build(deployState, parent.getStorageNodes(), element.getXml()); PersistenceEngine provider = parent.getPersistence().create(deployState, sNode, storageGroup, element); new Distributor.Builder(clusterElement, provider).build(deployState, parent.getDistributorNodes(), element.getXml()); return sNode; } }
class XmlNodeBuilder { private final ModelElement clusterElement; private final ModelElement element; private XmlNodeBuilder(ModelElement clusterElement, ModelElement element) { this.clusterElement = clusterElement; this.element = element; } public StorageNode build(DeployState deployState, ContentCluster parent, StorageGroup storageGroup) { StorageNode sNode = new StorageNode.Builder().build(deployState, parent.getStorageNodes(), element.getXml()); PersistenceEngine provider = parent.getPersistence().create(deployState, sNode, storageGroup, element); new Distributor.Builder(clusterElement, provider).build(deployState, parent.getDistributorNodes(), element.getXml()); return sNode; } }
Do we need to get the current catalog at this point? You handle that during the execution already, don't you?
public Operation convertSqlNode(SqlShowDatabases sqlShowDatabases, ConvertContext context) { if (sqlShowDatabases.getPreposition() == null) { return new ShowDatabasesOperation( sqlShowDatabases.getLikeType(), sqlShowDatabases.getLikeSqlPattern(), sqlShowDatabases.isNotLike()); } else { CatalogManager catalogManager = context.getCatalogManager(); String[] fullCatalogName = sqlShowDatabases.getCatalog(); String catalogName = fullCatalogName.length == 0 ? catalogManager.getCurrentCatalog() : fullCatalogName[0]; return new ShowDatabasesOperation( sqlShowDatabases.getPreposition(), catalogName, sqlShowDatabases.getLikeType(), sqlShowDatabases.getLikeSqlPattern(), sqlShowDatabases.isNotLike()); } }
: fullCatalogName[0];
public Operation convertSqlNode(SqlShowDatabases sqlShowDatabases, ConvertContext context) { if (sqlShowDatabases.getPreposition() == null) { return new ShowDatabasesOperation( sqlShowDatabases.getLikeType(), sqlShowDatabases.getLikeSqlPattern(), sqlShowDatabases.isNotLike()); } else { return new ShowDatabasesOperation( sqlShowDatabases.getCatalog()[0], sqlShowDatabases.getLikeType(), sqlShowDatabases.getLikeSqlPattern(), sqlShowDatabases.isNotLike()); } }
class SqlShowDatabasesConverter implements SqlNodeConverter<SqlShowDatabases> { @Override }
class SqlShowDatabasesConverter implements SqlNodeConverter<SqlShowDatabases> { @Override }
```suggestion final int cnt = 2 + userToAuthenticationInfo.size() * 2; ```
public void save(DataOutputStream dos) throws IOException { try { final int cnt = 1 + 1 + userToAuthenticationInfo.size() * 2; SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, AuthenticationManager.class.getName(), cnt); writer.writeJson(this); writer.writeJson(userToAuthenticationInfo.size()); Iterator<Map.Entry<UserIdentity, UserAuthenticationInfo>> iterator = userToAuthenticationInfo.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<UserIdentity, UserAuthenticationInfo> entry = iterator.next(); writer.writeJson(entry.getKey()); writer.writeJson(entry.getValue()); } writer.close(); } catch (SRMetaBlockException e) { IOException exception = new IOException("failed to save AuthenticationManager!"); exception.initCause(e); throw exception; } }
final int cnt = 1 + 1 + userToAuthenticationInfo.size() * 2;
public void save(DataOutputStream dos) throws IOException { try { final int cnt = 1 + 1 + userToAuthenticationInfo.size() * 2; SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, AuthenticationManager.class.getName(), cnt); writer.writeJson(this); writer.writeJson(userToAuthenticationInfo.size()); Iterator<Map.Entry<UserIdentity, UserAuthenticationInfo>> iterator = userToAuthenticationInfo.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<UserIdentity, UserAuthenticationInfo> entry = iterator.next(); writer.writeJson(entry.getKey()); writer.writeJson(entry.getValue()); } writer.close(); } catch (SRMetaBlockException e) { IOException exception = new IOException("failed to save AuthenticationManager!"); exception.initCause(e); throw exception; } }
class AuthenticationManager { private static final Logger LOG = LogManager.getLogger(AuthenticationManager.class); private static final String DEFAULT_PLUGIN = PlainPasswordAuthenticationProvider.PLUGIN_NAME; public static final String ROOT_USER = "root"; @Expose(serialize = false) private Map<UserIdentity, UserAuthenticationInfo> userToAuthenticationInfo = new HashMap<>(); @SerializedName(value = "m") private Map<String, UserProperty> userNameToProperty = new HashMap<>(); private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private void readLock() { lock.readLock().lock(); } private void readUnlock() { lock.readLock().unlock(); } private void writeLock() { lock.writeLock().lock(); } private void writeUnlock() { lock.writeLock().unlock(); } public void init() throws AuthenticationException { AuthenticationProviderFactory.installPlugin( PlainPasswordAuthenticationProvider.PLUGIN_NAME, new PlainPasswordAuthenticationProvider()); UserIdentity rootUser = new UserIdentity(ROOT_USER, UserAuthenticationInfo.ANY_HOST); rootUser.setIsAnalyzed(); UserAuthenticationInfo info = new UserAuthenticationInfo(); info.setOrigUserHost(ROOT_USER, UserAuthenticationInfo.ANY_HOST); info.setAuthPlugin(PlainPasswordAuthenticationProvider.PLUGIN_NAME); info.setPassword(new byte[0]); userToAuthenticationInfo.put(rootUser, info); userNameToProperty.put(rootUser.getQualifiedUser(), new UserProperty()); } public boolean doesUserExist(UserIdentity userIdentity) { readLock(); try { return userToAuthenticationInfo.containsKey(userIdentity); } finally { readUnlock(); } } public long getMaxConn(String userName) { return userNameToProperty.get(userName).getMaxConn(); } public String getDefaultPlugin() { return DEFAULT_PLUGIN; } public UserIdentity checkPassword(String remoteUser, String remoteHost, byte[] remotePasswd, byte[] randomString) { Iterator<Map.Entry<UserIdentity, UserAuthenticationInfo>> it = userToAuthenticationInfo.entrySet().iterator(); while (it.hasNext()) { Map.Entry<UserIdentity, UserAuthenticationInfo> entry = it.next(); UserAuthenticationInfo info = entry.getValue(); if (info.match(remoteUser, remoteHost)) { try { AuthenticationProvider provider = AuthenticationProviderFactory.create(info.getAuthPlugin()); provider.authenticate(remoteUser, remoteHost, remotePasswd, randomString, info); return entry.getKey(); } catch (AuthenticationException e) { LOG.debug("failed to authentication, ", e); } return null; } } LOG.debug("cannot find user {}@{}", remoteUser, remoteHost); return null; } public void createUser(CreateUserStmt stmt) throws DdlException { try { String pluginName = stmt.getAuthPlugin(); AuthenticationProvider provider = AuthenticationProviderFactory.create(pluginName); UserIdentity userIdentity = stmt.getUserIdent(); UserAuthenticationInfo info = provider.validAuthenticationInfo( userIdentity, stmt.getOriginalPassword(), stmt.getAuthString()); info.setAuthPlugin(pluginName); info.setOrigUserHost(userIdentity.getQualifiedUser(), userIdentity.getHost()); writeLock(); try { updateUserNoLock(userIdentity, info, false); UserProperty userProperty = null; if (!userNameToProperty.containsKey(userIdentity.getQualifiedUser())) { userProperty = new UserProperty(); userNameToProperty.put(userIdentity.getQualifiedUser(), userProperty); } GlobalStateMgr.getCurrentState().getEditLog().logCreateUser(userIdentity, info, userProperty); } finally { writeUnlock(); } } catch (AuthenticationException e) { DdlException exception = new DdlException("failed to create user" + stmt.getUserIdent().toString()); exception.initCause(e); throw exception; } } public void replayCreateUser( UserIdentity userIdentity, UserAuthenticationInfo info, UserProperty userProperty) throws AuthenticationException { writeLock(); try { info.analyse(); updateUserNoLock(userIdentity, info, false); if (userProperty != null) { userNameToProperty.put(userIdentity.getQualifiedUser(), userProperty); } } finally { writeUnlock(); } } private void updateUserNoLock( UserIdentity userIdentity, UserAuthenticationInfo info, boolean shouldExists) throws AuthenticationException { if (userToAuthenticationInfo.containsKey(userIdentity)) { if (! shouldExists) { throw new AuthenticationException("failed to find user " + userIdentity.getQualifiedUser()); } } else { if (shouldExists) { throw new AuthenticationException("user " + userIdentity.getQualifiedUser() + " already exists"); } } userToAuthenticationInfo.put(userIdentity, info); } /** * Use new image format by SRMetaBlockWriter/SRMetaBlockReader * * +------------------+ * | header | * +------------------+ * | | * | Authentication | * | Manager | * | | * +------------------+ * | numUser | * +------------------+ * | User Identify 1 | * +------------------+ * | User | * | Authentication | * | Info 1 | * +------------------+ * | User Identify 2 | * +------------------+ * | User | * | Authentication | * | Info 2 | * +------------------+ * | ... | * +------------------+ * | footer | * +------------------+ */ public static AuthenticationManager load(DataInputStream dis) throws IOException, DdlException { try { SRMetaBlockReader reader = new SRMetaBlockReader(dis, AuthenticationManager.class.getName()); AuthenticationManager ret = null; try { ret = (AuthenticationManager) reader.readJson(AuthenticationManager.class); ret.userToAuthenticationInfo = new HashMap<>(); int numUser = (int) reader.readJson(int.class); LOG.info("loading {} users", numUser); for (int i = 0; i != numUser; ++i) { UserIdentity userIdentity = (UserIdentity) reader.readJson(UserIdentity.class); UserAuthenticationInfo userAuthenticationInfo = (UserAuthenticationInfo) reader.readJson(UserAuthenticationInfo.class); userAuthenticationInfo.analyse(); ret.userToAuthenticationInfo.put(userIdentity, userAuthenticationInfo); } } catch (SRMetaBlockEOFException eofException) { LOG.warn("got EOF exception, ignore, ", eofException); } finally { reader.close(); } assert ret != null; LOG.info("loaded {} users", ret.userToAuthenticationInfo.size()); return ret; } catch (SRMetaBlockException | AuthenticationException e) { DdlException exception = new DdlException("failed to save AuthenticationManager!"); exception.initCause(e); throw exception; } } }
class AuthenticationManager { private static final Logger LOG = LogManager.getLogger(AuthenticationManager.class); private static final String DEFAULT_PLUGIN = PlainPasswordAuthenticationProvider.PLUGIN_NAME; public static final String ROOT_USER = "root"; private Map<UserIdentity, UserAuthenticationInfo> userToAuthenticationInfo = new HashMap<>(); @SerializedName(value = "m") private Map<String, UserProperty> userNameToProperty = new HashMap<>(); private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private void readLock() { lock.readLock().lock(); } private void readUnlock() { lock.readLock().unlock(); } private void writeLock() { lock.writeLock().lock(); } private void writeUnlock() { lock.writeLock().unlock(); } public void init() throws AuthenticationException { AuthenticationProviderFactory.installPlugin( PlainPasswordAuthenticationProvider.PLUGIN_NAME, new PlainPasswordAuthenticationProvider()); UserIdentity rootUser = new UserIdentity(ROOT_USER, UserAuthenticationInfo.ANY_HOST); rootUser.setIsAnalyzed(); UserAuthenticationInfo info = new UserAuthenticationInfo(); info.setOrigUserHost(ROOT_USER, UserAuthenticationInfo.ANY_HOST); info.setAuthPlugin(PlainPasswordAuthenticationProvider.PLUGIN_NAME); info.setPassword(new byte[0]); userToAuthenticationInfo.put(rootUser, info); userNameToProperty.put(rootUser.getQualifiedUser(), new UserProperty()); } public boolean doesUserExist(UserIdentity userIdentity) { readLock(); try { return userToAuthenticationInfo.containsKey(userIdentity); } finally { readUnlock(); } } public long getMaxConn(String userName) { return userNameToProperty.get(userName).getMaxConn(); } public String getDefaultPlugin() { return DEFAULT_PLUGIN; } public UserIdentity checkPassword(String remoteUser, String remoteHost, byte[] remotePasswd, byte[] randomString) { Iterator<Map.Entry<UserIdentity, UserAuthenticationInfo>> it = userToAuthenticationInfo.entrySet().iterator(); while (it.hasNext()) { Map.Entry<UserIdentity, UserAuthenticationInfo> entry = it.next(); UserAuthenticationInfo info = entry.getValue(); if (info.match(remoteUser, remoteHost)) { try { AuthenticationProvider provider = AuthenticationProviderFactory.create(info.getAuthPlugin()); provider.authenticate(remoteUser, remoteHost, remotePasswd, randomString, info); return entry.getKey(); } catch (AuthenticationException e) { LOG.debug("failed to authentication, ", e); } return null; } } LOG.debug("cannot find user {}@{}", remoteUser, remoteHost); return null; } public void createUser(CreateUserStmt stmt) throws DdlException { try { String pluginName = stmt.getAuthPlugin(); AuthenticationProvider provider = AuthenticationProviderFactory.create(pluginName); UserIdentity userIdentity = stmt.getUserIdent(); UserAuthenticationInfo info = provider.validAuthenticationInfo( userIdentity, stmt.getOriginalPassword(), stmt.getAuthString()); info.setAuthPlugin(pluginName); info.setOrigUserHost(userIdentity.getQualifiedUser(), userIdentity.getHost()); writeLock(); try { updateUserNoLock(userIdentity, info, false); UserProperty userProperty = null; if (!userNameToProperty.containsKey(userIdentity.getQualifiedUser())) { userProperty = new UserProperty(); userNameToProperty.put(userIdentity.getQualifiedUser(), userProperty); } GlobalStateMgr.getCurrentState().getEditLog().logCreateUser(userIdentity, info, userProperty); } finally { writeUnlock(); } } catch (AuthenticationException e) { throw new DdlException("failed to create user" + stmt.getUserIdent().toString(), e); } } public void replayCreateUser( UserIdentity userIdentity, UserAuthenticationInfo info, UserProperty userProperty) throws AuthenticationException { writeLock(); try { info.analyze(); updateUserNoLock(userIdentity, info, false); if (userProperty != null) { userNameToProperty.put(userIdentity.getQualifiedUser(), userProperty); } } finally { writeUnlock(); } } private void updateUserNoLock( UserIdentity userIdentity, UserAuthenticationInfo info, boolean shouldExists) throws AuthenticationException { if (userToAuthenticationInfo.containsKey(userIdentity)) { if (! shouldExists) { throw new AuthenticationException("failed to find user " + userIdentity.getQualifiedUser()); } } else { if (shouldExists) { throw new AuthenticationException("user " + userIdentity.getQualifiedUser() + " already exists"); } } userToAuthenticationInfo.put(userIdentity, info); } /** * Use new image format by SRMetaBlockWriter/SRMetaBlockReader * * +------------------+ * | header | * +------------------+ * | | * | Authentication | * | Manager | * | | * +------------------+ * | numUser | * +------------------+ * | User Identify 1 | * +------------------+ * | User | * | Authentication | * | Info 1 | * +------------------+ * | User Identify 2 | * +------------------+ * | User | * | Authentication | * | Info 2 | * +------------------+ * | ... | * +------------------+ * | footer | * +------------------+ */ public static AuthenticationManager load(DataInputStream dis) throws IOException, DdlException { try { SRMetaBlockReader reader = new SRMetaBlockReader(dis, AuthenticationManager.class.getName()); AuthenticationManager ret = null; try { ret = (AuthenticationManager) reader.readJson(AuthenticationManager.class); ret.userToAuthenticationInfo = new HashMap<>(); int numUser = (int) reader.readJson(int.class); LOG.info("loading {} users", numUser); for (int i = 0; i != numUser; ++i) { UserIdentity userIdentity = (UserIdentity) reader.readJson(UserIdentity.class); UserAuthenticationInfo userAuthenticationInfo = (UserAuthenticationInfo) reader.readJson(UserAuthenticationInfo.class); userAuthenticationInfo.analyze(); ret.userToAuthenticationInfo.put(userIdentity, userAuthenticationInfo); } } catch (SRMetaBlockEOFException eofException) { LOG.warn("got EOF exception, ignore, ", eofException); } finally { reader.close(); } assert ret != null; LOG.info("loaded {} users", ret.userToAuthenticationInfo.size()); return ret; } catch (SRMetaBlockException | AuthenticationException e) { throw new DdlException("failed to save AuthenticationManager!", e); } } }
Will this log be printed frequently when no more data to consume?
public boolean hasMoreDataToConsume(UUID taskId, Map<Integer, Long> partitionIdToOffset) throws UserException { for (Map.Entry<Integer, Long> entry : partitionIdToOffset.entrySet()) { if (cachedPartitionWithLatestOffsets.containsKey(entry.getKey()) && entry.getValue() < cachedPartitionWithLatestOffsets.get(entry.getKey())) { LOG.debug("has more data to consume. offsets to be consumed: {}, latest offsets: {}, task {}, job {}", partitionIdToOffset, cachedPartitionWithLatestOffsets, taskId, id); return true; } } try { List<Pair<Integer, Long>> tmp = KafkaUtil.getLatestOffsets(id, taskId, getBrokerList(), getTopic(), getConvertedCustomProperties(), Lists.newArrayList(partitionIdToOffset.keySet())); for (Pair<Integer, Long> pair : tmp) { cachedPartitionWithLatestOffsets.put(pair.first, pair.second); } } catch (Exception e) { LOG.warn("failed to get latest partition offset. {}", e.getMessage(), e); return false; } for (Map.Entry<Integer, Long> entry : partitionIdToOffset.entrySet()) { Integer partitionId = entry.getKey(); if (cachedPartitionWithLatestOffsets.containsKey(partitionId)) { long partitionLatestOffset = cachedPartitionWithLatestOffsets.get(partitionId); long recordPartitionOffset = entry.getValue(); if (recordPartitionOffset < partitionLatestOffset) { LOG.debug("has more data to consume. offsets to be consumed: {}," + " latest offsets: {}, task {}, job {}", partitionIdToOffset, cachedPartitionWithLatestOffsets, taskId, id); return true; } else if (recordPartitionOffset > partitionLatestOffset) { String msg = "offset set in job: " + recordPartitionOffset + " is greater than kafka latest offset: " + partitionLatestOffset + " partition id: " + partitionId; throw new UserException(msg); } } } LOG.info("no more data to consume. offsets to be consumed: {}, latest offsets: {}, task {}, job {}", partitionIdToOffset, cachedPartitionWithLatestOffsets, taskId, id); return false; }
LOG.info("no more data to consume. offsets to be consumed: {}, latest offsets: {}, task {}, job {}",
public boolean hasMoreDataToConsume(UUID taskId, Map<Integer, Long> partitionIdToOffset) throws UserException { for (Map.Entry<Integer, Long> entry : partitionIdToOffset.entrySet()) { if (cachedPartitionWithLatestOffsets.containsKey(entry.getKey()) && entry.getValue() < cachedPartitionWithLatestOffsets.get(entry.getKey())) { LOG.debug("has more data to consume. offsets to be consumed: {}, latest offsets: {}, task {}, job {}", partitionIdToOffset, cachedPartitionWithLatestOffsets, taskId, id); return true; } } try { List<Pair<Integer, Long>> tmp = KafkaUtil.getLatestOffsets(id, taskId, getBrokerList(), getTopic(), getConvertedCustomProperties(), Lists.newArrayList(partitionIdToOffset.keySet())); for (Pair<Integer, Long> pair : tmp) { cachedPartitionWithLatestOffsets.put(pair.first, pair.second); } } catch (Exception e) { LOG.warn("failed to get latest partition offset. {}", e.getMessage(), e); return false; } for (Map.Entry<Integer, Long> entry : partitionIdToOffset.entrySet()) { Integer partitionId = entry.getKey(); if (cachedPartitionWithLatestOffsets.containsKey(partitionId)) { long partitionLatestOffset = cachedPartitionWithLatestOffsets.get(partitionId); long recordPartitionOffset = entry.getValue(); if (recordPartitionOffset < partitionLatestOffset) { LOG.debug("has more data to consume. offsets to be consumed: {}," + " latest offsets: {}, task {}, job {}", partitionIdToOffset, cachedPartitionWithLatestOffsets, taskId, id); return true; } else if (recordPartitionOffset > partitionLatestOffset) { String msg = "offset set in job: " + recordPartitionOffset + " is greater than kafka latest offset: " + partitionLatestOffset + " partition id: " + partitionId; throw new UserException(msg); } } } LOG.debug("no more data to consume. offsets to be consumed: {}, latest offsets: {}, task {}, job {}", partitionIdToOffset, cachedPartitionWithLatestOffsets, taskId, id); return false; }
class KafkaRoutineLoadJob extends RoutineLoadJob { private static final Logger LOG = LogManager.getLogger(KafkaRoutineLoadJob.class); public static final String KAFKA_FILE_CATALOG = "kafka"; public static final String PROP_GROUP_ID = "group.id"; private String brokerList; private String topic; private List<Integer> customKafkaPartitions = Lists.newArrayList(); private List<Integer> currentKafkaPartitions = Lists.newArrayList(); private String kafkaDefaultOffSet = ""; private Map<String, String> customProperties = Maps.newHashMap(); private Map<String, String> convertedCustomProperties = Maps.newHashMap(); private Map<Integer, Long> cachedPartitionWithLatestOffsets = Maps.newConcurrentMap(); private List<Integer> newCurrentKafkaPartition = Lists.newArrayList(); public KafkaRoutineLoadJob() { super(-1, LoadDataSourceType.KAFKA); } public KafkaRoutineLoadJob(Long id, String name, long dbId, long tableId, String brokerList, String topic, UserIdentity userIdentity) { super(id, name, dbId, tableId, LoadDataSourceType.KAFKA, userIdentity); this.brokerList = brokerList; this.topic = topic; this.progress = new KafkaProgress(); } public KafkaRoutineLoadJob(Long id, String name, long dbId, String brokerList, String topic, UserIdentity userIdentity, boolean isMultiTable) { super(id, name, dbId, LoadDataSourceType.KAFKA, userIdentity); this.brokerList = brokerList; this.topic = topic; this.progress = new KafkaProgress(); setMultiTable(isMultiTable); } public String getTopic() { return topic; } public String getBrokerList() { return brokerList; } public Map<String, String> getConvertedCustomProperties() { return convertedCustomProperties; } private boolean isOffsetForTimes() { long offset = TimeUtils.timeStringToLong(this.kafkaDefaultOffSet); return offset != -1; } private long convertedDefaultOffsetToTimestamp() { TimeZone timeZone = TimeUtils.getOrSystemTimeZone(getTimezone()); return TimeUtils.timeStringToLong(this.kafkaDefaultOffSet, timeZone); } private long convertedDefaultOffsetToLong() { if (this.kafkaDefaultOffSet.isEmpty()) { return KafkaProgress.OFFSET_END_VAL; } else { if (isOffsetForTimes()) { return convertedDefaultOffsetToTimestamp(); } else if (this.kafkaDefaultOffSet.equalsIgnoreCase(KafkaProgress.OFFSET_BEGINNING)) { return KafkaProgress.OFFSET_BEGINNING_VAL; } else if (this.kafkaDefaultOffSet.equalsIgnoreCase(KafkaProgress.OFFSET_END)) { return KafkaProgress.OFFSET_END_VAL; } else { return KafkaProgress.OFFSET_END_VAL; } } } @Override public void prepare() throws UserException { super.prepare(); convertCustomProperties(true); } private void convertCustomProperties(boolean rebuild) throws DdlException { if (customProperties.isEmpty()) { return; } if (!rebuild && !convertedCustomProperties.isEmpty()) { return; } if (rebuild) { convertedCustomProperties.clear(); } SmallFileMgr smallFileMgr = Env.getCurrentEnv().getSmallFileMgr(); for (Map.Entry<String, String> entry : customProperties.entrySet()) { if (entry.getValue().startsWith("FILE:")) { String file = entry.getValue().substring(entry.getValue().indexOf(":") + 1); SmallFile smallFile = smallFileMgr.getSmallFile(dbId, KAFKA_FILE_CATALOG, file, true); convertedCustomProperties.put(entry.getKey(), "FILE:" + smallFile.id + ":" + smallFile.md5); } else { convertedCustomProperties.put(entry.getKey(), entry.getValue()); } } if (convertedCustomProperties.containsKey(KafkaConfiguration.KAFKA_ORIGIN_DEFAULT_OFFSETS.getName())) { kafkaDefaultOffSet = convertedCustomProperties .remove(KafkaConfiguration.KAFKA_ORIGIN_DEFAULT_OFFSETS.getName()); return; } if (convertedCustomProperties.containsKey(KafkaConfiguration.KAFKA_DEFAULT_OFFSETS.getName())) { kafkaDefaultOffSet = convertedCustomProperties.remove(KafkaConfiguration.KAFKA_DEFAULT_OFFSETS.getName()); } } @Override public void divideRoutineLoadJob(int currentConcurrentTaskNum) throws UserException { List<RoutineLoadTaskInfo> result = new ArrayList<>(); writeLock(); try { if (state == JobState.NEED_SCHEDULE) { for (int i = 0; i < currentConcurrentTaskNum; i++) { Map<Integer, Long> taskKafkaProgress = Maps.newHashMap(); for (int j = i; j < currentKafkaPartitions.size(); j = j + currentConcurrentTaskNum) { int kafkaPartition = currentKafkaPartitions.get(j); taskKafkaProgress.put(kafkaPartition, ((KafkaProgress) progress).getOffsetByPartition(kafkaPartition)); } KafkaTaskInfo kafkaTaskInfo = new KafkaTaskInfo(UUID.randomUUID(), id, maxBatchIntervalS * 2 * 1000, taskKafkaProgress, isMultiTable()); routineLoadTaskInfoList.add(kafkaTaskInfo); result.add(kafkaTaskInfo); } if (result.size() != 0) { unprotectUpdateState(JobState.RUNNING, null, false); } } else { LOG.debug("Ignore to divide routine load job while job state {}", state); } Env.getCurrentEnv().getRoutineLoadTaskScheduler().addTasksInQueue(result); } finally { writeUnlock(); } } @Override public int calculateCurrentConcurrentTaskNum() { int partitionNum = currentKafkaPartitions.size(); if (desireTaskConcurrentNum == 0) { desireTaskConcurrentNum = Config.max_routine_load_task_concurrent_num; } LOG.debug("current concurrent task number is min" + "(partition num: {}, desire task concurrent num: {} config: {})", partitionNum, desireTaskConcurrentNum, Config.max_routine_load_task_concurrent_num); currentTaskConcurrentNum = Math.min(partitionNum, Math.min(desireTaskConcurrentNum, Config.max_routine_load_task_concurrent_num)); return currentTaskConcurrentNum; } @Override protected boolean checkCommitInfo(RLTaskTxnCommitAttachment rlTaskTxnCommitAttachment, TransactionState txnState, TransactionState.TxnStatusChangeReason txnStatusChangeReason) { if (txnState.getTransactionStatus() == TransactionStatus.COMMITTED) { return true; } LOG.debug("no need to update the progress of kafka routine load. txn status: {}, " + "txnStatusChangeReason: {}, task: {}, job: {}", txnState.getTransactionStatus(), txnStatusChangeReason, DebugUtil.printId(rlTaskTxnCommitAttachment.getTaskId()), id); return false; } @Override protected void updateProgress(RLTaskTxnCommitAttachment attachment) throws UserException { super.updateProgress(attachment); this.progress.update(attachment); } @Override protected void replayUpdateProgress(RLTaskTxnCommitAttachment attachment) { super.replayUpdateProgress(attachment); this.progress.update(attachment); } @Override protected RoutineLoadTaskInfo unprotectRenewTask(RoutineLoadTaskInfo routineLoadTaskInfo) { KafkaTaskInfo oldKafkaTaskInfo = (KafkaTaskInfo) routineLoadTaskInfo; KafkaTaskInfo kafkaTaskInfo = new KafkaTaskInfo(oldKafkaTaskInfo, ((KafkaProgress) progress).getPartitionIdToOffset(oldKafkaTaskInfo.getPartitions()), isMultiTable()); routineLoadTaskInfoList.remove(routineLoadTaskInfo); routineLoadTaskInfoList.add(kafkaTaskInfo); return kafkaTaskInfo; } @Override protected void unprotectUpdateProgress() throws UserException { updateNewPartitionProgress(); } @Override protected void preCheckNeedSchedule() throws UserException { if (this.state == JobState.RUNNING || this.state == JobState.NEED_SCHEDULE) { if (customKafkaPartitions != null && !customKafkaPartitions.isEmpty()) { return; } updateKafkaPartitions(); } } private void updateKafkaPartitions() throws UserException { try { this.newCurrentKafkaPartition = getAllKafkaPartitions(); } catch (Exception e) { LOG.warn(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("error_msg", "Job failed to fetch all current partition with error " + e.getMessage()) .build(), e); if (this.state == JobState.NEED_SCHEDULE) { unprotectUpdateState(JobState.PAUSED, new ErrorReason(InternalErrorCode.PARTITIONS_ERR, "Job failed to fetch all current partition with error " + e.getMessage()), false /* not replay */); } } } @Override protected boolean unprotectNeedReschedule() throws UserException { if (this.state == JobState.RUNNING || this.state == JobState.NEED_SCHEDULE) { if (CollectionUtils.isNotEmpty(customKafkaPartitions)) { currentKafkaPartitions = customKafkaPartitions; return false; } Preconditions.checkNotNull(this.newCurrentKafkaPartition); if (new HashSet<>(currentKafkaPartitions).containsAll(this.newCurrentKafkaPartition)) { if (currentKafkaPartitions.size() > this.newCurrentKafkaPartition.size()) { currentKafkaPartitions = this.newCurrentKafkaPartition; if (LOG.isDebugEnabled()) { LOG.debug(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("current_kafka_partitions", Joiner.on(",").join(currentKafkaPartitions)) .add("msg", "current kafka partitions has been change") .build()); } return true; } else { for (Integer kafkaPartition : currentKafkaPartitions) { if (!((KafkaProgress) progress).containsPartition(kafkaPartition)) { return true; } } return false; } } else { currentKafkaPartitions = this.newCurrentKafkaPartition; if (LOG.isDebugEnabled()) { LOG.debug(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("current_kafka_partitions", Joiner.on(",").join(currentKafkaPartitions)) .add("msg", "current kafka partitions has been change") .build()); } return true; } } if (this.state == JobState.PAUSED) { return ScheduleRule.isNeedAutoSchedule(this); } return false; } @Override protected String getStatistic() { Map<String, Object> summary = this.jobStatistic.summary(); Gson gson = new GsonBuilder().disableHtmlEscaping().create(); return gson.toJson(summary); } private List<Integer> getAllKafkaPartitions() throws UserException { convertCustomProperties(false); return KafkaUtil.getAllKafkaPartitions(brokerList, topic, convertedCustomProperties); } public static KafkaRoutineLoadJob fromCreateStmt(CreateRoutineLoadStmt stmt) throws UserException { Database db = Env.getCurrentInternalCatalog().getDbOrDdlException(stmt.getDBName()); long id = Env.getCurrentEnv().getNextId(); KafkaDataSourceProperties kafkaProperties = (KafkaDataSourceProperties) stmt.getDataSourceProperties(); KafkaRoutineLoadJob kafkaRoutineLoadJob; if (kafkaProperties.isMultiTable()) { kafkaRoutineLoadJob = new KafkaRoutineLoadJob(id, stmt.getName(), db.getId(), kafkaProperties.getBrokerList(), kafkaProperties.getTopic(), stmt.getUserInfo(), true); } else { OlapTable olapTable = db.getOlapTableOrDdlException(stmt.getTableName()); checkMeta(olapTable, stmt.getRoutineLoadDesc()); long tableId = olapTable.getId(); kafkaRoutineLoadJob = new KafkaRoutineLoadJob(id, stmt.getName(), db.getId(), tableId, kafkaProperties.getBrokerList(), kafkaProperties.getTopic(), stmt.getUserInfo()); } kafkaRoutineLoadJob.setOptional(stmt); kafkaRoutineLoadJob.checkCustomProperties(); kafkaRoutineLoadJob.checkCustomPartition(); return kafkaRoutineLoadJob; } private void checkCustomPartition() throws UserException { if (customKafkaPartitions.isEmpty()) { return; } List<Integer> allKafkaPartitions = getAllKafkaPartitions(); for (Integer customPartition : customKafkaPartitions) { if (!allKafkaPartitions.contains(customPartition)) { throw new LoadException("there is a custom kafka partition " + customPartition + " which is invalid for topic " + topic); } } } private void checkCustomProperties() throws DdlException { SmallFileMgr smallFileMgr = Env.getCurrentEnv().getSmallFileMgr(); for (Map.Entry<String, String> entry : customProperties.entrySet()) { if (entry.getValue().startsWith("FILE:")) { String file = entry.getValue().substring(entry.getValue().indexOf(":") + 1); if (!smallFileMgr.containsFile(dbId, KAFKA_FILE_CATALOG, file)) { throw new DdlException("File " + file + " does not exist in db " + dbId + " with catalog: " + KAFKA_FILE_CATALOG); } } } } private void updateNewPartitionProgress() throws UserException { try { for (Integer kafkaPartition : currentKafkaPartitions) { if (!((KafkaProgress) progress).containsPartition(kafkaPartition)) { List<Integer> newPartitions = Lists.newArrayList(); newPartitions.add(kafkaPartition); List<Pair<Integer, Long>> newPartitionsOffsets = getNewPartitionOffsetsFromDefaultOffset(newPartitions); Preconditions.checkState(newPartitionsOffsets.size() == 1); for (Pair<Integer, Long> partitionOffset : newPartitionsOffsets) { ((KafkaProgress) progress).addPartitionOffset(partitionOffset); if (LOG.isDebugEnabled()) { LOG.debug(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("kafka_partition_id", partitionOffset.first) .add("begin_offset", partitionOffset.second) .add("msg", "The new partition has been added in job")); } } } } } catch (UserException e) { unprotectUpdateState(JobState.PAUSED, new ErrorReason(InternalErrorCode.PARTITIONS_ERR, e.getMessage()), false /* not replay */); throw e; } } private List<Pair<Integer, Long>> getNewPartitionOffsetsFromDefaultOffset(List<Integer> newPartitions) throws UserException { List<Pair<Integer, Long>> partitionOffsets = Lists.newArrayList(); long beginOffset = convertedDefaultOffsetToLong(); for (Integer kafkaPartition : newPartitions) { partitionOffsets.add(Pair.of(kafkaPartition, beginOffset)); } if (isOffsetForTimes()) { try { partitionOffsets = KafkaUtil.getOffsetsForTimes(this.brokerList, this.topic, convertedCustomProperties, partitionOffsets); } catch (LoadException e) { LOG.warn(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("partition:timestamp", Joiner.on(",").join(partitionOffsets)) .add("error_msg", "Job failed to fetch current offsets from times with error " + e.getMessage()) .build(), e); throw new UserException(e); } } return partitionOffsets; } @Override protected void setOptional(CreateRoutineLoadStmt stmt) throws UserException { super.setOptional(stmt); KafkaDataSourceProperties kafkaDataSourceProperties = (KafkaDataSourceProperties) stmt.getDataSourceProperties(); if (CollectionUtils.isNotEmpty(kafkaDataSourceProperties.getKafkaPartitionOffsets())) { setCustomKafkaPartitions(kafkaDataSourceProperties); } if (MapUtils.isNotEmpty(kafkaDataSourceProperties.getCustomKafkaProperties())) { setCustomKafkaProperties(kafkaDataSourceProperties.getCustomKafkaProperties()); } this.customProperties.putIfAbsent(PROP_GROUP_ID, name + "_" + UUID.randomUUID()); } private void setCustomKafkaPartitions(KafkaDataSourceProperties kafkaDataSourceProperties) throws LoadException { List<Pair<Integer, Long>> kafkaPartitionOffsets = kafkaDataSourceProperties.getKafkaPartitionOffsets(); boolean isForTimes = kafkaDataSourceProperties.isOffsetsForTimes(); if (isForTimes) { kafkaPartitionOffsets = KafkaUtil.getOffsetsForTimes(kafkaDataSourceProperties.getBrokerList(), kafkaDataSourceProperties.getTopic(), convertedCustomProperties, kafkaDataSourceProperties.getKafkaPartitionOffsets()); } for (Pair<Integer, Long> partitionOffset : kafkaPartitionOffsets) { this.customKafkaPartitions.add(partitionOffset.first); ((KafkaProgress) progress).addPartitionOffset(partitionOffset); } } private void setCustomKafkaProperties(Map<String, String> kafkaProperties) { this.customProperties = kafkaProperties; } @Override protected String dataSourcePropertiesJsonToString() { Map<String, String> dataSourceProperties = Maps.newHashMap(); dataSourceProperties.put("brokerList", brokerList); dataSourceProperties.put("topic", topic); List<Integer> sortedPartitions = Lists.newArrayList(currentKafkaPartitions); Collections.sort(sortedPartitions); dataSourceProperties.put("currentKafkaPartitions", Joiner.on(",").join(sortedPartitions)); Gson gson = new GsonBuilder().disableHtmlEscaping().create(); return gson.toJson(dataSourceProperties); } @Override protected String customPropertiesJsonToString() { Gson gson = new GsonBuilder().disableHtmlEscaping().create(); return gson.toJson(customProperties); } @Override protected Map<String, String> getDataSourceProperties() { Map<String, String> dataSourceProperties = Maps.newHashMap(); dataSourceProperties.put("kafka_broker_list", brokerList); dataSourceProperties.put("kafka_topic", topic); return dataSourceProperties; } @Override protected Map<String, String> getCustomProperties() { Map<String, String> ret = new HashMap<>(); customProperties.forEach((k, v) -> ret.put("property." + k, v)); return ret; } @Override public void write(DataOutput out) throws IOException { super.write(out); Text.writeString(out, brokerList); Text.writeString(out, topic); out.writeInt(customKafkaPartitions.size()); for (Integer partitionId : customKafkaPartitions) { out.writeInt(partitionId); } out.writeInt(customProperties.size()); for (Map.Entry<String, String> property : customProperties.entrySet()) { Text.writeString(out, "property." + property.getKey()); Text.writeString(out, property.getValue()); } } public void readFields(DataInput in) throws IOException { super.readFields(in); brokerList = Text.readString(in); topic = Text.readString(in); int size = in.readInt(); for (int i = 0; i < size; i++) { customKafkaPartitions.add(in.readInt()); } int count = in.readInt(); for (int i = 0; i < count; i++) { String propertyKey = Text.readString(in); String propertyValue = Text.readString(in); if (propertyKey.startsWith("property.")) { this.customProperties.put(propertyKey.substring(propertyKey.indexOf(".") + 1), propertyValue); } } } @Override public void modifyProperties(AlterRoutineLoadStmt stmt) throws UserException { Map<String, String> jobProperties = stmt.getAnalyzedJobProperties(); KafkaDataSourceProperties dataSourceProperties = (KafkaDataSourceProperties) stmt.getDataSourceProperties(); if (null != dataSourceProperties && dataSourceProperties.isOffsetsForTimes()) { convertTimestampToOffset(dataSourceProperties); } writeLock(); try { if (getState() != JobState.PAUSED) { throw new DdlException("Only supports modification of PAUSED jobs"); } modifyPropertiesInternal(jobProperties, dataSourceProperties); AlterRoutineLoadJobOperationLog log = new AlterRoutineLoadJobOperationLog(this.id, jobProperties, dataSourceProperties); Env.getCurrentEnv().getEditLog().logAlterRoutineLoadJob(log); } finally { writeUnlock(); } } private void convertTimestampToOffset(KafkaDataSourceProperties dataSourceProperties) throws UserException { List<Pair<Integer, Long>> partitionOffsets = dataSourceProperties.getKafkaPartitionOffsets(); if (partitionOffsets.isEmpty()) { return; } List<Pair<Integer, Long>> newOffsets = KafkaUtil.getOffsetsForTimes(brokerList, topic, convertedCustomProperties, partitionOffsets); dataSourceProperties.setKafkaPartitionOffsets(newOffsets); } private void modifyPropertiesInternal(Map<String, String> jobProperties, KafkaDataSourceProperties dataSourceProperties) throws DdlException { if (null != dataSourceProperties) { List<Pair<Integer, Long>> kafkaPartitionOffsets = Lists.newArrayList(); Map<String, String> customKafkaProperties = Maps.newHashMap(); if (MapUtils.isNotEmpty(dataSourceProperties.getOriginalDataSourceProperties())) { kafkaPartitionOffsets = dataSourceProperties.getKafkaPartitionOffsets(); customKafkaProperties = dataSourceProperties.getCustomKafkaProperties(); } if (!kafkaPartitionOffsets.isEmpty()) { ((KafkaProgress) progress).modifyOffset(kafkaPartitionOffsets); } if (!customKafkaProperties.isEmpty()) { this.customProperties.putAll(customKafkaProperties); convertCustomProperties(true); } if (!Strings.isNullOrEmpty(dataSourceProperties.getBrokerList())) { this.brokerList = dataSourceProperties.getBrokerList(); } if (!Strings.isNullOrEmpty(dataSourceProperties.getTopic())) { this.topic = dataSourceProperties.getTopic(); } } if (!jobProperties.isEmpty()) { Map<String, String> copiedJobProperties = Maps.newHashMap(jobProperties); modifyCommonJobProperties(copiedJobProperties); this.jobProperties.putAll(copiedJobProperties); if (jobProperties.containsKey(CreateRoutineLoadStmt.PARTIAL_COLUMNS)) { this.isPartialUpdate = BooleanUtils.toBoolean(jobProperties.get(CreateRoutineLoadStmt.PARTIAL_COLUMNS)); } } LOG.info("modify the properties of kafka routine load job: {}, jobProperties: {}, datasource properties: {}", this.id, jobProperties, dataSourceProperties); } @Override public void replayModifyProperties(AlterRoutineLoadJobOperationLog log) { try { modifyPropertiesInternal(log.getJobProperties(), (KafkaDataSourceProperties) log.getDataSourceProperties()); } catch (DdlException e) { LOG.error("failed to replay modify kafka routine load job: {}", id, e); } } @Override protected String getLag() { Map<Integer, Long> partitionIdToOffsetLag = ((KafkaProgress) progress).getLag(cachedPartitionWithLatestOffsets); Gson gson = new Gson(); return gson.toJson(partitionIdToOffsetLag); } @Override public TFileCompressType getCompressType() { return TFileCompressType.PLAIN; } @Override public double getMaxFilterRatio() { return maxFilterRatio; } }
class KafkaRoutineLoadJob extends RoutineLoadJob { private static final Logger LOG = LogManager.getLogger(KafkaRoutineLoadJob.class); public static final String KAFKA_FILE_CATALOG = "kafka"; public static final String PROP_GROUP_ID = "group.id"; private String brokerList; private String topic; private List<Integer> customKafkaPartitions = Lists.newArrayList(); private List<Integer> currentKafkaPartitions = Lists.newArrayList(); private String kafkaDefaultOffSet = ""; private Map<String, String> customProperties = Maps.newHashMap(); private Map<String, String> convertedCustomProperties = Maps.newHashMap(); private Map<Integer, Long> cachedPartitionWithLatestOffsets = Maps.newConcurrentMap(); private List<Integer> newCurrentKafkaPartition = Lists.newArrayList(); public KafkaRoutineLoadJob() { super(-1, LoadDataSourceType.KAFKA); } public KafkaRoutineLoadJob(Long id, String name, long dbId, long tableId, String brokerList, String topic, UserIdentity userIdentity) { super(id, name, dbId, tableId, LoadDataSourceType.KAFKA, userIdentity); this.brokerList = brokerList; this.topic = topic; this.progress = new KafkaProgress(); } public KafkaRoutineLoadJob(Long id, String name, long dbId, String brokerList, String topic, UserIdentity userIdentity, boolean isMultiTable) { super(id, name, dbId, LoadDataSourceType.KAFKA, userIdentity); this.brokerList = brokerList; this.topic = topic; this.progress = new KafkaProgress(); setMultiTable(isMultiTable); } public String getTopic() { return topic; } public String getBrokerList() { return brokerList; } public Map<String, String> getConvertedCustomProperties() { return convertedCustomProperties; } private boolean isOffsetForTimes() { long offset = TimeUtils.timeStringToLong(this.kafkaDefaultOffSet); return offset != -1; } private long convertedDefaultOffsetToTimestamp() { TimeZone timeZone = TimeUtils.getOrSystemTimeZone(getTimezone()); return TimeUtils.timeStringToLong(this.kafkaDefaultOffSet, timeZone); } private long convertedDefaultOffsetToLong() { if (this.kafkaDefaultOffSet.isEmpty()) { return KafkaProgress.OFFSET_END_VAL; } else { if (isOffsetForTimes()) { return convertedDefaultOffsetToTimestamp(); } else if (this.kafkaDefaultOffSet.equalsIgnoreCase(KafkaProgress.OFFSET_BEGINNING)) { return KafkaProgress.OFFSET_BEGINNING_VAL; } else if (this.kafkaDefaultOffSet.equalsIgnoreCase(KafkaProgress.OFFSET_END)) { return KafkaProgress.OFFSET_END_VAL; } else { return KafkaProgress.OFFSET_END_VAL; } } } @Override public void prepare() throws UserException { super.prepare(); convertCustomProperties(true); } private void convertCustomProperties(boolean rebuild) throws DdlException { if (customProperties.isEmpty()) { return; } if (!rebuild && !convertedCustomProperties.isEmpty()) { return; } if (rebuild) { convertedCustomProperties.clear(); } SmallFileMgr smallFileMgr = Env.getCurrentEnv().getSmallFileMgr(); for (Map.Entry<String, String> entry : customProperties.entrySet()) { if (entry.getValue().startsWith("FILE:")) { String file = entry.getValue().substring(entry.getValue().indexOf(":") + 1); SmallFile smallFile = smallFileMgr.getSmallFile(dbId, KAFKA_FILE_CATALOG, file, true); convertedCustomProperties.put(entry.getKey(), "FILE:" + smallFile.id + ":" + smallFile.md5); } else { convertedCustomProperties.put(entry.getKey(), entry.getValue()); } } if (convertedCustomProperties.containsKey(KafkaConfiguration.KAFKA_ORIGIN_DEFAULT_OFFSETS.getName())) { kafkaDefaultOffSet = convertedCustomProperties .remove(KafkaConfiguration.KAFKA_ORIGIN_DEFAULT_OFFSETS.getName()); return; } if (convertedCustomProperties.containsKey(KafkaConfiguration.KAFKA_DEFAULT_OFFSETS.getName())) { kafkaDefaultOffSet = convertedCustomProperties.remove(KafkaConfiguration.KAFKA_DEFAULT_OFFSETS.getName()); } } @Override public void divideRoutineLoadJob(int currentConcurrentTaskNum) throws UserException { List<RoutineLoadTaskInfo> result = new ArrayList<>(); writeLock(); try { if (state == JobState.NEED_SCHEDULE) { for (int i = 0; i < currentConcurrentTaskNum; i++) { Map<Integer, Long> taskKafkaProgress = Maps.newHashMap(); for (int j = i; j < currentKafkaPartitions.size(); j = j + currentConcurrentTaskNum) { int kafkaPartition = currentKafkaPartitions.get(j); taskKafkaProgress.put(kafkaPartition, ((KafkaProgress) progress).getOffsetByPartition(kafkaPartition)); } KafkaTaskInfo kafkaTaskInfo = new KafkaTaskInfo(UUID.randomUUID(), id, maxBatchIntervalS * 2 * 1000, taskKafkaProgress, isMultiTable()); routineLoadTaskInfoList.add(kafkaTaskInfo); result.add(kafkaTaskInfo); } if (result.size() != 0) { unprotectUpdateState(JobState.RUNNING, null, false); } } else { LOG.debug("Ignore to divide routine load job while job state {}", state); } Env.getCurrentEnv().getRoutineLoadTaskScheduler().addTasksInQueue(result); } finally { writeUnlock(); } } @Override public int calculateCurrentConcurrentTaskNum() { int partitionNum = currentKafkaPartitions.size(); if (desireTaskConcurrentNum == 0) { desireTaskConcurrentNum = Config.max_routine_load_task_concurrent_num; } LOG.debug("current concurrent task number is min" + "(partition num: {}, desire task concurrent num: {} config: {})", partitionNum, desireTaskConcurrentNum, Config.max_routine_load_task_concurrent_num); currentTaskConcurrentNum = Math.min(partitionNum, Math.min(desireTaskConcurrentNum, Config.max_routine_load_task_concurrent_num)); return currentTaskConcurrentNum; } @Override protected boolean checkCommitInfo(RLTaskTxnCommitAttachment rlTaskTxnCommitAttachment, TransactionState txnState, TransactionState.TxnStatusChangeReason txnStatusChangeReason) { if (txnState.getTransactionStatus() == TransactionStatus.COMMITTED) { return true; } LOG.debug("no need to update the progress of kafka routine load. txn status: {}, " + "txnStatusChangeReason: {}, task: {}, job: {}", txnState.getTransactionStatus(), txnStatusChangeReason, DebugUtil.printId(rlTaskTxnCommitAttachment.getTaskId()), id); return false; } @Override protected void updateProgress(RLTaskTxnCommitAttachment attachment) throws UserException { super.updateProgress(attachment); this.progress.update(attachment); } @Override protected void replayUpdateProgress(RLTaskTxnCommitAttachment attachment) { super.replayUpdateProgress(attachment); this.progress.update(attachment); } @Override protected RoutineLoadTaskInfo unprotectRenewTask(RoutineLoadTaskInfo routineLoadTaskInfo) { KafkaTaskInfo oldKafkaTaskInfo = (KafkaTaskInfo) routineLoadTaskInfo; KafkaTaskInfo kafkaTaskInfo = new KafkaTaskInfo(oldKafkaTaskInfo, ((KafkaProgress) progress).getPartitionIdToOffset(oldKafkaTaskInfo.getPartitions()), isMultiTable()); routineLoadTaskInfoList.remove(routineLoadTaskInfo); routineLoadTaskInfoList.add(kafkaTaskInfo); return kafkaTaskInfo; } @Override protected void unprotectUpdateProgress() throws UserException { updateNewPartitionProgress(); } @Override protected void preCheckNeedSchedule() throws UserException { if (this.state == JobState.RUNNING || this.state == JobState.NEED_SCHEDULE) { if (customKafkaPartitions != null && !customKafkaPartitions.isEmpty()) { return; } updateKafkaPartitions(); } } private void updateKafkaPartitions() throws UserException { try { this.newCurrentKafkaPartition = getAllKafkaPartitions(); } catch (Exception e) { LOG.warn(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("error_msg", "Job failed to fetch all current partition with error " + e.getMessage()) .build(), e); if (this.state == JobState.NEED_SCHEDULE) { unprotectUpdateState(JobState.PAUSED, new ErrorReason(InternalErrorCode.PARTITIONS_ERR, "Job failed to fetch all current partition with error " + e.getMessage()), false /* not replay */); } } } @Override protected boolean unprotectNeedReschedule() throws UserException { if (this.state == JobState.RUNNING || this.state == JobState.NEED_SCHEDULE) { if (CollectionUtils.isNotEmpty(customKafkaPartitions)) { currentKafkaPartitions = customKafkaPartitions; return false; } Preconditions.checkNotNull(this.newCurrentKafkaPartition); if (new HashSet<>(currentKafkaPartitions).containsAll(this.newCurrentKafkaPartition)) { if (currentKafkaPartitions.size() > this.newCurrentKafkaPartition.size()) { currentKafkaPartitions = this.newCurrentKafkaPartition; if (LOG.isDebugEnabled()) { LOG.debug(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("current_kafka_partitions", Joiner.on(",").join(currentKafkaPartitions)) .add("msg", "current kafka partitions has been change") .build()); } return true; } else { for (Integer kafkaPartition : currentKafkaPartitions) { if (!((KafkaProgress) progress).containsPartition(kafkaPartition)) { return true; } } return false; } } else { currentKafkaPartitions = this.newCurrentKafkaPartition; if (LOG.isDebugEnabled()) { LOG.debug(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("current_kafka_partitions", Joiner.on(",").join(currentKafkaPartitions)) .add("msg", "current kafka partitions has been change") .build()); } return true; } } if (this.state == JobState.PAUSED) { return ScheduleRule.isNeedAutoSchedule(this); } return false; } @Override protected String getStatistic() { Map<String, Object> summary = this.jobStatistic.summary(); Gson gson = new GsonBuilder().disableHtmlEscaping().create(); return gson.toJson(summary); } private List<Integer> getAllKafkaPartitions() throws UserException { convertCustomProperties(false); return KafkaUtil.getAllKafkaPartitions(brokerList, topic, convertedCustomProperties); } public static KafkaRoutineLoadJob fromCreateStmt(CreateRoutineLoadStmt stmt) throws UserException { Database db = Env.getCurrentInternalCatalog().getDbOrDdlException(stmt.getDBName()); long id = Env.getCurrentEnv().getNextId(); KafkaDataSourceProperties kafkaProperties = (KafkaDataSourceProperties) stmt.getDataSourceProperties(); KafkaRoutineLoadJob kafkaRoutineLoadJob; if (kafkaProperties.isMultiTable()) { kafkaRoutineLoadJob = new KafkaRoutineLoadJob(id, stmt.getName(), db.getId(), kafkaProperties.getBrokerList(), kafkaProperties.getTopic(), stmt.getUserInfo(), true); } else { OlapTable olapTable = db.getOlapTableOrDdlException(stmt.getTableName()); checkMeta(olapTable, stmt.getRoutineLoadDesc()); long tableId = olapTable.getId(); kafkaRoutineLoadJob = new KafkaRoutineLoadJob(id, stmt.getName(), db.getId(), tableId, kafkaProperties.getBrokerList(), kafkaProperties.getTopic(), stmt.getUserInfo()); } kafkaRoutineLoadJob.setOptional(stmt); kafkaRoutineLoadJob.checkCustomProperties(); kafkaRoutineLoadJob.checkCustomPartition(); return kafkaRoutineLoadJob; } private void checkCustomPartition() throws UserException { if (customKafkaPartitions.isEmpty()) { return; } List<Integer> allKafkaPartitions = getAllKafkaPartitions(); for (Integer customPartition : customKafkaPartitions) { if (!allKafkaPartitions.contains(customPartition)) { throw new LoadException("there is a custom kafka partition " + customPartition + " which is invalid for topic " + topic); } } } private void checkCustomProperties() throws DdlException { SmallFileMgr smallFileMgr = Env.getCurrentEnv().getSmallFileMgr(); for (Map.Entry<String, String> entry : customProperties.entrySet()) { if (entry.getValue().startsWith("FILE:")) { String file = entry.getValue().substring(entry.getValue().indexOf(":") + 1); if (!smallFileMgr.containsFile(dbId, KAFKA_FILE_CATALOG, file)) { throw new DdlException("File " + file + " does not exist in db " + dbId + " with catalog: " + KAFKA_FILE_CATALOG); } } } } private void updateNewPartitionProgress() throws UserException { try { for (Integer kafkaPartition : currentKafkaPartitions) { if (!((KafkaProgress) progress).containsPartition(kafkaPartition)) { List<Integer> newPartitions = Lists.newArrayList(); newPartitions.add(kafkaPartition); List<Pair<Integer, Long>> newPartitionsOffsets = getNewPartitionOffsetsFromDefaultOffset(newPartitions); Preconditions.checkState(newPartitionsOffsets.size() == 1); for (Pair<Integer, Long> partitionOffset : newPartitionsOffsets) { ((KafkaProgress) progress).addPartitionOffset(partitionOffset); if (LOG.isDebugEnabled()) { LOG.debug(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("kafka_partition_id", partitionOffset.first) .add("begin_offset", partitionOffset.second) .add("msg", "The new partition has been added in job")); } } } } } catch (UserException e) { unprotectUpdateState(JobState.PAUSED, new ErrorReason(InternalErrorCode.PARTITIONS_ERR, e.getMessage()), false /* not replay */); throw e; } } private List<Pair<Integer, Long>> getNewPartitionOffsetsFromDefaultOffset(List<Integer> newPartitions) throws UserException { List<Pair<Integer, Long>> partitionOffsets = Lists.newArrayList(); long beginOffset = convertedDefaultOffsetToLong(); for (Integer kafkaPartition : newPartitions) { partitionOffsets.add(Pair.of(kafkaPartition, beginOffset)); } if (isOffsetForTimes()) { try { partitionOffsets = KafkaUtil.getOffsetsForTimes(this.brokerList, this.topic, convertedCustomProperties, partitionOffsets); } catch (LoadException e) { LOG.warn(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, id) .add("partition:timestamp", Joiner.on(",").join(partitionOffsets)) .add("error_msg", "Job failed to fetch current offsets from times with error " + e.getMessage()) .build(), e); throw new UserException(e); } } return partitionOffsets; } @Override protected void setOptional(CreateRoutineLoadStmt stmt) throws UserException { super.setOptional(stmt); KafkaDataSourceProperties kafkaDataSourceProperties = (KafkaDataSourceProperties) stmt.getDataSourceProperties(); if (CollectionUtils.isNotEmpty(kafkaDataSourceProperties.getKafkaPartitionOffsets())) { setCustomKafkaPartitions(kafkaDataSourceProperties); } if (MapUtils.isNotEmpty(kafkaDataSourceProperties.getCustomKafkaProperties())) { setCustomKafkaProperties(kafkaDataSourceProperties.getCustomKafkaProperties()); } this.customProperties.putIfAbsent(PROP_GROUP_ID, name + "_" + UUID.randomUUID()); } private void setCustomKafkaPartitions(KafkaDataSourceProperties kafkaDataSourceProperties) throws LoadException { List<Pair<Integer, Long>> kafkaPartitionOffsets = kafkaDataSourceProperties.getKafkaPartitionOffsets(); boolean isForTimes = kafkaDataSourceProperties.isOffsetsForTimes(); if (isForTimes) { kafkaPartitionOffsets = KafkaUtil.getOffsetsForTimes(kafkaDataSourceProperties.getBrokerList(), kafkaDataSourceProperties.getTopic(), convertedCustomProperties, kafkaDataSourceProperties.getKafkaPartitionOffsets()); } for (Pair<Integer, Long> partitionOffset : kafkaPartitionOffsets) { this.customKafkaPartitions.add(partitionOffset.first); ((KafkaProgress) progress).addPartitionOffset(partitionOffset); } } private void setCustomKafkaProperties(Map<String, String> kafkaProperties) { this.customProperties = kafkaProperties; } @Override protected String dataSourcePropertiesJsonToString() { Map<String, String> dataSourceProperties = Maps.newHashMap(); dataSourceProperties.put("brokerList", brokerList); dataSourceProperties.put("topic", topic); List<Integer> sortedPartitions = Lists.newArrayList(currentKafkaPartitions); Collections.sort(sortedPartitions); dataSourceProperties.put("currentKafkaPartitions", Joiner.on(",").join(sortedPartitions)); Gson gson = new GsonBuilder().disableHtmlEscaping().create(); return gson.toJson(dataSourceProperties); } @Override protected String customPropertiesJsonToString() { Gson gson = new GsonBuilder().disableHtmlEscaping().create(); return gson.toJson(customProperties); } @Override protected Map<String, String> getDataSourceProperties() { Map<String, String> dataSourceProperties = Maps.newHashMap(); dataSourceProperties.put("kafka_broker_list", brokerList); dataSourceProperties.put("kafka_topic", topic); return dataSourceProperties; } @Override protected Map<String, String> getCustomProperties() { Map<String, String> ret = new HashMap<>(); customProperties.forEach((k, v) -> ret.put("property." + k, v)); return ret; } @Override public void write(DataOutput out) throws IOException { super.write(out); Text.writeString(out, brokerList); Text.writeString(out, topic); out.writeInt(customKafkaPartitions.size()); for (Integer partitionId : customKafkaPartitions) { out.writeInt(partitionId); } out.writeInt(customProperties.size()); for (Map.Entry<String, String> property : customProperties.entrySet()) { Text.writeString(out, "property." + property.getKey()); Text.writeString(out, property.getValue()); } } public void readFields(DataInput in) throws IOException { super.readFields(in); brokerList = Text.readString(in); topic = Text.readString(in); int size = in.readInt(); for (int i = 0; i < size; i++) { customKafkaPartitions.add(in.readInt()); } int count = in.readInt(); for (int i = 0; i < count; i++) { String propertyKey = Text.readString(in); String propertyValue = Text.readString(in); if (propertyKey.startsWith("property.")) { this.customProperties.put(propertyKey.substring(propertyKey.indexOf(".") + 1), propertyValue); } } } @Override public void modifyProperties(AlterRoutineLoadStmt stmt) throws UserException { Map<String, String> jobProperties = stmt.getAnalyzedJobProperties(); KafkaDataSourceProperties dataSourceProperties = (KafkaDataSourceProperties) stmt.getDataSourceProperties(); if (null != dataSourceProperties && dataSourceProperties.isOffsetsForTimes()) { convertTimestampToOffset(dataSourceProperties); } writeLock(); try { if (getState() != JobState.PAUSED) { throw new DdlException("Only supports modification of PAUSED jobs"); } modifyPropertiesInternal(jobProperties, dataSourceProperties); AlterRoutineLoadJobOperationLog log = new AlterRoutineLoadJobOperationLog(this.id, jobProperties, dataSourceProperties); Env.getCurrentEnv().getEditLog().logAlterRoutineLoadJob(log); } finally { writeUnlock(); } } private void convertTimestampToOffset(KafkaDataSourceProperties dataSourceProperties) throws UserException { List<Pair<Integer, Long>> partitionOffsets = dataSourceProperties.getKafkaPartitionOffsets(); if (partitionOffsets.isEmpty()) { return; } List<Pair<Integer, Long>> newOffsets = KafkaUtil.getOffsetsForTimes(brokerList, topic, convertedCustomProperties, partitionOffsets); dataSourceProperties.setKafkaPartitionOffsets(newOffsets); } private void modifyPropertiesInternal(Map<String, String> jobProperties, KafkaDataSourceProperties dataSourceProperties) throws DdlException { if (null != dataSourceProperties) { List<Pair<Integer, Long>> kafkaPartitionOffsets = Lists.newArrayList(); Map<String, String> customKafkaProperties = Maps.newHashMap(); if (MapUtils.isNotEmpty(dataSourceProperties.getOriginalDataSourceProperties())) { kafkaPartitionOffsets = dataSourceProperties.getKafkaPartitionOffsets(); customKafkaProperties = dataSourceProperties.getCustomKafkaProperties(); } if (!kafkaPartitionOffsets.isEmpty()) { ((KafkaProgress) progress).modifyOffset(kafkaPartitionOffsets); } if (!customKafkaProperties.isEmpty()) { this.customProperties.putAll(customKafkaProperties); convertCustomProperties(true); } if (!Strings.isNullOrEmpty(dataSourceProperties.getBrokerList())) { this.brokerList = dataSourceProperties.getBrokerList(); } if (!Strings.isNullOrEmpty(dataSourceProperties.getTopic())) { this.topic = dataSourceProperties.getTopic(); } } if (!jobProperties.isEmpty()) { Map<String, String> copiedJobProperties = Maps.newHashMap(jobProperties); modifyCommonJobProperties(copiedJobProperties); this.jobProperties.putAll(copiedJobProperties); if (jobProperties.containsKey(CreateRoutineLoadStmt.PARTIAL_COLUMNS)) { this.isPartialUpdate = BooleanUtils.toBoolean(jobProperties.get(CreateRoutineLoadStmt.PARTIAL_COLUMNS)); } } LOG.info("modify the properties of kafka routine load job: {}, jobProperties: {}, datasource properties: {}", this.id, jobProperties, dataSourceProperties); } @Override public void replayModifyProperties(AlterRoutineLoadJobOperationLog log) { try { modifyPropertiesInternal(log.getJobProperties(), (KafkaDataSourceProperties) log.getDataSourceProperties()); } catch (DdlException e) { LOG.error("failed to replay modify kafka routine load job: {}", id, e); } } @Override protected String getLag() { Map<Integer, Long> partitionIdToOffsetLag = ((KafkaProgress) progress).getLag(cachedPartitionWithLatestOffsets); Gson gson = new Gson(); return gson.toJson(partitionIdToOffsetLag); } @Override public TFileCompressType getCompressType() { return TFileCompressType.PLAIN; } @Override public double getMaxFilterRatio() { return maxFilterRatio; } }
Actually, `isEmpty` is not implemented :smile: There are just too many ways of checking if a collection is empty or not
public void testBasicInfo() { TestingOneInputStreamOperator inOperator1 = new TestingOneInputStreamOperator(); TestingOneInputStreamOperator inOperator2 = new TestingOneInputStreamOperator(); TestingTwoInputStreamOperator outOperator = new TestingTwoInputStreamOperator(); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper1 = createOneInputOperatorWrapper(inOperator1, "test1"); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper2 = createOneInputOperatorWrapper(inOperator2, "test2"); TableOperatorWrapper<TestingTwoInputStreamOperator> wrapper3 = createTwoInputOperatorWrapper(outOperator, "test3"); wrapper3.addInput(wrapper1, 1); wrapper3.addInput(wrapper2, 2); assertThat(wrapper1.getInputEdges().isEmpty()).isTrue(); assertThat(wrapper1.getInputWrappers().isEmpty()).isTrue(); assertThat(wrapper1.getOutputWrappers()).containsExactly(wrapper3); assertThat(wrapper1.getOutputEdges()).containsExactly(new Edge(wrapper1, wrapper3, 1)); assertThat(wrapper2.getInputEdges().isEmpty()).isTrue(); assertThat(wrapper2.getInputWrappers().isEmpty()).isTrue(); assertThat(wrapper2.getOutputWrappers()).containsExactly(wrapper3); assertThat(wrapper2.getOutputEdges()).containsExactly(new Edge(wrapper2, wrapper3, 2)); assertThat(wrapper3.getOutputEdges().isEmpty()).isTrue(); assertThat(wrapper3.getOutputWrappers().isEmpty()).isTrue(); assertThat(wrapper3.getInputWrappers()).isEqualTo(Arrays.asList(wrapper1, wrapper2)); assertThat(wrapper3.getInputEdges()) .isEqualTo( Arrays.asList( new Edge(wrapper1, wrapper3, 1), new Edge(wrapper2, wrapper3, 2))); }
assertThat(wrapper2.getInputWrappers().isEmpty()).isTrue();
public void testBasicInfo() { TestingOneInputStreamOperator inOperator1 = new TestingOneInputStreamOperator(); TestingOneInputStreamOperator inOperator2 = new TestingOneInputStreamOperator(); TestingTwoInputStreamOperator outOperator = new TestingTwoInputStreamOperator(); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper1 = createOneInputOperatorWrapper(inOperator1, "test1"); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper2 = createOneInputOperatorWrapper(inOperator2, "test2"); TableOperatorWrapper<TestingTwoInputStreamOperator> wrapper3 = createTwoInputOperatorWrapper(outOperator, "test3"); wrapper3.addInput(wrapper1, 1); wrapper3.addInput(wrapper2, 2); assertThat(wrapper1.getInputEdges()).isEmpty(); assertThat(wrapper1.getInputWrappers()).isEmpty(); assertThat(wrapper1.getOutputWrappers()).containsExactly(wrapper3); assertThat(wrapper1.getOutputEdges()).containsExactly(new Edge(wrapper1, wrapper3, 1)); assertThat(wrapper2.getInputEdges()).isEmpty(); assertThat(wrapper2.getInputWrappers()).isEmpty(); assertThat(wrapper2.getOutputWrappers()).containsExactly(wrapper3); assertThat(wrapper2.getOutputEdges()).containsExactly(new Edge(wrapper2, wrapper3, 2)); assertThat(wrapper3.getOutputEdges()).isEmpty(); assertThat(wrapper3.getOutputWrappers()).isEmpty(); assertThat(wrapper3.getInputWrappers()).isEqualTo(Arrays.asList(wrapper1, wrapper2)); assertThat(wrapper3.getInputEdges()) .isEqualTo( Arrays.asList( new Edge(wrapper1, wrapper3, 1), new Edge(wrapper2, wrapper3, 2))); }
class TableOperatorWrapperTest extends MultipleInputTestBase { @Test @Test public void testCreateOperator() throws Exception { TestingOneInputStreamOperator operator = new TestingOneInputStreamOperator(); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper = createOneInputOperatorWrapper(operator, "test"); StreamOperatorParameters<RowData> parameters = createStreamOperatorParameters(); wrapper.createOperator(parameters); assertThat(wrapper.getStreamOperator()).isEqualTo(operator); try { wrapper.createOperator(parameters); fail("This should not happen"); } catch (Exception e) { assertThat(e.getMessage()).contains("This operator has been initialized"); } } @Test public void testEndInput() throws Exception { StreamOperatorParameters<RowData> parameters = createStreamOperatorParameters(); TestingOneInputStreamOperator inOperator1 = new TestingOneInputStreamOperator(); TestingOneInputStreamOperator inOperator2 = new TestingOneInputStreamOperator(); TestingTwoInputStreamOperator outOperator = new TestingTwoInputStreamOperator(); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper1 = createOneInputOperatorWrapper(inOperator1, "test1"); wrapper1.createOperator(parameters); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper2 = createOneInputOperatorWrapper(inOperator2, "test2"); wrapper2.createOperator(parameters); TableOperatorWrapper<TestingTwoInputStreamOperator> wrapper3 = createTwoInputOperatorWrapper(outOperator, "test3"); wrapper3.addInput(wrapper1, 1); wrapper3.addInput(wrapper2, 2); wrapper3.createOperator(parameters); assertThat(inOperator1.isEnd()).isFalse(); assertThat(inOperator2.isEnd()).isFalse(); assertThat(outOperator.getEndInputs().isEmpty()).isTrue(); wrapper1.endOperatorInput(1); assertThat(inOperator1.isEnd()).isTrue(); assertThat(wrapper1.getEndedInputCount()).isEqualTo(1); assertThat(inOperator2.isEnd()).isFalse(); assertThat(wrapper2.getEndedInputCount()).isEqualTo(0); assertThat(outOperator.getEndInputs()).containsExactly(1); assertThat(wrapper3.getEndedInputCount()).isEqualTo(1); wrapper2.endOperatorInput(1); assertThat(inOperator1.isEnd()).isTrue(); assertThat(wrapper1.getEndedInputCount()).isEqualTo(1); assertThat(inOperator2.isEnd()).isTrue(); assertThat(wrapper2.getEndedInputCount()).isEqualTo(1); assertThat(outOperator.getEndInputs()).isEqualTo(Arrays.asList(1, 2)); assertThat(wrapper3.getEndedInputCount()).isEqualTo(2); } @Test public void testClose() throws Exception { TestingOneInputStreamOperator operator = new TestingOneInputStreamOperator(); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper = createOneInputOperatorWrapper(operator, "test"); StreamOperatorParameters<RowData> parameters = createStreamOperatorParameters(); wrapper.createOperator(parameters); assertThat(wrapper.getStreamOperator()).isEqualTo(operator); assertThat(operator.isClosed()).isFalse(); assertThat(wrapper.isClosed()).isFalse(); wrapper.close(); assertThat(wrapper.isClosed()).isTrue(); assertThat(operator.isClosed()).isTrue(); wrapper.close(); assertThat(wrapper.isClosed()).isTrue(); assertThat(operator.isClosed()).isTrue(); } }
class TableOperatorWrapperTest extends MultipleInputTestBase { @Test @Test public void testCreateOperator() throws Exception { TestingOneInputStreamOperator operator = new TestingOneInputStreamOperator(); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper = createOneInputOperatorWrapper(operator, "test"); StreamOperatorParameters<RowData> parameters = createStreamOperatorParameters(); wrapper.createOperator(parameters); assertThat(wrapper.getStreamOperator()).isEqualTo(operator); assertThatThrownBy(() -> wrapper.createOperator(parameters)) .hasMessageContaining("This operator has been initialized"); } @Test public void testEndInput() throws Exception { StreamOperatorParameters<RowData> parameters = createStreamOperatorParameters(); TestingOneInputStreamOperator inOperator1 = new TestingOneInputStreamOperator(); TestingOneInputStreamOperator inOperator2 = new TestingOneInputStreamOperator(); TestingTwoInputStreamOperator outOperator = new TestingTwoInputStreamOperator(); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper1 = createOneInputOperatorWrapper(inOperator1, "test1"); wrapper1.createOperator(parameters); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper2 = createOneInputOperatorWrapper(inOperator2, "test2"); wrapper2.createOperator(parameters); TableOperatorWrapper<TestingTwoInputStreamOperator> wrapper3 = createTwoInputOperatorWrapper(outOperator, "test3"); wrapper3.addInput(wrapper1, 1); wrapper3.addInput(wrapper2, 2); wrapper3.createOperator(parameters); assertThat(inOperator1.isEnd()).isFalse(); assertThat(inOperator2.isEnd()).isFalse(); assertThat(outOperator.getEndInputs()).isEmpty(); wrapper1.endOperatorInput(1); assertThat(inOperator1.isEnd()).isTrue(); assertThat(wrapper1.getEndedInputCount()).isEqualTo(1); assertThat(inOperator2.isEnd()).isFalse(); assertThat(wrapper2.getEndedInputCount()).isEqualTo(0); assertThat(outOperator.getEndInputs()).containsExactly(1); assertThat(wrapper3.getEndedInputCount()).isEqualTo(1); wrapper2.endOperatorInput(1); assertThat(inOperator1.isEnd()).isTrue(); assertThat(wrapper1.getEndedInputCount()).isEqualTo(1); assertThat(inOperator2.isEnd()).isTrue(); assertThat(wrapper2.getEndedInputCount()).isEqualTo(1); assertThat(outOperator.getEndInputs()).isEqualTo(Arrays.asList(1, 2)); assertThat(wrapper3.getEndedInputCount()).isEqualTo(2); } @Test public void testClose() throws Exception { TestingOneInputStreamOperator operator = new TestingOneInputStreamOperator(); TableOperatorWrapper<TestingOneInputStreamOperator> wrapper = createOneInputOperatorWrapper(operator, "test"); StreamOperatorParameters<RowData> parameters = createStreamOperatorParameters(); wrapper.createOperator(parameters); assertThat(wrapper.getStreamOperator()).isEqualTo(operator); assertThat(operator.isClosed()).isFalse(); assertThat(wrapper.isClosed()).isFalse(); wrapper.close(); assertThat(wrapper.isClosed()).isTrue(); assertThat(operator.isClosed()).isTrue(); wrapper.close(); assertThat(wrapper.isClosed()).isTrue(); assertThat(operator.isClosed()).isTrue(); } }
The tabletInfo will be added to the `List<TabletInfo>`
private void sendTasks() { Map<ComputeNode, List<TabletInfo>> beToTabletInfos = new HashMap<>(); for (Tablet tablet : tablets.values()) { ComputeNode node = Utils.chooseNode((LakeTablet) tablet); if (node == null) { LOG.warn("Stop sending tablet stat task for partition {} because no alive node", debugName()); return; } TabletInfo tabletInfo = new TabletInfo(); tabletInfo.tabletId = tablet.getId(); tabletInfo.version = version; beToTabletInfos.computeIfAbsent(node, k -> Lists.newArrayList()).add(tabletInfo); } responseList = Lists.newArrayListWithCapacity(beToTabletInfos.size()); for (Map.Entry<ComputeNode, List<TabletInfo>> entry : beToTabletInfos.entrySet()) { ComputeNode node = entry.getKey(); TabletStatRequest request = new TabletStatRequest(); request.tabletInfos = entry.getValue(); request.timeoutMs = LakeService.TIMEOUT_GET_TABLET_STATS; try { LakeService lakeService = BrpcProxy.getLakeService(node.getHost(), node.getBrpcPort()); Future<TabletStatResponse> responseFuture = lakeService.getTabletStats(request); responseList.add(responseFuture); LOG.debug("Sent tablet stat collection task to node {} for partition {} of version {}. tablet count={}", node.getHost(), debugName(), version, entry.getValue().size()); } catch (Throwable e) { LOG.warn("Fail to send tablet stat task to host {} for partition {}: {}", node.getHost(), debugName(), e.getMessage()); } } }
beToTabletInfos.computeIfAbsent(node, k -> Lists.newArrayList()).add(tabletInfo);
private void sendTasks() { Map<ComputeNode, List<TabletInfo>> beToTabletInfos = new HashMap<>(); for (Tablet tablet : tablets.values()) { ComputeNode node = Utils.chooseNode((LakeTablet) tablet); if (node == null) { LOG.warn("Stop sending tablet stat task for partition {} because no alive node", debugName()); return; } TabletInfo tabletInfo = new TabletInfo(); tabletInfo.tabletId = tablet.getId(); tabletInfo.version = version; beToTabletInfos.computeIfAbsent(node, k -> Lists.newArrayList()).add(tabletInfo); } collectStatTime = System.currentTimeMillis(); responseList = Lists.newArrayListWithCapacity(beToTabletInfos.size()); for (Map.Entry<ComputeNode, List<TabletInfo>> entry : beToTabletInfos.entrySet()) { ComputeNode node = entry.getKey(); TabletStatRequest request = new TabletStatRequest(); request.tabletInfos = entry.getValue(); request.timeoutMs = LakeService.TIMEOUT_GET_TABLET_STATS; try { LakeService lakeService = BrpcProxy.getLakeService(node.getHost(), node.getBrpcPort()); Future<TabletStatResponse> responseFuture = lakeService.getTabletStats(request); responseList.add(responseFuture); LOG.debug("Sent tablet stat collection task to node {} for partition {} of version {}. tablet count={}", node.getHost(), debugName(), version, entry.getValue().size()); } catch (Throwable e) { LOG.warn("Fail to send tablet stat task to host {} for partition {}: {}", node.getHost(), debugName(), e.getMessage()); } } }
class CollectTabletStatJob { private final String dbName; private final String tableName; private final long partitionId; private final long version; private final Map<Long, Tablet> tablets; private List<Future<TabletStatResponse>> responseList; CollectTabletStatJob(PartitionSnapshot snapshot) { this.dbName = Objects.requireNonNull(snapshot.dbName, "dbName is null"); this.tableName = Objects.requireNonNull(snapshot.tableName, "tableName is null"); this.partitionId = snapshot.partitionId; this.version = snapshot.visibleVersion; this.tablets = new HashMap<>(); for (Tablet tablet : snapshot.tablets) { this.tablets.put(tablet.getId(), tablet); } } void execute() { sendTasks(); waitResponse(); } private String debugName() { return String.format("%s.%s.%d", dbName, tableName, partitionId); } private void waitResponse() { for (Future<TabletStatResponse> responseFuture : responseList) { try { TabletStatResponse response = responseFuture.get(); long now = System.currentTimeMillis(); if (response != null && response.tabletStats != null) { for (TabletStat stat : response.tabletStats) { LakeTablet tablet = (LakeTablet) tablets.get(stat.tabletId); tablet.setDataSize(stat.dataSize); tablet.setRowCount(stat.numRows); tablet.setDataSizeUpdateTime(now); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (ExecutionException e) { LOG.warn("Fail to collect tablet stat for partition {}: {}", debugName(), e.getMessage()); } } } }
class CollectTabletStatJob { private final String dbName; private final String tableName; private final long partitionId; private final long version; private final Map<Long, Tablet> tablets; private long collectStatTime = 0; private List<Future<TabletStatResponse>> responseList; CollectTabletStatJob(PartitionSnapshot snapshot) { this.dbName = Objects.requireNonNull(snapshot.dbName, "dbName is null"); this.tableName = Objects.requireNonNull(snapshot.tableName, "tableName is null"); this.partitionId = snapshot.partitionId; this.version = snapshot.visibleVersion; this.tablets = new HashMap<>(); for (Tablet tablet : snapshot.tablets) { this.tablets.put(tablet.getId(), tablet); } } void execute() { sendTasks(); waitResponse(); } private String debugName() { return String.format("%s.%s.%d", dbName, tableName, partitionId); } private void waitResponse() { for (Future<TabletStatResponse> responseFuture : responseList) { try { TabletStatResponse response = responseFuture.get(); if (response != null && response.tabletStats != null) { for (TabletStat stat : response.tabletStats) { LakeTablet tablet = (LakeTablet) tablets.get(stat.tabletId); tablet.setDataSize(stat.dataSize); tablet.setRowCount(stat.numRows); tablet.setDataSizeUpdateTime(collectStatTime); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (ExecutionException e) { LOG.warn("Fail to collect tablet stat for partition {}: {}", debugName(), e.getMessage()); } } } }
I think we should gradually refactor existing generated operators. Take `AggregateWindowOperator` as an exmple, every generated class has two objects, e.g. `aggWindowAggregator` and `generatedAggWindowAggregator`, etc.. And they are all nullable, and we have to add a lot of `if else` to compile them in `open` method. I think if we have a `WindowOperatorFactory`, the logic and member fields will be simpler.
public StreamOperator createStreamOperator(StreamTask containingTask, StreamConfig config, Output output) { WatermarkGenerator watermarkGenerator = generatedWatermarkGenerator.newInstance(containingTask.getUserCodeClassLoader()); WatermarkAssignerOperator operator = new WatermarkAssignerOperator(rowtimeFieldIndex, watermarkGenerator, idleTimeout); operator.setup(containingTask, config, output); return operator; }
WatermarkAssignerOperator operator = new WatermarkAssignerOperator(rowtimeFieldIndex, watermarkGenerator, idleTimeout);
public StreamOperator createStreamOperator(StreamTask containingTask, StreamConfig config, Output output) { WatermarkGenerator watermarkGenerator = generatedWatermarkGenerator.newInstance(containingTask.getUserCodeClassLoader()); WatermarkAssignerOperator operator = new WatermarkAssignerOperator(rowtimeFieldIndex, watermarkGenerator, idleTimeout); operator.setup(containingTask, config, output); return operator; }
class WatermarkAssignerOperatorFactory implements OneInputStreamOperatorFactory<BaseRow, BaseRow> { private static final long serialVersionUID = 1L; private final int rowtimeFieldIndex; private final long idleTimeout; private final GeneratedWatermarkGenerator generatedWatermarkGenerator; private ChainingStrategy strategy = ChainingStrategy.HEAD; public WatermarkAssignerOperatorFactory( int rowtimeFieldIndex, long idleTimeout, GeneratedWatermarkGenerator generatedWatermarkGenerator) { this.rowtimeFieldIndex = rowtimeFieldIndex; this.idleTimeout = idleTimeout; this.generatedWatermarkGenerator = generatedWatermarkGenerator; } @SuppressWarnings("unchecked") @Override @Override public void setChainingStrategy(ChainingStrategy strategy) { this.strategy = strategy; } @Override public ChainingStrategy getChainingStrategy() { return strategy; } @Override public Class<? extends StreamOperator> getStreamOperatorClass(ClassLoader classLoader) { return WatermarkAssignerOperator.class; } }
class WatermarkAssignerOperatorFactory implements OneInputStreamOperatorFactory<BaseRow, BaseRow> { private static final long serialVersionUID = 1L; private final int rowtimeFieldIndex; private final long idleTimeout; private final GeneratedWatermarkGenerator generatedWatermarkGenerator; private ChainingStrategy strategy = ChainingStrategy.HEAD; public WatermarkAssignerOperatorFactory( int rowtimeFieldIndex, long idleTimeout, GeneratedWatermarkGenerator generatedWatermarkGenerator) { this.rowtimeFieldIndex = rowtimeFieldIndex; this.idleTimeout = idleTimeout; this.generatedWatermarkGenerator = generatedWatermarkGenerator; } @SuppressWarnings("unchecked") @Override @Override public void setChainingStrategy(ChainingStrategy strategy) { this.strategy = strategy; } @Override public ChainingStrategy getChainingStrategy() { return strategy; } @Override public Class<? extends StreamOperator> getStreamOperatorClass(ClassLoader classLoader) { return WatermarkAssignerOperator.class; } }
Same comment here, need to get rid of this .block() too
public void canDeleteExistingMessage() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); SendChatMessageResult response = chatThreadClient.sendMessage(messageRequest).block(); StepVerifier.create(chatThreadClient.deleteMessage(response.getId())) .verifyComplete(); }
SendChatMessageResult response = chatThreadClient.sendMessage(messageRequest).block();
public void canDeleteExistingMessage() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); StepVerifier.create( chatThreadClient.sendMessage(messageRequest) .flatMap(response -> { return chatThreadClient.deleteMessage(response.getId()); }) ) .verifyComplete(); }
class ChatThreadAsyncClientTest extends ChatClientTestBase { private ClientLogger logger = new ClientLogger(ChatThreadAsyncClientTest.class); private CommunicationIdentityClient communicationClient; private ChatAsyncClient client; private ChatThreadAsyncClient chatThreadClient; private String threadId; private CommunicationUser firstThreadMember; private CommunicationUser secondThreadMember; private CommunicationUser firstAddedThreadMember; private CommunicationUser secondAddedThreadMember; @Override protected void beforeTest() { super.beforeTest(); communicationClient = getCommunicationIdentityClientBuilder().buildClient(); firstThreadMember = communicationClient.createUser(); secondThreadMember = communicationClient.createUser(); List<String> scopes = new ArrayList<String>(Arrays.asList("chat")); CommunicationUserToken response = communicationClient.issueToken(firstThreadMember, scopes); client = getChatClientBuilder(response.getToken()).buildAsyncClient(); CreateChatThreadOptions threadRequest = ChatOptionsProvider.createThreadOptions(firstThreadMember.getId(), secondThreadMember.getId()); chatThreadClient = client.createChatThread(threadRequest).block(); threadId = chatThreadClient.getChatThreadId(); } @Override protected void afterTest() { super.afterTest(); } @Test public void canUpdateThread() { UpdateChatThreadOptions threadRequest = ChatOptionsProvider.updateThreadOptions(); StepVerifier.create(chatThreadClient.updateChatThread(threadRequest)) .assertNext(noResp -> { StepVerifier.create(client.getChatThread(threadId)).assertNext(chatThread -> { assertEquals(chatThread.getTopic(), threadRequest.getTopic()); }) .verifyComplete(); }); } @Test public void canUpdateThreadWithResponse() { UpdateChatThreadOptions threadRequest = ChatOptionsProvider.updateThreadOptions(); StepVerifier.create(chatThreadClient.updateChatThreadWithResponse(threadRequest)) .assertNext(updateThreadResponse -> { assertEquals(updateThreadResponse.getStatusCode(), 200); StepVerifier.create(client.getChatThread(threadId)).assertNext(chatThread -> { assertEquals(chatThread.getTopic(), threadRequest.getTopic()); }) .verifyComplete(); }) .verifyComplete(); } @Test public void canAddListAndRemoveMembersAsync() throws InterruptedException { firstAddedThreadMember = communicationClient.createUser(); secondAddedThreadMember = communicationClient.createUser(); AddChatThreadMembersOptions options = ChatOptionsProvider.addThreadMembersOptions( firstAddedThreadMember.getId(), secondAddedThreadMember.getId()); StepVerifier.create(chatThreadClient.addMembers(options)) .assertNext(noResp -> { PagedIterable<ChatThreadMember> membersResponse = new PagedIterable<>(chatThreadClient.listMembers()); List<ChatThreadMember> returnedMembers = new ArrayList<ChatThreadMember>(); membersResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> returnedMembers.add(item)); }); for (ChatThreadMember member: options.getMembers()) { assertTrue(checkMembersListContainsMemberId(returnedMembers, member.getUser().getId())); } assertTrue(returnedMembers.size() == 4); }); for (ChatThreadMember member: options.getMembers()) { StepVerifier.create(chatThreadClient.removeMember(member.getUser())) .verifyComplete(); } } @Test public void canAddListAndRemoveMembersWithResponseAsync() throws InterruptedException { firstAddedThreadMember = communicationClient.createUser(); secondAddedThreadMember = communicationClient.createUser(); AddChatThreadMembersOptions options = ChatOptionsProvider.addThreadMembersOptions( firstAddedThreadMember.getId(), secondAddedThreadMember.getId()); StepVerifier.create(chatThreadClient.addMembersWithResponse(options)) .assertNext(addMembersResponse -> { assertEquals(addMembersResponse.getStatusCode(), 207); PagedIterable<ChatThreadMember> membersResponse = new PagedIterable<>(chatThreadClient.listMembers()); List<ChatThreadMember> returnedMembers = new ArrayList<ChatThreadMember>(); membersResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> returnedMembers.add(item)); }); for (ChatThreadMember member: options.getMembers()) { assertTrue(checkMembersListContainsMemberId(returnedMembers, member.getUser().getId())); } assertTrue(returnedMembers.size() == 4); for (ChatThreadMember member: options.getMembers()) { StepVerifier.create(chatThreadClient.removeMemberWithResponse(member.getUser())) .assertNext(resp -> { assertEquals(resp.getStatusCode(), 204); }) .verifyComplete(); } }) .verifyComplete(); } @Test public void canSendThenGetMessage() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); StepVerifier.create(chatThreadClient.sendMessage(messageRequest)) .assertNext(response -> { StepVerifier.create(chatThreadClient.getMessage(response.getId())) .assertNext(message -> { assertEquals(message.getContent(), messageRequest.getContent()); assertEquals(message.getPriority(), messageRequest.getPriority()); assertEquals(message.getSenderDisplayName(), messageRequest.getSenderDisplayName()); }) .verifyComplete(); }) .verifyComplete(); } @Test public void canSendThenGetMessageWithResponse() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); StepVerifier.create(chatThreadClient.sendMessageWithResponse(messageRequest)) .assertNext(sendResponse -> { StepVerifier.create(chatThreadClient.getMessageWithResponse(sendResponse.getValue().getId())) .assertNext(getResponse -> { ChatMessage message = getResponse.getValue(); assertEquals(message.getContent(), messageRequest.getContent()); assertEquals(message.getPriority(), messageRequest.getPriority()); assertEquals(message.getSenderDisplayName(), messageRequest.getSenderDisplayName()); }) .verifyComplete(); }) .verifyComplete(); } @Test @Test public void canDeleteExistingMessageWithResponse() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); SendChatMessageResult response = chatThreadClient.sendMessage(messageRequest).block(); StepVerifier.create(chatThreadClient.deleteMessageWithResponse(response.getId())) .assertNext(deleteResponse -> { assertEquals(deleteResponse.getStatusCode(), 204); }) .verifyComplete(); } @Test public void canUpdateExistingMessage() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); UpdateChatMessageOptions updateMessageRequest = ChatOptionsProvider.updateMessageOptions(); SendChatMessageResult response = chatThreadClient.sendMessage(messageRequest).block(); StepVerifier.create(chatThreadClient.updateMessage(response.getId(), updateMessageRequest)) .assertNext(noResp -> { StepVerifier.create(chatThreadClient.getMessage(response.getId())) .assertNext(message -> { assertEquals(message.getContent(), updateMessageRequest.getContent()); }) .verifyComplete(); }); } @Test public void canUpdateExistingMessageWithResponse() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); UpdateChatMessageOptions updateMessageRequest = ChatOptionsProvider.updateMessageOptions(); SendChatMessageResult response = chatThreadClient.sendMessage(messageRequest).block(); StepVerifier.create(chatThreadClient.updateMessageWithResponse(response.getId(), updateMessageRequest)) .assertNext(updateResponse -> { assertEquals(updateResponse.getStatusCode(), 200); StepVerifier.create(chatThreadClient.getMessage(response.getId())) .assertNext(message -> { assertEquals(message.getContent(), updateMessageRequest.getContent()); }) .verifyComplete(); }) .verifyComplete(); } @Test public void canListMessages() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); chatThreadClient.sendMessage(messageRequest).block(); chatThreadClient.sendMessage(messageRequest).block(); PagedIterable<ChatMessage> messagesResponse = new PagedIterable<ChatMessage>(chatThreadClient.listMessages()); List<ChatMessage> returnedMessages = new ArrayList<ChatMessage>(); messagesResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> { if (item.getType().equals("Text")) { returnedMessages.add(item); } }); }); assertTrue(returnedMessages.size() == 2); } @Test public void canListMessagesWithOptions() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); chatThreadClient.sendMessage(messageRequest).block(); chatThreadClient.sendMessage(messageRequest).block(); ListChatMessagesOptions options = new ListChatMessagesOptions(); options.setMaxPageSize(10); options.setStartTime(OffsetDateTime.parse("2020-09-08T01:02:14.387Z")); PagedIterable<ChatMessage> messagesResponse = new PagedIterable<ChatMessage>(chatThreadClient.listMessages(options)); List<ChatMessage> returnedMessages = new ArrayList<ChatMessage>(); messagesResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> { if (item.getType().equals("Text")) { returnedMessages.add(item); } }); }); assertTrue(returnedMessages.size() == 2); } @Test public void canSendTypingNotification() { StepVerifier.create(chatThreadClient.sendTypingNotification()) .verifyComplete(); } @Test public void canSendTypingNotificationWithResponse() { StepVerifier.create(chatThreadClient.sendTypingNotificationWithResponse()) .assertNext(response -> { assertEquals(response.getStatusCode(), 200); }) .verifyComplete(); } @Test public void canSendThenListReadReceipts() throws InterruptedException { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); SendChatMessageResult response = chatThreadClient.sendMessage(messageRequest).block(); StepVerifier.create(chatThreadClient.sendReadReceipt(response.getId())) .assertNext(noResp -> { PagedIterable<ReadReceipt> readReceiptsResponse = new PagedIterable<ReadReceipt>(chatThreadClient.listReadReceipts()); List<ReadReceipt> returnedReadReceipts = new ArrayList<ReadReceipt>(); readReceiptsResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> returnedReadReceipts.add(item)); }); if (interceptorManager.isPlaybackMode()) { assertTrue(returnedReadReceipts.size() > 0); checkReadReceiptListContainsMessageId(returnedReadReceipts, response.getId()); } }); } @Test public void canSendThenListReadReceiptsWithResponse() throws InterruptedException { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); SendChatMessageResult response = chatThreadClient.sendMessage(messageRequest).block(); StepVerifier.create(chatThreadClient.sendReadReceiptWithResponse(response.getId())) .assertNext(receiptResponse -> { assertEquals(receiptResponse.getStatusCode(), 201); PagedIterable<ReadReceipt> readReceiptsResponse = new PagedIterable<ReadReceipt>(chatThreadClient.listReadReceipts()); List<ReadReceipt> returnedReadReceipts = new ArrayList<ReadReceipt>(); readReceiptsResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> returnedReadReceipts.add(item)); }); if (interceptorManager.isPlaybackMode()) { assertTrue(returnedReadReceipts.size() > 0); checkReadReceiptListContainsMessageId(returnedReadReceipts, response.getId()); } }) .verifyComplete(); } }
class ChatThreadAsyncClientTest extends ChatClientTestBase { private ClientLogger logger = new ClientLogger(ChatThreadAsyncClientTest.class); private CommunicationIdentityClient communicationClient; private ChatAsyncClient client; private ChatThreadAsyncClient chatThreadClient; private String threadId; private CommunicationUser firstThreadMember; private CommunicationUser secondThreadMember; private CommunicationUser firstAddedThreadMember; private CommunicationUser secondAddedThreadMember; @Override protected void beforeTest() { super.beforeTest(); communicationClient = getCommunicationIdentityClientBuilder().buildClient(); firstThreadMember = communicationClient.createUser(); secondThreadMember = communicationClient.createUser(); List<String> scopes = new ArrayList<String>(Arrays.asList("chat")); CommunicationUserToken response = communicationClient.issueToken(firstThreadMember, scopes); client = getChatClientBuilder(response.getToken()).buildAsyncClient(); CreateChatThreadOptions threadRequest = ChatOptionsProvider.createThreadOptions(firstThreadMember.getId(), secondThreadMember.getId()); chatThreadClient = client.createChatThread(threadRequest).block(); threadId = chatThreadClient.getChatThreadId(); } @Override protected void afterTest() { super.afterTest(); } @Test public void canUpdateThread() { UpdateChatThreadOptions threadRequest = ChatOptionsProvider.updateThreadOptions(); StepVerifier.create( chatThreadClient.updateChatThread(threadRequest) .flatMap(noResp -> { return client.getChatThread(threadId); }) ) .assertNext(chatThread -> { assertEquals(chatThread.getTopic(), threadRequest.getTopic()); }); } @Test public void canUpdateThreadWithResponse() { UpdateChatThreadOptions threadRequest = ChatOptionsProvider.updateThreadOptions(); StepVerifier.create( chatThreadClient.updateChatThreadWithResponse(threadRequest) .flatMap(updateThreadResponse -> { assertEquals(updateThreadResponse.getStatusCode(), 200); return client.getChatThread(threadId); }) ) .assertNext(chatThread -> { assertEquals(chatThread.getTopic(), threadRequest.getTopic()); }) .verifyComplete(); } @Test public void canAddListAndRemoveMembersAsync() throws InterruptedException { firstAddedThreadMember = communicationClient.createUser(); secondAddedThreadMember = communicationClient.createUser(); AddChatThreadMembersOptions options = ChatOptionsProvider.addThreadMembersOptions( firstAddedThreadMember.getId(), secondAddedThreadMember.getId()); StepVerifier.create(chatThreadClient.addMembers(options)) .assertNext(noResp -> { PagedIterable<ChatThreadMember> membersResponse = new PagedIterable<>(chatThreadClient.listMembers()); List<ChatThreadMember> returnedMembers = new ArrayList<ChatThreadMember>(); membersResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> returnedMembers.add(item)); }); for (ChatThreadMember member: options.getMembers()) { assertTrue(checkMembersListContainsMemberId(returnedMembers, member.getUser().getId())); } assertTrue(returnedMembers.size() == 4); }); for (ChatThreadMember member: options.getMembers()) { StepVerifier.create(chatThreadClient.removeMember(member.getUser())) .verifyComplete(); } } @Test public void canAddListAndRemoveMembersWithResponseAsync() throws InterruptedException { firstAddedThreadMember = communicationClient.createUser(); secondAddedThreadMember = communicationClient.createUser(); AddChatThreadMembersOptions options = ChatOptionsProvider.addThreadMembersOptions( firstAddedThreadMember.getId(), secondAddedThreadMember.getId()); StepVerifier.create(chatThreadClient.addMembersWithResponse(options)) .assertNext(addMembersResponse -> { assertEquals(addMembersResponse.getStatusCode(), 207); PagedIterable<ChatThreadMember> membersResponse = new PagedIterable<>(chatThreadClient.listMembers()); List<ChatThreadMember> returnedMembers = new ArrayList<ChatThreadMember>(); membersResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> returnedMembers.add(item)); }); for (ChatThreadMember member: options.getMembers()) { assertTrue(checkMembersListContainsMemberId(returnedMembers, member.getUser().getId())); } assertTrue(returnedMembers.size() == 4); }) .verifyComplete(); for (ChatThreadMember member: options.getMembers()) { StepVerifier.create(chatThreadClient.removeMemberWithResponse(member.getUser())) .assertNext(resp -> { assertEquals(resp.getStatusCode(), 204); }) .verifyComplete(); } } @Test public void canSendThenGetMessage() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); StepVerifier .create(chatThreadClient.sendMessage(messageRequest) .flatMap(response -> { return chatThreadClient.getMessage(response.getId()); }) ) .assertNext(message -> { assertEquals(message.getContent(), messageRequest.getContent()); assertEquals(message.getPriority(), messageRequest.getPriority()); assertEquals(message.getSenderDisplayName(), messageRequest.getSenderDisplayName()); }) .verifyComplete(); } @Test public void canSendThenGetMessageWithResponse() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); StepVerifier .create(chatThreadClient.sendMessageWithResponse(messageRequest) .flatMap(sendResponse -> { assertEquals(sendResponse.getStatusCode(), 201); return chatThreadClient.getMessageWithResponse(sendResponse.getValue().getId()); }) ) .assertNext(getResponse -> { ChatMessage message = getResponse.getValue(); assertEquals(message.getContent(), messageRequest.getContent()); assertEquals(message.getPriority(), messageRequest.getPriority()); assertEquals(message.getSenderDisplayName(), messageRequest.getSenderDisplayName()); }) .verifyComplete(); } @Test @Test public void canDeleteExistingMessageWithResponse() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); StepVerifier.create( chatThreadClient.sendMessage(messageRequest) .flatMap(response -> { return chatThreadClient.deleteMessageWithResponse(response.getId()); }) ) .assertNext(deleteResponse -> { assertEquals(deleteResponse.getStatusCode(), 204); }) .verifyComplete(); } @Test public void canUpdateExistingMessage() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); UpdateChatMessageOptions updateMessageRequest = ChatOptionsProvider.updateMessageOptions(); AtomicReference<SendChatMessageResult> messageResponseRef = new AtomicReference<>(); StepVerifier.create( chatThreadClient.sendMessage(messageRequest) .flatMap(response -> { messageResponseRef.set(response); return chatThreadClient.updateMessage(response.getId(), updateMessageRequest); }) .flatMap((Void resp) -> { return chatThreadClient.getMessage(messageResponseRef.get().getId()); }) ) .assertNext(message -> { assertEquals(message.getContent(), updateMessageRequest.getContent()); }); } @Test public void canUpdateExistingMessageWithResponse() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); UpdateChatMessageOptions updateMessageRequest = ChatOptionsProvider.updateMessageOptions(); AtomicReference<SendChatMessageResult> messageResponseRef = new AtomicReference<>(); StepVerifier.create( chatThreadClient.sendMessage(messageRequest) .flatMap((SendChatMessageResult response) -> { messageResponseRef.set(response); return chatThreadClient.updateMessageWithResponse(response.getId(), updateMessageRequest); }) .flatMap((Response<Void> updateResponse) -> { assertEquals(updateResponse.getStatusCode(), 200); return chatThreadClient.getMessage(messageResponseRef.get().getId()); }) ) .assertNext(message -> { assertEquals(message.getContent(), updateMessageRequest.getContent()); }) .verifyComplete(); } @Test public void canListMessages() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); StepVerifier.create( chatThreadClient.sendMessage(messageRequest) .concatWith(chatThreadClient.sendMessage(messageRequest)) ) .assertNext((message) -> { PagedIterable<ChatMessage> messagesResponse = new PagedIterable<ChatMessage>(chatThreadClient.listMessages()); List<ChatMessage> returnedMessages = new ArrayList<ChatMessage>(); messagesResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> { if (item.getType().equals("Text")) { returnedMessages.add(item); } }); }); assertTrue(returnedMessages.size() == 2); }); } @Test public void canListMessagesWithOptions() { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); ListChatMessagesOptions options = new ListChatMessagesOptions(); options.setMaxPageSize(10); options.setStartTime(OffsetDateTime.parse("2020-09-08T01:02:14.387Z")); StepVerifier.create( chatThreadClient.sendMessage(messageRequest) .concatWith(chatThreadClient.sendMessage(messageRequest))) .assertNext((message) -> { PagedIterable<ChatMessage> messagesResponse = new PagedIterable<ChatMessage>(chatThreadClient.listMessages(options)); List<ChatMessage> returnedMessages = new ArrayList<ChatMessage>(); messagesResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> { if (item.getType().equals("Text")) { returnedMessages.add(item); } }); }); assertTrue(returnedMessages.size() == 2); }); } @Test public void canSendTypingNotification() { StepVerifier.create(chatThreadClient.sendTypingNotification()) .verifyComplete(); } @Test public void canSendTypingNotificationWithResponse() { StepVerifier.create(chatThreadClient.sendTypingNotificationWithResponse()) .assertNext(response -> { assertEquals(response.getStatusCode(), 200); }) .verifyComplete(); } @Test public void canSendThenListReadReceipts() throws InterruptedException { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); AtomicReference<SendChatMessageResult> messageResponseRef = new AtomicReference<>(); StepVerifier.create( chatThreadClient.sendMessage(messageRequest) .flatMap(response -> { messageResponseRef.set(response); return chatThreadClient.sendReadReceipt(response.getId()); }) ) .assertNext(noResp -> { PagedIterable<ReadReceipt> readReceiptsResponse = new PagedIterable<ReadReceipt>(chatThreadClient.listReadReceipts()); List<ReadReceipt> returnedReadReceipts = new ArrayList<ReadReceipt>(); readReceiptsResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> returnedReadReceipts.add(item)); }); if (interceptorManager.isPlaybackMode()) { assertTrue(returnedReadReceipts.size() > 0); checkReadReceiptListContainsMessageId(returnedReadReceipts, messageResponseRef.get().getId()); } }); } @Test public void canSendThenListReadReceiptsWithResponse() throws InterruptedException { SendChatMessageOptions messageRequest = ChatOptionsProvider.sendMessageOptions(); AtomicReference<SendChatMessageResult> messageResponseRef = new AtomicReference<>(); StepVerifier.create( chatThreadClient.sendMessage(messageRequest) .flatMap(response -> { messageResponseRef.set(response); return chatThreadClient.sendReadReceiptWithResponse(response.getId()); }) ) .assertNext(receiptResponse -> { assertEquals(receiptResponse.getStatusCode(), 201); PagedIterable<ReadReceipt> readReceiptsResponse = new PagedIterable<ReadReceipt>(chatThreadClient.listReadReceipts()); List<ReadReceipt> returnedReadReceipts = new ArrayList<ReadReceipt>(); readReceiptsResponse.iterableByPage().forEach(resp -> { assertEquals(resp.getStatusCode(), 200); resp.getItems().forEach(item -> returnedReadReceipts.add(item)); }); if (interceptorManager.isPlaybackMode()) { assertTrue(returnedReadReceipts.size() > 0); checkReadReceiptListContainsMessageId(returnedReadReceipts, messageResponseRef.get().getId()); } }) .verifyComplete(); } }
Method `loginUserFromKeytabAndReturnUGI` does't set the current login user but `loginUserFromKeytab` does, I've used `loginUserFromKeytab` instead.
protected void initLocalObjectsImpl() { HiveConf hiveConf = new HiveConf(); for (String key : catalogProperty.getHdfsProperties().keySet()) { String val = catalogProperty.getOrDefault(key, ""); hiveConf.set(key, val); } String authentication = catalogProperty.getOrDefault( HdfsResource.HADOOP_SECURITY_AUTHENTICATION, ""); if (AuthType.KERBEROS.getDesc().equals(authentication)) { Configuration conf = new Configuration(); conf.set(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authentication); UserGroupInformation.setConfiguration(conf); final UserGroupInformation ugi; try { /** * Because metastore client is created by using * {@link org.apache.hadoop.hive.metastore.RetryingMetaStoreClient * it will relogin when TGT is expired, so we don't need to relogin manually. */ ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI( catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_PRINCIPAL, ""), catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_KEYTAB, "")); UserGroupInformation.setLoginUser(ugi); } catch (IOException e) { throw new HMSClientException("login with kerberos auth failed for catalog %s", e, this.getName()); } } Map<String, String> properties = HiveMetaStoreClientHelper.getPropertiesForDLF(name, hiveConf); properties.putAll(catalogProperty.getProperties()); catalogProperty.setProperties(properties); client = new PooledHiveMetaStoreClient(hiveConf, MAX_CLIENT_POOL_SIZE); }
UserGroupInformation.setLoginUser(ugi);
protected void initLocalObjectsImpl() { HiveConf hiveConf = new HiveConf(); for (String key : catalogProperty.getHdfsProperties().keySet()) { String val = catalogProperty.getOrDefault(key, ""); hiveConf.set(key, val); } String authentication = catalogProperty.getOrDefault( HdfsResource.HADOOP_SECURITY_AUTHENTICATION, ""); if (AuthType.KERBEROS.getDesc().equals(authentication)) { Configuration conf = new Configuration(); conf.set(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authentication); UserGroupInformation.setConfiguration(conf); try { /** * Because metastore client is created by using * {@link org.apache.hadoop.hive.metastore.RetryingMetaStoreClient * it will relogin when TGT is expired, so we don't need to relogin manually. */ UserGroupInformation.loginUserFromKeytab( catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_PRINCIPAL, ""), catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_KEYTAB, "")); } catch (IOException e) { throw new HMSClientException("login with kerberos auth failed for catalog %s", e, this.getName()); } } Map<String, String> properties = HiveMetaStoreClientHelper.getPropertiesForDLF(name, hiveConf); properties.putAll(catalogProperty.getProperties()); catalogProperty.setProperties(properties); client = new PooledHiveMetaStoreClient(hiveConf, MAX_CLIENT_POOL_SIZE); }
class HMSExternalCatalog extends ExternalCatalog { private static final Logger LOG = LogManager.getLogger(HMSExternalCatalog.class); private static final int MAX_CLIENT_POOL_SIZE = 8; protected PooledHiveMetaStoreClient client; /** * Default constructor for HMSExternalCatalog. */ public HMSExternalCatalog(long catalogId, String name, Map<String, String> props) { this.id = catalogId; this.name = name; this.type = "hms"; this.catalogProperty = new CatalogProperty(); this.catalogProperty.setProperties(props); } public String getHiveMetastoreUris() { return catalogProperty.getOrDefault(HMSResource.HIVE_METASTORE_URIS, ""); } @Override protected void init() { Map<String, Long> tmpDbNameToId = Maps.newConcurrentMap(); Map<Long, ExternalDatabase> tmpIdToDb = Maps.newConcurrentMap(); InitCatalogLog initCatalogLog = new InitCatalogLog(); initCatalogLog.setCatalogId(id); initCatalogLog.setType(InitCatalogLog.Type.HMS); List<String> allDatabases = client.getAllDatabases(); for (String dbName : allDatabases) { long dbId; if (dbNameToId != null && dbNameToId.containsKey(dbName)) { dbId = dbNameToId.get(dbName); tmpDbNameToId.put(dbName, dbId); ExternalDatabase db = idToDb.get(dbId); db.setUnInitialized(invalidCacheInInit); tmpIdToDb.put(dbId, db); initCatalogLog.addRefreshDb(dbId); } else { dbId = Env.getCurrentEnv().getNextId(); tmpDbNameToId.put(dbName, dbId); HMSExternalDatabase db = new HMSExternalDatabase(this, dbId, dbName); tmpIdToDb.put(dbId, db); initCatalogLog.addCreateDb(dbId, dbName); } } dbNameToId = tmpDbNameToId; idToDb = tmpIdToDb; Env.getCurrentEnv().getEditLog().logInitCatalog(initCatalogLog); } @Override @Override public List<String> listDatabaseNames(SessionContext ctx) { makeSureInitialized(); return Lists.newArrayList(dbNameToId.keySet()); } @Override public List<String> listTableNames(SessionContext ctx, String dbName) { makeSureInitialized(); HMSExternalDatabase hmsExternalDatabase = (HMSExternalDatabase) idToDb.get(dbNameToId.get(dbName)); if (hmsExternalDatabase != null && hmsExternalDatabase.isInitialized()) { List<String> names = Lists.newArrayList(); hmsExternalDatabase.getTables().forEach(table -> names.add(table.getName())); return names; } else { return client.getAllTables(getRealTableName(dbName)); } } @Override public boolean tableExist(SessionContext ctx, String dbName, String tblName) { return client.tableExists(getRealTableName(dbName), tblName); } public PooledHiveMetaStoreClient getClient() { makeSureInitialized(); return client; } @Override public List<Column> getSchema(String dbName, String tblName) { makeSureInitialized(); List<FieldSchema> schema = getClient().getSchema(dbName, tblName); List<Column> tmpSchema = Lists.newArrayListWithCapacity(schema.size()); for (FieldSchema field : schema) { tmpSchema.add(new Column(field.getName(), HiveMetaStoreClientHelper.hiveTypeToDorisType(field.getType()), true, null, true, null, field.getComment(), true, null, -1)); } return tmpSchema; } }
class HMSExternalCatalog extends ExternalCatalog { private static final Logger LOG = LogManager.getLogger(HMSExternalCatalog.class); private static final int MAX_CLIENT_POOL_SIZE = 8; protected PooledHiveMetaStoreClient client; /** * Default constructor for HMSExternalCatalog. */ public HMSExternalCatalog(long catalogId, String name, Map<String, String> props) { this.id = catalogId; this.name = name; this.type = "hms"; this.catalogProperty = new CatalogProperty(); this.catalogProperty.setProperties(props); } public String getHiveMetastoreUris() { return catalogProperty.getOrDefault(HMSResource.HIVE_METASTORE_URIS, ""); } @Override protected void init() { Map<String, Long> tmpDbNameToId = Maps.newConcurrentMap(); Map<Long, ExternalDatabase> tmpIdToDb = Maps.newConcurrentMap(); InitCatalogLog initCatalogLog = new InitCatalogLog(); initCatalogLog.setCatalogId(id); initCatalogLog.setType(InitCatalogLog.Type.HMS); List<String> allDatabases = client.getAllDatabases(); for (String dbName : allDatabases) { long dbId; if (dbNameToId != null && dbNameToId.containsKey(dbName)) { dbId = dbNameToId.get(dbName); tmpDbNameToId.put(dbName, dbId); ExternalDatabase db = idToDb.get(dbId); db.setUnInitialized(invalidCacheInInit); tmpIdToDb.put(dbId, db); initCatalogLog.addRefreshDb(dbId); } else { dbId = Env.getCurrentEnv().getNextId(); tmpDbNameToId.put(dbName, dbId); HMSExternalDatabase db = new HMSExternalDatabase(this, dbId, dbName); tmpIdToDb.put(dbId, db); initCatalogLog.addCreateDb(dbId, dbName); } } dbNameToId = tmpDbNameToId; idToDb = tmpIdToDb; Env.getCurrentEnv().getEditLog().logInitCatalog(initCatalogLog); } @Override @Override public List<String> listDatabaseNames(SessionContext ctx) { makeSureInitialized(); return Lists.newArrayList(dbNameToId.keySet()); } @Override public List<String> listTableNames(SessionContext ctx, String dbName) { makeSureInitialized(); HMSExternalDatabase hmsExternalDatabase = (HMSExternalDatabase) idToDb.get(dbNameToId.get(dbName)); if (hmsExternalDatabase != null && hmsExternalDatabase.isInitialized()) { List<String> names = Lists.newArrayList(); hmsExternalDatabase.getTables().forEach(table -> names.add(table.getName())); return names; } else { return client.getAllTables(getRealTableName(dbName)); } } @Override public boolean tableExist(SessionContext ctx, String dbName, String tblName) { return client.tableExists(getRealTableName(dbName), tblName); } public PooledHiveMetaStoreClient getClient() { makeSureInitialized(); return client; } @Override public List<Column> getSchema(String dbName, String tblName) { makeSureInitialized(); List<FieldSchema> schema = getClient().getSchema(dbName, tblName); List<Column> tmpSchema = Lists.newArrayListWithCapacity(schema.size()); for (FieldSchema field : schema) { tmpSchema.add(new Column(field.getName(), HiveMetaStoreClientHelper.hiveTypeToDorisType(field.getType()), true, null, true, null, field.getComment(), true, null, -1)); } return tmpSchema; } }
It may be better to call `TransactionTestConstants.ACCOUNT, 6)` before calling rollback.
private void assertRollback() throws SQLException { Connection conn = getDataSource().getConnection(); conn.setAutoCommit(false); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 0); executeWithLog(conn, "insert into account(id, BALANCE, TRANSACTION_ID) values(1, 1, 1),(2, 2, 2),(3, 3, 3),(4, 4, 4),(5, 5, 5),(6, 6, 6);"); conn.rollback(); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 0); }
assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 0);
private void assertRollback() throws SQLException { Connection conn = getDataSource().getConnection(); conn.setAutoCommit(false); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 0); executeWithLog(conn, "insert into account(id, BALANCE, TRANSACTION_ID) values(1, 1, 1),(2, 2, 2),(3, 3, 3),(4, 4, 4),(5, 5, 5),(6, 6, 6);"); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 6); conn.rollback(); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 0); }
class AddResourceTestCase extends BaseTransactionTestCase { public AddResourceTestCase(final BaseTransactionITCase baseTransactionITCase, final DataSource dataSource) { super(baseTransactionITCase, dataSource); } @Override @SneakyThrows public void executeTest() { assertAddResource(); } private void assertAddResource() throws SQLException { Connection conn = getDataSource().getConnection(); getBaseTransactionITCase().addNewResource(conn); getBaseTransactionITCase().createThreeDataSourceAccountTableRule(conn); reCreateAccountTable(conn); assertRollback(); assertCommit(); conn.close(); } private void reCreateAccountTable(final Connection conn) throws SQLException { getBaseTransactionITCase().dropAccountTable(conn); getBaseTransactionITCase().createAccountTable(conn); } private void assertCommit() throws SQLException { Connection conn = getDataSource().getConnection(); conn.setAutoCommit(false); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 0); executeWithLog(conn, "insert into account(id, BALANCE, TRANSACTION_ID) values(1, 1, 1),(2, 2, 2),(3, 3, 3),(4, 4, 4),(5, 5, 5),(6, 6, 6);"); conn.commit(); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 6); } }
class AddResourceTestCase extends BaseTransactionTestCase { public AddResourceTestCase(final BaseTransactionITCase baseTransactionITCase, final DataSource dataSource) { super(baseTransactionITCase, dataSource); } @Override @SneakyThrows public void executeTest() { assertAddResource(); } private void assertAddResource() throws SQLException { Connection conn = getDataSource().getConnection(); getBaseTransactionITCase().addNewResource(conn); getBaseTransactionITCase().createThreeDataSourceAccountTableRule(conn); reCreateAccountTable(conn); assertRollback(); assertCommit(); conn.close(); } private void reCreateAccountTable(final Connection conn) throws SQLException { getBaseTransactionITCase().dropAccountTable(conn); getBaseTransactionITCase().createAccountTable(conn); } private void assertCommit() throws SQLException { Connection conn = getDataSource().getConnection(); conn.setAutoCommit(false); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 0); executeWithLog(conn, "insert into account(id, BALANCE, TRANSACTION_ID) values(1, 1, 1),(2, 2, 2),(3, 3, 3),(4, 4, 4),(5, 5, 5),(6, 6, 6);"); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 6); conn.commit(); assertTableRowCount(conn, TransactionTestConstants.ACCOUNT, 6); } }
The most risky bug in this code is: The `KuduTable` constructor does not initialize the `properties`, `masterAddresses`, `catalogName`, `databaseName`, `tableName`, and `partColNames` fields which could lead to NullPointerException when accessing these fields. You can modify the code like this: ```java public KuduTable() { super(TableType.KUDU); this.properties = new HashMap<>(); this.masterAddresses = ""; this.catalogName = ""; this.databaseName = ""; this.tableName = ""; this.partColNames = new ArrayList<>(); } ```
public List<String> getPartitionColumnNames() { return partColNames; }
return partColNames;
public List<String> getPartitionColumnNames() { return partColNames; }
class KuduTable extends Table { private static final Logger LOG = LogManager.getLogger(KuduTable.class); public static final Set<String> KUDU_INPUT_FORMATS = Sets.newHashSet( "org.apache.hadoop.hive.kudu.KuduInputFormat", "org.apache.kudu.mapreduce.KuduTableInputFormat"); public static final String PARTITION_NULL_VALUE = "null"; private String masterAddresses; private String catalogName; private String databaseName; private String tableName; private List<String> partColNames; private Map<String, String> properties; public KuduTable() { super(TableType.KUDU); } public KuduTable(String masterAddresses, String catalogName, String dbName, String tblName, List<Column> schema, List<String> partColNames) { super(CONNECTOR_ID_GENERATOR.getNextId().asInt(), tblName, TableType.KUDU, schema); this.masterAddresses = masterAddresses; this.catalogName = catalogName; this.databaseName = dbName; this.tableName = tblName; this.partColNames = partColNames; } public static KuduTable fromMetastoreTable(org.apache.hadoop.hive.metastore.api.Table table, String catalogName, List<Column> fullSchema, List<String> partColNames) { return new KuduTable(StringUtils.EMPTY, catalogName, table.getDbName(), table.getTableName(), fullSchema, partColNames); } public String getMasterAddresses() { return masterAddresses; } @Override public String getCatalogName() { return catalogName; } public String getDbName() { return databaseName; } public String getTableName() { return tableName; } @Override public List<Column> getPartitionColumns() { List<Column> partitionColumns = new ArrayList<>(); if (!partColNames.isEmpty()) { partitionColumns = partColNames.stream().map(this::getColumn) .collect(Collectors.toList()); } return partitionColumns; } @Override public Map<String, String> getProperties() { if (properties == null) { this.properties = new HashMap<>(); } return properties; } @Override @Override public boolean isPartitioned() { return !partColNames.isEmpty(); } @Override public boolean isSupported() { return true; } public static boolean isKuduInputFormat(String inputFormat) { return KUDU_INPUT_FORMATS.contains(inputFormat); } @Override public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) { return new TTableDescriptor(id, TTableType.KUDU_TABLE, fullSchema.size(), 0, tableName, databaseName); } }
class KuduTable extends Table { private static final Logger LOG = LogManager.getLogger(KuduTable.class); public static final Set<String> KUDU_INPUT_FORMATS = Sets.newHashSet( "org.apache.hadoop.hive.kudu.KuduInputFormat", "org.apache.kudu.mapreduce.KuduTableInputFormat"); public static final String PARTITION_NULL_VALUE = "null"; private String masterAddresses; private String catalogName; private String databaseName; private String tableName; private List<String> partColNames; private Map<String, String> properties; public KuduTable() { super(TableType.KUDU); } public KuduTable(String masterAddresses, String catalogName, String dbName, String tblName, List<Column> schema, List<String> partColNames) { super(CONNECTOR_ID_GENERATOR.getNextId().asInt(), tblName, TableType.KUDU, schema); this.masterAddresses = masterAddresses; this.catalogName = catalogName; this.databaseName = dbName; this.tableName = tblName; this.partColNames = partColNames; } public static KuduTable fromMetastoreTable(org.apache.hadoop.hive.metastore.api.Table table, String catalogName, List<Column> fullSchema, List<String> partColNames) { return new KuduTable(StringUtils.EMPTY, catalogName, table.getDbName(), table.getTableName(), fullSchema, partColNames); } public String getMasterAddresses() { return masterAddresses; } @Override public String getCatalogName() { return catalogName; } public String getDbName() { return databaseName; } public String getTableName() { return tableName; } @Override public List<Column> getPartitionColumns() { List<Column> partitionColumns = new ArrayList<>(); if (!partColNames.isEmpty()) { partitionColumns = partColNames.stream().map(this::getColumn) .collect(Collectors.toList()); } return partitionColumns; } @Override public Map<String, String> getProperties() { if (properties == null) { this.properties = new HashMap<>(); } return properties; } @Override @Override public boolean isPartitioned() { return !partColNames.isEmpty(); } @Override public boolean isSupported() { return true; } public static boolean isKuduInputFormat(String inputFormat) { return KUDU_INPUT_FORMATS.contains(inputFormat); } @Override public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) { return new TTableDescriptor(id, TTableType.KUDU_TABLE, fullSchema.size(), 0, tableName, databaseName); } }
> we had to patch Arc in the past to remove some of these as they had a signifcant impact on benchmarks I do remember quite a few optimizations I'm not exactly proud of :shrug:. > you won't notice on micro benchmarks unless you're using all CPUs So I'd personally avoid them All benchmarks are tricky because they run in a specific system configuration. But I strongly disagree that we should avoid microbenchmarks . In an ideal world, we would start with a microbenchmark and continue with benchmarking several quarkus app configurations (small app, big app, etc.).. > you know better how much of an "hot path" this is.. I'm pretty sure here.
public ArcContainerImpl(CurrentContextFactory currentContextFactory, boolean strictMode) { this.strictMode = strictMode; id = String.valueOf(ID_GENERATOR.incrementAndGet()); running = new AtomicBoolean(true); List<InjectableBean<?>> beans = new ArrayList<>(); Map<String, List<InjectableBean<?>>> beansByRawType = new HashMap<>(); List<Supplier<Collection<RemovedBean>>> removedBeans = new ArrayList<>(); List<InjectableInterceptor<?>> interceptors = new ArrayList<>(); List<InjectableDecorator<?>> decorators = new ArrayList<>(); List<InjectableObserverMethod<?>> observers = new ArrayList<>(); Set<String> interceptorBindings = new HashSet<>(); Map<Class<? extends Annotation>, Set<Annotation>> transitiveInterceptorBindings = new HashMap<>(); Map<String, Set<String>> qualifierNonbindingMembers = new HashMap<>(); Set<String> qualifiers = new HashSet<>(); this.currentContextFactory = currentContextFactory == null ? new ThreadLocalCurrentContextFactory() : currentContextFactory; List<Components> components = new ArrayList<>(); for (ComponentsProvider componentsProvider : ServiceLoader.load(ComponentsProvider.class)) { components.add(componentsProvider.getComponents()); } for (Components c : components) { for (InjectableBean<?> bean : c.getBeans()) { if (bean instanceof InjectableInterceptor) { interceptors.add((InjectableInterceptor<?>) bean); } else if (bean instanceof InjectableDecorator) { decorators.add((InjectableDecorator<?>) bean); } else { beans.add(bean); precomputeBeanRawTypes(beansByRawType, bean); } } removedBeans.add(c.getRemovedBeans()); observers.addAll(c.getObservers()); interceptorBindings.addAll(c.getInterceptorBindings()); transitiveInterceptorBindings.putAll(c.getTransitiveInterceptorBindings()); qualifierNonbindingMembers.putAll(c.getQualifierNonbindingMembers()); qualifiers.addAll(c.getQualifiers()); } addBuiltInBeans(beans, beansByRawType); interceptors.sort(Comparator.comparingInt(InjectableInterceptor::getPriority)); decorators.sort(Comparator.comparingInt(InjectableDecorator::getPriority)); resolved = new ComputingCache<>(this::resolve); beansById = new ComputingCache<>(this::findById); beansByName = new ComputingCache<>(this::resolve); resourceProviders = new ArrayList<>(); for (ResourceReferenceProvider resourceProvider : ServiceLoader.load(ResourceReferenceProvider.class)) { resourceProviders.add(resourceProvider); } resourceProviders.trimToSize(); instance = InstanceImpl.forGlobalEntrypoint(Object.class, Collections.emptySet()); this.beans = List.copyOf(beans); this.beansByRawType = Map.copyOf(beansByRawType); this.beansByRawType.forEach(new BiConsumer<String, List<InjectableBean<?>>>() { @Override public void accept(String key, List<InjectableBean<?>> val) { if (val.size() > 1) { ((ArrayList<InjectableBean<?>>) val).trimToSize(); } } }); this.interceptors = List.copyOf(interceptors); this.decorators = List.copyOf(decorators); this.observers = List.copyOf(observers); this.removedBeans = new LazyValue<>(new Supplier<List<RemovedBean>>() { @Override public List<RemovedBean> get() { List<RemovedBean> removed = new ArrayList<>(); for (Supplier<Collection<RemovedBean>> supplier : removedBeans) { removed.addAll(supplier.get()); } LOGGER.debugf("Loaded %s removed beans lazily", removed.size()); return List.copyOf(removed); } }); this.registeredQualifiers = new Qualifiers(qualifiers, qualifierNonbindingMembers); this.registeredInterceptorBindings = new InterceptorBindings(interceptorBindings, transitiveInterceptorBindings); Contexts.Builder contextsBuilder = new Contexts.Builder( new RequestContext(this.currentContextFactory.create(RequestScoped.class), notifierOrNull(Set.of(Initialized.Literal.REQUEST, Any.Literal.INSTANCE)), notifierOrNull(Set.of(BeforeDestroyed.Literal.REQUEST, Any.Literal.INSTANCE)), notifierOrNull(Set.of(Destroyed.Literal.REQUEST, Any.Literal.INSTANCE))), new ApplicationContext(), new SingletonContext(), new DependentContext()); for (Components c : components) { for (InjectableContext context : c.getContexts()) { if (ApplicationScoped.class.equals(context.getScope())) { throw new IllegalStateException( "Failed to register a context - built-in application context is always active: " + context); } if (Singleton.class.equals(context.getScope())) { throw new IllegalStateException( "Failed to register a context - built-in singleton context is always active: " + context); } contextsBuilder.putContext(context); } } this.contexts = contextsBuilder.build(); }
this.beansByRawType = Map.copyOf(beansByRawType);
public ArcContainerImpl(CurrentContextFactory currentContextFactory, boolean strictMode) { this.strictMode = strictMode; id = String.valueOf(ID_GENERATOR.incrementAndGet()); running = new AtomicBoolean(true); List<InjectableBean<?>> beans = new ArrayList<>(); Map<String, List<InjectableBean<?>>> beansByRawType = new HashMap<>(); List<Supplier<Collection<RemovedBean>>> removedBeans = new ArrayList<>(); List<InjectableInterceptor<?>> interceptors = new ArrayList<>(); List<InjectableDecorator<?>> decorators = new ArrayList<>(); List<InjectableObserverMethod<?>> observers = new ArrayList<>(); Set<String> interceptorBindings = new HashSet<>(); Map<Class<? extends Annotation>, Set<Annotation>> transitiveInterceptorBindings = new HashMap<>(); Map<String, Set<String>> qualifierNonbindingMembers = new HashMap<>(); Set<String> qualifiers = new HashSet<>(); this.currentContextFactory = currentContextFactory == null ? new ThreadLocalCurrentContextFactory() : currentContextFactory; List<Components> components = new ArrayList<>(); for (ComponentsProvider componentsProvider : ServiceLoader.load(ComponentsProvider.class)) { components.add(componentsProvider.getComponents()); } for (Components c : components) { for (InjectableBean<?> bean : c.getBeans()) { if (bean instanceof InjectableInterceptor) { interceptors.add((InjectableInterceptor<?>) bean); } else if (bean instanceof InjectableDecorator) { decorators.add((InjectableDecorator<?>) bean); } else { beans.add(bean); precomputeBeanRawTypes(beansByRawType, bean); } } removedBeans.add(c.getRemovedBeans()); observers.addAll(c.getObservers()); interceptorBindings.addAll(c.getInterceptorBindings()); transitiveInterceptorBindings.putAll(c.getTransitiveInterceptorBindings()); qualifierNonbindingMembers.putAll(c.getQualifierNonbindingMembers()); qualifiers.addAll(c.getQualifiers()); } addBuiltInBeans(beans, beansByRawType); interceptors.sort(Comparator.comparingInt(InjectableInterceptor::getPriority)); decorators.sort(Comparator.comparingInt(InjectableDecorator::getPriority)); resolved = new ComputingCache<>(this::resolve); beansById = new ComputingCache<>(this::findById); beansByName = new ComputingCache<>(this::resolve); resourceProviders = new ArrayList<>(); for (ResourceReferenceProvider resourceProvider : ServiceLoader.load(ResourceReferenceProvider.class)) { resourceProviders.add(resourceProvider); } resourceProviders.trimToSize(); instance = InstanceImpl.forGlobalEntrypoint(Object.class, Collections.emptySet()); this.beans = List.copyOf(beans); this.beansByRawType = Map.copyOf(beansByRawType); this.beansByRawType.forEach(new BiConsumer<String, List<InjectableBean<?>>>() { @Override public void accept(String key, List<InjectableBean<?>> val) { if (val.size() > 1) { ((ArrayList<InjectableBean<?>>) val).trimToSize(); } } }); this.interceptors = List.copyOf(interceptors); this.decorators = List.copyOf(decorators); this.observers = List.copyOf(observers); this.removedBeans = new LazyValue<>(new Supplier<List<RemovedBean>>() { @Override public List<RemovedBean> get() { List<RemovedBean> removed = new ArrayList<>(); for (Supplier<Collection<RemovedBean>> supplier : removedBeans) { removed.addAll(supplier.get()); } LOGGER.debugf("Loaded %s removed beans lazily", removed.size()); return List.copyOf(removed); } }); this.registeredQualifiers = new Qualifiers(qualifiers, qualifierNonbindingMembers); this.registeredInterceptorBindings = new InterceptorBindings(interceptorBindings, transitiveInterceptorBindings); Contexts.Builder contextsBuilder = new Contexts.Builder( new RequestContext(this.currentContextFactory.create(RequestScoped.class), notifierOrNull(Set.of(Initialized.Literal.REQUEST, Any.Literal.INSTANCE)), notifierOrNull(Set.of(BeforeDestroyed.Literal.REQUEST, Any.Literal.INSTANCE)), notifierOrNull(Set.of(Destroyed.Literal.REQUEST, Any.Literal.INSTANCE))), new ApplicationContext(), new SingletonContext(), new DependentContext()); for (Components c : components) { for (InjectableContext context : c.getContexts()) { if (ApplicationScoped.class.equals(context.getScope())) { throw new IllegalStateException( "Failed to register a context - built-in application context is always active: " + context); } if (Singleton.class.equals(context.getScope())) { throw new IllegalStateException( "Failed to register a context - built-in singleton context is always active: " + context); } contextsBuilder.putContext(context); } } this.contexts = contextsBuilder.build(); }
class ArcContainerImpl implements ArcContainer { private static final Logger LOGGER = Logger.getLogger(ArcContainerImpl.class.getPackage().getName()); private static final AtomicInteger ID_GENERATOR = new AtomicInteger(); private final String id; private final AtomicBoolean running; private final List<InjectableBean<?>> beans; private final Map<String, List<InjectableBean<?>>> beansByRawType; private final LazyValue<List<RemovedBean>> removedBeans; private final List<InjectableInterceptor<?>> interceptors; private final List<InjectableDecorator<?>> decorators; private final List<InjectableObserverMethod<?>> observers; private final Contexts contexts; private final ComputingCache<Resolvable, Set<InjectableBean<?>>> resolved; private final ComputingCache<String, InjectableBean<?>> beansById; private final ComputingCache<String, Set<InjectableBean<?>>> beansByName; private final ArrayList<ResourceReferenceProvider> resourceProviders; final InstanceImpl<Object> instance; final Qualifiers registeredQualifiers; final InterceptorBindings registeredInterceptorBindings; private volatile ExecutorService executorService; private final CurrentContextFactory currentContextFactory; private final boolean strictMode; static void precomputeBeanRawTypes(Map<String, List<InjectableBean<?>>> map, InjectableBean<?> bean) { for (Type type : bean.getTypes()) { if (Object.class.equals(type)) { continue; } Class<?> rawType = Types.getRawType(type); if (rawType == null) { continue; } rawType = Types.boxedClass(rawType); String key = rawType.getName(); List<InjectableBean<?>> match = map.get(key); if (match == null) { map.put(key, List.of(bean)); } else { if (match.contains(bean)) { continue; } if (match.size() == 1) { List<InjectableBean<?>> newMatch = new ArrayList<>(); newMatch.add(match.get(0)); newMatch.add(bean); map.put(key, newMatch); } else { match.add(bean); } } } } public void init() { Set<Annotation> qualifiers = Set.of(Initialized.Literal.APPLICATION, Any.Literal.INSTANCE); EventImpl.createNotifier(Object.class, Object.class, qualifiers, this, false, null) .notify("@Initialized(ApplicationScoped.class)"); CDI.setCDIProvider(new ArcCDIProvider()); LOGGER.debugf("ArC DI container initialized [beans=%s, observers=%s]", beans.size(), observers.size()); } @Override public InjectableContext getActiveContext(Class<? extends Annotation> scopeType) { return contexts.getActiveContext(scopeType); } @Override public List<InjectableContext> getContexts(Class<? extends Annotation> scopeType) { return contexts.getContexts(scopeType); } @Override public Set<Class<? extends Annotation>> getScopes() { return contexts.scopes; } @Override public <T> InstanceHandle<T> instance(Class<T> type, Annotation... qualifiers) { return instanceHandle(type, qualifiers); } @Override public <T> InstanceHandle<T> instance(TypeLiteral<T> type, Annotation... qualifiers) { return instanceHandle(type.getType(), qualifiers); } @Override public <X> InstanceHandle<X> instance(Type type, Annotation... qualifiers) { return instanceHandle(type, qualifiers); } @Override public <T> Supplier<InstanceHandle<T>> beanInstanceSupplier(Class<T> type, Annotation... qualifiers) { return createInstanceSupplier(false, type, qualifiers); } @Override public <T> Supplier<InstanceHandle<T>> instanceSupplier(Class<T> type, Annotation... qualifiers) { return createInstanceSupplier(true, type, qualifiers); } private <T> Supplier<InstanceHandle<T>> createInstanceSupplier(boolean resolveAmbiguities, Class<T> type, Annotation... qualifiers) { if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } Set<InjectableBean<?>> resolvedBeans = resolved.getValue(new Resolvable(type, qualifiers)); Set<InjectableBean<?>> filteredBean = resolvedBeans; if (resolvedBeans.size() > 1) { if (resolveAmbiguities) { filteredBean = new HashSet<>(); for (InjectableBean<?> i : resolvedBeans) { if (i.getBeanClass().equals(type)) { filteredBean.add(i); } } } else { throw new AmbiguousResolutionException("Beans: " + resolvedBeans); } } @SuppressWarnings("unchecked") InjectableBean<T> bean = filteredBean.size() != 1 ? null : (InjectableBean<T>) filteredBean.iterator().next(); if (bean == null) { return null; } return new Supplier<InstanceHandle<T>>() { @Override public InstanceHandle<T> get() { return beanInstanceHandle(bean, null); } }; } @Override public <T> InstanceHandle<T> instance(InjectableBean<T> bean) { Objects.requireNonNull(bean); return beanInstanceHandle(bean, null); } @Override public <T> InjectableInstance<T> select(Class<T> type, Annotation... qualifiers) { return instance.select(type, qualifiers); } @Override public <T> InjectableInstance<T> select(TypeLiteral<T> type, Annotation... qualifiers) { return instance.select(type, qualifiers); } @Override public <T> List<InstanceHandle<T>> listAll(Class<T> type, Annotation... qualifiers) { return Instances.listOfHandles(CurrentInjectionPointProvider.EMPTY_SUPPLIER, type, Set.of(qualifiers), new CreationalContextImpl<>(null)); } @Override public <T> List<InstanceHandle<T>> listAll(TypeLiteral<T> type, Annotation... qualifiers) { return Instances.listOfHandles(CurrentInjectionPointProvider.EMPTY_SUPPLIER, type.getType(), Set.of(qualifiers), new CreationalContextImpl<>(null)); } @Override public boolean isRunning() { return running.get(); } @SuppressWarnings("unchecked") @Override public <T> InjectableBean<T> bean(String beanIdentifier) { Objects.requireNonNull(beanIdentifier); return (InjectableBean<T>) beansById.getValue(beanIdentifier); } @Override public InjectableBean<?> namedBean(String name) { Objects.requireNonNull(name); Set<InjectableBean<?>> found = beansByName.getValue(name); return found.size() == 1 ? found.iterator().next() : null; } @SuppressWarnings("unchecked") @Override public <T> InstanceHandle<T> instance(String name) { Objects.requireNonNull(name); Set<InjectableBean<?>> resolvedBeans = beansByName.getValue(name); return resolvedBeans.size() != 1 ? EagerInstanceHandle.unavailable() : (InstanceHandle<T>) beanInstanceHandle(resolvedBeans.iterator() .next(), null); } @Override public ManagedContext requestContext() { return contexts.requestContext; } @Override public BeanManager beanManager() { return BeanManagerImpl.INSTANCE.get(); } @Override public ExecutorService getExecutorService() { ExecutorService executor = executorService; return executor != null ? executor : ForkJoinPool.commonPool(); } public void setExecutor(ExecutorService executor) { this.executorService = executor; } @Override public CurrentContextFactory getCurrentContextFactory() { return currentContextFactory; } @Override public boolean strictCompatibility() { return strictMode; } @Override public String toString() { return "ArcContainerImpl [id=" + id + ", running=" + running + ", beans=" + beans.size() + ", observers=" + observers.size() + ", scopes=" + contexts.scopes.size() + "]"; } public synchronized void shutdown() { if (running.get()) { CDI<?> cdi = CDI.current(); if (cdi instanceof ArcCDI) { ArcCDI arcCdi = (ArcCDI) cdi; arcCdi.destroy(); } contexts.requestContext.terminate(); Set<Annotation> beforeDestroyQualifiers = new HashSet<>(4); beforeDestroyQualifiers.add(BeforeDestroyed.Literal.APPLICATION); beforeDestroyQualifiers.add(Any.Literal.INSTANCE); try { EventImpl.createNotifier(Object.class, Object.class, beforeDestroyQualifiers, this, false, null) .notify(toString()); } catch (Exception e) { LOGGER.warn("An error occurred during delivery of the @BeforeDestroyed(ApplicationScoped.class) event", e); } contexts.applicationContext.destroy(); Set<Annotation> destroyQualifiers = new HashSet<>(4); destroyQualifiers.add(Destroyed.Literal.APPLICATION); destroyQualifiers.add(Any.Literal.INSTANCE); try { EventImpl.createNotifier(Object.class, Object.class, destroyQualifiers, this, false, null).notify(toString()); } catch (Exception e) { LOGGER.warn("An error occurred during delivery of the @Destroyed(ApplicationScoped.class) event", e); } contexts.singletonContext.destroy(); Reflections.clearCaches(); resolved.clear(); running.set(false); InterceptedStaticMethods.clear(); LOGGER.debugf("ArC DI container shut down"); } } public List<InjectableBean<?>> getBeans() { return beans; } public List<RemovedBean> getRemovedBeans() { return removedBeans.get(); } public List<InjectableInterceptor<?>> getInterceptors() { return interceptors; } public List<InjectableDecorator<?>> getDecorators() { return decorators; } public List<InjectableObserverMethod<?>> getObservers() { return observers; } InstanceHandle<Object> getResource(Type type, Set<Annotation> annotations) { for (ResourceReferenceProvider resourceProvider : resourceProviders) { InstanceHandle<Object> ret = resourceProvider.get(type, annotations); if (ret != null) { return ret; } } return null; } private Notifier<Object> notifierOrNull(Set<Annotation> qualifiers) { Notifier<Object> notifier = EventImpl.createNotifier(Object.class, Object.class, qualifiers, this, false, null); return notifier.isEmpty() ? null : notifier; } private static void addBuiltInBeans(List<InjectableBean<?>> beans, Map<String, List<InjectableBean<?>>> beansByRawType) { BeanManagerBean beanManagerBean = new BeanManagerBean(); beans.add(beanManagerBean); precomputeBeanRawTypes(beansByRawType, beanManagerBean); EventBean eventBean = new EventBean(); beans.add(eventBean); precomputeBeanRawTypes(beansByRawType, eventBean); beans.add(InstanceBean.INSTANCE); precomputeBeanRawTypes(beansByRawType, InstanceBean.INSTANCE); InjectionPointBean injectionPointBean = new InjectionPointBean(); beans.add(injectionPointBean); precomputeBeanRawTypes(beansByRawType, injectionPointBean); } private <T> InstanceHandle<T> instanceHandle(Type type, Annotation... qualifiers) { return beanInstanceHandle(getBean(type, qualifiers), null); } static <T> InstanceHandle<T> beanInstanceHandle(InjectableBean<T> bean, CreationalContextImpl<T> parentContext, boolean resetCurrentInjectionPoint, Consumer<T> destroyLogic) { return beanInstanceHandle(bean, parentContext, resetCurrentInjectionPoint, destroyLogic, false); } static <T> InstanceHandle<T> beanInstanceHandle(InjectableBean<T> bean, CreationalContextImpl<T> parentContext, boolean resetCurrentInjectionPoint, Consumer<T> destroyLogic, boolean useParentCreationalContextDirectly) { if (bean != null) { if (parentContext == null && Dependent.class.equals(bean.getScope())) { parentContext = new CreationalContextImpl<>(null); } CreationalContextImpl<T> creationalContext; if (parentContext != null) { creationalContext = useParentCreationalContextDirectly ? parentContext : parentContext.child(bean); } else { creationalContext = new CreationalContextImpl<>(bean); } InjectionPoint prev = null; if (resetCurrentInjectionPoint) { prev = InjectionPointProvider.set(CurrentInjectionPointProvider.EMPTY); } try { return new EagerInstanceHandle<>(bean, bean.get(creationalContext), creationalContext, parentContext, destroyLogic); } finally { if (resetCurrentInjectionPoint) { InjectionPointProvider.set(prev); } } } else { return EagerInstanceHandle.unavailable(); } } static <T> InstanceHandle<T> beanInstanceHandle(InjectableBean<T> bean, CreationalContextImpl<T> parentContext) { return beanInstanceHandle(bean, parentContext, true, null); } @SuppressWarnings("unchecked") private <T> InjectableBean<T> getBean(Type requiredType, Annotation... qualifiers) { if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } else { registeredQualifiers.verify(qualifiers); } Resolvable resolvable = new Resolvable(requiredType, qualifiers); Set<InjectableBean<?>> resolvedBeans = resolved.getValue(resolvable); if (resolvedBeans.isEmpty()) { scanRemovedBeans(resolvable); } return resolvedBeans.size() != 1 ? null : (InjectableBean<T>) resolvedBeans.iterator().next(); } Set<Bean<?>> getBeans(Type requiredType, Annotation... qualifiers) { if (requiredType instanceof TypeVariable) { throw new IllegalArgumentException("The given type is a type variable: " + requiredType); } if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } else { registeredQualifiers.verify(qualifiers); } return Set.of(getMatchingBeans(new Resolvable(requiredType, qualifiers)).toArray(new Bean<?>[] {})); } Set<Bean<?>> getBeans(String name) { return new HashSet<>(getMatchingBeans(name)); } boolean isScope(Class<? extends Annotation> annotationType) { if (annotationType.isAnnotationPresent(Scope.class) || annotationType.isAnnotationPresent(NormalScope.class)) { return true; } for (Class<? extends Annotation> scopeType : contexts.scopes) { if (scopeType.equals(annotationType)) { return true; } } return false; } boolean isNormalScope(Class<? extends Annotation> annotationType) { if (annotationType.isAnnotationPresent(NormalScope.class)) { return true; } List<InjectableContext> injectableContexts = contexts.getContexts(annotationType); for (InjectableContext context : injectableContexts) { if (context.isNormal()) { return true; } } return false; } private Set<InjectableBean<?>> resolve(Resolvable resolvable) { return resolve(getMatchingBeans(resolvable)); } private Set<InjectableBean<?>> resolve(String name) { return resolve(getMatchingBeans(name)); } private InjectableBean<?> findById(String identifier) { for (InjectableBean<?> bean : beans) { if (bean.getIdentifier().equals(identifier)) { return bean; } } for (InjectableInterceptor<?> interceptorBean : interceptors) { if (interceptorBean.getIdentifier().equals(identifier)) { return interceptorBean; } } for (InjectableDecorator<?> decoratorBean : decorators) { if (decoratorBean.getIdentifier().equals(identifier)) { return decoratorBean; } } return null; } @SuppressWarnings("unchecked") static <X> Bean<? extends X> resolve(Set<Bean<? extends X>> beans) { if (beans == null || beans.isEmpty()) { return null; } else if (beans.size() == 1) { return beans.iterator().next(); } else { if (beans.stream().allMatch(InjectableBean.class::isInstance)) { List<InjectableBean<?>> matching = new ArrayList<>(beans.size()); for (Bean<? extends X> bean : beans) { matching.add((InjectableBean<? extends X>) bean); } Set<InjectableBean<?>> resolved = resolve(matching); if (resolved.size() != 1) { throw new AmbiguousResolutionException(resolved.toString()); } return (Bean<? extends X>) resolved.iterator().next(); } else { Set<Bean<? extends X>> resolved = new HashSet<>(beans); resolved.removeIf(not(Bean::isAlternative)); if (resolved.size() != 1) { throw new AmbiguousResolutionException(resolved.toString()); } return resolved.iterator().next(); } } } private static Set<InjectableBean<?>> resolve(List<InjectableBean<?>> matching) { if (matching.isEmpty()) { return Collections.emptySet(); } else if (matching.size() == 1) { return Set.of(matching.get(0)); } List<InjectableBean<?>> nonDefault = new ArrayList<>(matching); nonDefault.removeIf(InjectableBean::isDefaultBean); if (nonDefault.isEmpty()) { return Set.copyOf(matching); } else if (nonDefault.size() == 1) { return Set.of(nonDefault.get(0)); } List<InjectableBean<?>> priorityBeans = new ArrayList<>(nonDefault); priorityBeans.removeIf(not(ArcContainerImpl::isAlternativeOrDeclaredOnAlternative)); if (priorityBeans.isEmpty()) { return Set.copyOf(nonDefault); } else if (priorityBeans.size() == 1) { return Set.of(priorityBeans.get(0)); } else { priorityBeans.sort(ArcContainerImpl::compareAlternativeBeans); Integer highest = getAlternativePriority(priorityBeans.get(0)); priorityBeans.removeIf(bean -> !highest.equals(getAlternativePriority(bean))); if (priorityBeans.size() == 1) { return Set.of(priorityBeans.get(0)); } return Set.copyOf(priorityBeans); } } private static boolean isAlternativeOrDeclaredOnAlternative(InjectableBean<?> bean) { return bean.getAlternativePriority() != null || bean.getDeclaringBean() != null && bean.getDeclaringBean().getAlternativePriority() != null; } private static Integer getAlternativePriority(InjectableBean<?> bean) { Integer beanPriority = bean.getAlternativePriority(); if (beanPriority == null && bean.getDeclaringBean() != null) { beanPriority = bean.getDeclaringBean().getAlternativePriority(); } return beanPriority; } List<InjectableBean<?>> getMatchingBeans(Resolvable resolvable) { List<InjectableBean<?>> matching = new ArrayList<>(); for (InjectableBean<?> bean : potentialBeans(resolvable.requiredType)) { if (matches(bean, resolvable.requiredType, resolvable.qualifiers)) { matching.add(bean); } } return matching; } Iterable<InjectableBean<?>> potentialBeans(Type type) { if (!Object.class.equals(type)) { Class<?> rawType = Types.getRawType(type); if (rawType != null) { List<InjectableBean<?>> match = beansByRawType.get(Types.boxedClass(rawType).getName()); return match == null ? List.of() : match; } } return beans; } List<RemovedBean> getMatchingRemovedBeans(Resolvable resolvable) { List<RemovedBean> matching = new ArrayList<>(); for (RemovedBean removedBean : removedBeans.get()) { if (matches(removedBean.getTypes(), removedBean.getQualifiers(), resolvable.requiredType, resolvable.qualifiers)) { matching.add(removedBean); } } return matching; } void scanRemovedBeans(Type requiredType, Annotation... qualifiers) { scanRemovedBeans(new Resolvable(requiredType, qualifiers)); } void scanRemovedBeans(Resolvable resolvable) { List<RemovedBean> removedMatching = getMatchingRemovedBeans(resolvable); if (!removedMatching.isEmpty()) { String separator = "===================="; String msg = "\n%1$s%1$s%1$s%1$s\n" + "CDI: programmatic lookup problem detected\n" + "-----------------------------------------\n" + "At least one bean matched the required type and qualifiers but was marked as unused and removed during build\n\n" + "Stack frame: %5$s\n" + "Required type: %3$s\n" + "Required qualifiers: %4$s\n" + "Removed beans:\n\t- %2$s\n" + "Solutions:\n" + "\t- Application developers can eliminate false positives via the @Unremovable annotation\n" + "\t- Extensions can eliminate false positives via build items, e.g. using the UnremovableBeanBuildItem\n" + "\t- See also https: + "\t- Enable the DEBUG log level to see the full stack trace\n" + "%1$s%1$s%1$s%1$s\n"; StackWalker walker = StackWalker.getInstance(); StackFrame frame = walker.walk(this::findCaller); LOGGER.warnf(msg, separator, removedMatching.stream().map(Object::toString).collect(Collectors.joining("\n\t- ")), resolvable.requiredType, Arrays.toString(resolvable.qualifiers), frame != null ? frame : "n/a"); if (LOGGER.isDebugEnabled()) { LOGGER.debug("\nCDI: programmatic lookup stack trace:\n" + walker.walk(this::collectStack)); } } } private StackFrame findCaller(Stream<StackFrame> stream) { return stream .filter(this::isCallerFrame) .findFirst().orElse(null); } private String collectStack(Stream<StackFrame> stream) { return stream .map(Object::toString) .collect(Collectors.joining("\n\t")); } private boolean isCallerFrame(StackFrame frame) { String className = frame.getClassName(); return !className.startsWith("io.quarkus.arc.impl"); } List<InjectableBean<?>> getMatchingBeans(String name) { List<InjectableBean<?>> matching = new ArrayList<>(); for (InjectableBean<?> bean : beans) { if (name.equals(bean.getName())) { matching.add(bean); } } return matching; } private static int compareAlternativeBeans(InjectableBean<?> bean1, InjectableBean<?> bean2) { Integer priority2 = bean2.getAlternativePriority(); if (priority2 == null && bean2.getDeclaringBean() != null) { priority2 = bean2.getDeclaringBean().getAlternativePriority(); } Integer priority1 = bean1.getAlternativePriority(); if (priority1 == null && bean1.getDeclaringBean() != null) { priority1 = bean1.getDeclaringBean().getAlternativePriority(); } return priority2.compareTo(priority1); } @SuppressWarnings("unchecked") <T> List<InjectableObserverMethod<? super T>> resolveObservers(Type eventType, Set<Annotation> eventQualifiers) { registeredQualifiers.verify(eventQualifiers); if (observers.isEmpty()) { return Collections.emptyList(); } Set<Type> eventTypes = new HierarchyDiscovery(eventType).getTypeClosure(); List<InjectableObserverMethod<? super T>> resolvedObservers = new ArrayList<>(); for (InjectableObserverMethod<?> observer : observers) { if (EventTypeAssignabilityRules.instance().matches(observer.getObservedType(), eventTypes)) { if (observer.getObservedQualifiers().isEmpty() || registeredQualifiers.isSubset(observer.getObservedQualifiers(), eventQualifiers)) { resolvedObservers.add((InjectableObserverMethod<? super T>) observer); } } } resolvedObservers.sort(InjectableObserverMethod::compare); return resolvedObservers; } List<Interceptor<?>> resolveInterceptors(InterceptionType type, Annotation... interceptorBindings) { if (interceptors.isEmpty()) { return Collections.emptyList(); } if (interceptorBindings.length == 0) { throw new IllegalArgumentException("No interceptor bindings"); } registeredInterceptorBindings.verify(interceptorBindings); List<Interceptor<?>> interceptors = new ArrayList<>(); List<Annotation> bindings = new ArrayList<>(); for (Annotation binding : interceptorBindings) { bindings.add(binding); Set<Annotation> transitive = registeredInterceptorBindings.getTransitive(binding.annotationType()); if (transitive != null) { bindings.addAll(transitive); } } for (InjectableInterceptor<?> interceptor : this.interceptors) { if (interceptor.intercepts(type) && hasAllInterceptionBindings(interceptor, bindings)) { interceptors.add(interceptor); } } return interceptors; } List<Decorator<?>> resolveDecorators(Set<Type> types, Annotation... qualifiers) { if (decorators.isEmpty()) { return Collections.emptyList(); } if (Objects.requireNonNull(types).isEmpty()) { throw new IllegalArgumentException("The set of bean types must not be empty"); } if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } else { registeredQualifiers.verify(qualifiers); } List<Decorator<?>> decorators = new ArrayList<>(); for (InjectableDecorator<?> decorator : this.decorators) { if (decoratorMatches(decorator.getDelegateType(), decorator.getDelegateQualifiers(), types, Set.of(qualifiers))) { decorators.add(decorator); } } return decorators; } private boolean hasAllInterceptionBindings(InjectableInterceptor<?> interceptor, Iterable<Annotation> bindings) { for (Annotation binding : interceptor.getInterceptorBindings()) { if (!registeredQualifiers.hasQualifier(bindings, binding)) { return false; } } return true; } /** * Performs typesafe resolution and resolves ambiguities. * * @param requiredType * @param qualifiers * @return the set of resolved beans */ Set<InjectableBean<?>> getResolvedBeans(Type requiredType, Annotation... qualifiers) { if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } else { registeredQualifiers.verify(qualifiers); } return resolved.getValue(new Resolvable(requiredType, qualifiers)); } private boolean matches(InjectableBean<?> bean, Type requiredType, Annotation... qualifiers) { return matches(bean.getTypes(), bean.getQualifiers(), requiredType, qualifiers); } private boolean matches(Set<Type> beanTypes, Set<Annotation> beanQualifiers, Type requiredType, Annotation... qualifiers) { if (!BeanTypeAssignabilityRules.instance().matches(requiredType, beanTypes)) { return false; } return registeredQualifiers.hasQualifiers(beanQualifiers, qualifiers); } private boolean decoratorMatches(Type delegateType, Set<Annotation> delegateQualifiers, Set<Type> requiredTypes, Set<Annotation> requiredQualifiers) { if (!DelegateInjectionPointAssignabilityRules.instance().matches(delegateType, requiredTypes)) { return false; } return registeredQualifiers.hasQualifiers(delegateQualifiers, requiredQualifiers.toArray(new Annotation[0])); } static ArcContainerImpl unwrap(ArcContainer container) { if (container instanceof ArcContainerImpl) { return (ArcContainerImpl) container; } else { throw new IllegalArgumentException(); } } public static void mockObservers(String beanIdentifier, boolean mock) { instance().mockObserversFor(beanIdentifier, mock); } private void mockObserversFor(String beanIdentifier, boolean mock) { for (InjectableObserverMethod<?> observer : observers) { if (observer instanceof Mockable && beanIdentifier.equals(observer.getDeclaringBeanIdentifier())) { Mockable mockable = (Mockable) observer; if (mock) { mockable.arc$setMock(null); } else { mockable.arc$clearMock(); } } } } public void mockObserversFor(Class<?> beanClass, boolean mock) { for (InjectableObserverMethod<?> observer : observers) { if (observer instanceof Mockable && beanClass.equals(observer.getBeanClass())) { Mockable mockable = (Mockable) observer; if (mock) { mockable.arc$setMock(null); } else { mockable.arc$clearMock(); } } } } public static ArcContainerImpl instance() { return unwrap(Arc.container()); } private static final class Resolvable { private static final Annotation[] ANY_QUALIFIER = { Any.Literal.INSTANCE }; final Type requiredType; final Annotation[] qualifiers; Resolvable(Type requiredType, Annotation[] qualifiers) { Type rawType = Reflections.getRawType(requiredType); if (Event.class.equals(rawType) || Instance.class.equals(rawType) || InjectionPoint.class.equals(rawType)) { this.requiredType = rawType; this.qualifiers = ANY_QUALIFIER; } else { this.requiredType = requiredType; this.qualifiers = qualifiers; } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(qualifiers); result = prime * result + (requiredType == null ? 0 : requiredType.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof Resolvable)) { return false; } Resolvable other = (Resolvable) obj; if (requiredType == null) { if (other.requiredType != null) { return false; } } else if (!requiredType.equals(other.requiredType)) { return false; } return Arrays.equals(qualifiers, other.qualifiers); } } }
class ArcContainerImpl implements ArcContainer { private static final Logger LOGGER = Logger.getLogger(ArcContainerImpl.class.getPackage().getName()); private static final AtomicInteger ID_GENERATOR = new AtomicInteger(); private final String id; private final AtomicBoolean running; private final List<InjectableBean<?>> beans; private final Map<String, List<InjectableBean<?>>> beansByRawType; private final LazyValue<List<RemovedBean>> removedBeans; private final List<InjectableInterceptor<?>> interceptors; private final List<InjectableDecorator<?>> decorators; private final List<InjectableObserverMethod<?>> observers; private final Contexts contexts; private final ComputingCache<Resolvable, Set<InjectableBean<?>>> resolved; private final ComputingCache<String, InjectableBean<?>> beansById; private final ComputingCache<String, Set<InjectableBean<?>>> beansByName; private final ArrayList<ResourceReferenceProvider> resourceProviders; final InstanceImpl<Object> instance; final Qualifiers registeredQualifiers; final InterceptorBindings registeredInterceptorBindings; private volatile ExecutorService executorService; private final CurrentContextFactory currentContextFactory; private final boolean strictMode; static void precomputeBeanRawTypes(Map<String, List<InjectableBean<?>>> map, InjectableBean<?> bean) { for (Type type : bean.getTypes()) { if (Object.class.equals(type)) { continue; } Class<?> rawType = Types.getRawType(type); if (rawType == null) { continue; } rawType = Types.boxedClass(rawType); String key = rawType.getName(); List<InjectableBean<?>> match = map.get(key); if (match == null) { map.put(key, List.of(bean)); } else { if (match.contains(bean)) { continue; } if (match.size() == 1) { List<InjectableBean<?>> newMatch = new ArrayList<>(); newMatch.add(match.get(0)); newMatch.add(bean); map.put(key, newMatch); } else { match.add(bean); } } } } public void init() { Set<Annotation> qualifiers = Set.of(Initialized.Literal.APPLICATION, Any.Literal.INSTANCE); EventImpl.createNotifier(Object.class, Object.class, qualifiers, this, false, null) .notify("@Initialized(ApplicationScoped.class)"); CDI.setCDIProvider(new ArcCDIProvider()); LOGGER.debugf("ArC DI container initialized [beans=%s, observers=%s]", beans.size(), observers.size()); } @Override public InjectableContext getActiveContext(Class<? extends Annotation> scopeType) { return contexts.getActiveContext(scopeType); } @Override public List<InjectableContext> getContexts(Class<? extends Annotation> scopeType) { return contexts.getContexts(scopeType); } @Override public Set<Class<? extends Annotation>> getScopes() { return contexts.scopes; } @Override public <T> InstanceHandle<T> instance(Class<T> type, Annotation... qualifiers) { return instanceHandle(type, qualifiers); } @Override public <T> InstanceHandle<T> instance(TypeLiteral<T> type, Annotation... qualifiers) { return instanceHandle(type.getType(), qualifiers); } @Override public <X> InstanceHandle<X> instance(Type type, Annotation... qualifiers) { return instanceHandle(type, qualifiers); } @Override public <T> Supplier<InstanceHandle<T>> beanInstanceSupplier(Class<T> type, Annotation... qualifiers) { return createInstanceSupplier(false, type, qualifiers); } @Override public <T> Supplier<InstanceHandle<T>> instanceSupplier(Class<T> type, Annotation... qualifiers) { return createInstanceSupplier(true, type, qualifiers); } private <T> Supplier<InstanceHandle<T>> createInstanceSupplier(boolean resolveAmbiguities, Class<T> type, Annotation... qualifiers) { if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } Set<InjectableBean<?>> resolvedBeans = resolved.getValue(new Resolvable(type, qualifiers)); Set<InjectableBean<?>> filteredBean = resolvedBeans; if (resolvedBeans.size() > 1) { if (resolveAmbiguities) { filteredBean = new HashSet<>(); for (InjectableBean<?> i : resolvedBeans) { if (i.getBeanClass().equals(type)) { filteredBean.add(i); } } } else { throw new AmbiguousResolutionException("Beans: " + resolvedBeans); } } @SuppressWarnings("unchecked") InjectableBean<T> bean = filteredBean.size() != 1 ? null : (InjectableBean<T>) filteredBean.iterator().next(); if (bean == null) { return null; } return new Supplier<InstanceHandle<T>>() { @Override public InstanceHandle<T> get() { return beanInstanceHandle(bean, null); } }; } @Override public <T> InstanceHandle<T> instance(InjectableBean<T> bean) { Objects.requireNonNull(bean); return beanInstanceHandle(bean, null); } @Override public <T> InjectableInstance<T> select(Class<T> type, Annotation... qualifiers) { return instance.select(type, qualifiers); } @Override public <T> InjectableInstance<T> select(TypeLiteral<T> type, Annotation... qualifiers) { return instance.select(type, qualifiers); } @Override public <T> List<InstanceHandle<T>> listAll(Class<T> type, Annotation... qualifiers) { return Instances.listOfHandles(CurrentInjectionPointProvider.EMPTY_SUPPLIER, type, Set.of(qualifiers), new CreationalContextImpl<>(null)); } @Override public <T> List<InstanceHandle<T>> listAll(TypeLiteral<T> type, Annotation... qualifiers) { return Instances.listOfHandles(CurrentInjectionPointProvider.EMPTY_SUPPLIER, type.getType(), Set.of(qualifiers), new CreationalContextImpl<>(null)); } @Override public boolean isRunning() { return running.get(); } @SuppressWarnings("unchecked") @Override public <T> InjectableBean<T> bean(String beanIdentifier) { Objects.requireNonNull(beanIdentifier); return (InjectableBean<T>) beansById.getValue(beanIdentifier); } @Override public InjectableBean<?> namedBean(String name) { Objects.requireNonNull(name); Set<InjectableBean<?>> found = beansByName.getValue(name); return found.size() == 1 ? found.iterator().next() : null; } @SuppressWarnings("unchecked") @Override public <T> InstanceHandle<T> instance(String name) { Objects.requireNonNull(name); Set<InjectableBean<?>> resolvedBeans = beansByName.getValue(name); return resolvedBeans.size() != 1 ? EagerInstanceHandle.unavailable() : (InstanceHandle<T>) beanInstanceHandle(resolvedBeans.iterator() .next(), null); } @Override public ManagedContext requestContext() { return contexts.requestContext; } @Override public BeanManager beanManager() { return BeanManagerImpl.INSTANCE.get(); } @Override public ExecutorService getExecutorService() { ExecutorService executor = executorService; return executor != null ? executor : ForkJoinPool.commonPool(); } public void setExecutor(ExecutorService executor) { this.executorService = executor; } @Override public CurrentContextFactory getCurrentContextFactory() { return currentContextFactory; } @Override public boolean strictCompatibility() { return strictMode; } @Override public String toString() { return "ArcContainerImpl [id=" + id + ", running=" + running + ", beans=" + beans.size() + ", observers=" + observers.size() + ", scopes=" + contexts.scopes.size() + "]"; } public synchronized void shutdown() { if (running.get()) { CDI<?> cdi = CDI.current(); if (cdi instanceof ArcCDI) { ArcCDI arcCdi = (ArcCDI) cdi; arcCdi.destroy(); } contexts.requestContext.terminate(); Set<Annotation> beforeDestroyQualifiers = new HashSet<>(4); beforeDestroyQualifiers.add(BeforeDestroyed.Literal.APPLICATION); beforeDestroyQualifiers.add(Any.Literal.INSTANCE); try { EventImpl.createNotifier(Object.class, Object.class, beforeDestroyQualifiers, this, false, null) .notify(toString()); } catch (Exception e) { LOGGER.warn("An error occurred during delivery of the @BeforeDestroyed(ApplicationScoped.class) event", e); } contexts.applicationContext.destroy(); Set<Annotation> destroyQualifiers = new HashSet<>(4); destroyQualifiers.add(Destroyed.Literal.APPLICATION); destroyQualifiers.add(Any.Literal.INSTANCE); try { EventImpl.createNotifier(Object.class, Object.class, destroyQualifiers, this, false, null).notify(toString()); } catch (Exception e) { LOGGER.warn("An error occurred during delivery of the @Destroyed(ApplicationScoped.class) event", e); } contexts.singletonContext.destroy(); Reflections.clearCaches(); resolved.clear(); running.set(false); InterceptedStaticMethods.clear(); LOGGER.debugf("ArC DI container shut down"); } } public List<InjectableBean<?>> getBeans() { return beans; } public List<RemovedBean> getRemovedBeans() { return removedBeans.get(); } public List<InjectableInterceptor<?>> getInterceptors() { return interceptors; } public List<InjectableDecorator<?>> getDecorators() { return decorators; } public List<InjectableObserverMethod<?>> getObservers() { return observers; } InstanceHandle<Object> getResource(Type type, Set<Annotation> annotations) { for (ResourceReferenceProvider resourceProvider : resourceProviders) { InstanceHandle<Object> ret = resourceProvider.get(type, annotations); if (ret != null) { return ret; } } return null; } private Notifier<Object> notifierOrNull(Set<Annotation> qualifiers) { Notifier<Object> notifier = EventImpl.createNotifier(Object.class, Object.class, qualifiers, this, false, null); return notifier.isEmpty() ? null : notifier; } private static void addBuiltInBeans(List<InjectableBean<?>> beans, Map<String, List<InjectableBean<?>>> beansByRawType) { BeanManagerBean beanManagerBean = new BeanManagerBean(); beans.add(beanManagerBean); precomputeBeanRawTypes(beansByRawType, beanManagerBean); EventBean eventBean = new EventBean(); beans.add(eventBean); precomputeBeanRawTypes(beansByRawType, eventBean); beans.add(InstanceBean.INSTANCE); precomputeBeanRawTypes(beansByRawType, InstanceBean.INSTANCE); InjectionPointBean injectionPointBean = new InjectionPointBean(); beans.add(injectionPointBean); precomputeBeanRawTypes(beansByRawType, injectionPointBean); } private <T> InstanceHandle<T> instanceHandle(Type type, Annotation... qualifiers) { return beanInstanceHandle(getBean(type, qualifiers), null); } static <T> InstanceHandle<T> beanInstanceHandle(InjectableBean<T> bean, CreationalContextImpl<T> parentContext, boolean resetCurrentInjectionPoint, Consumer<T> destroyLogic) { return beanInstanceHandle(bean, parentContext, resetCurrentInjectionPoint, destroyLogic, false); } static <T> InstanceHandle<T> beanInstanceHandle(InjectableBean<T> bean, CreationalContextImpl<T> parentContext, boolean resetCurrentInjectionPoint, Consumer<T> destroyLogic, boolean useParentCreationalContextDirectly) { if (bean != null) { if (parentContext == null && Dependent.class.equals(bean.getScope())) { parentContext = new CreationalContextImpl<>(null); } CreationalContextImpl<T> creationalContext; if (parentContext != null) { creationalContext = useParentCreationalContextDirectly ? parentContext : parentContext.child(bean); } else { creationalContext = new CreationalContextImpl<>(bean); } InjectionPoint prev = null; if (resetCurrentInjectionPoint) { prev = InjectionPointProvider.set(CurrentInjectionPointProvider.EMPTY); } try { return new EagerInstanceHandle<>(bean, bean.get(creationalContext), creationalContext, parentContext, destroyLogic); } finally { if (resetCurrentInjectionPoint) { InjectionPointProvider.set(prev); } } } else { return EagerInstanceHandle.unavailable(); } } static <T> InstanceHandle<T> beanInstanceHandle(InjectableBean<T> bean, CreationalContextImpl<T> parentContext) { return beanInstanceHandle(bean, parentContext, true, null); } @SuppressWarnings("unchecked") private <T> InjectableBean<T> getBean(Type requiredType, Annotation... qualifiers) { if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } else { registeredQualifiers.verify(qualifiers); } Resolvable resolvable = new Resolvable(requiredType, qualifiers); Set<InjectableBean<?>> resolvedBeans = resolved.getValue(resolvable); if (resolvedBeans.isEmpty()) { scanRemovedBeans(resolvable); } return resolvedBeans.size() != 1 ? null : (InjectableBean<T>) resolvedBeans.iterator().next(); } Set<Bean<?>> getBeans(Type requiredType, Annotation... qualifiers) { if (requiredType instanceof TypeVariable) { throw new IllegalArgumentException("The given type is a type variable: " + requiredType); } if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } else { registeredQualifiers.verify(qualifiers); } return Set.of(getMatchingBeans(new Resolvable(requiredType, qualifiers)).toArray(new Bean<?>[] {})); } Set<Bean<?>> getBeans(String name) { return new HashSet<>(getMatchingBeans(name)); } boolean isScope(Class<? extends Annotation> annotationType) { if (annotationType.isAnnotationPresent(Scope.class) || annotationType.isAnnotationPresent(NormalScope.class)) { return true; } for (Class<? extends Annotation> scopeType : contexts.scopes) { if (scopeType.equals(annotationType)) { return true; } } return false; } boolean isNormalScope(Class<? extends Annotation> annotationType) { if (annotationType.isAnnotationPresent(NormalScope.class)) { return true; } List<InjectableContext> injectableContexts = contexts.getContexts(annotationType); for (InjectableContext context : injectableContexts) { if (context.isNormal()) { return true; } } return false; } private Set<InjectableBean<?>> resolve(Resolvable resolvable) { return resolve(getMatchingBeans(resolvable)); } private Set<InjectableBean<?>> resolve(String name) { return resolve(getMatchingBeans(name)); } private InjectableBean<?> findById(String identifier) { for (InjectableBean<?> bean : beans) { if (bean.getIdentifier().equals(identifier)) { return bean; } } for (InjectableInterceptor<?> interceptorBean : interceptors) { if (interceptorBean.getIdentifier().equals(identifier)) { return interceptorBean; } } for (InjectableDecorator<?> decoratorBean : decorators) { if (decoratorBean.getIdentifier().equals(identifier)) { return decoratorBean; } } return null; } @SuppressWarnings("unchecked") static <X> Bean<? extends X> resolve(Set<Bean<? extends X>> beans) { if (beans == null || beans.isEmpty()) { return null; } else if (beans.size() == 1) { return beans.iterator().next(); } else { if (beans.stream().allMatch(InjectableBean.class::isInstance)) { List<InjectableBean<?>> matching = new ArrayList<>(beans.size()); for (Bean<? extends X> bean : beans) { matching.add((InjectableBean<? extends X>) bean); } Set<InjectableBean<?>> resolved = resolve(matching); if (resolved.size() != 1) { throw new AmbiguousResolutionException(resolved.toString()); } return (Bean<? extends X>) resolved.iterator().next(); } else { Set<Bean<? extends X>> resolved = new HashSet<>(beans); resolved.removeIf(not(Bean::isAlternative)); if (resolved.size() != 1) { throw new AmbiguousResolutionException(resolved.toString()); } return resolved.iterator().next(); } } } private static Set<InjectableBean<?>> resolve(List<InjectableBean<?>> matching) { if (matching.isEmpty()) { return Collections.emptySet(); } else if (matching.size() == 1) { return Set.of(matching.get(0)); } List<InjectableBean<?>> nonDefault = new ArrayList<>(matching); nonDefault.removeIf(InjectableBean::isDefaultBean); if (nonDefault.isEmpty()) { return Set.copyOf(matching); } else if (nonDefault.size() == 1) { return Set.of(nonDefault.get(0)); } List<InjectableBean<?>> priorityBeans = new ArrayList<>(nonDefault); priorityBeans.removeIf(not(ArcContainerImpl::isAlternativeOrDeclaredOnAlternative)); if (priorityBeans.isEmpty()) { return Set.copyOf(nonDefault); } else if (priorityBeans.size() == 1) { return Set.of(priorityBeans.get(0)); } else { priorityBeans.sort(ArcContainerImpl::compareAlternativeBeans); Integer highest = getAlternativePriority(priorityBeans.get(0)); priorityBeans.removeIf(bean -> !highest.equals(getAlternativePriority(bean))); if (priorityBeans.size() == 1) { return Set.of(priorityBeans.get(0)); } return Set.copyOf(priorityBeans); } } private static boolean isAlternativeOrDeclaredOnAlternative(InjectableBean<?> bean) { return bean.getAlternativePriority() != null || bean.getDeclaringBean() != null && bean.getDeclaringBean().getAlternativePriority() != null; } private static Integer getAlternativePriority(InjectableBean<?> bean) { Integer beanPriority = bean.getAlternativePriority(); if (beanPriority == null && bean.getDeclaringBean() != null) { beanPriority = bean.getDeclaringBean().getAlternativePriority(); } return beanPriority; } List<InjectableBean<?>> getMatchingBeans(Resolvable resolvable) { List<InjectableBean<?>> matching = new ArrayList<>(); for (InjectableBean<?> bean : potentialBeans(resolvable.requiredType)) { if (matches(bean, resolvable.requiredType, resolvable.qualifiers)) { matching.add(bean); } } return matching; } Iterable<InjectableBean<?>> potentialBeans(Type type) { if (!Object.class.equals(type)) { Class<?> rawType = Types.getRawType(type); if (rawType != null) { List<InjectableBean<?>> match = beansByRawType.get(Types.boxedClass(rawType).getName()); return match == null ? List.of() : match; } } return beans; } List<RemovedBean> getMatchingRemovedBeans(Resolvable resolvable) { List<RemovedBean> matching = new ArrayList<>(); for (RemovedBean removedBean : removedBeans.get()) { if (matches(removedBean.getTypes(), removedBean.getQualifiers(), resolvable.requiredType, resolvable.qualifiers)) { matching.add(removedBean); } } return matching; } void scanRemovedBeans(Type requiredType, Annotation... qualifiers) { scanRemovedBeans(new Resolvable(requiredType, qualifiers)); } void scanRemovedBeans(Resolvable resolvable) { List<RemovedBean> removedMatching = getMatchingRemovedBeans(resolvable); if (!removedMatching.isEmpty()) { String separator = "===================="; String msg = "\n%1$s%1$s%1$s%1$s\n" + "CDI: programmatic lookup problem detected\n" + "-----------------------------------------\n" + "At least one bean matched the required type and qualifiers but was marked as unused and removed during build\n\n" + "Stack frame: %5$s\n" + "Required type: %3$s\n" + "Required qualifiers: %4$s\n" + "Removed beans:\n\t- %2$s\n" + "Solutions:\n" + "\t- Application developers can eliminate false positives via the @Unremovable annotation\n" + "\t- Extensions can eliminate false positives via build items, e.g. using the UnremovableBeanBuildItem\n" + "\t- See also https: + "\t- Enable the DEBUG log level to see the full stack trace\n" + "%1$s%1$s%1$s%1$s\n"; StackWalker walker = StackWalker.getInstance(); StackFrame frame = walker.walk(this::findCaller); LOGGER.warnf(msg, separator, removedMatching.stream().map(Object::toString).collect(Collectors.joining("\n\t- ")), resolvable.requiredType, Arrays.toString(resolvable.qualifiers), frame != null ? frame : "n/a"); if (LOGGER.isDebugEnabled()) { LOGGER.debug("\nCDI: programmatic lookup stack trace:\n" + walker.walk(this::collectStack)); } } } private StackFrame findCaller(Stream<StackFrame> stream) { return stream .filter(this::isCallerFrame) .findFirst().orElse(null); } private String collectStack(Stream<StackFrame> stream) { return stream .map(Object::toString) .collect(Collectors.joining("\n\t")); } private boolean isCallerFrame(StackFrame frame) { String className = frame.getClassName(); return !className.startsWith("io.quarkus.arc.impl"); } List<InjectableBean<?>> getMatchingBeans(String name) { List<InjectableBean<?>> matching = new ArrayList<>(); for (InjectableBean<?> bean : beans) { if (name.equals(bean.getName())) { matching.add(bean); } } return matching; } private static int compareAlternativeBeans(InjectableBean<?> bean1, InjectableBean<?> bean2) { Integer priority2 = bean2.getAlternativePriority(); if (priority2 == null && bean2.getDeclaringBean() != null) { priority2 = bean2.getDeclaringBean().getAlternativePriority(); } Integer priority1 = bean1.getAlternativePriority(); if (priority1 == null && bean1.getDeclaringBean() != null) { priority1 = bean1.getDeclaringBean().getAlternativePriority(); } return priority2.compareTo(priority1); } @SuppressWarnings("unchecked") <T> List<InjectableObserverMethod<? super T>> resolveObservers(Type eventType, Set<Annotation> eventQualifiers) { registeredQualifiers.verify(eventQualifiers); if (observers.isEmpty()) { return Collections.emptyList(); } Set<Type> eventTypes = new HierarchyDiscovery(eventType).getTypeClosure(); List<InjectableObserverMethod<? super T>> resolvedObservers = new ArrayList<>(); for (InjectableObserverMethod<?> observer : observers) { if (EventTypeAssignabilityRules.instance().matches(observer.getObservedType(), eventTypes)) { if (observer.getObservedQualifiers().isEmpty() || registeredQualifiers.isSubset(observer.getObservedQualifiers(), eventQualifiers)) { resolvedObservers.add((InjectableObserverMethod<? super T>) observer); } } } resolvedObservers.sort(InjectableObserverMethod::compare); return resolvedObservers; } List<Interceptor<?>> resolveInterceptors(InterceptionType type, Annotation... interceptorBindings) { if (interceptors.isEmpty()) { return Collections.emptyList(); } if (interceptorBindings.length == 0) { throw new IllegalArgumentException("No interceptor bindings"); } registeredInterceptorBindings.verify(interceptorBindings); List<Interceptor<?>> interceptors = new ArrayList<>(); List<Annotation> bindings = new ArrayList<>(); for (Annotation binding : interceptorBindings) { bindings.add(binding); Set<Annotation> transitive = registeredInterceptorBindings.getTransitive(binding.annotationType()); if (transitive != null) { bindings.addAll(transitive); } } for (InjectableInterceptor<?> interceptor : this.interceptors) { if (interceptor.intercepts(type) && hasAllInterceptionBindings(interceptor, bindings)) { interceptors.add(interceptor); } } return interceptors; } List<Decorator<?>> resolveDecorators(Set<Type> types, Annotation... qualifiers) { if (decorators.isEmpty()) { return Collections.emptyList(); } if (Objects.requireNonNull(types).isEmpty()) { throw new IllegalArgumentException("The set of bean types must not be empty"); } if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } else { registeredQualifiers.verify(qualifiers); } List<Decorator<?>> decorators = new ArrayList<>(); for (InjectableDecorator<?> decorator : this.decorators) { if (decoratorMatches(decorator.getDelegateType(), decorator.getDelegateQualifiers(), types, Set.of(qualifiers))) { decorators.add(decorator); } } return decorators; } private boolean hasAllInterceptionBindings(InjectableInterceptor<?> interceptor, Iterable<Annotation> bindings) { for (Annotation binding : interceptor.getInterceptorBindings()) { if (!registeredQualifiers.hasQualifier(bindings, binding)) { return false; } } return true; } /** * Performs typesafe resolution and resolves ambiguities. * * @param requiredType * @param qualifiers * @return the set of resolved beans */ Set<InjectableBean<?>> getResolvedBeans(Type requiredType, Annotation... qualifiers) { if (qualifiers == null || qualifiers.length == 0) { qualifiers = new Annotation[] { Default.Literal.INSTANCE }; } else { registeredQualifiers.verify(qualifiers); } return resolved.getValue(new Resolvable(requiredType, qualifiers)); } private boolean matches(InjectableBean<?> bean, Type requiredType, Annotation... qualifiers) { return matches(bean.getTypes(), bean.getQualifiers(), requiredType, qualifiers); } private boolean matches(Set<Type> beanTypes, Set<Annotation> beanQualifiers, Type requiredType, Annotation... qualifiers) { if (!BeanTypeAssignabilityRules.instance().matches(requiredType, beanTypes)) { return false; } return registeredQualifiers.hasQualifiers(beanQualifiers, qualifiers); } private boolean decoratorMatches(Type delegateType, Set<Annotation> delegateQualifiers, Set<Type> requiredTypes, Set<Annotation> requiredQualifiers) { if (!DelegateInjectionPointAssignabilityRules.instance().matches(delegateType, requiredTypes)) { return false; } return registeredQualifiers.hasQualifiers(delegateQualifiers, requiredQualifiers.toArray(new Annotation[0])); } static ArcContainerImpl unwrap(ArcContainer container) { if (container instanceof ArcContainerImpl) { return (ArcContainerImpl) container; } else { throw new IllegalArgumentException(); } } public static void mockObservers(String beanIdentifier, boolean mock) { instance().mockObserversFor(beanIdentifier, mock); } private void mockObserversFor(String beanIdentifier, boolean mock) { for (InjectableObserverMethod<?> observer : observers) { if (observer instanceof Mockable && beanIdentifier.equals(observer.getDeclaringBeanIdentifier())) { Mockable mockable = (Mockable) observer; if (mock) { mockable.arc$setMock(null); } else { mockable.arc$clearMock(); } } } } public void mockObserversFor(Class<?> beanClass, boolean mock) { for (InjectableObserverMethod<?> observer : observers) { if (observer instanceof Mockable && beanClass.equals(observer.getBeanClass())) { Mockable mockable = (Mockable) observer; if (mock) { mockable.arc$setMock(null); } else { mockable.arc$clearMock(); } } } } public static ArcContainerImpl instance() { return unwrap(Arc.container()); } private static final class Resolvable { private static final Annotation[] ANY_QUALIFIER = { Any.Literal.INSTANCE }; final Type requiredType; final Annotation[] qualifiers; Resolvable(Type requiredType, Annotation[] qualifiers) { Type rawType = Reflections.getRawType(requiredType); if (Event.class.equals(rawType) || Instance.class.equals(rawType) || InjectionPoint.class.equals(rawType)) { this.requiredType = rawType; this.qualifiers = ANY_QUALIFIER; } else { this.requiredType = requiredType; this.qualifiers = qualifiers; } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(qualifiers); result = prime * result + (requiredType == null ? 0 : requiredType.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof Resolvable)) { return false; } Resolvable other = (Resolvable) obj; if (requiredType == null) { if (other.requiredType != null) { return false; } } else if (!requiredType.equals(other.requiredType)) { return false; } return Arrays.equals(qualifiers, other.qualifiers); } } }
Is there a possibility of getting `null` here?
public static Hover getHover(HoverContext context) { Optional<Document> srcFile = context.currentDocument(); Optional<SemanticModel> semanticModel = context.currentSemanticModel(); if (semanticModel.isEmpty() || srcFile.isEmpty()) { return HoverUtil.getHoverObject(""); } fillTokenInfoAtCursor(context); Position cursorPosition = context.getCursorPosition(); LinePosition linePosition = LinePosition.from(cursorPosition.getLine(), cursorPosition.getCharacter()); context.checkCancelled(); Optional<? extends Symbol> symbolAtCursor = semanticModel.get().symbol(srcFile.get(), linePosition); context.checkCancelled(); HoverObjectResolver provider = new HoverObjectResolver(context); Hover hoverObj = HoverUtil.getHoverObject(""); if (symbolAtCursor.isEmpty()) { Range nodeRange = new Range(context.getCursorPosition(), context.getCursorPosition()); NonTerminalNode nodeAtCursor = CommonUtil.findNode(nodeRange, srcFile.get().syntaxTree()); if (nodeAtCursor != null) { MatchedExpressionNodeResolver expressionResolver = new MatchedExpressionNodeResolver(nodeAtCursor); Optional<ExpressionNode> expr = expressionResolver.findExpression(nodeAtCursor); if (expr.isPresent()) { hoverObj = provider.getHoverObjectForExpression(expr.get()); } } } else { hoverObj = provider.getHoverObjectForSymbol(symbolAtCursor.get()); } if (hoverObj.getContents().isRight()) { MarkupContent markupContent = hoverObj.getContents().getRight(); String content = markupContent.getValue(); HoverSymbolResolver symbolResolver = new HoverSymbolResolver(context, semanticModel.get()); Optional<Symbol> symbol = context.getNodeAtCursor().apply(symbolResolver); if (symbol == null || symbol.isEmpty() || !symbolResolver.isSymbolReferable()) { return hoverObj; } Optional<ModuleID> moduleID = symbol.flatMap(Symbol::getModule).map(ModuleSymbol::id); Optional<HoverConstructKind> constructKind = symbolResolver.getConstructKind(); if (moduleID.isEmpty() || symbol.get().getName().isEmpty() || constructKind.isEmpty()) { return hoverObj; } String url = APIDocReference.from(moduleID.get().orgName(), moduleID.get().moduleName(), moduleID.get().version(), constructKind.get(), symbol.get().getName().get()); markupContent.setValue((content.isEmpty() ? "" : content + MarkupUtils.getHorizontalSeparator()) + "[View API Docs](" + url + ")"); hoverObj.setContents(markupContent); } return hoverObj; }
if (symbol == null || symbol.isEmpty() || !symbolResolver.isSymbolReferable()) {
public static Hover getHover(HoverContext context) { Optional<Document> srcFile = context.currentDocument(); Optional<SemanticModel> semanticModel = context.currentSemanticModel(); if (semanticModel.isEmpty() || srcFile.isEmpty()) { return HoverUtil.getHoverObject(""); } fillTokenInfoAtCursor(context); Position cursorPosition = context.getCursorPosition(); LinePosition linePosition = LinePosition.from(cursorPosition.getLine(), cursorPosition.getCharacter()); context.checkCancelled(); Optional<? extends Symbol> symbolAtCursor = semanticModel.get().symbol(srcFile.get(), linePosition); context.checkCancelled(); HoverObjectResolver provider = new HoverObjectResolver(context); Hover hoverObj = HoverUtil.getHoverObject(""); if (symbolAtCursor.isEmpty()) { Range nodeRange = new Range(context.getCursorPosition(), context.getCursorPosition()); NonTerminalNode nodeAtCursor = CommonUtil.findNode(nodeRange, srcFile.get().syntaxTree()); if (nodeAtCursor != null) { MatchedExpressionNodeResolver expressionResolver = new MatchedExpressionNodeResolver(nodeAtCursor); Optional<ExpressionNode> expr = expressionResolver.findExpression(nodeAtCursor); if (expr.isPresent()) { hoverObj = provider.getHoverObjectForExpression(expr.get()); } } } else { hoverObj = provider.getHoverObjectForSymbol(symbolAtCursor.get()); } if (hoverObj.getContents().isRight()) { MarkupContent markupContent = hoverObj.getContents().getRight(); String content = markupContent.getValue(); HoverSymbolResolver symbolResolver = new HoverSymbolResolver(context, semanticModel.get()); Optional<Symbol> symbol = context.getNodeAtCursor().apply(symbolResolver); if (symbol == null || symbol.isEmpty() || !symbolResolver.isSymbolReferable()) { return hoverObj; } Optional<ModuleID> moduleID = symbol.flatMap(Symbol::getModule).map(ModuleSymbol::id); Optional<HoverConstructKind> constructKind = symbolResolver.getConstructKind(); if (moduleID.isEmpty() || symbol.get().getName().isEmpty() || constructKind.isEmpty()) { return hoverObj; } String url = APIDocReference.from(moduleID.get().orgName(), moduleID.get().moduleName(), moduleID.get().version(), constructKind.get(), symbol.get().getName().get()); markupContent.setValue((content.isEmpty() ? "" : content + MarkupUtils.getHorizontalSeparator()) + "[View API Docs](" + url + ")"); hoverObj.setContents(markupContent); } return hoverObj; }
class HoverUtil { /** * Get the hover content. * * @param context Hover operation context * @return {@link Hover} Hover content */ /** * returns the default hover object. * * @return {@link Hover} hover object. */ protected static Hover getHoverObject() { return getHoverObject(""); } /** * Get a Hover object given the content. * * @return {@link Hover} hover object. */ protected static Hover getHoverObject(String content) { Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(content); hover.setContents(hoverMarkupContent); return hover; } /** * Check if a given symbol has valid access modifiers to be visible with in the give context. * * @param symbol Symbol. * @param currentPackage Current Package. * @param currentModule Current Module. * @return {@link Boolean} Whether the symbol is visible in the current context. */ protected static Boolean withValidAccessModifiers(Symbol symbol, Package currentPackage, ModuleId currentModule, HoverContext context) { Optional<Project> project = context.workspace().project(context.filePath()); Optional<ModuleSymbol> typeSymbolModule = symbol.getModule(); if (project.isEmpty() || typeSymbolModule.isEmpty()) { return false; } boolean isResource = false; boolean isPrivate = false; boolean isPublic = false; boolean isRemote = false; if (symbol instanceof Qualifiable) { Qualifiable qSymbol = (Qualifiable) symbol; isPrivate = qSymbol.qualifiers().contains(Qualifier.PRIVATE); isPublic = qSymbol.qualifiers().contains(Qualifier.PUBLIC); isResource = qSymbol.qualifiers().contains(Qualifier.RESOURCE); isRemote = qSymbol.qualifiers().contains(Qualifier.REMOTE); } if (isResource || isRemote || isPublic) { return true; } ModuleID objModuleId = typeSymbolModule.get().id(); return (!isPrivate && objModuleId.moduleName().equals(currentModule.moduleName()) && objModuleId.orgName().equals(currentPackage.packageOrg().value())); } /** * Get the description only hover object. * * @return {@link Hover} */ public static Hover getDescriptionOnlyHoverObject(Symbol symbol) { if (!(symbol instanceof Documentable) || ((Documentable) symbol).documentation().isEmpty()) { return HoverUtil.getHoverObject(""); } return getDescriptionOnlyHoverObject(((Documentable) symbol).documentation().get()); } /** * Get the description only hover object. * * @return {@link Hover} */ public static Hover getDescriptionOnlyHoverObject(Documentation documentation) { String description = ""; if (documentation.description().isPresent()) { description = documentation.description().get(); } return HoverUtil.getHoverObject(description); } public static void fillTokenInfoAtCursor(HoverContext context) { Optional<Token> tokenAtCursor = PositionUtil.findTokenAtPosition(context, context.getCursorPosition()); Optional<Document> document = context.currentDocument(); if (document.isEmpty() || tokenAtCursor.isEmpty()) { throw new RuntimeException("Could not find a valid document/token"); } context.setTokenAtCursor(tokenAtCursor.get()); TextDocument textDocument = document.get().textDocument(); Position position = context.getCursorPosition(); int txtPos = textDocument.textPositionFrom(LinePosition.from(position.getLine(), position.getCharacter())); context.setCursorPositionInTree(txtPos); TextRange range = TextRange.from(txtPos, 0); NonTerminalNode nonTerminalNode = ((ModulePartNode) document.get().syntaxTree().rootNode()).findNode(range); context.setNodeAtCursor(nonTerminalNode); } }
class HoverUtil { /** * Get the hover content. * * @param context Hover operation context * @return {@link Hover} Hover content */ /** * returns the default hover object. * * @return {@link Hover} hover object. */ protected static Hover getHoverObject() { return getHoverObject(""); } /** * Get a Hover object given the content. * * @return {@link Hover} hover object. */ protected static Hover getHoverObject(String content) { Hover hover = new Hover(); MarkupContent hoverMarkupContent = new MarkupContent(); hoverMarkupContent.setKind(CommonUtil.MARKDOWN_MARKUP_KIND); hoverMarkupContent.setValue(content); hover.setContents(hoverMarkupContent); return hover; } /** * Check if a given symbol has valid access modifiers to be visible with in the give context. * * @param symbol Symbol. * @param currentPackage Current Package. * @param currentModule Current Module. * @return {@link Boolean} Whether the symbol is visible in the current context. */ protected static Boolean withValidAccessModifiers(Symbol symbol, Package currentPackage, ModuleId currentModule, HoverContext context) { Optional<Project> project = context.workspace().project(context.filePath()); Optional<ModuleSymbol> typeSymbolModule = symbol.getModule(); if (project.isEmpty() || typeSymbolModule.isEmpty()) { return false; } boolean isResource = false; boolean isPrivate = false; boolean isPublic = false; boolean isRemote = false; if (symbol instanceof Qualifiable) { Qualifiable qSymbol = (Qualifiable) symbol; isPrivate = qSymbol.qualifiers().contains(Qualifier.PRIVATE); isPublic = qSymbol.qualifiers().contains(Qualifier.PUBLIC); isResource = qSymbol.qualifiers().contains(Qualifier.RESOURCE); isRemote = qSymbol.qualifiers().contains(Qualifier.REMOTE); } if (isResource || isRemote || isPublic) { return true; } ModuleID objModuleId = typeSymbolModule.get().id(); return (!isPrivate && objModuleId.moduleName().equals(currentModule.moduleName()) && objModuleId.orgName().equals(currentPackage.packageOrg().value())); } /** * Get the description only hover object. * * @return {@link Hover} */ public static Hover getDescriptionOnlyHoverObject(Symbol symbol) { if (!(symbol instanceof Documentable) || ((Documentable) symbol).documentation().isEmpty()) { return HoverUtil.getHoverObject(""); } return getDescriptionOnlyHoverObject(((Documentable) symbol).documentation().get()); } /** * Get the description only hover object. * * @return {@link Hover} */ public static Hover getDescriptionOnlyHoverObject(Documentation documentation) { String description = ""; if (documentation.description().isPresent()) { description = documentation.description().get(); } return HoverUtil.getHoverObject(description); } public static void fillTokenInfoAtCursor(HoverContext context) { Optional<Token> tokenAtCursor = PositionUtil.findTokenAtPosition(context, context.getCursorPosition()); Optional<Document> document = context.currentDocument(); if (document.isEmpty() || tokenAtCursor.isEmpty()) { throw new RuntimeException("Could not find a valid document/token"); } context.setTokenAtCursor(tokenAtCursor.get()); TextDocument textDocument = document.get().textDocument(); Position position = context.getCursorPosition(); int txtPos = textDocument.textPositionFrom(LinePosition.from(position.getLine(), position.getCharacter())); context.setCursorPositionInTree(txtPos); TextRange range = TextRange.from(txtPos, 0); NonTerminalNode nonTerminalNode = ((ModulePartNode) document.get().syntaxTree().rootNode()).findNode(range); context.setNodeAtCursor(nonTerminalNode); } }
The pattern of lock should be: ``` olapTable.writeLock(); try { ...... } finally { olapTable.writeUnlock(); }
public void modifyDefaultDistributionBucketNum(Database db, OlapTable olapTable, ModifyDistributionClause modifyDistributionClause) throws DdlException { olapTable.writeLock(); if (olapTable.isColocateTable()) { throw new DdlException("Cannot change default bucket number of colocate table."); } if (olapTable.getPartitionInfo().getType() != PartitionType.RANGE) { throw new DdlException("Only support change partitioned table's distribution."); } DistributionInfo defaultDistributionInfo = olapTable.getDefaultDistributionInfo(); if(defaultDistributionInfo.getType() != DistributionInfoType.HASH) { throw new DdlException("Cannot change default bucket number of distribution type " + defaultDistributionInfo.getType()); } DistributionDesc distributionDesc = modifyDistributionClause.getDistributionDesc(); DistributionInfo distributionInfo = null; List<Column> baseSchema = olapTable.getBaseSchema(); if (distributionDesc != null) { distributionInfo = distributionDesc.toDistributionInfo(baseSchema); if (distributionInfo.getType() != DistributionInfoType.HASH) { throw new DdlException("Cannot change distribution type to " + distributionInfo.getType()); } HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) distributionInfo; List<Column> newDistriCols = hashDistributionInfo.getDistributionColumns(); List<Column> defaultDistriCols = ((HashDistributionInfo) defaultDistributionInfo).getDistributionColumns(); if (!newDistriCols.equals(defaultDistriCols)) { throw new DdlException("Cannot assign hash distribution with different distribution cols. " + "default is: " + defaultDistriCols); } int bucketNum = hashDistributionInfo.getBucketNum(); if (bucketNum <= 0) { throw new DdlException("Cannot assign hash distribution buckets less than 1"); } defaultDistributionInfo.setBucketNum(bucketNum); ModifyTableDefaultDistributionBucketNumOperationLog info = new ModifyTableDefaultDistributionBucketNumOperationLog(db.getId(), olapTable.getId(), bucketNum); editLog.logModifyDefaultDistributionBucketNum(info); LOG.info("modify table[{}] default bucket num to {}", olapTable.getName(), bucketNum); } olapTable.writeUnlock(); }
throw new DdlException("Cannot assign hash distribution with different distribution cols. "
public void modifyDefaultDistributionBucketNum(Database db, OlapTable olapTable, ModifyDistributionClause modifyDistributionClause) throws DdlException { olapTable.writeLock(); try { if (olapTable.isColocateTable()) { throw new DdlException("Cannot change default bucket number of colocate table."); } if (olapTable.getPartitionInfo().getType() != PartitionType.RANGE) { throw new DdlException("Only support change partitioned table's distribution."); } DistributionInfo defaultDistributionInfo = olapTable.getDefaultDistributionInfo(); if (defaultDistributionInfo.getType() != DistributionInfoType.HASH) { throw new DdlException("Cannot change default bucket number of distribution type " + defaultDistributionInfo.getType()); } DistributionDesc distributionDesc = modifyDistributionClause.getDistributionDesc(); DistributionInfo distributionInfo = null; List<Column> baseSchema = olapTable.getBaseSchema(); if (distributionDesc != null) { distributionInfo = distributionDesc.toDistributionInfo(baseSchema); if (distributionInfo.getType() != DistributionInfoType.HASH) { throw new DdlException("Cannot change distribution type to " + distributionInfo.getType()); } HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) distributionInfo; List<Column> newDistriCols = hashDistributionInfo.getDistributionColumns(); List<Column> defaultDistriCols = ((HashDistributionInfo) defaultDistributionInfo).getDistributionColumns(); if (!newDistriCols.equals(defaultDistriCols)) { throw new DdlException("Cannot assign hash distribution with different distribution cols. " + "default is: " + defaultDistriCols); } int bucketNum = hashDistributionInfo.getBucketNum(); if (bucketNum <= 0) { throw new DdlException("Cannot assign hash distribution buckets less than 1"); } defaultDistributionInfo.setBucketNum(bucketNum); ModifyTableDefaultDistributionBucketNumOperationLog info = new ModifyTableDefaultDistributionBucketNumOperationLog(db.getId(), olapTable.getId(), bucketNum); editLog.logModifyDefaultDistributionBucketNum(info); LOG.info("modify table[{}] default bucket num to {}", olapTable.getName(), bucketNum); } } finally { olapTable.writeUnlock(); } }
class SingletonHolder { private static final Catalog INSTANCE = new Catalog(); }
class SingletonHolder { private static final Catalog INSTANCE = new Catalog(); }
Sorry for a bit late response. > 1. There are two types of scenarios where we enqueue to the mailbox (1) to handle fatal exceptions and (2) to add to the buffer any failed request entries. I believe, these should take priority over flushing new items? Apart of that, (2) has also a very important purpose to mark decrease the number of "in flight" requests, so that `AsyncSinkWriter` can induce back pressure to upstream operators. > I do agree the line is dubious, perhaps, this is more appropriate: (the behaviour would remain identical) ``` while (inFlightRequestsCount > 0) { mailboxExecutor.yield(); } ``` I think that would be better, BUT that would block writes and backpressure upstream operators even if there is just a single element still "in flight" and the internal buffers are mostly empty. I'm not sure if that's the intention? Can you elaborate what was wrong with the original condition ``` while (bufferedRequestEntries.size() >= maxBufferedRequests) { mailboxExecutor.yield(); } ``` ? I mean I would normally expected the following behaviour. Given some max buffer capacity, having two thresholds and the following conditions: a) start flushing if `size >= threshold_1` b) block new writes/flushes once if `size == max_capacity` c) unblock if `size < threshold_2` Where for example `AsyncWaitOperator` implicitly defines `threshold_1 = 1` and `threshold_2 = max_capacity`. But having those thresholds as some fractions of max capacity could improve performance by limiting the amount of small write requests. With the `inFlightRequestsCount > 0` condition to unblock, we are actually guaranteeing that the operator will be always wasting a bit of IO resources, as in this sequence of events: 1. external system finishing the last IO write 2. information about this reaching Flink, decreasing `inFlightRequestsCount` to `0`. 3. Flink sending another write request to the external system 4. external system starting it's own IO write. there will be a bit of delay between 1. and 4. > Perhaps I'm mistaken, but I don't believe we have a busy loop here. i.e. if mailboxExecutor.tryYield() returns true, there is some work to be elsewhere in the mailbox, then we perform that. Otherwise, it will return false and the loop will end. I can't see where CPU resources is being wasted? Yes, this particular snippet in this PR doesn't have busy waiting. The version in the current master has actually this exact problem. However as far as I understand it, `AsyncSinkWriter#flush` is doing the busy waiting both on master and in this PR? On a side note, why do we need to have this logic spread among different methods? Can not we have one single check ``` while (someCondition()) { mailboxExecutor.yield(); } ``` ?
public void write(InputT element, Context context) throws IOException, InterruptedException { while (mailboxExecutor.tryYield()) {} while (bufferedRequestEntries.size() >= maxBufferedRequests) { flush(); } addEntryToBuffer(elementConverter.apply(element, context), false); nonBlockingFlush(); }
while (mailboxExecutor.tryYield()) {}
public void write(InputT element, Context context) throws IOException, InterruptedException { while (bufferedRequestEntries.size() >= maxBufferedRequests) { flush(); } addEntryToBuffer(elementConverter.apply(element, context), false); nonBlockingFlush(); }
class AsyncSinkWriter<InputT, RequestEntryT extends Serializable> implements SinkWriter<InputT, Void, Collection<RequestEntryT>> { private final MailboxExecutor mailboxExecutor; private final Sink.ProcessingTimeService timeService; /* The timestamp of the previous batch of records was sent from this sink. */ private long lastSendTimestamp = 0; /* The timestamp of the response to the previous request from this sink. */ private long ackTime = Long.MAX_VALUE; /* The sink writer metric group. */ private final SinkWriterMetricGroup metrics; /* Counter for number of bytes this sink has attempted to send to the destination. */ private final Counter numBytesOutCounter; /* Counter for number of records this sink has attempted to send to the destination. */ private final Counter numRecordsOutCounter; private final int maxBatchSize; private final int maxInFlightRequests; private final int maxBufferedRequests; private final long maxBatchSizeInBytes; private final long maxTimeInBufferMS; private final long maxRecordSizeInBytes; /** * The ElementConverter provides a mapping between for the elements of a stream to request * entries that can be sent to the destination. * * <p>The resulting request entry is buffered by the AsyncSinkWriter and sent to the destination * when the {@code submitRequestEntries} method is invoked. */ private final ElementConverter<InputT, RequestEntryT> elementConverter; /** * Buffer to hold request entries that should be persisted into the destination, along with its * size in bytes. * * <p>A request entry contain all relevant details to make a call to the destination. Eg, for * Kinesis Data Streams a request entry contains the payload and partition key. * * <p>It seems more natural to buffer InputT, ie, the events that should be persisted, rather * than RequestEntryT. However, in practice, the response of a failed request call can make it * very hard, if not impossible, to reconstruct the original event. It is much easier, to just * construct a new (retry) request entry from the response and add that back to the queue for * later retry. */ private final Deque<RequestEntryWrapper<RequestEntryT>> bufferedRequestEntries = new ArrayDeque<>(); /** * Tracks all pending async calls that have been executed since the last checkpoint. Calls that * completed (successfully or unsuccessfully) are automatically decrementing the counter. Any * request entry that was not successfully persisted needs to be handled and retried by the * logic in {@code submitRequestsToApi}. * * <p>There is a limit on the number of concurrent (async) requests that can be handled by the * client library. This limit is enforced by checking the queue size before accepting a new * element into the queue. * * <p>To complete a checkpoint, we need to make sure that no requests are in flight, as they may * fail, which could then lead to data loss. */ private int inFlightRequestsCount; /** * Tracks the cumulative size of all elements in {@code bufferedRequestEntries} to facilitate * the criterion for flushing after {@code maxBatchSizeInBytes} is reached. */ private double bufferedRequestEntriesTotalSizeInBytes; private boolean existsActiveTimerCallback = false; /** * The {@code accept} method should be called on this Consumer if the processing of the {@code * requestEntries} raises an exception that should not be retried. Specifically, any action that * we are sure will result in the same exception no matter how many times we retry should raise * a {@code RuntimeException} here. For example, wrong user credentials. However, it is possible * intermittent failures will recover, e.g. flaky network connections, in which case, some other * mechanism may be more appropriate. */ private final Consumer<Exception> fatalExceptionCons; /** * This method specifies how to persist buffered request entries into the destination. It is * implemented when support for a new destination is added. * * <p>The method is invoked with a set of request entries according to the buffering hints (and * the valid limits of the destination). The logic then needs to create and execute the request * asynchronously against the destination (ideally by batching together multiple request entries * to increase efficiency). The logic also needs to identify individual request entries that * were not persisted successfully and resubmit them using the {@code requestResult} callback. * * <p>From a threading perspective, the mailbox thread will call this method and initiate the * asynchronous request to persist the {@code requestEntries}. NOTE: The client must support * asynchronous requests and the method called to persist the records must asynchronously * execute and return a future with the results of that request. A thread from the destination * client thread pool should complete the request and submit the failed entries that should be * retried. The {@code requestResult} will then trigger the mailbox thread to requeue the * unsuccessful elements. * * <p>An example implementation of this method is included: * * <pre>{@code * @Override * protected void submitRequestEntries * (List<RequestEntryT> records, Consumer<Collection<RequestEntryT>> requestResult) { * Future<Response> response = destinationClient.putRecords(records); * response.whenComplete( * (response, error) -> { * if(error){ * List<RequestEntryT> retryableFailedRecords = getRetryableFailed(response); * requestResult.accept(retryableFailedRecords); * }else{ * requestResult.accept(Collections.emptyList()); * } * } * ); * } * * }</pre> * * <p>During checkpointing, the sink needs to ensure that there are no outstanding in-flight * requests. * * @param requestEntries a set of request entries that should be sent to the destination * @param requestResult the {@code accept} method should be called on this Consumer once the * processing of the {@code requestEntries} are complete. Any entries that encountered * difficulties in persisting should be re-queued through {@code requestResult} by including * that element in the collection of {@code RequestEntryT}s passed to the {@code accept} * method. All other elements are assumed to have been successfully persisted. */ protected abstract void submitRequestEntries( List<RequestEntryT> requestEntries, Consumer<List<RequestEntryT>> requestResult); /** * This method allows the getting of the size of a {@code RequestEntryT} in bytes. The size in * this case is measured as the total bytes that is written to the destination as a result of * persisting this particular {@code RequestEntryT} rather than the serialized length (which may * be the same). * * @param requestEntry the requestEntry for which we want to know the size * @return the size of the requestEntry, as defined previously */ protected abstract long getSizeInBytes(RequestEntryT requestEntry); public AsyncSinkWriter( ElementConverter<InputT, RequestEntryT> elementConverter, Sink.InitContext context, int maxBatchSize, int maxInFlightRequests, int maxBufferedRequests, long maxBatchSizeInBytes, long maxTimeInBufferMS, long maxRecordSizeInBytes) { this.elementConverter = elementConverter; this.mailboxExecutor = context.getMailboxExecutor(); this.timeService = context.getProcessingTimeService(); Preconditions.checkNotNull(elementConverter); Preconditions.checkArgument(maxBatchSize > 0); Preconditions.checkArgument(maxBufferedRequests > 0); Preconditions.checkArgument(maxInFlightRequests > 0); Preconditions.checkArgument(maxBatchSizeInBytes > 0); Preconditions.checkArgument(maxTimeInBufferMS > 0); Preconditions.checkArgument(maxRecordSizeInBytes > 0); Preconditions.checkArgument( maxBufferedRequests > maxBatchSize, "The maximum number of requests that may be buffered should be strictly" + " greater than the maximum number of requests per batch."); Preconditions.checkArgument( maxBatchSizeInBytes >= maxRecordSizeInBytes, "The maximum allowed size in bytes per flush must be greater than or equal to the" + " maximum allowed size in bytes of a single record."); this.maxBatchSize = maxBatchSize; this.maxInFlightRequests = maxInFlightRequests; this.maxBufferedRequests = maxBufferedRequests; this.maxBatchSizeInBytes = maxBatchSizeInBytes; this.maxTimeInBufferMS = maxTimeInBufferMS; this.maxRecordSizeInBytes = maxRecordSizeInBytes; this.inFlightRequestsCount = 0; this.bufferedRequestEntriesTotalSizeInBytes = 0; this.metrics = context.metricGroup(); this.metrics.setCurrentSendTimeGauge(() -> this.ackTime - this.lastSendTimestamp); this.numBytesOutCounter = this.metrics.getIOMetricGroup().getNumBytesOutCounter(); this.numRecordsOutCounter = this.metrics.getIOMetricGroup().getNumRecordsOutCounter(); this.fatalExceptionCons = exception -> mailboxExecutor.execute( () -> { throw exception; }, "A fatal exception occurred in the sink that cannot be recovered from or should not be retried."); } private void registerCallback() { Sink.ProcessingTimeService.ProcessingTimeCallback ptc = instant -> { existsActiveTimerCallback = false; while (!bufferedRequestEntries.isEmpty()) { flush(); } }; timeService.registerProcessingTimer( timeService.getCurrentProcessingTime() + maxTimeInBufferMS, ptc); existsActiveTimerCallback = true; } @Override /** * Determines if a call to flush will be non-blocking (i.e. {@code inFlightRequestsCount} is * strictly smaller than {@code maxInFlightRequests}). Also requires one of the following * requirements to be met: * * <ul> * <li>The number of elements buffered is greater than or equal to the {@code maxBatchSize} * <li>The sum of the size in bytes of all records in the buffer is greater than or equal to * {@code maxBatchSizeInBytes} * </ul> */ private void nonBlockingFlush() { while (inFlightRequestsCount < maxInFlightRequests && (bufferedRequestEntries.size() >= maxBatchSize || bufferedRequestEntriesTotalSizeInBytes >= maxBatchSizeInBytes)) { flush(); } } /** * Persists buffered RequestsEntries into the destination by invoking {@code * submitRequestEntries} with batches according to the user specified buffering hints. * * <p>The method blocks if too many async requests are in flight. */ private void flush() { while (inFlightRequestsCount >= maxInFlightRequests) { mailboxExecutor.tryYield(); } List<RequestEntryT> batch = createNextAvailableBatch(); if (batch.size() == 0) { return; } long timestampOfRequest = System.currentTimeMillis(); Consumer<List<RequestEntryT>> requestResult = failedRequestEntries -> mailboxExecutor.execute( () -> completeRequest(failedRequestEntries, timestampOfRequest), "Mark in-flight request as completed and requeue %d request entries", failedRequestEntries.size()); inFlightRequestsCount++; submitRequestEntries(batch, requestResult); } /** * Creates the next batch of request entries while respecting the {@code maxBatchSize} and * {@code maxBatchSizeInBytes}. Also adds these to the metrics counters. */ private List<RequestEntryT> createNextAvailableBatch() { int batchSize = Math.min(maxBatchSize, bufferedRequestEntries.size()); List<RequestEntryT> batch = new ArrayList<>(batchSize); int batchSizeBytes = 0; for (int i = 0; i < batchSize; i++) { long requestEntrySize = bufferedRequestEntries.peek().getSize(); if (batchSizeBytes + requestEntrySize > maxBatchSizeInBytes) { break; } RequestEntryWrapper<RequestEntryT> elem = bufferedRequestEntries.remove(); batch.add(elem.getRequestEntry()); bufferedRequestEntriesTotalSizeInBytes -= requestEntrySize; batchSizeBytes += requestEntrySize; } numRecordsOutCounter.inc(batch.size()); numBytesOutCounter.inc(batchSizeBytes); return batch; } /** * Marks an in-flight request as completed and prepends failed requestEntries back to the * internal requestEntry buffer for later retry. * * @param failedRequestEntries requestEntries that need to be retried */ private void completeRequest(List<RequestEntryT> failedRequestEntries, long requestStartTime) { lastSendTimestamp = requestStartTime; ackTime = System.currentTimeMillis(); inFlightRequestsCount--; ListIterator<RequestEntryT> iterator = failedRequestEntries.listIterator(failedRequestEntries.size()); while (iterator.hasPrevious()) { addEntryToBuffer(iterator.previous(), true); } } private void addEntryToBuffer(RequestEntryT entry, boolean insertAtHead) { if (bufferedRequestEntries.isEmpty() && !existsActiveTimerCallback) { registerCallback(); } RequestEntryWrapper<RequestEntryT> wrappedEntry = new RequestEntryWrapper<>(entry, getSizeInBytes(entry)); if (wrappedEntry.getSize() > maxRecordSizeInBytes) { throw new IllegalArgumentException( String.format( "The request entry sent to the buffer was of size [%s], when the maxRecordSizeInBytes was set to [%s].", wrappedEntry.getSize(), maxRecordSizeInBytes)); } if (insertAtHead) { bufferedRequestEntries.addFirst(wrappedEntry); } else { bufferedRequestEntries.add(wrappedEntry); } bufferedRequestEntriesTotalSizeInBytes += wrappedEntry.getSize(); } /** * In flight requests will be retried if the sink is still healthy. But if in-flight requests * fail after a checkpoint has been triggered and Flink needs to recover from the checkpoint, * the (failed) in-flight requests are gone and cannot be retried. Hence, there cannot be any * outstanding in-flight requests when a commit is initialized. * * <p>To this end, all in-flight requests need to completed before proceeding with the commit. */ @Override public List<Void> prepareCommit(boolean flush) { while (inFlightRequestsCount > 0 || (bufferedRequestEntries.size() > 0 && flush)) { mailboxExecutor.tryYield(); if (flush) { flush(); } } return Collections.emptyList(); } /** * All in-flight requests that are relevant for the snapshot have been completed, but there may * still be request entries in the internal buffers that are yet to be sent to the endpoint. * These request entries are stored in the snapshot state so that they don't get lost in case of * a failure/restart of the application. */ @Override public List<Collection<RequestEntryT>> snapshotState() { return Arrays.asList( bufferedRequestEntries.stream() .map(RequestEntryWrapper::getRequestEntry) .collect(Collectors.toList())); } @Override public void close() {} protected Consumer<Exception> getFatalExceptionCons() { return fatalExceptionCons; } }
class AsyncSinkWriter<InputT, RequestEntryT extends Serializable> implements StatefulSink.StatefulSinkWriter<InputT, BufferedRequestState<RequestEntryT>> { private static final int INFLIGHT_MESSAGES_LIMIT_INCREASE_RATE = 10; private static final double INFLIGHT_MESSAGES_LIMIT_DECREASE_FACTOR = 0.5; private final MailboxExecutor mailboxExecutor; private final ProcessingTimeService timeService; /* The timestamp of the previous batch of records was sent from this sink. */ private long lastSendTimestamp = 0; /* The timestamp of the response to the previous request from this sink. */ private long ackTime = Long.MAX_VALUE; /* The sink writer metric group. */ private final SinkWriterMetricGroup metrics; /* Counter for number of bytes this sink has attempted to send to the destination. */ private final Counter numBytesOutCounter; /* Counter for number of records this sink has attempted to send to the destination. */ private final Counter numRecordsOutCounter; /** * Rate limiting strategy {@code inflightMessages} at any given time, {@code * rateLimitingStrategy.getRateLimit()} is used to adjust the sink's throughput not to exceed * destination's throttle rate. * * <p>throttled requests should update limit by calling {@code rateLimitingStrategy.scaleDown()} * and successful requests should update by calling {@code rateLimitingStrategy.scaleUp()} * * <p>Failure of throttled request decreases limit resulting in yielding on fewer number of * messages. */ private final AIMDRateLimitingStrategy rateLimitingStrategy; private final int maxBatchSize; private final int maxInFlightRequests; private final int maxBufferedRequests; private final long maxBatchSizeInBytes; private final long maxTimeInBufferMS; private final long maxRecordSizeInBytes; /** * The ElementConverter provides a mapping between for the elements of a stream to request * entries that can be sent to the destination. * * <p>The resulting request entry is buffered by the AsyncSinkWriter and sent to the destination * when the {@code submitRequestEntries} method is invoked. */ private final ElementConverter<InputT, RequestEntryT> elementConverter; /** * Buffer to hold request entries that should be persisted into the destination, along with its * size in bytes. * * <p>A request entry contain all relevant details to make a call to the destination. Eg, for * Kinesis Data Streams a request entry contains the payload and partition key. * * <p>It seems more natural to buffer InputT, ie, the events that should be persisted, rather * than RequestEntryT. However, in practice, the response of a failed request call can make it * very hard, if not impossible, to reconstruct the original event. It is much easier, to just * construct a new (retry) request entry from the response and add that back to the queue for * later retry. */ private final Deque<RequestEntryWrapper<RequestEntryT>> bufferedRequestEntries = new ArrayDeque<>(); /** * Tracks all pending async calls that have been executed since the last checkpoint. Calls that * completed (successfully or unsuccessfully) are automatically decrementing the counter. Any * request entry that was not successfully persisted needs to be handled and retried by the * logic in {@code submitRequestsToApi}. * * <p>There is a limit on the number of concurrent (async) requests that can be handled by the * client library. This limit is enforced by checking the queue size before accepting a new * element into the queue. * * <p>To complete a checkpoint, we need to make sure that no requests are in flight, as they may * fail, which could then lead to data loss. */ private int inFlightRequestsCount; /** * Tracks number of messages (request entries) in the inflight requests. This variable is used * to control rate of outbound messages flow as {@code inFlightMessages} should not exceed * {@code rateLimitingStrategy}. * * <p>{@code inFlightMessages} should also be consistent with {@code inFlightRequestsCount} * where {@code inFlightMessages} should never exceed {@code inFlightRequestsCount} at any time. */ private int inFlightMessages; /** * Tracks the cumulative size of all elements in {@code bufferedRequestEntries} to facilitate * the criterion for flushing after {@code maxBatchSizeInBytes} is reached. */ private double bufferedRequestEntriesTotalSizeInBytes; private boolean existsActiveTimerCallback = false; /** * The {@code accept} method should be called on this Consumer if the processing of the {@code * requestEntries} raises an exception that should not be retried. Specifically, any action that * we are sure will result in the same exception no matter how many times we retry should raise * a {@code RuntimeException} here. For example, wrong user credentials. However, it is possible * intermittent failures will recover, e.g. flaky network connections, in which case, some other * mechanism may be more appropriate. */ private final Consumer<Exception> fatalExceptionCons; /** * This method specifies how to persist buffered request entries into the destination. It is * implemented when support for a new destination is added. * * <p>The method is invoked with a set of request entries according to the buffering hints (and * the valid limits of the destination). The logic then needs to create and execute the request * asynchronously against the destination (ideally by batching together multiple request entries * to increase efficiency). The logic also needs to identify individual request entries that * were not persisted successfully and resubmit them using the {@code requestResult} callback. * * <p>From a threading perspective, the mailbox thread will call this method and initiate the * asynchronous request to persist the {@code requestEntries}. NOTE: The client must support * asynchronous requests and the method called to persist the records must asynchronously * execute and return a future with the results of that request. A thread from the destination * client thread pool should complete the request and submit the failed entries that should be * retried. The {@code requestResult} will then trigger the mailbox thread to requeue the * unsuccessful elements. * * <p>An example implementation of this method is included: * * <pre>{@code * @Override * protected void submitRequestEntries * (List<RequestEntryT> records, Consumer<Collection<RequestEntryT>> requestResult) { * Future<Response> response = destinationClient.putRecords(records); * response.whenComplete( * (response, error) -> { * if(error){ * List<RequestEntryT> retryableFailedRecords = getRetryableFailed(response); * requestResult.accept(retryableFailedRecords); * }else{ * requestResult.accept(Collections.emptyList()); * } * } * ); * } * * }</pre> * * <p>During checkpointing, the sink needs to ensure that there are no outstanding in-flight * requests. * * @param requestEntries a set of request entries that should be sent to the destination * @param requestResult the {@code accept} method should be called on this Consumer once the * processing of the {@code requestEntries} are complete. Any entries that encountered * difficulties in persisting should be re-queued through {@code requestResult} by including * that element in the collection of {@code RequestEntryT}s passed to the {@code accept} * method. All other elements are assumed to have been successfully persisted. */ protected abstract void submitRequestEntries( List<RequestEntryT> requestEntries, Consumer<List<RequestEntryT>> requestResult); /** * This method allows the getting of the size of a {@code RequestEntryT} in bytes. The size in * this case is measured as the total bytes that is written to the destination as a result of * persisting this particular {@code RequestEntryT} rather than the serialized length (which may * be the same). * * @param requestEntry the requestEntry for which we want to know the size * @return the size of the requestEntry, as defined previously */ protected abstract long getSizeInBytes(RequestEntryT requestEntry); public AsyncSinkWriter( ElementConverter<InputT, RequestEntryT> elementConverter, Sink.InitContext context, int maxBatchSize, int maxInFlightRequests, int maxBufferedRequests, long maxBatchSizeInBytes, long maxTimeInBufferMS, long maxRecordSizeInBytes) { this( elementConverter, context, maxBatchSize, maxInFlightRequests, maxBufferedRequests, maxBatchSizeInBytes, maxTimeInBufferMS, maxRecordSizeInBytes, Collections.emptyList()); } public AsyncSinkWriter( ElementConverter<InputT, RequestEntryT> elementConverter, Sink.InitContext context, int maxBatchSize, int maxInFlightRequests, int maxBufferedRequests, long maxBatchSizeInBytes, long maxTimeInBufferMS, long maxRecordSizeInBytes, Collection<BufferedRequestState<RequestEntryT>> states) { this.elementConverter = elementConverter; this.mailboxExecutor = context.getMailboxExecutor(); this.timeService = context.getProcessingTimeService(); Preconditions.checkNotNull(elementConverter); Preconditions.checkArgument(maxBatchSize > 0); Preconditions.checkArgument(maxBufferedRequests > 0); Preconditions.checkArgument(maxInFlightRequests > 0); Preconditions.checkArgument(maxBatchSizeInBytes > 0); Preconditions.checkArgument(maxTimeInBufferMS > 0); Preconditions.checkArgument(maxRecordSizeInBytes > 0); Preconditions.checkArgument( maxBufferedRequests > maxBatchSize, "The maximum number of requests that may be buffered should be strictly" + " greater than the maximum number of requests per batch."); Preconditions.checkArgument( maxBatchSizeInBytes >= maxRecordSizeInBytes, "The maximum allowed size in bytes per flush must be greater than or equal to the" + " maximum allowed size in bytes of a single record."); this.maxBatchSize = maxBatchSize; this.maxInFlightRequests = maxInFlightRequests; this.maxBufferedRequests = maxBufferedRequests; this.maxBatchSizeInBytes = maxBatchSizeInBytes; this.maxTimeInBufferMS = maxTimeInBufferMS; this.maxRecordSizeInBytes = maxRecordSizeInBytes; this.inFlightRequestsCount = 0; this.bufferedRequestEntriesTotalSizeInBytes = 0; this.inFlightMessages = 0; this.rateLimitingStrategy = new AIMDRateLimitingStrategy( INFLIGHT_MESSAGES_LIMIT_INCREASE_RATE, INFLIGHT_MESSAGES_LIMIT_DECREASE_FACTOR, maxBatchSize * maxInFlightRequests, maxBatchSize * maxInFlightRequests); this.metrics = context.metricGroup(); this.metrics.setCurrentSendTimeGauge(() -> this.ackTime - this.lastSendTimestamp); this.numBytesOutCounter = this.metrics.getIOMetricGroup().getNumBytesOutCounter(); this.numRecordsOutCounter = this.metrics.getIOMetricGroup().getNumRecordsOutCounter(); this.fatalExceptionCons = exception -> mailboxExecutor.execute( () -> { throw exception; }, "A fatal exception occurred in the sink that cannot be recovered from or should not be retried."); initializeState(states); } private void registerCallback() { ProcessingTimeService.ProcessingTimeCallback ptc = instant -> { existsActiveTimerCallback = false; while (!bufferedRequestEntries.isEmpty()) { flush(); } }; timeService.registerTimer(timeService.getCurrentProcessingTime() + maxTimeInBufferMS, ptc); existsActiveTimerCallback = true; } @Override /** * Determines if a call to flush will be non-blocking (i.e. {@code inFlightRequestsCount} is * strictly smaller than {@code maxInFlightRequests}). Also requires one of the following * requirements to be met: * * <ul> * <li>The number of elements buffered is greater than or equal to the {@code maxBatchSize} * <li>The sum of the size in bytes of all records in the buffer is greater than or equal to * {@code maxBatchSizeInBytes} * </ul> */ private void nonBlockingFlush() throws InterruptedException { while (!isInFlightRequestOrMessageLimitExceeded() && (bufferedRequestEntries.size() >= getNextBatchSizeLimit() || bufferedRequestEntriesTotalSizeInBytes >= maxBatchSizeInBytes)) { flush(); } } /** * Determines if the sink should block and complete existing in flight requests before it may * prudently create any new ones. This is exactly determined by if the number of requests * currently in flight exceeds the maximum supported by the sink OR if the number of in flight * messages exceeds the maximum determined to be appropriate by the rate limiting strategy. */ private boolean isInFlightRequestOrMessageLimitExceeded() { return inFlightRequestsCount >= maxInFlightRequests || inFlightMessages >= rateLimitingStrategy.getRateLimit(); } /** * Persists buffered RequestsEntries into the destination by invoking {@code * submitRequestEntries} with batches according to the user specified buffering hints. * * <p>The method blocks if too many async requests are in flight. */ private void flush() throws InterruptedException { while (isInFlightRequestOrMessageLimitExceeded()) { mailboxExecutor.yield(); } List<RequestEntryT> batch = createNextAvailableBatch(); int batchSize = batch.size(); if (batch.size() == 0) { return; } long timestampOfRequest = System.currentTimeMillis(); Consumer<List<RequestEntryT>> requestResult = failedRequestEntries -> mailboxExecutor.execute( () -> completeRequest( failedRequestEntries, batchSize, timestampOfRequest), "Mark in-flight request as completed and requeue %d request entries", failedRequestEntries.size()); inFlightRequestsCount++; inFlightMessages += batchSize; submitRequestEntries(batch, requestResult); } /** * Creates the next batch of request entries while respecting the {@code maxBatchSize} and * {@code maxBatchSizeInBytes}. Also adds these to the metrics counters. */ private List<RequestEntryT> createNextAvailableBatch() { int batchSize = Math.min(getNextBatchSizeLimit(), bufferedRequestEntries.size()); List<RequestEntryT> batch = new ArrayList<>(batchSize); int batchSizeBytes = 0; for (int i = 0; i < batchSize; i++) { long requestEntrySize = bufferedRequestEntries.peek().getSize(); if (batchSizeBytes + requestEntrySize > maxBatchSizeInBytes) { break; } RequestEntryWrapper<RequestEntryT> elem = bufferedRequestEntries.remove(); batch.add(elem.getRequestEntry()); bufferedRequestEntriesTotalSizeInBytes -= requestEntrySize; batchSizeBytes += requestEntrySize; } numRecordsOutCounter.inc(batch.size()); numBytesOutCounter.inc(batchSizeBytes); return batch; } /** * Marks an in-flight request as completed and prepends failed requestEntries back to the * internal requestEntry buffer for later retry. * * @param failedRequestEntries requestEntries that need to be retried */ private void completeRequest( List<RequestEntryT> failedRequestEntries, int batchSize, long requestStartTime) throws InterruptedException { lastSendTimestamp = requestStartTime; ackTime = System.currentTimeMillis(); inFlightRequestsCount--; inFlightMessages -= batchSize; updateInFlightMessagesLimit(failedRequestEntries.size() == 0); ListIterator<RequestEntryT> iterator = failedRequestEntries.listIterator(failedRequestEntries.size()); while (iterator.hasPrevious()) { addEntryToBuffer(iterator.previous(), true); } nonBlockingFlush(); } private void updateInFlightMessagesLimit(boolean isSuccessfulRequest) { if (isSuccessfulRequest) { rateLimitingStrategy.scaleUp(); } else { rateLimitingStrategy.scaleDown(); } } private void addEntryToBuffer(RequestEntryT entry, boolean insertAtHead) { if (bufferedRequestEntries.isEmpty() && !existsActiveTimerCallback) { registerCallback(); } RequestEntryWrapper<RequestEntryT> wrappedEntry = new RequestEntryWrapper<>(entry, getSizeInBytes(entry)); if (wrappedEntry.getSize() > maxRecordSizeInBytes) { throw new IllegalArgumentException( String.format( "The request entry sent to the buffer was of size [%s], when the maxRecordSizeInBytes was set to [%s].", wrappedEntry.getSize(), maxRecordSizeInBytes)); } if (insertAtHead) { bufferedRequestEntries.addFirst(wrappedEntry); } else { bufferedRequestEntries.add(wrappedEntry); } bufferedRequestEntriesTotalSizeInBytes += wrappedEntry.getSize(); } /** * In flight requests will be retried if the sink is still healthy. But if in-flight requests * fail after a checkpoint has been triggered and Flink needs to recover from the checkpoint, * the (failed) in-flight requests are gone and cannot be retried. Hence, there cannot be any * outstanding in-flight requests when a commit is initialized. * * <p>To this end, all in-flight requests need to completed before proceeding with the commit. */ @Override public void flush(boolean flush) throws InterruptedException { while (inFlightRequestsCount > 0 || (bufferedRequestEntries.size() > 0 && flush)) { yieldIfThereExistsInFlightRequests(); if (flush) { flush(); } } } private void yieldIfThereExistsInFlightRequests() throws InterruptedException { if (inFlightRequestsCount > 0) { mailboxExecutor.yield(); } } /** * All in-flight requests that are relevant for the snapshot have been completed, but there may * still be request entries in the internal buffers that are yet to be sent to the endpoint. * These request entries are stored in the snapshot state so that they don't get lost in case of * a failure/restart of the application. */ @Override public List<BufferedRequestState<RequestEntryT>> snapshotState(long checkpointId) { return Collections.singletonList(new BufferedRequestState<>((bufferedRequestEntries))); } private void initializeState(Collection<BufferedRequestState<RequestEntryT>> states) { for (BufferedRequestState<RequestEntryT> state : states) { initializeState(state); } } private void initializeState(BufferedRequestState<RequestEntryT> state) { this.bufferedRequestEntries.addAll(state.getBufferedRequestEntries()); for (RequestEntryWrapper<RequestEntryT> wrapper : bufferedRequestEntries) { if (wrapper.getSize() > maxRecordSizeInBytes) { throw new IllegalStateException( String.format( "State contains record of size %d which exceeds sink maximum record size %d.", wrapper.getSize(), maxRecordSizeInBytes)); } } this.bufferedRequestEntriesTotalSizeInBytes += state.getStateSize(); } @Override public void close() {} private int getNextBatchSizeLimit() { return Math.min(maxBatchSize, rateLimitingStrategy.getRateLimit()); } protected Consumer<Exception> getFatalExceptionCons() { return fatalExceptionCons; } }
I think by two steps Kenn meant that we could merge a PR that did not support processing-time timers and then a second PR that added that support.
private void setAndVerifyOutputTimestamp() { if (outputTimestamp != null && !TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) { throw new IllegalStateException("Cannot set outputTimestamp in processing time domain."); } if (outputTimestamp == null) { outputTimestamp = target; } if (TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) { Instant windowExpiry = window.maxTimestamp().plus(allowedLateness); checkArgument( !outputTimestamp.isAfter(windowExpiry), "Attempted to set event time timer that outputs for %s but that is" + " after the expiration of window %s", outputTimestamp, windowExpiry); } }
throw new IllegalStateException("Cannot set outputTimestamp in processing time domain.");
private void setAndVerifyOutputTimestamp() { if (outputTimestamp != null && !TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) { throw new IllegalStateException("Cannot set outputTimestamp in processing time domain."); } if (outputTimestamp == null) { outputTimestamp = target; } if (TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) { Instant windowExpiry = window.maxTimestamp().plus(allowedLateness); checkArgument( !target.isAfter(windowExpiry), "Attempted to set event time timer that outputs for %s but that is" + " after the expiration of window %s", target, windowExpiry); } }
class TimerInternalsTimer implements Timer { private final TimerInternals timerInternals; private final BoundedWindow window; private final StateNamespace namespace; private final String timerId; private final TimerSpec spec; private Instant target; private Instant outputTimestamp; private Duration period = Duration.ZERO; private Duration offset = Duration.ZERO; public TimerInternalsTimer( BoundedWindow window, StateNamespace namespace, String timerId, TimerSpec spec, TimerInternals timerInternals) { this.window = window; this.namespace = namespace; this.timerId = timerId; this.spec = spec; this.timerInternals = timerInternals; } @Override public void set(Instant target) { this.target = target; verifyAbsoluteTimeDomain(); setAndVerifyOutputTimestamp(); setUnderlyingTimer(); } @Override public void setRelative() { Instant now = getCurrentTime(); if (period.equals(Duration.ZERO)) { target = now.plus(offset); } else { long millisSinceStart = now.plus(offset).getMillis() % period.getMillis(); target = millisSinceStart == 0 ? now : now.plus(period).minus(millisSinceStart); } target = minTargetAndGcTime(target); setAndVerifyOutputTimestamp(); setUnderlyingTimer(); } @Override public Timer offset(Duration offset) { this.offset = offset; return this; } @Override public Timer align(Duration period) { this.period = period; return this; } /** * For event time timers the target time should be prior to window GC time. So it return * min(time to set, GC Time of window). */ private Instant minTargetAndGcTime(Instant target) { if (TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) { Instant windowExpiry = LateDataUtils.garbageCollectionTime(window, allowedLateness); if (target.isAfter(windowExpiry)) { return windowExpiry; } } return target; } @Override public Timer withOutputTimestamp(Instant outputTimestamp) { this.outputTimestamp = outputTimestamp; return this; } /** Verifies that the time domain of this timer is acceptable for absolute timers. */ private void verifyAbsoluteTimeDomain() { if (!TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) { throw new IllegalStateException( "Cannot only set relative timers in processing time domain." + " Use } } /** * * * <ul> * Ensures that: * <li>Users can't set {@code outputTimestamp} for processing time timers. * <li>Event time timers' {@code outputTimestamp} is set before window expiration. * </ul> */ /** * Sets the timer for the target time without checking anything about whether it is a reasonable * thing to do. For example, absolute processing time timers are not really sensible since the * user has no way to compute a good choice of time. */ private void setUnderlyingTimer() { timerInternals.setTimer(namespace, timerId, target, outputTimestamp, spec.getTimeDomain()); } private Instant getCurrentTime() { switch (spec.getTimeDomain()) { case EVENT_TIME: return timerInternals.currentInputWatermarkTime(); case PROCESSING_TIME: return timerInternals.currentProcessingTime(); case SYNCHRONIZED_PROCESSING_TIME: return timerInternals.currentSynchronizedProcessingTime(); default: throw new IllegalStateException( String.format("Timer created for unknown time domain %s", spec.getTimeDomain())); } } }
class TimerInternalsTimer implements Timer { private final TimerInternals timerInternals; private final BoundedWindow window; private final StateNamespace namespace; private final String timerId; private final TimerSpec spec; private Instant target; private Instant outputTimestamp; private Duration period = Duration.ZERO; private Duration offset = Duration.ZERO; public TimerInternalsTimer( BoundedWindow window, StateNamespace namespace, String timerId, TimerSpec spec, TimerInternals timerInternals) { this.window = window; this.namespace = namespace; this.timerId = timerId; this.spec = spec; this.timerInternals = timerInternals; } @Override public void set(Instant target) { this.target = target; verifyAbsoluteTimeDomain(); setAndVerifyOutputTimestamp(); setUnderlyingTimer(); } @Override public void setRelative() { Instant now = getCurrentTime(); if (period.equals(Duration.ZERO)) { target = now.plus(offset); } else { long millisSinceStart = now.plus(offset).getMillis() % period.getMillis(); target = millisSinceStart == 0 ? now : now.plus(period).minus(millisSinceStart); } target = minTargetAndGcTime(target); setAndVerifyOutputTimestamp(); setUnderlyingTimer(); } @Override public Timer offset(Duration offset) { this.offset = offset; return this; } @Override public Timer align(Duration period) { this.period = period; return this; } /** * For event time timers the target time should be prior to window GC time. So it return * min(time to set, GC Time of window). */ private Instant minTargetAndGcTime(Instant target) { if (TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) { Instant windowExpiry = LateDataUtils.garbageCollectionTime(window, allowedLateness); if (target.isAfter(windowExpiry)) { return windowExpiry; } } return target; } @Override public Timer withOutputTimestamp(Instant outputTimestamp) { this.outputTimestamp = outputTimestamp; return this; } /** Verifies that the time domain of this timer is acceptable for absolute timers. */ private void verifyAbsoluteTimeDomain() { if (!TimeDomain.EVENT_TIME.equals(spec.getTimeDomain())) { throw new IllegalStateException( "Cannot only set relative timers in processing time domain." + " Use } } /** * * * <ul> * Ensures that: * <li>Users can't set {@code outputTimestamp} for processing time timers. * <li>Event time timers' {@code outputTimestamp} is set before window expiration. * </ul> */ /** * Sets the timer for the target time without checking anything about whether it is a reasonable * thing to do. For example, absolute processing time timers are not really sensible since the * user has no way to compute a good choice of time. */ private void setUnderlyingTimer() { timerInternals.setTimer( namespace, timerId, "", target, outputTimestamp, spec.getTimeDomain()); } private Instant getCurrentTime() { switch (spec.getTimeDomain()) { case EVENT_TIME: return timerInternals.currentInputWatermarkTime(); case PROCESSING_TIME: return timerInternals.currentProcessingTime(); case SYNCHRONIZED_PROCESSING_TIME: return timerInternals.currentSynchronizedProcessingTime(); default: throw new IllegalStateException( String.format("Timer created for unknown time domain %s", spec.getTimeDomain())); } } }
Add comment on these are the min/max values that can be represented as ZetaSQL NUMERIC?
public void testNumericLiteral() { String sql = "SELECT NUMERIC '0', " + "NUMERIC '123456', " + "NUMERIC '-3.14', " + "NUMERIC '-0.54321', " + "NUMERIC '1.23456e05', " + "NUMERIC '-9.876e-3', " + "NUMERIC '-99999999999999999999999999999.999999999', " + "NUMERIC '99999999999999999999999999999.999999999'"; ; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addDecimalField("f_numeric1") .addDecimalField("f_numeric2") .addDecimalField("f_numeric3") .addDecimalField("f_numeric4") .addDecimalField("f_numeric5") .addDecimalField("f_numeric6") .addDecimalField("f_numeric7") .addDecimalField("f_numeric8") .build()) .addValues(new BigDecimal("0").setScale(9)) .addValues(new BigDecimal("123456").setScale(9)) .addValues(new BigDecimal("-3.14").setScale(9)) .addValues(new BigDecimal("-0.54321").setScale(9)) .addValues(new BigDecimal("123456").setScale(9)) .addValues(new BigDecimal("-0.009876").setScale(9)) .addValues(new BigDecimal("-99999999999999999999999999999.999999999")) .addValues(new BigDecimal("99999999999999999999999999999.999999999")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); }
+ "NUMERIC '-99999999999999999999999999999.999999999', "
public void testNumericLiteral() { String sql = "SELECT NUMERIC '0', " + "NUMERIC '123456', " + "NUMERIC '-3.14', " + "NUMERIC '-0.54321', " + "NUMERIC '1.23456e05', " + "NUMERIC '-9.876e-3', " + "NUMERIC '-99999999999999999999999999999.999999999', " + "NUMERIC '99999999999999999999999999999.999999999'"; ; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addDecimalField("f_numeric1") .addDecimalField("f_numeric2") .addDecimalField("f_numeric3") .addDecimalField("f_numeric4") .addDecimalField("f_numeric5") .addDecimalField("f_numeric6") .addDecimalField("f_numeric7") .addDecimalField("f_numeric8") .build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("0")) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("123456")) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("-3.14")) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("-0.54321")) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("123456")) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("-0.009876")) .addValues( ZetaSqlTypesUtils.bigDecimalAsNumeric( "-99999999999999999999999999999.999999999")) .addValues( ZetaSqlTypesUtils.bigDecimalAsNumeric( "99999999999999999999999999999.999999999")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); }
class ZetaSqlDialectSpecTest extends ZetaSqlTestBase { @Rule public transient TestPipeline pipeline = TestPipeline.create(); @Rule public ExpectedException thrown = ExpectedException.none(); @Before public void setUp() { initialize(); } @Test public void testSimpleSelect() { String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryPlannerClass() { String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; PCollection<Row> stream = pipeline.apply(SqlTransform.query(sql).withQueryPlannerClass(ZetaSQLQueryPlanner.class)); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testPlannerNamePipelineOption() { pipeline .getOptions() .as(BeamSqlPipelineOptions.class) .setPlannerName("org.apache.beam.sdk.extensions.sql.zetasql.ZetaSQLQueryPlanner"); String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; PCollection<Row> stream = pipeline.apply(SqlTransform.query(sql)); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testByteLiterals() { String sql = "SELECT b'abc'"; byte[] byteString = new byte[] {'a', 'b', 'c'}; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.BYTES).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(byteString).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testByteString() { String sql = "SELECT @p0 IS NULL AS ColA"; ByteString byteString = ByteString.copyFrom(new byte[] {0x62}); ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder().put("p0", Value.createBytesValue(byteString)).build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testFloat() { String sql = "SELECT 3.0"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.DOUBLE).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(3.0).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStringLiterals() { String sql = "SELECT '\"America/Los_Angeles\"\\n'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("\"America/Los_Angeles\"\n").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testParameterString() { String sql = "SELECT ?"; ImmutableList<Value> params = ImmutableList.of(Value.createStringValue("abc\n")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc\n").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel") public void testEQ1() { String sql = "SELECT @p0 = @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_BOOL)) .put("p1", Value.createBoolValue(true)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore( "Does not support inf/-inf/nan in double/float literals because double/float literals are" + " converted to BigDecimal in Calcite codegen.") public void testEQ2() { String sql = "SELECT @p0 = @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createDoubleValue(0)) .put("p1", Value.createDoubleValue(Double.POSITIVE_INFINITY)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addBooleanField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel") public void testEQ3() { String sql = "SELECT @p0 = @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_DOUBLE)) .put("p1", Value.createDoubleValue(3.14)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEQ4() { String sql = "SELECT @p0 = @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createBytesValue(ByteString.copyFromUtf8("hello"))) .put("p1", Value.createBytesValue(ByteString.copyFromUtf8("hello"))) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEQ5() { String sql = "SELECT b'hello' = b'hello' AS ColA"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEQ6() { String sql = "SELECT ? = ? AS ColA"; ImmutableList<Value> params = ImmutableList.of(Value.createInt64Value(4L), Value.createInt64Value(5L)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIsNotNull1() { String sql = "SELECT @p0 IS NOT NULL AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIsNotNull2() { String sql = "SELECT @p0 IS NOT NULL AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createNullValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIsNotNull3() { String sql = "SELECT @p0 IS NOT NULL AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createNullValue( TypeFactory.createStructType( Arrays.asList( new StructField( "a", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)))))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfBasic() { String sql = "SELECT IF(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createBoolValue(true), "p1", Value.createInt64Value(1), "p2", Value.createInt64Value(2)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.INT64).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfPositional() { String sql = "SELECT IF(?, ?, ?) AS ColA"; ImmutableList<Value> params = ImmutableList.of( Value.createBoolValue(true), Value.createInt64Value(1), Value.createInt64Value(2)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.INT64).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCoalesceBasic() { String sql = "SELECT COALESCE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createStringValue("yay"), "p2", Value.createStringValue("nay")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("yay").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCoalesceSingleArgument() { String sql = "SELECT COALESCE(@p0) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_INT64)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.array(FieldType.INT64)).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCoalesceNullArray() { String sql = "SELECT COALESCE(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createNullValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))), "p1", Value.createNullValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.array(FieldType.INT64)).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel") public void testNullIfCoercion() { String sql = "SELECT NULLIF(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createInt64Value(3L), "p1", Value.createSimpleNullValue(TypeKind.TYPE_DOUBLE)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.DOUBLE).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(3.0).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCoalesceNullStruct() { String sql = "SELECT COALESCE(NULL, STRUCT(\"a\" AS s, -33 AS i))"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema innerSchema = Schema.of(Field.of("s", FieldType.STRING), Field.of("i", FieldType.INT64)); final Schema schema = Schema.builder().addNullableField("field1", FieldType.row(innerSchema)).build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValue(Row.withSchema(innerSchema).addValues("a", -33L).build()) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfTimestamp() { String sql = "SELECT IF(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createBoolValue(false), "p1", Value.createTimestampValueFromUnixMicros(0), "p2", Value.createTimestampValueFromUnixMicros( DateTime.parse("2019-01-01T00:00:00Z").getMillis() * 1000)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", DATETIME).build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(DateTime.parse("2019-01-01T00:00:00Z")).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("$make_array is not implemented") public void testMakeArray() { String sql = "SELECT [s3, s1, s2] FROM (SELECT \"foo\" AS s1, \"bar\" AS s2, \"baz\" AS s3);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.array(FieldType.STRING)).build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(ImmutableList.of("baz", "foo", "bar")).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNullIfPositive() { String sql = "SELECT NULLIF(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("null"), "p1", Value.createStringValue("null")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNullIfNegative() { String sql = "SELECT NULLIF(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("foo"), "p1", Value.createStringValue("null")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("foo").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfNullPositive() { String sql = "SELECT IFNULL(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("foo"), "p1", Value.createStringValue("default")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("foo").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfNullNegative() { String sql = "SELECT IFNULL(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createStringValue("yay")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("yay").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEmptyArrayParameter() { String sql = "SELECT @p0 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createArrayValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)), ImmutableList.of())); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addArrayField("field1", FieldType.INT64).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValue(ImmutableList.of()).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEmptyArrayLiteral() { String sql = "SELECT ARRAY<STRING>[];"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addArrayField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValue(ImmutableList.of()).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLike1() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("ab%"), "p1", Value.createStringValue("ab\\%")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel") public void testLikeNullPattern() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("ab%"), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLikeAllowsEscapingNonSpecialCharacter() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue("ab"), "p1", Value.createStringValue("\\ab")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLikeAllowsEscapingBackslash() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("a\\c"), "p1", Value.createStringValue("a\\\\c")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLikeBytes() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createBytesValue(ByteString.copyFromUtf8("abcd")), "p1", Value.createBytesValue(ByteString.copyFromUtf8("__%"))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testMod() { String sql = "SELECT MOD(4, 2)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(0L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSimpleUnionAll() { String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING) " + " UNION ALL " + " SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build(), Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testThreeWayUnionAll() { String sql = "SELECT a FROM (SELECT 1 a UNION ALL SELECT 2 UNION ALL SELECT 3)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L).build(), Row.withSchema(schema).addValues(2L).build(), Row.withSchema(schema).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSimpleUnionDISTINCT() { String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING) " + " UNION DISTINCT " + " SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLInnerJoin() { String sql = "SELECT t1.Key " + "FROM KeyValue AS t1" + " INNER JOIN BigTable AS t2" + " on " + " t1.Key = t2.RowKey AND t1.ts = t2.ts"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("field1").build()) .addValues(15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLInnerJoinWithUsing() { String sql = "SELECT t1.Key " + "FROM KeyValue AS t1" + " INNER JOIN BigTable AS t2 USING(ts)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("field1").build()) .addValues(15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLInnerJoinTwo() { String sql = "SELECT t2.RowKey " + "FROM KeyValue AS t1" + " INNER JOIN BigTable AS t2" + " on " + " t2.RowKey = t1.Key AND t2.ts = t1.ts"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("field1").build()) .addValues(15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLLeftOuterJoin() { String sql = "SELECT * " + "FROM KeyValue AS t1" + " LEFT JOIN BigTable AS t2" + " on " + " t1.Key = t2.RowKey"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schemaOne = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addNullableField("field4", FieldType.INT64) .addNullableField("field5", FieldType.STRING) .addNullableField("field6", DATETIME) .build(); final Schema schemaTwo = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schemaOne) .addValues( 14L, "KeyValue234", new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()), null, null, null) .build(), Row.withSchema(schemaTwo) .addValues( 15L, "KeyValue235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLRightOuterJoin() { String sql = "SELECT * " + "FROM KeyValue AS t1" + " RIGHT JOIN BigTable AS t2" + " on " + " t1.Key = t2.RowKey"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schemaOne = Schema.builder() .addNullableField("field1", FieldType.INT64) .addNullableField("field2", FieldType.STRING) .addNullableField("field3", DATETIME) .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); final Schema schemaTwo = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schemaOne) .addValues( null, null, null, 16L, "BigTable236", new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schemaTwo) .addValues( 15L, "KeyValue235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLFullOuterJoin() { String sql = "SELECT * " + "FROM KeyValue AS t1" + " FULL JOIN BigTable AS t2" + " on " + " t1.Key = t2.RowKey"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schemaOne = Schema.builder() .addNullableField("field1", FieldType.INT64) .addNullableField("field2", FieldType.STRING) .addNullableField("field3", DATETIME) .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); final Schema schemaTwo = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); final Schema schemaThree = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addNullableField("field4", FieldType.INT64) .addNullableField("field5", FieldType.STRING) .addNullableField("field6", DATETIME) .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schemaOne) .addValues( null, null, null, 16L, "BigTable236", new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schemaTwo) .addValues( 15L, "KeyValue235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schemaThree) .addValues( 14L, "KeyValue234", new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()), null, null, null) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("BeamSQL only supports equal join") public void testZetaSQLFullOuterJoinTwo() { String sql = "SELECT * " + "FROM KeyValue AS t1" + " FULL JOIN BigTable AS t2" + " on " + " t1.Key + t2.RowKey = 30"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLFullOuterJoinFalse() { String sql = "SELECT * FROM KeyValue AS t1 FULL JOIN BigTable AS t2 ON false"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); thrown.expect(UnsupportedOperationException.class); BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); } @Test public void testZetaSQLThreeWayInnerJoin() { String sql = "SELECT t3.Value, t2.Value, t1.Value, t1.Key, t3.ColId FROM KeyValue as t1 " + "JOIN BigTable as t2 " + "ON (t1.Key = t2.RowKey) " + "JOIN Spanner as t3 " + "ON (t3.ColId = t1.Key)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addStringField("t3.Value") .addStringField("t2.Value") .addStringField("t1.Value") .addInt64Field("t1.Key") .addInt64Field("t3.ColId") .build()) .addValues("Spanner235", "BigTable235", "KeyValue235", 15L, 15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLTableJoinOnItselfWithFiltering() { String sql = "SELECT * FROM Spanner as t1 " + "JOIN Spanner as t2 " + "ON (t1.ColId = t2.ColId) WHERE t1.ColId = 17"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addInt64Field("field1") .addStringField("field2") .addInt64Field("field3") .addStringField("field4") .build()) .addValues(17L, "Spanner237", 17L, "Spanner237") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromSelect() { String sql = "SELECT * FROM (SELECT \"apple\" AS fruit, \"carrot\" AS vegetable);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("apple", "carrot").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(2, outputSchema.getFieldCount()); Assert.assertEquals("fruit", outputSchema.getField(0).getName()); Assert.assertEquals("vegetable", outputSchema.getField(1).getName()); } @Test public void testZetaSQLSelectFromTable() { String sql = "SELECT Key, Value FROM KeyValue;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableLimit() { String sql = "SELECT Key, Value FROM KeyValue LIMIT 2;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableLimit0() { String sql = "SELECT Key, Value FROM KeyValue LIMIT 0;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectNullLimitParam() { String sql = "SELECT Key, Value FROM KeyValue LIMIT @lmt;"; ImmutableMap<String, Value> params = ImmutableMap.of( "lmt", Value.createNullValue(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage("Limit requires non-null count and offset"); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); } @Test public void testZetaSQLSelectNullOffsetParam() { String sql = "SELECT Key, Value FROM KeyValue LIMIT 1 OFFSET @lmt;"; ImmutableMap<String, Value> params = ImmutableMap.of( "lmt", Value.createNullValue(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage("Limit requires non-null count and offset"); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); } @Test public void testZetaSQLSelectFromTableOrderLimit() { String sql = "SELECT x, y FROM (SELECT 1 as x, 0 as y UNION ALL SELECT 0, 0 " + "UNION ALL SELECT 1, 0 UNION ALL SELECT 1, 1) ORDER BY x LIMIT 1"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(0L, 0L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableLimitOffset() { String sql = "SELECT COUNT(a) FROM (\n" + "SELECT a FROM (SELECT 1 a UNION ALL SELECT 2 UNION ALL SELECT 3) LIMIT 3 OFFSET 1);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableOrderByLimit() { String sql = "SELECT Key, Value FROM KeyValue ORDER BY Key DESC LIMIT 2;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableOrderBy() { String sql = "SELECT Key, Value FROM KeyValue ORDER BY Key DESC;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage("ORDER BY without a LIMIT is not supported."); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testZetaSQLSelectFromTableWithStructType2() { String sql = "SELECT table_with_struct.struct_col.struct_col_str FROM table_with_struct WHERE id = 1;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("row_one").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInFilter() { String sql = "SELECT table_with_struct.id FROM table_with_struct WHERE" + " table_with_struct.struct_col.struct_col_str = 'row_one';"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInCast() { String sql = "SELECT CAST(table_with_struct.id AS STRING) FROM table_with_struct WHERE" + " table_with_struct.struct_col.struct_col_str = 'row_one';"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("1").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9191] CAST operator does not work fully due to bugs in unparsing") public void testZetaSQLStructFieldAccessInCast2() { String sql = "SELECT CAST(A.struct_col.struct_col_str AS TIMESTAMP) FROM table_with_struct_ts_string AS" + " A"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addDateTimeField("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValue(parseTimestampWithUTCTimeZone("2019-01-15 13:21:03")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testAggregateWithAndWithoutColumnRefs() { ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); String sql = "SELECT \n" + " id, \n" + " SUM(has_f1) as f1_count, \n" + " SUM(has_f2) as f2_count, \n" + " SUM(has_f3) as f3_count, \n" + " SUM(has_f4) as f4_count, \n" + " SUM(has_f5) as f5_count, \n" + " COUNT(*) as count, \n" + " SUM(has_f6) as f6_count \n" + "FROM (select 0 as id, 1 as has_f1, 2 as has_f2, 3 as has_f3, 4 as has_f4, 5 as has_f5, 6 as has_f6)\n" + "GROUP BY id"; BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("id") .addInt64Field("f1_count") .addInt64Field("f2_count") .addInt64Field("f3_count") .addInt64Field("f4_count") .addInt64Field("f5_count") .addInt64Field("count") .addInt64Field("f6_count") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(0L, 1L, 2L, 3L, 4L, 5L, 1L, 6L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInGroupBy() { String sql = "SELECT rowCol.row_id, COUNT(*) FROM table_with_struct_two GROUP BY rowCol.row_id"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 1L).build(), Row.withSchema(schema).addValues(2L, 1L).build(), Row.withSchema(schema).addValues(3L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLAnyValueInGroupBy() { String sql = "SELECT rowCol.row_id as key, ANY_VALUE(rowCol.data) as any_value FROM table_with_struct_two GROUP BY rowCol.row_id"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Map<Long, List<String>> allowedTuples = new HashMap<>(); allowedTuples.put(1L, Arrays.asList("data1")); allowedTuples.put(2L, Arrays.asList("data2")); allowedTuples.put(3L, Arrays.asList("data2", "data3")); PAssert.that(stream) .satisfies( input -> { Iterator<Row> iter = input.iterator(); while (iter.hasNext()) { Row row = iter.next(); List<String> values = allowedTuples.remove(row.getInt64("key")); assertTrue(values != null); assertTrue(values.contains(row.getString("any_value"))); } assertTrue(allowedTuples.isEmpty()); return null; }); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInGroupBy2() { String sql = "SELECT rowCol.data, MAX(rowCol.row_id), MIN(rowCol.row_id) FROM table_with_struct_two" + " GROUP BY rowCol.data"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addStringField("field1") .addInt64Field("field2") .addInt64Field("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("data1", 1L, 1L).build(), Row.withSchema(schema).addValues("data2", 3L, 2L).build(), Row.withSchema(schema).addValues("data3", 3L, 3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInnerJoin() { String sql = "SELECT A.rowCol.data FROM table_with_struct_two AS A INNER JOIN " + "table_with_struct AS B " + "ON A.rowCol.row_id = B.id"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue("data1").build(), Row.withSchema(schema).addValue("data2").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableWithArrayType() { String sql = "SELECT array_col FROM table_with_array;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addArrayField("field", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(Arrays.asList("1", "2", "3")).build(), Row.withSchema(schema).addValue(ImmutableList.of()).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectStarFromTable() { String sql = "SELECT * FROM BigTable;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schema) .addValues( 16L, "BigTable236", new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicFiltering() { String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder().addInt64Field("field1").addStringField("field2").build()) .addValues(14L, "KeyValue234") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicFilteringTwo() { String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14 AND Value = 'non-existing';"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicFilteringThree() { String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14 OR Key = 15;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLCountOnAColumn() { String sql = "SELECT COUNT(Key) FROM KeyValue"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLAggDistinct() { String sql = "SELECT Key, COUNT(DISTINCT Value) FROM KeyValue GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage("Does not support COUNT DISTINCT"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testZetaSQLBasicAgg() { String sql = "SELECT Key, COUNT(*) FROM KeyValue GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, 1L).build(), Row.withSchema(schema).addValues(15L, 1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLColumnAlias1() { String sql = "SELECT Key, COUNT(*) AS count_col FROM KeyValue GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(2, outputSchema.getFieldCount()); Assert.assertEquals("Key", outputSchema.getField(0).getName()); Assert.assertEquals("count_col", outputSchema.getField(1).getName()); } @Test public void testZetaSQLColumnAlias2() { String sql = "SELECT Key AS k1, (count_col + 1) AS k2 FROM (SELECT Key, COUNT(*) AS count_col FROM" + " KeyValue GROUP BY Key)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(2, outputSchema.getFieldCount()); Assert.assertEquals("k1", outputSchema.getField(0).getName()); Assert.assertEquals("k2", outputSchema.getField(1).getName()); } @Test public void testZetaSQLColumnAlias3() { String sql = "SELECT Key AS v1, Value AS v2, ts AS v3 FROM KeyValue"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(3, outputSchema.getFieldCount()); Assert.assertEquals("v1", outputSchema.getField(0).getName()); Assert.assertEquals("v2", outputSchema.getField(1).getName()); Assert.assertEquals("v3", outputSchema.getField(2).getName()); } @Test public void testZetaSQLColumnAlias4() { String sql = "SELECT CAST(123 AS INT64) AS cast_col"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(1, outputSchema.getFieldCount()); Assert.assertEquals("cast_col", outputSchema.getField(0).getName()); } @Test public void testZetaSQLAmbiguousAlias() { String sql = "SELECT row_id as ID, int64_col as ID FROM table_all_types GROUP BY ID;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expectMessage( "Name ID in GROUP BY clause is ambiguous; it may refer to multiple columns in the" + " SELECT-list [at 1:68]"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testZetaSQLAggWithOrdinalReference() { String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY 1"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 2L).build(), Row.withSchema(schema).addValues(2L, 3L).build(), Row.withSchema(schema).addValues(3L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLAggWithAliasReference() { String sql = "SELECT Key AS K, COUNT(*) FROM aggregate_test_table GROUP BY K"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 2L).build(), Row.withSchema(schema).addValues(2L, 3L).build(), Row.withSchema(schema).addValues(3L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicAgg2() { String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 2L).build(), Row.withSchema(schema).addValues(2L, 3L).build(), Row.withSchema(schema).addValues(3L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicAgg3() { String sql = "SELECT Key, Key2, COUNT(*) FROM aggregate_test_table GROUP BY Key2, Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field3") .addInt64Field("field2") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1L).build(), Row.withSchema(schema).addValues(1L, 11L, 1L).build(), Row.withSchema(schema).addValues(2L, 11L, 2L).build(), Row.withSchema(schema).addValues(2L, 12L, 1L).build(), Row.withSchema(schema).addValues(3L, 13L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicAgg4() { String sql = "SELECT Key, Key2, MAX(f_int_1), MIN(f_int_1), SUM(f_int_1), SUM(f_double_1) " + "FROM aggregate_test_table GROUP BY Key2, Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field3") .addInt64Field("field2") .addInt64Field("field4") .addInt64Field("field5") .addDoubleField("field6") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1L, 1L, 1L, 1.0).build(), Row.withSchema(schema).addValues(1L, 11L, 2L, 2L, 2L, 2.0).build(), Row.withSchema(schema).addValues(2L, 11L, 4L, 3L, 7L, 7.0).build(), Row.withSchema(schema).addValues(2L, 12L, 5L, 5L, 5L, 5.0).build(), Row.withSchema(schema).addValues(3L, 13L, 7L, 6L, 13L, 13.0).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicAgg5() { String sql = "SELECT Key, Key2, AVG(CAST(f_int_1 AS FLOAT64)), AVG(f_double_1) " + "FROM aggregate_test_table GROUP BY Key2, Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field2") .addDoubleField("field3") .addDoubleField("field4") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1.0, 1.0).build(), Row.withSchema(schema).addValues(1L, 11L, 2.0, 2.0).build(), Row.withSchema(schema).addValues(2L, 11L, 3.5, 3.5).build(), Row.withSchema(schema).addValues(2L, 12L, 5.0, 5.0).build(), Row.withSchema(schema).addValues(3L, 13L, 6.5, 6.5).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore( "Calcite infers return type of AVG(int64) as BIGINT while ZetaSQL requires it as either" + " NUMERIC or DOUBLE/FLOAT64") public void testZetaSQLTestAVG() { String sql = "SELECT Key, AVG(f_int_1)" + "FROM aggregate_test_table GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field2") .addInt64Field("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1L).build(), Row.withSchema(schema).addValues(1L, 11L, 6L).build(), Row.withSchema(schema).addValues(2L, 11L, 6L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLGroupByExprInSelect() { String sql = "SELECT int64_col + 1 FROM table_all_types GROUP BY int64_col + 1;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(0L).build(), Row.withSchema(schema).addValue(-1L).build(), Row.withSchema(schema).addValue(-2L).build(), Row.withSchema(schema).addValue(-3L).build(), Row.withSchema(schema).addValue(-4L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLGroupByAndFiltering() { String sql = "SELECT int64_col FROM table_all_types WHERE int64_col = 1 GROUP BY int64_col;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLGroupByAndFilteringOnNonGroupByColumn() { String sql = "SELECT int64_col FROM table_all_types WHERE double_col = 0.5 GROUP BY int64_col;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(-5L).build(), Row.withSchema(schema).addValue(-4L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicHaving() { String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY Key HAVING COUNT(*) > 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L, 3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLHavingNull() { String sql = "SELECT SUM(int64_val) FROM all_null_table GROUP BY primary_key HAVING false"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream).empty(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicFixedWindowing() { String sql = "SELECT " + "COUNT(*) as field_count, " + "TUMBLE_START(\"INTERVAL 1 SECOND\") as window_start, " + "TUMBLE_END(\"INTERVAL 1 SECOND\") as window_end " + "FROM KeyValue " + "GROUP BY TUMBLE(ts, \"INTERVAL 1 SECOND\");"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("count_start") .addDateTimeField("field1") .addDateTimeField("field2") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1L, new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schema) .addValues( 1L, new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()), new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLNestedQueryOne() { String sql = "SELECT a.Value, a.Key FROM (SELECT Key, Value FROM KeyValue WHERE Key = 14 OR Key = 15)" + " as a;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234", 14L).build(), Row.withSchema(schema).addValues("KeyValue235", 15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLNestedQueryTwo() { String sql = "SELECT a.Key, a.Key2, COUNT(*) FROM " + " (SELECT * FROM aggregate_test_table WHERE Key != 10) as a " + " GROUP BY a.Key2, a.Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field3") .addInt64Field("field2") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1L).build(), Row.withSchema(schema).addValues(1L, 11L, 1L).build(), Row.withSchema(schema).addValues(2L, 11L, 2L).build(), Row.withSchema(schema).addValues(2L, 12L, 1L).build(), Row.withSchema(schema).addValues(3L, 13L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLNestedQueryThree() { String sql = "SELECT * FROM (SELECT * FROM KeyValue) AS t1 INNER JOIN (SELECT * FROM BigTable) AS t2 on" + " t1.Key = t2.RowKey"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addInt64Field("Key") .addStringField("Value") .addDateTimeField("ts") .addInt64Field("RowKey") .addStringField("Value2") .addDateTimeField("ts2") .build()) .addValues( 15L, "KeyValue235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLNestedQueryFive() { String sql = "SELECT a.Value, a.Key FROM (SELECT Value, Key FROM KeyValue WHERE Key = 14 OR Key = 15)" + " as a;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234", 14L).build(), Row.withSchema(schema).addValues("KeyValue235", 15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Test public void testUnaryMinusNumeric() { String sql = "SELECT - NUMERIC '1.23456e05'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("-123456").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testAddNumeric() { String sql = "SELECT NUMERIC '1.23456e05' + NUMERIC '9.876e-3'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("123456.009876").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSubNumeric() { String sql = "SELECT NUMERIC '1.23456e05' - NUMERIC '-9.876e-3'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("123456.009876").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testMultiNumeric() { String sql = "SELECT NUMERIC '1.23e02' * NUMERIC '-1.001e-3'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("-0.123123").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testDivNumeric() { String sql = "SELECT NUMERIC '-1.23123e-1' / NUMERIC '-1.001e-3'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("123").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testModNumeric() { String sql = "SELECT MOD(NUMERIC '1.23456e05', NUMERIC '5')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("1").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testFloorNumeric() { String sql = "SELECT FLOOR(NUMERIC '1.23456e04'), FLOOR(NUMERIC '-1.23456e04')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addDecimalField("f_numeric1") .addDecimalField("f_numeric2") .build()) .addValues(new BigDecimal("12345").setScale(9)) .addValues(new BigDecimal("-12346").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCeilNumeric() { String sql = "SELECT CEIL(NUMERIC '1.23456e04'), CEIL(NUMERIC '-1.23456e04')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addDecimalField("f_numeric1") .addDecimalField("f_numeric2") .build()) .addValues(new BigDecimal("12346").setScale(9)) .addValues(new BigDecimal("-12345").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNumericColumn() { String sql = "SELECT numeric_field FROM table_with_numeric"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("123.4567").setScale(9)) .build(), Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("765.4321").setScale(9)) .build(), Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("-555.5555").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testSumNumeric() { String sql = "SELECT SUM(numeric_field) FROM table_with_numeric"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("333.3333").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testAvgNumeric() { String sql = "SELECT AVG(numeric_field) FROM table_with_numeric"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(new BigDecimal("111.1111").setScale(9)) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testMultipleSelectStatementsThrowsException() { String sql = "SELECT 1; SELECT 2;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); thrown.expectMessage("No additional statements are allowed after a SELECT statement."); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testAlreadyDefinedUDFThrowsException() { String sql = "CREATE FUNCTION foo() AS (0); CREATE FUNCTION foo() AS (1); SELECT foo();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(ParseException.class); thrown.expectMessage("Failed to define function foo"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testCreateFunctionNoSelectThrowsException() { String sql = "CREATE FUNCTION plusOne(x INT64) AS (x + 1);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); thrown.expectMessage("Statement list must end in a SELECT statement, not CreateFunctionStmt"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testNullaryUdf() { String sql = "CREATE FUNCTION zero() AS (0); SELECT zero();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(0L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testQualifiedNameUdfUnqualifiedCall() { String sql = "CREATE FUNCTION foo.bar.baz() AS (\"uwu\"); SELECT baz();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("x").build()).addValue("uwu").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore( "Qualified paths can't be resolved due to a bug in ZetaSQL: " + "https: public void testQualifiedNameUdfQualifiedCallThrowsException() { String sql = "CREATE FUNCTION foo.bar.baz() AS (\"uwu\"); SELECT foo.bar.baz();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("x").build()).addValue("uwu").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUnaryUdf() { String sql = "CREATE FUNCTION triple(x INT64) AS (3 * x); SELECT triple(triple(1));"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(9L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUdfWithinUdf() { String sql = "CREATE FUNCTION triple(x INT64) AS (3 * x);" + " CREATE FUNCTION nonuple(x INT64) as (triple(triple(x)));" + " SELECT nonuple(1);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(9L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUndefinedUdfThrowsException() { String sql = "CREATE FUNCTION foo() AS (bar()); " + "CREATE FUNCTION bar() AS (foo()); " + "SELECT foo();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(SqlException.class); thrown.expectMessage("Function not found: bar"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testRecursiveUdfThrowsException() { String sql = "CREATE FUNCTION omega() AS (omega()); SELECT omega();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(SqlException.class); thrown.expectMessage("Function not found: omega"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testUDTVF() { String sql = "CREATE TABLE FUNCTION CustomerRange(MinID INT64, MaxID INT64)\n" + " AS\n" + " SELECT *\n" + " FROM KeyValue\n" + " WHERE key >= MinId AND key <= MaxId; \n" + " SELECT key FROM CustomerRange(10, 14)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema singleField = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(singleField).addValues(14L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUDTVFTableNotFound() { String sql = "CREATE TABLE FUNCTION CustomerRange(MinID INT64, MaxID INT64)\n" + " AS\n" + " SELECT *\n" + " FROM TableNotExist\n" + " WHERE key >= MinId AND key <= MaxId; \n" + " SELECT key FROM CustomerRange(10, 14)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(SqlConversionException.class); thrown.expectMessage("Wasn't able to resolve the path [TableNotExist] in schema: beam"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testUDTVFFunctionNotFound() { String sql = "CREATE TABLE FUNCTION CustomerRange(MinID INT64, MaxID INT64)\n" + " AS\n" + " SELECT *\n" + " FROM KeyValue\n" + " WHERE key >= MinId AND key <= MaxId; \n" + " SELECT key FROM FunctionNotFound(10, 14)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(SqlException.class); thrown.expectMessage("Table-valued function not found: FunctionNotFound"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testDistinct() { String sql = "SELECT DISTINCT Key2 FROM aggregate_test_table"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addInt64Field("Key2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(10L).build(), Row.withSchema(schema).addValues(11L).build(), Row.withSchema(schema).addValues(12L).build(), Row.withSchema(schema).addValues(13L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testDistinctOnNull() { String sql = "SELECT DISTINCT str_val FROM all_null_table"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addNullableField("str_val", FieldType.DOUBLE).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testAnyValue() { String sql = "SELECT ANY_VALUE(double_val) FROM all_null_table"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addNullableField("double_val", FieldType.DOUBLE).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNULL() { String sql = "SELECT NULL"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addNullableField("long_val", FieldType.INT64).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryOne() { String sql = "With T1 AS (SELECT * FROM KeyValue), T2 AS (SELECT * FROM BigTable) SELECT T2.RowKey FROM" + " T1 INNER JOIN T2 on T1.Key = T2.RowKey;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("field1").build()) .addValues(15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryTwo() { String sql = "WITH T1 AS (SELECT Key, COUNT(*) as value FROM KeyValue GROUP BY Key) SELECT T1.Key," + " T1.value FROM T1"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, 1L).build(), Row.withSchema(schema).addValues(15L, 1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryThree() { String sql = "WITH T1 as (SELECT Value, Key FROM KeyValue WHERE Key = 14 OR Key = 15) SELECT T1.Value," + " T1.Key FROM T1;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234", 14L).build(), Row.withSchema(schema).addValues("KeyValue235", 15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryFour() { String sql = "WITH T1 as (SELECT Value, Key FROM KeyValue) SELECT T1.Value, T1.Key FROM T1 WHERE T1.Key" + " = 14 OR T1.Key = 15;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234", 14L).build(), Row.withSchema(schema).addValues("KeyValue235", 15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryFive() { String sql = "WITH T1 AS (SELECT * FROM KeyValue) SELECT T1.Key, COUNT(*) FROM T1 GROUP BY T1.Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, 1L).build(), Row.withSchema(schema).addValues(15L, 1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQuerySix() { String sql = "WITH T1 AS (SELECT * FROM window_test_table_two) SELECT " + "COUNT(*) as field_count, " + "SESSION_START(\"INTERVAL 3 SECOND\") as window_start, " + "SESSION_END(\"INTERVAL 3 SECOND\") as window_end " + "FROM T1 " + "GROUP BY SESSION(ts, \"INTERVAL 3 SECOND\");"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("count_star") .addDateTimeField("field1") .addDateTimeField("field2") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 2L, new DateTime(2018, 7, 1, 21, 26, 12, ISOChronology.getInstanceUTC()), new DateTime(2018, 7, 1, 21, 26, 12, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schema) .addValues( 2L, new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()), new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUNNESTLiteral() { String sql = "SELECT * FROM UNNEST(ARRAY<STRING>['foo', 'bar']);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("str_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("foo").build(), Row.withSchema(schema).addValues("bar").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUNNESTParameters() { String sql = "SELECT * FROM UNNEST(@p0);"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createArrayValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_STRING)), ImmutableList.of(Value.createStringValue("foo"), Value.createStringValue("bar")))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("str_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("foo").build(), Row.withSchema(schema).addValues("bar").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("BEAM-9515") public void testUNNESTExpression() { String sql = "SELECT * FROM UNNEST(ARRAY(SELECT Value FROM KeyValue));"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("str_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234").build(), Row.withSchema(schema).addValues("KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNamedUNNESTLiteral() { String sql = "SELECT *, T1 FROM UNNEST(ARRAY<STRING>['foo', 'bar']) AS T1"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("str_field").addStringField("str2_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("foo", "foo").build(), Row.withSchema(schema).addValues("bar", "bar").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNamedUNNESTLiteralOffset() { String sql = "SELECT x, p FROM UNNEST([3, 4]) AS x WITH OFFSET p"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); thrown.expect(UnsupportedOperationException.class); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); } @Test public void testUnnestArrayColumn() { String sql = "SELECT p FROM table_with_array_for_unnest, UNNEST(table_with_array_for_unnest.int_array_col) as p"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addInt64Field("int_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(14L).build(), Row.withSchema(schema).addValue(18L).build(), Row.withSchema(schema).addValue(22L).build(), Row.withSchema(schema).addValue(24L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStringAggregation() { String sql = "SELECT STRING_AGG(fruit) AS string_agg" + " FROM UNNEST([\"apple\", \"pear\", \"banana\", \"pear\"]) AS fruit"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("string_field").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValue("apple,pear,banana,pear").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("Seeing exception in Beam, need further investigation on the cause of this failed query.") public void testNamedUNNESTJoin() { String sql = "SELECT * " + "FROM table_with_array_for_unnest AS t1" + " LEFT JOIN UNNEST(t1.int_array_col) AS t2" + " on " + " t1.int_col = t2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUnnestJoinStruct() { String sql = "SELECT b, x FROM UNNEST(" + "[STRUCT(true AS b, [3, 5] AS arr), STRUCT(false AS b, [7, 9] AS arr)]) t " + "LEFT JOIN UNNEST(t.arr) x ON b"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testUnnestJoinLiteral() { String sql = "SELECT a, b " + "FROM UNNEST([1, 1, 2, 3, 5, 8, 13, NULL]) a " + "JOIN UNNEST([1, 2, 3, 5, 7, 11, 13, NULL]) b " + "ON a = b"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testUnnestJoinSubquery() { String sql = "SELECT a, b " + "FROM UNNEST([1, 2, 3]) a " + "JOIN UNNEST(ARRAY(SELECT b FROM UNNEST([3, 2, 1]) b)) b " + "ON a = b"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testCaseNoValue() { String sql = "SELECT CASE WHEN 1 > 2 THEN 'not possible' ELSE 'seems right' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("str_field").build()) .addValue("seems right") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseWithValue() { String sql = "SELECT CASE 1 WHEN 2 THEN 'not possible' ELSE 'seems right' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("str_field").build()) .addValue("seems right") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseWithValueMultipleCases() { String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' WHEN 2 THEN 'seems right' ELSE 'also not" + " possible' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("str_field").build()) .addValue("seems right") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseWithValueNoElse() { String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' WHEN 2 THEN 'seems right' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("str_field").build()) .addValue("seems right") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseNoValueNoElseNoMatch() { String sql = "SELECT CASE WHEN 'abc' = '123' THEN 'not possible' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addNullableField("str_field", FieldType.STRING).build()) .addValue(null) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseWithValueNoElseNoMatch() { String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addNullableField("str_field", FieldType.STRING).build()) .addValue(null) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastToDateWithCase() { String sql = "SELECT f_int, \n" + "CASE WHEN CHAR_LENGTH(TRIM(f_string)) = 8 \n" + " THEN CAST (CONCAT(\n" + " SUBSTR(TRIM(f_string), 1, 4) \n" + " , '-' \n" + " , SUBSTR(TRIM(f_string), 5, 2) \n" + " , '-' \n" + " , SUBSTR(TRIM(f_string), 7, 2)) AS DATE)\n" + " ELSE NULL\n" + "END \n" + "FROM table_for_case_when"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema resultType = Schema.builder() .addInt64Field("f_long") .addNullableField("f_date", FieldType.logicalType(SqlTypes.DATE)) .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(resultType).addValues(1L, LocalDate.parse("2018-10-18")).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIntersectAll() { String sql = "SELECT Key FROM aggregate_test_table " + "INTERSECT ALL " + "SELECT Key FROM aggregate_test_table_two"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema resultType = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(resultType).addValues(1L).build(), Row.withSchema(resultType).addValues(2L).build(), Row.withSchema(resultType).addValues(2L).build(), Row.withSchema(resultType).addValues(2L).build(), Row.withSchema(resultType).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIntersectDistinct() { String sql = "SELECT Key FROM aggregate_test_table " + "INTERSECT DISTINCT " + "SELECT Key FROM aggregate_test_table_two"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema resultType = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(resultType).addValues(1L).build(), Row.withSchema(resultType).addValues(2L).build(), Row.withSchema(resultType).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testExceptAll() { String sql = "SELECT Key FROM aggregate_test_table " + "EXCEPT ALL " + "SELECT Key FROM aggregate_test_table_two"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema resultType = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(resultType).addValues(1L).build(), Row.withSchema(resultType).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNullIntersectDistinct() { String sql = "SELECT NULL INTERSECT DISTINCT SELECT 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); System.err.println("SCHEMA " + stream.getSchema()); PAssert.that(stream).empty(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNullIntersectAll() { String sql = "SELECT NULL INTERSECT ALL SELECT 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); System.err.println("SCHEMA " + stream.getSchema()); PAssert.that(stream).empty(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNullExceptDistinct() { String sql = "SELECT NULL EXCEPT DISTINCT SELECT 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(Row.nullRow(stream.getSchema())); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNullExceptAll() { String sql = "SELECT NULL EXCEPT ALL SELECT 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(Row.nullRow(stream.getSchema())); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectFromEmptyTable() { String sql = "SELECT * FROM table_empty;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStartsWithString() { String sql = "SELECT STARTS_WITH('string1', 'stri')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStartsWithString2() { String sql = "SELECT STARTS_WITH(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .put("p1", Value.createStringValue("")) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStartsWithString3() { String sql = "SELECT STARTS_WITH(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .put("p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEndsWithString() { String sql = "SELECT STARTS_WITH('string1', 'ng0')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEndsWithString2() { String sql = "SELECT STARTS_WITH(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .put("p1", Value.createStringValue("")) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEndsWithString3() { String sql = "SELECT STARTS_WITH(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .put("p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithOneParameters() { String sql = "SELECT concat('abc')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithTwoParameters() { String sql = "SELECT concat('abc', 'def')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abcdef").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithThreeParameters() { String sql = "SELECT concat('abc', 'def', 'xyz')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abcdefxyz").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithFourParameters() { String sql = "SELECT concat('abc', 'def', ' ', 'xyz')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyz").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithFiveParameters() { String sql = "SELECT concat('abc', 'def', ' ', 'xyz', 'kkk')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyzkkk").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithSixParameters() { String sql = "SELECT concat('abc', 'def', ' ', 'xyz', 'kkk', 'ttt')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyzkkkttt").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithNull1() { String sql = "SELECT CONCAT(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue(""), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithNull2() { String sql = "SELECT CONCAT(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNamedParameterQuery() { String sql = "SELECT @ColA AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("ColA", Value.createInt64Value(5)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(5L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testArrayStructLiteral() { String sql = "SELECT ARRAY<STRUCT<INT64, INT64>>[(11, 12)];"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema innerSchema = Schema.of(Field.of("s", FieldType.INT64), Field.of("i", FieldType.INT64)); final Schema schema = Schema.of(Field.of("field1", FieldType.array(FieldType.row(innerSchema)))); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValue(ImmutableList.of(Row.withSchema(innerSchema).addValues(11L, 12L).build())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testParameterStruct() { String sql = "SELECT @p as ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p", Value.createStructValue( TypeFactory.createStructType( ImmutableList.of( new StructType.StructField( "s", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)), new StructType.StructField( "i", TypeFactory.createSimpleType(TypeKind.TYPE_INT64)))), ImmutableList.of(Value.createStringValue("foo"), Value.createInt64Value(1L)))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema innerSchema = Schema.of(Field.of("s", FieldType.STRING), Field.of("i", FieldType.INT64)); final Schema schema = Schema.of(Field.of("field1", FieldType.row(innerSchema))); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValue(Row.withSchema(innerSchema).addValues("foo", 1L).build()) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testParameterStructNested() { String sql = "SELECT @outer_struct.inner_struct.s as ColA"; StructType innerStructType = TypeFactory.createStructType( ImmutableList.of( new StructType.StructField( "s", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)))); ImmutableMap<String, Value> params = ImmutableMap.of( "outer_struct", Value.createStructValue( TypeFactory.createStructType( ImmutableList.of(new StructType.StructField("inner_struct", innerStructType))), ImmutableList.of( Value.createStructValue( innerStructType, ImmutableList.of(Value.createStringValue("foo")))))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("foo").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatNamedParameterQuery() { String sql = "SELECT CONCAT(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(""), "p1", Value.createStringValue("A")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("A").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatPositionalParameterQuery() { String sql = "SELECT CONCAT(?, ?, ?) AS ColA"; ImmutableList<Value> params = ImmutableList.of( Value.createStringValue("a"), Value.createStringValue("b"), Value.createStringValue("c")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testReplace1() { String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue(""), "p1", Value.createStringValue(""), "p2", Value.createStringValue("a")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testReplace2() { String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abc"), "p1", Value.createStringValue(""), "p2", Value.createStringValue("xyz")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testReplace3() { String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue(""), "p1", Value.createStringValue(""), "p2", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testReplace4() { String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p2", Value.createStringValue("")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testTrim1() { String sql = "SELECT trim(@p0)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(" a b c ")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("a b c").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testTrim2() { String sql = "SELECT trim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("xyz").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testTrim3() { String sql = "SELECT trim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLTrim1() { String sql = "SELECT ltrim(@p0)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(" a b c ")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("a b c ").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLTrim2() { String sql = "SELECT ltrim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("xyzab").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLTrim3() { String sql = "SELECT ltrim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testRTrim1() { String sql = "SELECT rtrim(@p0)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(" a b c ")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(" a b c").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testRTrim2() { String sql = "SELECT rtrim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abxyz").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testRTrim3() { String sql = "SELECT rtrim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testCastBytesToString1() { String sql = "SELECT CAST(@p0 AS STRING)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createBytesValue(ByteString.copyFromUtf8("`"))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("`").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastBytesToString2() { String sql = "SELECT CAST(b'b' AS STRING)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("b").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testCastBytesToStringFromTable() { String sql = "SELECT CAST(bytes_col AS STRING) FROM table_all_types"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("1").build(), Row.withSchema(schema).addValues("2").build(), Row.withSchema(schema).addValues("3").build(), Row.withSchema(schema).addValues("4").build(), Row.withSchema(schema).addValues("5").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastStringToTimestamp() { String sql = "SELECT CAST('2019-01-15 13:21:03' AS TIMESTAMP)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addDateTimeField("field_1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues(parseTimestampWithUTCTimeZone("2019-01-15 13:21:03")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastStringToTimestampWithDefaultTimezoneSet() { String sql = "SELECT CAST('2014-12-01 12:34:56+07:30' AS TIMESTAMP)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); zetaSQLQueryPlanner.setDefaultTimezone("Pacific/Chatham"); pipeline .getOptions() .as(BeamSqlPipelineOptions.class) .setZetaSqlDefaultTimezone("Pacific/Chatham"); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDateTimeField("field_1").build()) .addValues(parseTimestampWithUTCTimeZone("2014-12-01 05:04:56")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testCastBetweenTimeAndString() { String sql = "SELECT CAST(s1 as TIME) as t2, CAST(t1 as STRING) as s2 FROM " + "(SELECT '12:34:56.123456' as s1, TIME '12:34:56.123456' as t1)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addLogicalTypeField("t2", SqlTypes.TIME) .addStringField("s2") .build()) .addValues(LocalTime.of(12, 34, 56, 123456000), "12:34:56.123456") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastStringToString() { String sql = "SELECT CAST(@p0 AS STRING)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue("")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastStringToInt64() { String sql = "SELECT CAST(@p0 AS INT64)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue("123")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(123L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectConstant() { String sql = "SELECT 'hi'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("hi").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-8593] ZetaSQL does not support Map type") public void testSelectFromTableWithMap() { String sql = "SELECT row_field FROM table_with_map"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema rowSchema = Schema.builder().addInt64Field("row_id").addStringField("data").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addRowField("row_field", rowSchema).build()) .addValues(Row.withSchema(rowSchema).addValues(1L, "data1").build()) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSubQuery() { String sql = "select sum(Key) from KeyValue\n" + "group by (select Key)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); thrown.expectMessage("Does not support sub-queries"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testSubstr() { String sql = "SELECT substr(@p0, @p1, @p2)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abc"), "p1", Value.createInt64Value(-2L), "p2", Value.createInt64Value(1L)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("b").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSubstrWithLargeValueExpectException() { String sql = "SELECT substr(@p0, @p1, @p2)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abc"), "p1", Value.createInt64Value(Integer.MAX_VALUE + 1L), "p2", Value.createInt64Value(Integer.MIN_VALUE - 1L)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); thrown.expect(RuntimeException.class); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectAll() { String sql = "SELECT ALL Key, Value FROM KeyValue;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectDistinct() { String sql = "SELECT DISTINCT Key FROM aggregate_test_table;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L).build(), Row.withSchema(schema).addValues(2L).build(), Row.withSchema(schema).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectDistinct2() { String sql = "SELECT DISTINCT val.BYTES\n" + "from (select b\"BYTES\" BYTES union all\n" + " select b\"bytes\" union all\n" + " select b\"ByTeS\") val"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addByteArrayField("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("BYTES".getBytes(StandardCharsets.UTF_8)).build(), Row.withSchema(schema).addValues("ByTeS".getBytes(StandardCharsets.UTF_8)).build(), Row.withSchema(schema).addValues("bytes".getBytes(StandardCharsets.UTF_8)).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectBytes() { String sql = "SELECT b\"ByTes\""; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addByteArrayField("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("ByTes".getBytes(StandardCharsets.UTF_8)).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectExcept() { String sql = "SELECT * EXCEPT (Key, ts) FROM KeyValue;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234").build(), Row.withSchema(schema).addValues("KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectReplace() { String sql = "WITH orders AS\n" + " (SELECT 5 as order_id,\n" + " \"sprocket\" as item_name,\n" + " 200 as quantity)\n" + "SELECT * REPLACE (\"widget\" AS item_name)\n" + "FROM orders"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addInt64Field("field3") .build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues(5L, "widget", 200L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUnionAllBasic() { String sql = "SELECT row_id FROM table_all_types UNION ALL SELECT row_id FROM table_all_types_2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(1L).build(), Row.withSchema(schema).addValue(2L).build(), Row.withSchema(schema).addValue(3L).build(), Row.withSchema(schema).addValue(4L).build(), Row.withSchema(schema).addValue(5L).build(), Row.withSchema(schema).addValue(6L).build(), Row.withSchema(schema).addValue(7L).build(), Row.withSchema(schema).addValue(8L).build(), Row.withSchema(schema).addValue(9L).build(), Row.withSchema(schema).addValue(10L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testAVGWithLongInput() { String sql = "SELECT AVG(f_int_1) FROM aggregate_test_table;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage( "AVG(LONG) is not supported. You might want to use AVG(CAST(expression AS DOUBLE)."); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testReverseString() { String sql = "SELECT REVERSE('abc');"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("cba").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCharLength() { String sql = "SELECT CHAR_LENGTH('abc');"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCharLengthNull() { String sql = "SELECT CHAR_LENGTH(@p0);"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field", FieldType.INT64).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testTumbleAsTVF() { String sql = "select Key, Value, ts, window_start, window_end from " + "TUMBLE((select * from KeyValue), descriptor(ts), 'INTERVAL 1 SECOND')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); ImmutableMap<String, Value> params = ImmutableMap.of(); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("Key") .addStringField("Value") .addDateTimeField("ts") .addDateTimeField("window_start") .addDateTimeField("window_end") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 14L, "KeyValue234", DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:06"), DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:06"), DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07")) .build(), Row.withSchema(schema) .addValues( 15L, "KeyValue235", DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"), DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"), DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01T21:26:08")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIsNullTrueFalse() { String sql = "WITH Src AS (\n" + " SELECT NULL as data UNION ALL\n" + " SELECT TRUE UNION ALL\n" + " SELECT FALSE\n" + ")\n" + "SELECT\n" + " data IS NULL as isnull,\n" + " data IS NOT NULL as isnotnull,\n" + " data IS TRUE as istrue,\n" + " data IS NOT TRUE as isnottrue,\n" + " data IS FALSE as isfalse,\n" + " data IS NOT FALSE as isnotfalse\n" + "FROM Src\n"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); ImmutableMap<String, Value> params = ImmutableMap.of(); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addField("isnull", FieldType.BOOLEAN) .addField("isnotnull", FieldType.BOOLEAN) .addField("istrue", FieldType.BOOLEAN) .addField("isnottrue", FieldType.BOOLEAN) .addField("isfalse", FieldType.BOOLEAN) .addField("isnotfalse", FieldType.BOOLEAN) .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(true, false, false, true, false, true).build(), Row.withSchema(schema).addValues(false, true, true, false, false, true).build(), Row.withSchema(schema).addValues(false, true, false, true, true, false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBitOr() { String sql = "SELECT BIT_OR(row_id) FROM table_all_types GROUP BY bool_col"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(3L).build(), Row.withSchema(schema).addValue(7L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("NULL values don't work correctly. (https: public void testZetaSQLBitAnd() { String sql = "SELECT BIT_AND(row_id) FROM table_all_types GROUP BY bool_col"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(1L).build(), Row.withSchema(schema).addValue(0L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSimpleTableName() { String sql = "SELECT Key FROM KeyValue"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema singleField = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(singleField).addValues(14L).build(), Row.withSchema(singleField).addValues(15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } }
class ZetaSqlDialectSpecTest extends ZetaSqlTestBase { @Rule public transient TestPipeline pipeline = TestPipeline.create(); @Rule public ExpectedException thrown = ExpectedException.none(); @Before public void setUp() { initialize(); } @Test public void testSimpleSelect() { String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryPlannerClass() { String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; PCollection<Row> stream = pipeline.apply(SqlTransform.query(sql).withQueryPlannerClass(ZetaSQLQueryPlanner.class)); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testPlannerNamePipelineOption() { pipeline .getOptions() .as(BeamSqlPipelineOptions.class) .setPlannerName("org.apache.beam.sdk.extensions.sql.zetasql.ZetaSQLQueryPlanner"); String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; PCollection<Row> stream = pipeline.apply(SqlTransform.query(sql)); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testByteLiterals() { String sql = "SELECT b'abc'"; byte[] byteString = new byte[] {'a', 'b', 'c'}; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.BYTES).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(byteString).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testByteString() { String sql = "SELECT @p0 IS NULL AS ColA"; ByteString byteString = ByteString.copyFrom(new byte[] {0x62}); ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder().put("p0", Value.createBytesValue(byteString)).build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testFloat() { String sql = "SELECT 3.0"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.DOUBLE).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(3.0).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStringLiterals() { String sql = "SELECT '\"America/Los_Angeles\"\\n'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("\"America/Los_Angeles\"\n").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testParameterString() { String sql = "SELECT ?"; ImmutableList<Value> params = ImmutableList.of(Value.createStringValue("abc\n")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("ColA", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc\n").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel") public void testEQ1() { String sql = "SELECT @p0 = @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_BOOL)) .put("p1", Value.createBoolValue(true)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore( "Does not support inf/-inf/nan in double/float literals because double/float literals are" + " converted to BigDecimal in Calcite codegen.") public void testEQ2() { String sql = "SELECT @p0 = @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createDoubleValue(0)) .put("p1", Value.createDoubleValue(Double.POSITIVE_INFINITY)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addBooleanField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel") public void testEQ3() { String sql = "SELECT @p0 = @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_DOUBLE)) .put("p1", Value.createDoubleValue(3.14)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEQ4() { String sql = "SELECT @p0 = @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createBytesValue(ByteString.copyFromUtf8("hello"))) .put("p1", Value.createBytesValue(ByteString.copyFromUtf8("hello"))) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEQ5() { String sql = "SELECT b'hello' = b'hello' AS ColA"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEQ6() { String sql = "SELECT ? = ? AS ColA"; ImmutableList<Value> params = ImmutableList.of(Value.createInt64Value(4L), Value.createInt64Value(5L)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIsNotNull1() { String sql = "SELECT @p0 IS NOT NULL AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIsNotNull2() { String sql = "SELECT @p0 IS NOT NULL AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createNullValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIsNotNull3() { String sql = "SELECT @p0 IS NOT NULL AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createNullValue( TypeFactory.createStructType( Arrays.asList( new StructField( "a", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)))))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfBasic() { String sql = "SELECT IF(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createBoolValue(true), "p1", Value.createInt64Value(1), "p2", Value.createInt64Value(2)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.INT64).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfPositional() { String sql = "SELECT IF(?, ?, ?) AS ColA"; ImmutableList<Value> params = ImmutableList.of( Value.createBoolValue(true), Value.createInt64Value(1), Value.createInt64Value(2)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.INT64).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCoalesceBasic() { String sql = "SELECT COALESCE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createStringValue("yay"), "p2", Value.createStringValue("nay")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("yay").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCoalesceSingleArgument() { String sql = "SELECT COALESCE(@p0) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_INT64)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.array(FieldType.INT64)).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCoalesceNullArray() { String sql = "SELECT COALESCE(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createNullValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))), "p1", Value.createNullValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.array(FieldType.INT64)).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel") public void testNullIfCoercion() { String sql = "SELECT NULLIF(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createInt64Value(3L), "p1", Value.createSimpleNullValue(TypeKind.TYPE_DOUBLE)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.DOUBLE).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(3.0).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCoalesceNullStruct() { String sql = "SELECT COALESCE(NULL, STRUCT(\"a\" AS s, -33 AS i))"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema innerSchema = Schema.of(Field.of("s", FieldType.STRING), Field.of("i", FieldType.INT64)); final Schema schema = Schema.builder().addNullableField("field1", FieldType.row(innerSchema)).build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValue(Row.withSchema(innerSchema).addValues("a", -33L).build()) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfTimestamp() { String sql = "SELECT IF(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createBoolValue(false), "p1", Value.createTimestampValueFromUnixMicros(0), "p2", Value.createTimestampValueFromUnixMicros( DateTime.parse("2019-01-01T00:00:00Z").getMillis() * 1000)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", DATETIME).build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(DateTime.parse("2019-01-01T00:00:00Z")).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("$make_array is not implemented") public void testMakeArray() { String sql = "SELECT [s3, s1, s2] FROM (SELECT \"foo\" AS s1, \"bar\" AS s2, \"baz\" AS s3);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.array(FieldType.STRING)).build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(ImmutableList.of("baz", "foo", "bar")).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNullIfPositive() { String sql = "SELECT NULLIF(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("null"), "p1", Value.createStringValue("null")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNullIfNegative() { String sql = "SELECT NULLIF(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("foo"), "p1", Value.createStringValue("null")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("foo").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfNullPositive() { String sql = "SELECT IFNULL(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("foo"), "p1", Value.createStringValue("default")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("foo").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIfNullNegative() { String sql = "SELECT IFNULL(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createStringValue("yay")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("yay").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEmptyArrayParameter() { String sql = "SELECT @p0 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createArrayValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)), ImmutableList.of())); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addArrayField("field1", FieldType.INT64).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValue(ImmutableList.of()).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEmptyArrayLiteral() { String sql = "SELECT ARRAY<STRING>[];"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addArrayField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValue(ImmutableList.of()).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLike1() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("ab%"), "p1", Value.createStringValue("ab\\%")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel") public void testLikeNullPattern() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("ab%"), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLikeAllowsEscapingNonSpecialCharacter() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue("ab"), "p1", Value.createStringValue("\\ab")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLikeAllowsEscapingBackslash() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("a\\c"), "p1", Value.createStringValue("a\\\\c")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLikeBytes() { String sql = "SELECT @p0 LIKE @p1 AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createBytesValue(ByteString.copyFromUtf8("abcd")), "p1", Value.createBytesValue(ByteString.copyFromUtf8("__%"))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testMod() { String sql = "SELECT MOD(4, 2)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(0L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSimpleUnionAll() { String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING) " + " UNION ALL " + " SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build(), Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testThreeWayUnionAll() { String sql = "SELECT a FROM (SELECT 1 a UNION ALL SELECT 2 UNION ALL SELECT 3)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L).build(), Row.withSchema(schema).addValues(2L).build(), Row.withSchema(schema).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSimpleUnionDISTINCT() { String sql = "SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING) " + " UNION DISTINCT " + " SELECT CAST (1243 as INT64), " + "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), " + "CAST ('string' as STRING);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addDateTimeField("field2") .addStringField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1243L, new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()), "string") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLInnerJoin() { String sql = "SELECT t1.Key " + "FROM KeyValue AS t1" + " INNER JOIN BigTable AS t2" + " on " + " t1.Key = t2.RowKey AND t1.ts = t2.ts"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("field1").build()) .addValues(15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLInnerJoinWithUsing() { String sql = "SELECT t1.Key " + "FROM KeyValue AS t1" + " INNER JOIN BigTable AS t2 USING(ts)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("field1").build()) .addValues(15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLInnerJoinTwo() { String sql = "SELECT t2.RowKey " + "FROM KeyValue AS t1" + " INNER JOIN BigTable AS t2" + " on " + " t2.RowKey = t1.Key AND t2.ts = t1.ts"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("field1").build()) .addValues(15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLLeftOuterJoin() { String sql = "SELECT * " + "FROM KeyValue AS t1" + " LEFT JOIN BigTable AS t2" + " on " + " t1.Key = t2.RowKey"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schemaOne = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addNullableField("field4", FieldType.INT64) .addNullableField("field5", FieldType.STRING) .addNullableField("field6", DATETIME) .build(); final Schema schemaTwo = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schemaOne) .addValues( 14L, "KeyValue234", new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()), null, null, null) .build(), Row.withSchema(schemaTwo) .addValues( 15L, "KeyValue235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLRightOuterJoin() { String sql = "SELECT * " + "FROM KeyValue AS t1" + " RIGHT JOIN BigTable AS t2" + " on " + " t1.Key = t2.RowKey"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schemaOne = Schema.builder() .addNullableField("field1", FieldType.INT64) .addNullableField("field2", FieldType.STRING) .addNullableField("field3", DATETIME) .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); final Schema schemaTwo = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schemaOne) .addValues( null, null, null, 16L, "BigTable236", new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schemaTwo) .addValues( 15L, "KeyValue235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLFullOuterJoin() { String sql = "SELECT * " + "FROM KeyValue AS t1" + " FULL JOIN BigTable AS t2" + " on " + " t1.Key = t2.RowKey"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schemaOne = Schema.builder() .addNullableField("field1", FieldType.INT64) .addNullableField("field2", FieldType.STRING) .addNullableField("field3", DATETIME) .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); final Schema schemaTwo = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addInt64Field("field4") .addStringField("field5") .addDateTimeField("field6") .build(); final Schema schemaThree = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .addNullableField("field4", FieldType.INT64) .addNullableField("field5", FieldType.STRING) .addNullableField("field6", DATETIME) .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schemaOne) .addValues( null, null, null, 16L, "BigTable236", new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schemaTwo) .addValues( 15L, "KeyValue235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schemaThree) .addValues( 14L, "KeyValue234", new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()), null, null, null) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("BeamSQL only supports equal join") public void testZetaSQLFullOuterJoinTwo() { String sql = "SELECT * " + "FROM KeyValue AS t1" + " FULL JOIN BigTable AS t2" + " on " + " t1.Key + t2.RowKey = 30"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLFullOuterJoinFalse() { String sql = "SELECT * FROM KeyValue AS t1 FULL JOIN BigTable AS t2 ON false"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); thrown.expect(UnsupportedOperationException.class); BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); } @Test public void testZetaSQLThreeWayInnerJoin() { String sql = "SELECT t3.Value, t2.Value, t1.Value, t1.Key, t3.ColId FROM KeyValue as t1 " + "JOIN BigTable as t2 " + "ON (t1.Key = t2.RowKey) " + "JOIN Spanner as t3 " + "ON (t3.ColId = t1.Key)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addStringField("t3.Value") .addStringField("t2.Value") .addStringField("t1.Value") .addInt64Field("t1.Key") .addInt64Field("t3.ColId") .build()) .addValues("Spanner235", "BigTable235", "KeyValue235", 15L, 15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLTableJoinOnItselfWithFiltering() { String sql = "SELECT * FROM Spanner as t1 " + "JOIN Spanner as t2 " + "ON (t1.ColId = t2.ColId) WHERE t1.ColId = 17"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addInt64Field("field1") .addStringField("field2") .addInt64Field("field3") .addStringField("field4") .build()) .addValues(17L, "Spanner237", 17L, "Spanner237") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromSelect() { String sql = "SELECT * FROM (SELECT \"apple\" AS fruit, \"carrot\" AS vegetable);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("apple", "carrot").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(2, outputSchema.getFieldCount()); Assert.assertEquals("fruit", outputSchema.getField(0).getName()); Assert.assertEquals("vegetable", outputSchema.getField(1).getName()); } @Test public void testZetaSQLSelectFromTable() { String sql = "SELECT Key, Value FROM KeyValue;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableLimit() { String sql = "SELECT Key, Value FROM KeyValue LIMIT 2;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableLimit0() { String sql = "SELECT Key, Value FROM KeyValue LIMIT 0;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectNullLimitParam() { String sql = "SELECT Key, Value FROM KeyValue LIMIT @lmt;"; ImmutableMap<String, Value> params = ImmutableMap.of( "lmt", Value.createNullValue(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage("Limit requires non-null count and offset"); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); } @Test public void testZetaSQLSelectNullOffsetParam() { String sql = "SELECT Key, Value FROM KeyValue LIMIT 1 OFFSET @lmt;"; ImmutableMap<String, Value> params = ImmutableMap.of( "lmt", Value.createNullValue(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage("Limit requires non-null count and offset"); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); } @Test public void testZetaSQLSelectFromTableOrderLimit() { String sql = "SELECT x, y FROM (SELECT 1 as x, 0 as y UNION ALL SELECT 0, 0 " + "UNION ALL SELECT 1, 0 UNION ALL SELECT 1, 1) ORDER BY x LIMIT 1"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(0L, 0L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableLimitOffset() { String sql = "SELECT COUNT(a) FROM (\n" + "SELECT a FROM (SELECT 1 a UNION ALL SELECT 2 UNION ALL SELECT 3) LIMIT 3 OFFSET 1);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableOrderByLimit() { String sql = "SELECT Key, Value FROM KeyValue ORDER BY Key DESC LIMIT 2;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableOrderBy() { String sql = "SELECT Key, Value FROM KeyValue ORDER BY Key DESC;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage("ORDER BY without a LIMIT is not supported."); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testZetaSQLSelectFromTableWithStructType2() { String sql = "SELECT table_with_struct.struct_col.struct_col_str FROM table_with_struct WHERE id = 1;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("row_one").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInFilter() { String sql = "SELECT table_with_struct.id FROM table_with_struct WHERE" + " table_with_struct.struct_col.struct_col_str = 'row_one';"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInCast() { String sql = "SELECT CAST(table_with_struct.id AS STRING) FROM table_with_struct WHERE" + " table_with_struct.struct_col.struct_col_str = 'row_one';"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("1").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-9191] CAST operator does not work fully due to bugs in unparsing") public void testZetaSQLStructFieldAccessInCast2() { String sql = "SELECT CAST(A.struct_col.struct_col_str AS TIMESTAMP) FROM table_with_struct_ts_string AS" + " A"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addDateTimeField("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValue(parseTimestampWithUTCTimeZone("2019-01-15 13:21:03")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testAggregateWithAndWithoutColumnRefs() { ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); String sql = "SELECT \n" + " id, \n" + " SUM(has_f1) as f1_count, \n" + " SUM(has_f2) as f2_count, \n" + " SUM(has_f3) as f3_count, \n" + " SUM(has_f4) as f4_count, \n" + " SUM(has_f5) as f5_count, \n" + " COUNT(*) as count, \n" + " SUM(has_f6) as f6_count \n" + "FROM (select 0 as id, 1 as has_f1, 2 as has_f2, 3 as has_f3, 4 as has_f4, 5 as has_f5, 6 as has_f6)\n" + "GROUP BY id"; BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("id") .addInt64Field("f1_count") .addInt64Field("f2_count") .addInt64Field("f3_count") .addInt64Field("f4_count") .addInt64Field("f5_count") .addInt64Field("count") .addInt64Field("f6_count") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(0L, 1L, 2L, 3L, 4L, 5L, 1L, 6L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInGroupBy() { String sql = "SELECT rowCol.row_id, COUNT(*) FROM table_with_struct_two GROUP BY rowCol.row_id"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 1L).build(), Row.withSchema(schema).addValues(2L, 1L).build(), Row.withSchema(schema).addValues(3L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLAnyValueInGroupBy() { String sql = "SELECT rowCol.row_id as key, ANY_VALUE(rowCol.data) as any_value FROM table_with_struct_two GROUP BY rowCol.row_id"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Map<Long, List<String>> allowedTuples = new HashMap<>(); allowedTuples.put(1L, Arrays.asList("data1")); allowedTuples.put(2L, Arrays.asList("data2")); allowedTuples.put(3L, Arrays.asList("data2", "data3")); PAssert.that(stream) .satisfies( input -> { Iterator<Row> iter = input.iterator(); while (iter.hasNext()) { Row row = iter.next(); List<String> values = allowedTuples.remove(row.getInt64("key")); assertTrue(values != null); assertTrue(values.contains(row.getString("any_value"))); } assertTrue(allowedTuples.isEmpty()); return null; }); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInGroupBy2() { String sql = "SELECT rowCol.data, MAX(rowCol.row_id), MIN(rowCol.row_id) FROM table_with_struct_two" + " GROUP BY rowCol.data"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addStringField("field1") .addInt64Field("field2") .addInt64Field("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("data1", 1L, 1L).build(), Row.withSchema(schema).addValues("data2", 3L, 2L).build(), Row.withSchema(schema).addValues("data3", 3L, 3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLStructFieldAccessInnerJoin() { String sql = "SELECT A.rowCol.data FROM table_with_struct_two AS A INNER JOIN " + "table_with_struct AS B " + "ON A.rowCol.row_id = B.id"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue("data1").build(), Row.withSchema(schema).addValue("data2").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectFromTableWithArrayType() { String sql = "SELECT array_col FROM table_with_array;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addArrayField("field", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(Arrays.asList("1", "2", "3")).build(), Row.withSchema(schema).addValue(ImmutableList.of()).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLSelectStarFromTable() { String sql = "SELECT * FROM BigTable;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addDateTimeField("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schema) .addValues( 16L, "BigTable236", new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicFiltering() { String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder().addInt64Field("field1").addStringField("field2").build()) .addValues(14L, "KeyValue234") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicFilteringTwo() { String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14 AND Value = 'non-existing';"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicFilteringThree() { String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14 OR Key = 15;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLCountOnAColumn() { String sql = "SELECT COUNT(Key) FROM KeyValue"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLAggDistinct() { String sql = "SELECT Key, COUNT(DISTINCT Value) FROM KeyValue GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage("Does not support COUNT DISTINCT"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testZetaSQLBasicAgg() { String sql = "SELECT Key, COUNT(*) FROM KeyValue GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, 1L).build(), Row.withSchema(schema).addValues(15L, 1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLColumnAlias1() { String sql = "SELECT Key, COUNT(*) AS count_col FROM KeyValue GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(2, outputSchema.getFieldCount()); Assert.assertEquals("Key", outputSchema.getField(0).getName()); Assert.assertEquals("count_col", outputSchema.getField(1).getName()); } @Test public void testZetaSQLColumnAlias2() { String sql = "SELECT Key AS k1, (count_col + 1) AS k2 FROM (SELECT Key, COUNT(*) AS count_col FROM" + " KeyValue GROUP BY Key)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(2, outputSchema.getFieldCount()); Assert.assertEquals("k1", outputSchema.getField(0).getName()); Assert.assertEquals("k2", outputSchema.getField(1).getName()); } @Test public void testZetaSQLColumnAlias3() { String sql = "SELECT Key AS v1, Value AS v2, ts AS v3 FROM KeyValue"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(3, outputSchema.getFieldCount()); Assert.assertEquals("v1", outputSchema.getField(0).getName()); Assert.assertEquals("v2", outputSchema.getField(1).getName()); Assert.assertEquals("v3", outputSchema.getField(2).getName()); } @Test public void testZetaSQLColumnAlias4() { String sql = "SELECT CAST(123 AS INT64) AS cast_col"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); Schema outputSchema = stream.getSchema(); Assert.assertEquals(1, outputSchema.getFieldCount()); Assert.assertEquals("cast_col", outputSchema.getField(0).getName()); } @Test public void testZetaSQLAmbiguousAlias() { String sql = "SELECT row_id as ID, int64_col as ID FROM table_all_types GROUP BY ID;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expectMessage( "Name ID in GROUP BY clause is ambiguous; it may refer to multiple columns in the" + " SELECT-list [at 1:68]"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testZetaSQLAggWithOrdinalReference() { String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY 1"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 2L).build(), Row.withSchema(schema).addValues(2L, 3L).build(), Row.withSchema(schema).addValues(3L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLAggWithAliasReference() { String sql = "SELECT Key AS K, COUNT(*) FROM aggregate_test_table GROUP BY K"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 2L).build(), Row.withSchema(schema).addValues(2L, 3L).build(), Row.withSchema(schema).addValues(3L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicAgg2() { String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 2L).build(), Row.withSchema(schema).addValues(2L, 3L).build(), Row.withSchema(schema).addValues(3L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicAgg3() { String sql = "SELECT Key, Key2, COUNT(*) FROM aggregate_test_table GROUP BY Key2, Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field3") .addInt64Field("field2") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1L).build(), Row.withSchema(schema).addValues(1L, 11L, 1L).build(), Row.withSchema(schema).addValues(2L, 11L, 2L).build(), Row.withSchema(schema).addValues(2L, 12L, 1L).build(), Row.withSchema(schema).addValues(3L, 13L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicAgg4() { String sql = "SELECT Key, Key2, MAX(f_int_1), MIN(f_int_1), SUM(f_int_1), SUM(f_double_1) " + "FROM aggregate_test_table GROUP BY Key2, Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field3") .addInt64Field("field2") .addInt64Field("field4") .addInt64Field("field5") .addDoubleField("field6") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1L, 1L, 1L, 1.0).build(), Row.withSchema(schema).addValues(1L, 11L, 2L, 2L, 2L, 2.0).build(), Row.withSchema(schema).addValues(2L, 11L, 4L, 3L, 7L, 7.0).build(), Row.withSchema(schema).addValues(2L, 12L, 5L, 5L, 5L, 5.0).build(), Row.withSchema(schema).addValues(3L, 13L, 7L, 6L, 13L, 13.0).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicAgg5() { String sql = "SELECT Key, Key2, AVG(CAST(f_int_1 AS FLOAT64)), AVG(f_double_1) " + "FROM aggregate_test_table GROUP BY Key2, Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field2") .addDoubleField("field3") .addDoubleField("field4") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1.0, 1.0).build(), Row.withSchema(schema).addValues(1L, 11L, 2.0, 2.0).build(), Row.withSchema(schema).addValues(2L, 11L, 3.5, 3.5).build(), Row.withSchema(schema).addValues(2L, 12L, 5.0, 5.0).build(), Row.withSchema(schema).addValues(3L, 13L, 6.5, 6.5).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore( "Calcite infers return type of AVG(int64) as BIGINT while ZetaSQL requires it as either" + " NUMERIC or DOUBLE/FLOAT64") public void testZetaSQLTestAVG() { String sql = "SELECT Key, AVG(f_int_1)" + "FROM aggregate_test_table GROUP BY Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field2") .addInt64Field("field3") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1L).build(), Row.withSchema(schema).addValues(1L, 11L, 6L).build(), Row.withSchema(schema).addValues(2L, 11L, 6L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLGroupByExprInSelect() { String sql = "SELECT int64_col + 1 FROM table_all_types GROUP BY int64_col + 1;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(0L).build(), Row.withSchema(schema).addValue(-1L).build(), Row.withSchema(schema).addValue(-2L).build(), Row.withSchema(schema).addValue(-3L).build(), Row.withSchema(schema).addValue(-4L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLGroupByAndFiltering() { String sql = "SELECT int64_col FROM table_all_types WHERE int64_col = 1 GROUP BY int64_col;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLGroupByAndFilteringOnNonGroupByColumn() { String sql = "SELECT int64_col FROM table_all_types WHERE double_col = 0.5 GROUP BY int64_col;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(-5L).build(), Row.withSchema(schema).addValue(-4L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicHaving() { String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY Key HAVING COUNT(*) > 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L, 3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLHavingNull() { String sql = "SELECT SUM(int64_val) FROM all_null_table GROUP BY primary_key HAVING false"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream).empty(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBasicFixedWindowing() { String sql = "SELECT " + "COUNT(*) as field_count, " + "TUMBLE_START(\"INTERVAL 1 SECOND\") as window_start, " + "TUMBLE_END(\"INTERVAL 1 SECOND\") as window_end " + "FROM KeyValue " + "GROUP BY TUMBLE(ts, \"INTERVAL 1 SECOND\");"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("count_start") .addDateTimeField("field1") .addDateTimeField("field2") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 1L, new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schema) .addValues( 1L, new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()), new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLNestedQueryOne() { String sql = "SELECT a.Value, a.Key FROM (SELECT Key, Value FROM KeyValue WHERE Key = 14 OR Key = 15)" + " as a;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234", 14L).build(), Row.withSchema(schema).addValues("KeyValue235", 15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLNestedQueryTwo() { String sql = "SELECT a.Key, a.Key2, COUNT(*) FROM " + " (SELECT * FROM aggregate_test_table WHERE Key != 10) as a " + " GROUP BY a.Key2, a.Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addInt64Field("field3") .addInt64Field("field2") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L, 10L, 1L).build(), Row.withSchema(schema).addValues(1L, 11L, 1L).build(), Row.withSchema(schema).addValues(2L, 11L, 2L).build(), Row.withSchema(schema).addValues(2L, 12L, 1L).build(), Row.withSchema(schema).addValues(3L, 13L, 2L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLNestedQueryThree() { String sql = "SELECT * FROM (SELECT * FROM KeyValue) AS t1 INNER JOIN (SELECT * FROM BigTable) AS t2 on" + " t1.Key = t2.RowKey"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addInt64Field("Key") .addStringField("Value") .addDateTimeField("ts") .addInt64Field("RowKey") .addStringField("Value2") .addDateTimeField("ts2") .build()) .addValues( 15L, "KeyValue235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()), 15L, "BigTable235", new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLNestedQueryFive() { String sql = "SELECT a.Value, a.Key FROM (SELECT Value, Key FROM KeyValue WHERE Key = 14 OR Key = 15)" + " as a;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234", 14L).build(), Row.withSchema(schema).addValues("KeyValue235", 15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Test public void testNumericColumn() { String sql = "SELECT numeric_field FROM table_with_numeric"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("123.4567")) .build(), Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("765.4321")) .build(), Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("-555.5555")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUnaryMinusNumeric() { String sql = "SELECT - NUMERIC '1.23456e05'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("-123456")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testAddNumeric() { String sql = "SELECT NUMERIC '1.23456e05' + NUMERIC '9.876e-3'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("123456.009876")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSubNumeric() { String sql = "SELECT NUMERIC '1.23456e05' - NUMERIC '-9.876e-3'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("123456.009876")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testMultiNumeric() { String sql = "SELECT NUMERIC '1.23e02' * NUMERIC '-1.001e-3'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("-0.123123")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testDivNumeric() { String sql = "SELECT NUMERIC '-1.23123e-1' / NUMERIC '-1.001e-3'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("123")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testModNumeric() { String sql = "SELECT MOD(NUMERIC '1.23456e05', NUMERIC '5')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("1")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testFloorNumeric() { String sql = "SELECT FLOOR(NUMERIC '1.23456e04'), FLOOR(NUMERIC '-1.23456e04')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addDecimalField("f_numeric1") .addDecimalField("f_numeric2") .build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("12345")) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("-12346")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCeilNumeric() { String sql = "SELECT CEIL(NUMERIC '1.23456e04'), CEIL(NUMERIC '-1.23456e04')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addDecimalField("f_numeric1") .addDecimalField("f_numeric2") .build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("12346")) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("-12345")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testSumNumeric() { String sql = "SELECT SUM(numeric_field) FROM table_with_numeric"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("333.3333")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testAvgNumeric() { String sql = "SELECT AVG(numeric_field) FROM table_with_numeric"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDecimalField("f_numeric").build()) .addValues(ZetaSqlTypesUtils.bigDecimalAsNumeric("111.1111")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testMultipleSelectStatementsThrowsException() { String sql = "SELECT 1; SELECT 2;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); thrown.expectMessage("No additional statements are allowed after a SELECT statement."); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testAlreadyDefinedUDFThrowsException() { String sql = "CREATE FUNCTION foo() AS (0); CREATE FUNCTION foo() AS (1); SELECT foo();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(ParseException.class); thrown.expectMessage("Failed to define function foo"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testCreateFunctionNoSelectThrowsException() { String sql = "CREATE FUNCTION plusOne(x INT64) AS (x + 1);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); thrown.expectMessage("Statement list must end in a SELECT statement, not CreateFunctionStmt"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testNullaryUdf() { String sql = "CREATE FUNCTION zero() AS (0); SELECT zero();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(0L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testQualifiedNameUdfUnqualifiedCall() { String sql = "CREATE FUNCTION foo.bar.baz() AS (\"uwu\"); SELECT baz();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("x").build()).addValue("uwu").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore( "Qualified paths can't be resolved due to a bug in ZetaSQL: " + "https: public void testQualifiedNameUdfQualifiedCallThrowsException() { String sql = "CREATE FUNCTION foo.bar.baz() AS (\"uwu\"); SELECT foo.bar.baz();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("x").build()).addValue("uwu").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUnaryUdf() { String sql = "CREATE FUNCTION triple(x INT64) AS (3 * x); SELECT triple(triple(1));"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(9L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUdfWithinUdf() { String sql = "CREATE FUNCTION triple(x INT64) AS (3 * x);" + " CREATE FUNCTION nonuple(x INT64) as (triple(triple(x)));" + " SELECT nonuple(1);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(9L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUndefinedUdfThrowsException() { String sql = "CREATE FUNCTION foo() AS (bar()); " + "CREATE FUNCTION bar() AS (foo()); " + "SELECT foo();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(SqlException.class); thrown.expectMessage("Function not found: bar"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testRecursiveUdfThrowsException() { String sql = "CREATE FUNCTION omega() AS (omega()); SELECT omega();"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(SqlException.class); thrown.expectMessage("Function not found: omega"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testUDTVF() { String sql = "CREATE TABLE FUNCTION CustomerRange(MinID INT64, MaxID INT64)\n" + " AS\n" + " SELECT *\n" + " FROM KeyValue\n" + " WHERE key >= MinId AND key <= MaxId; \n" + " SELECT key FROM CustomerRange(10, 14)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema singleField = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(singleField).addValues(14L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUDTVFTableNotFound() { String sql = "CREATE TABLE FUNCTION CustomerRange(MinID INT64, MaxID INT64)\n" + " AS\n" + " SELECT *\n" + " FROM TableNotExist\n" + " WHERE key >= MinId AND key <= MaxId; \n" + " SELECT key FROM CustomerRange(10, 14)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(SqlConversionException.class); thrown.expectMessage("Wasn't able to resolve the path [TableNotExist] in schema: beam"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testUDTVFFunctionNotFound() { String sql = "CREATE TABLE FUNCTION CustomerRange(MinID INT64, MaxID INT64)\n" + " AS\n" + " SELECT *\n" + " FROM KeyValue\n" + " WHERE key >= MinId AND key <= MaxId; \n" + " SELECT key FROM FunctionNotFound(10, 14)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(SqlException.class); thrown.expectMessage("Table-valued function not found: FunctionNotFound"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testDistinct() { String sql = "SELECT DISTINCT Key2 FROM aggregate_test_table"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addInt64Field("Key2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(10L).build(), Row.withSchema(schema).addValues(11L).build(), Row.withSchema(schema).addValues(12L).build(), Row.withSchema(schema).addValues(13L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testDistinctOnNull() { String sql = "SELECT DISTINCT str_val FROM all_null_table"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addNullableField("str_val", FieldType.DOUBLE).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testAnyValue() { String sql = "SELECT ANY_VALUE(double_val) FROM all_null_table"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addNullableField("double_val", FieldType.DOUBLE).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNULL() { String sql = "SELECT NULL"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addNullableField("long_val", FieldType.INT64).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryOne() { String sql = "With T1 AS (SELECT * FROM KeyValue), T2 AS (SELECT * FROM BigTable) SELECT T2.RowKey FROM" + " T1 INNER JOIN T2 on T1.Key = T2.RowKey;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addInt64Field("field1").build()) .addValues(15L) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryTwo() { String sql = "WITH T1 AS (SELECT Key, COUNT(*) as value FROM KeyValue GROUP BY Key) SELECT T1.Key," + " T1.value FROM T1"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, 1L).build(), Row.withSchema(schema).addValues(15L, 1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryThree() { String sql = "WITH T1 as (SELECT Value, Key FROM KeyValue WHERE Key = 14 OR Key = 15) SELECT T1.Value," + " T1.Key FROM T1;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234", 14L).build(), Row.withSchema(schema).addValues("KeyValue235", 15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryFour() { String sql = "WITH T1 as (SELECT Value, Key FROM KeyValue) SELECT T1.Value, T1.Key FROM T1 WHERE T1.Key" + " = 14 OR T1.Key = 15;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234", 14L).build(), Row.withSchema(schema).addValues("KeyValue235", 15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQueryFive() { String sql = "WITH T1 AS (SELECT * FROM KeyValue) SELECT T1.Key, COUNT(*) FROM T1 GROUP BY T1.Key"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, 1L).build(), Row.withSchema(schema).addValues(15L, 1L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testWithQuerySix() { String sql = "WITH T1 AS (SELECT * FROM window_test_table_two) SELECT " + "COUNT(*) as field_count, " + "SESSION_START(\"INTERVAL 3 SECOND\") as window_start, " + "SESSION_END(\"INTERVAL 3 SECOND\") as window_end " + "FROM T1 " + "GROUP BY SESSION(ts, \"INTERVAL 3 SECOND\");"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("count_star") .addDateTimeField("field1") .addDateTimeField("field2") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 2L, new DateTime(2018, 7, 1, 21, 26, 12, ISOChronology.getInstanceUTC()), new DateTime(2018, 7, 1, 21, 26, 12, ISOChronology.getInstanceUTC())) .build(), Row.withSchema(schema) .addValues( 2L, new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()), new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUNNESTLiteral() { String sql = "SELECT * FROM UNNEST(ARRAY<STRING>['foo', 'bar']);"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("str_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("foo").build(), Row.withSchema(schema).addValues("bar").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUNNESTParameters() { String sql = "SELECT * FROM UNNEST(@p0);"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createArrayValue( TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_STRING)), ImmutableList.of(Value.createStringValue("foo"), Value.createStringValue("bar")))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("str_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("foo").build(), Row.withSchema(schema).addValues("bar").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("BEAM-9515") public void testUNNESTExpression() { String sql = "SELECT * FROM UNNEST(ARRAY(SELECT Value FROM KeyValue));"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("str_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234").build(), Row.withSchema(schema).addValues("KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNamedUNNESTLiteral() { String sql = "SELECT *, T1 FROM UNNEST(ARRAY<STRING>['foo', 'bar']) AS T1"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("str_field").addStringField("str2_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("foo", "foo").build(), Row.withSchema(schema).addValues("bar", "bar").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNamedUNNESTLiteralOffset() { String sql = "SELECT x, p FROM UNNEST([3, 4]) AS x WITH OFFSET p"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); thrown.expect(UnsupportedOperationException.class); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); } @Test public void testUnnestArrayColumn() { String sql = "SELECT p FROM table_with_array_for_unnest, UNNEST(table_with_array_for_unnest.int_array_col) as p"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addInt64Field("int_field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(14L).build(), Row.withSchema(schema).addValue(18L).build(), Row.withSchema(schema).addValue(22L).build(), Row.withSchema(schema).addValue(24L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStringAggregation() { String sql = "SELECT STRING_AGG(fruit) AS string_agg" + " FROM UNNEST([\"apple\", \"pear\", \"banana\", \"pear\"]) AS fruit"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema schema = Schema.builder().addStringField("string_field").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValue("apple,pear,banana,pear").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("Seeing exception in Beam, need further investigation on the cause of this failed query.") public void testNamedUNNESTJoin() { String sql = "SELECT * " + "FROM table_with_array_for_unnest AS t1" + " LEFT JOIN UNNEST(t1.int_array_col) AS t2" + " on " + " t1.int_col = t2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUnnestJoinStruct() { String sql = "SELECT b, x FROM UNNEST(" + "[STRUCT(true AS b, [3, 5] AS arr), STRUCT(false AS b, [7, 9] AS arr)]) t " + "LEFT JOIN UNNEST(t.arr) x ON b"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testUnnestJoinLiteral() { String sql = "SELECT a, b " + "FROM UNNEST([1, 1, 2, 3, 5, 8, 13, NULL]) a " + "JOIN UNNEST([1, 2, 3, 5, 7, 11, 13, NULL]) b " + "ON a = b"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testUnnestJoinSubquery() { String sql = "SELECT a, b " + "FROM UNNEST([1, 2, 3]) a " + "JOIN UNNEST(ARRAY(SELECT b FROM UNNEST([3, 2, 1]) b)) b " + "ON a = b"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testCaseNoValue() { String sql = "SELECT CASE WHEN 1 > 2 THEN 'not possible' ELSE 'seems right' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("str_field").build()) .addValue("seems right") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseWithValue() { String sql = "SELECT CASE 1 WHEN 2 THEN 'not possible' ELSE 'seems right' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("str_field").build()) .addValue("seems right") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseWithValueMultipleCases() { String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' WHEN 2 THEN 'seems right' ELSE 'also not" + " possible' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("str_field").build()) .addValue("seems right") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseWithValueNoElse() { String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' WHEN 2 THEN 'seems right' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addStringField("str_field").build()) .addValue("seems right") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseNoValueNoElseNoMatch() { String sql = "SELECT CASE WHEN 'abc' = '123' THEN 'not possible' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addNullableField("str_field", FieldType.STRING).build()) .addValue(null) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCaseWithValueNoElseNoMatch() { String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' END"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addNullableField("str_field", FieldType.STRING).build()) .addValue(null) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastToDateWithCase() { String sql = "SELECT f_int, \n" + "CASE WHEN CHAR_LENGTH(TRIM(f_string)) = 8 \n" + " THEN CAST (CONCAT(\n" + " SUBSTR(TRIM(f_string), 1, 4) \n" + " , '-' \n" + " , SUBSTR(TRIM(f_string), 5, 2) \n" + " , '-' \n" + " , SUBSTR(TRIM(f_string), 7, 2)) AS DATE)\n" + " ELSE NULL\n" + "END \n" + "FROM table_for_case_when"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema resultType = Schema.builder() .addInt64Field("f_long") .addNullableField("f_date", FieldType.logicalType(SqlTypes.DATE)) .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(resultType).addValues(1L, LocalDate.parse("2018-10-18")).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIntersectAll() { String sql = "SELECT Key FROM aggregate_test_table " + "INTERSECT ALL " + "SELECT Key FROM aggregate_test_table_two"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema resultType = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(resultType).addValues(1L).build(), Row.withSchema(resultType).addValues(2L).build(), Row.withSchema(resultType).addValues(2L).build(), Row.withSchema(resultType).addValues(2L).build(), Row.withSchema(resultType).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIntersectDistinct() { String sql = "SELECT Key FROM aggregate_test_table " + "INTERSECT DISTINCT " + "SELECT Key FROM aggregate_test_table_two"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema resultType = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(resultType).addValues(1L).build(), Row.withSchema(resultType).addValues(2L).build(), Row.withSchema(resultType).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testExceptAll() { String sql = "SELECT Key FROM aggregate_test_table " + "EXCEPT ALL " + "SELECT Key FROM aggregate_test_table_two"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema resultType = Schema.builder().addInt64Field("field").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(resultType).addValues(1L).build(), Row.withSchema(resultType).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNullIntersectDistinct() { String sql = "SELECT NULL INTERSECT DISTINCT SELECT 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); System.err.println("SCHEMA " + stream.getSchema()); PAssert.that(stream).empty(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNullIntersectAll() { String sql = "SELECT NULL INTERSECT ALL SELECT 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); System.err.println("SCHEMA " + stream.getSchema()); PAssert.that(stream).empty(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNullExceptDistinct() { String sql = "SELECT NULL EXCEPT DISTINCT SELECT 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(Row.nullRow(stream.getSchema())); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectNullExceptAll() { String sql = "SELECT NULL EXCEPT ALL SELECT 2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(Row.nullRow(stream.getSchema())); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectFromEmptyTable() { String sql = "SELECT * FROM table_empty;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream).containsInAnyOrder(); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStartsWithString() { String sql = "SELECT STARTS_WITH('string1', 'stri')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStartsWithString2() { String sql = "SELECT STARTS_WITH(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .put("p1", Value.createStringValue("")) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testStartsWithString3() { String sql = "SELECT STARTS_WITH(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .put("p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEndsWithString() { String sql = "SELECT STARTS_WITH('string1', 'ng0')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEndsWithString2() { String sql = "SELECT STARTS_WITH(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .put("p1", Value.createStringValue("")) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testEndsWithString3() { String sql = "SELECT STARTS_WITH(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.<String, Value>builder() .put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .put("p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)) .build(); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithOneParameters() { String sql = "SELECT concat('abc')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithTwoParameters() { String sql = "SELECT concat('abc', 'def')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abcdef").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithThreeParameters() { String sql = "SELECT concat('abc', 'def', 'xyz')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abcdefxyz").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithFourParameters() { String sql = "SELECT concat('abc', 'def', ' ', 'xyz')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyz").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithFiveParameters() { String sql = "SELECT concat('abc', 'def', ' ', 'xyz', 'kkk')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyzkkk").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithSixParameters() { String sql = "SELECT concat('abc', 'def', ' ', 'xyz', 'kkk', 'ttt')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyzkkkttt").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithNull1() { String sql = "SELECT CONCAT(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue(""), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatWithNull2() { String sql = "SELECT CONCAT(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testNamedParameterQuery() { String sql = "SELECT @ColA AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("ColA", Value.createInt64Value(5)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(5L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testArrayStructLiteral() { String sql = "SELECT ARRAY<STRUCT<INT64, INT64>>[(11, 12)];"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema innerSchema = Schema.of(Field.of("s", FieldType.INT64), Field.of("i", FieldType.INT64)); final Schema schema = Schema.of(Field.of("field1", FieldType.array(FieldType.row(innerSchema)))); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValue(ImmutableList.of(Row.withSchema(innerSchema).addValues(11L, 12L).build())) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testParameterStruct() { String sql = "SELECT @p as ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p", Value.createStructValue( TypeFactory.createStructType( ImmutableList.of( new StructType.StructField( "s", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)), new StructType.StructField( "i", TypeFactory.createSimpleType(TypeKind.TYPE_INT64)))), ImmutableList.of(Value.createStringValue("foo"), Value.createInt64Value(1L)))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema innerSchema = Schema.of(Field.of("s", FieldType.STRING), Field.of("i", FieldType.INT64)); final Schema schema = Schema.of(Field.of("field1", FieldType.row(innerSchema))); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValue(Row.withSchema(innerSchema).addValues("foo", 1L).build()) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testParameterStructNested() { String sql = "SELECT @outer_struct.inner_struct.s as ColA"; StructType innerStructType = TypeFactory.createStructType( ImmutableList.of( new StructType.StructField( "s", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)))); ImmutableMap<String, Value> params = ImmutableMap.of( "outer_struct", Value.createStructValue( TypeFactory.createStructType( ImmutableList.of(new StructType.StructField("inner_struct", innerStructType))), ImmutableList.of( Value.createStructValue( innerStructType, ImmutableList.of(Value.createStringValue("foo")))))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("foo").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatNamedParameterQuery() { String sql = "SELECT CONCAT(@p0, @p1) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(""), "p1", Value.createStringValue("A")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("A").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testConcatPositionalParameterQuery() { String sql = "SELECT CONCAT(?, ?, ?) AS ColA"; ImmutableList<Value> params = ImmutableList.of( Value.createStringValue("a"), Value.createStringValue("b"), Value.createStringValue("c")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testReplace1() { String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue(""), "p1", Value.createStringValue(""), "p2", Value.createStringValue("a")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testReplace2() { String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abc"), "p1", Value.createStringValue(""), "p2", Value.createStringValue("xyz")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testReplace3() { String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue(""), "p1", Value.createStringValue(""), "p2", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testReplace4() { String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p2", Value.createStringValue("")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testTrim1() { String sql = "SELECT trim(@p0)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(" a b c ")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("a b c").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testTrim2() { String sql = "SELECT trim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("xyz").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testTrim3() { String sql = "SELECT trim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLTrim1() { String sql = "SELECT ltrim(@p0)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(" a b c ")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("a b c ").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLTrim2() { String sql = "SELECT ltrim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("xyzab").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testLTrim3() { String sql = "SELECT ltrim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testRTrim1() { String sql = "SELECT rtrim(@p0)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(" a b c ")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(" a b c").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testRTrim2() { String sql = "SELECT rtrim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abxyz").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testRTrim3() { String sql = "SELECT rtrim(@p0, @p1)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING), "p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testCastBytesToString1() { String sql = "SELECT CAST(@p0 AS STRING)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createBytesValue(ByteString.copyFromUtf8("`"))); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("`").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastBytesToString2() { String sql = "SELECT CAST(b'b' AS STRING)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("b").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testCastBytesToStringFromTable() { String sql = "SELECT CAST(bytes_col AS STRING) FROM table_all_types"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("1").build(), Row.withSchema(schema).addValues("2").build(), Row.withSchema(schema).addValues("3").build(), Row.withSchema(schema).addValues("4").build(), Row.withSchema(schema).addValues("5").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastStringToTimestamp() { String sql = "SELECT CAST('2019-01-15 13:21:03' AS TIMESTAMP)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addDateTimeField("field_1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues(parseTimestampWithUTCTimeZone("2019-01-15 13:21:03")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastStringToTimestampWithDefaultTimezoneSet() { String sql = "SELECT CAST('2014-12-01 12:34:56+07:30' AS TIMESTAMP)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); zetaSQLQueryPlanner.setDefaultTimezone("Pacific/Chatham"); pipeline .getOptions() .as(BeamSqlPipelineOptions.class) .setZetaSqlDefaultTimezone("Pacific/Chatham"); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addDateTimeField("field_1").build()) .addValues(parseTimestampWithUTCTimeZone("2014-12-01 05:04:56")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("https: public void testCastBetweenTimeAndString() { String sql = "SELECT CAST(s1 as TIME) as t2, CAST(t1 as STRING) as s2 FROM " + "(SELECT '12:34:56.123456' as s1, TIME '12:34:56.123456' as t1)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); PAssert.that(stream) .containsInAnyOrder( Row.withSchema( Schema.builder() .addLogicalTypeField("t2", SqlTypes.TIME) .addStringField("s2") .build()) .addValues(LocalTime.of(12, 34, 56, 123456000), "12:34:56.123456") .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastStringToString() { String sql = "SELECT CAST(@p0 AS STRING)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue("")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCastStringToInt64() { String sql = "SELECT CAST(@p0 AS INT64)"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue("123")); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(123L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectConstant() { String sql = "SELECT 'hi'"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("hi").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("[BEAM-8593] ZetaSQL does not support Map type") public void testSelectFromTableWithMap() { String sql = "SELECT row_field FROM table_with_map"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema rowSchema = Schema.builder().addInt64Field("row_id").addStringField("data").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(Schema.builder().addRowField("row_field", rowSchema).build()) .addValues(Row.withSchema(rowSchema).addValues(1L, "data1").build()) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSubQuery() { String sql = "select sum(Key) from KeyValue\n" + "group by (select Key)"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(UnsupportedOperationException.class); thrown.expectMessage("Does not support sub-queries"); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testSubstr() { String sql = "SELECT substr(@p0, @p1, @p2)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abc"), "p1", Value.createInt64Value(-2L), "p2", Value.createInt64Value(1L)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field1").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("b").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSubstrWithLargeValueExpectException() { String sql = "SELECT substr(@p0, @p1, @p2)"; ImmutableMap<String, Value> params = ImmutableMap.of( "p0", Value.createStringValue("abc"), "p1", Value.createInt64Value(Integer.MAX_VALUE + 1L), "p2", Value.createInt64Value(Integer.MIN_VALUE - 1L)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); thrown.expect(RuntimeException.class); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectAll() { String sql = "SELECT ALL Key, Value FROM KeyValue;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(14L, "KeyValue234").build(), Row.withSchema(schema).addValues(15L, "KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectDistinct() { String sql = "SELECT DISTINCT Key FROM aggregate_test_table;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(1L).build(), Row.withSchema(schema).addValues(2L).build(), Row.withSchema(schema).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectDistinct2() { String sql = "SELECT DISTINCT val.BYTES\n" + "from (select b\"BYTES\" BYTES union all\n" + " select b\"bytes\" union all\n" + " select b\"ByTeS\") val"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addByteArrayField("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("BYTES".getBytes(StandardCharsets.UTF_8)).build(), Row.withSchema(schema).addValues("ByTeS".getBytes(StandardCharsets.UTF_8)).build(), Row.withSchema(schema).addValues("bytes".getBytes(StandardCharsets.UTF_8)).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectBytes() { String sql = "SELECT b\"ByTes\""; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addByteArrayField("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("ByTes".getBytes(StandardCharsets.UTF_8)).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectExcept() { String sql = "SELECT * EXCEPT (Key, ts) FROM KeyValue;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues("KeyValue234").build(), Row.withSchema(schema).addValues("KeyValue235").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSelectReplace() { String sql = "WITH orders AS\n" + " (SELECT 5 as order_id,\n" + " \"sprocket\" as item_name,\n" + " 200 as quantity)\n" + "SELECT * REPLACE (\"widget\" AS item_name)\n" + "FROM orders"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("field1") .addStringField("field2") .addInt64Field("field3") .build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues(5L, "widget", 200L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testUnionAllBasic() { String sql = "SELECT row_id FROM table_all_types UNION ALL SELECT row_id FROM table_all_types_2"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(1L).build(), Row.withSchema(schema).addValue(2L).build(), Row.withSchema(schema).addValue(3L).build(), Row.withSchema(schema).addValue(4L).build(), Row.withSchema(schema).addValue(5L).build(), Row.withSchema(schema).addValue(6L).build(), Row.withSchema(schema).addValue(7L).build(), Row.withSchema(schema).addValue(8L).build(), Row.withSchema(schema).addValue(9L).build(), Row.withSchema(schema).addValue(10L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testAVGWithLongInput() { String sql = "SELECT AVG(f_int_1) FROM aggregate_test_table;"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); thrown.expect(RuntimeException.class); thrown.expectMessage( "AVG(LONG) is not supported. You might want to use AVG(CAST(expression AS DOUBLE)."); zetaSQLQueryPlanner.convertToBeamRel(sql); } @Test public void testReverseString() { String sql = "SELECT REVERSE('abc');"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addStringField("field2").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("cba").build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCharLength() { String sql = "SELECT CHAR_LENGTH('abc');"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field").build(); PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(3L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testCharLengthNull() { String sql = "SELECT CHAR_LENGTH(@p0);"; ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING)); ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addNullableField("field", FieldType.INT64).build(); PAssert.that(stream) .containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testTumbleAsTVF() { String sql = "select Key, Value, ts, window_start, window_end from " + "TUMBLE((select * from KeyValue), descriptor(ts), 'INTERVAL 1 SECOND')"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); ImmutableMap<String, Value> params = ImmutableMap.of(); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addInt64Field("Key") .addStringField("Value") .addDateTimeField("ts") .addDateTimeField("window_start") .addDateTimeField("window_end") .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema) .addValues( 14L, "KeyValue234", DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:06"), DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:06"), DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07")) .build(), Row.withSchema(schema) .addValues( 15L, "KeyValue235", DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"), DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"), DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01T21:26:08")) .build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testIsNullTrueFalse() { String sql = "WITH Src AS (\n" + " SELECT NULL as data UNION ALL\n" + " SELECT TRUE UNION ALL\n" + " SELECT FALSE\n" + ")\n" + "SELECT\n" + " data IS NULL as isnull,\n" + " data IS NOT NULL as isnotnull,\n" + " data IS TRUE as istrue,\n" + " data IS NOT TRUE as isnottrue,\n" + " data IS FALSE as isfalse,\n" + " data IS NOT FALSE as isnotfalse\n" + "FROM Src\n"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); ImmutableMap<String, Value> params = ImmutableMap.of(); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder() .addField("isnull", FieldType.BOOLEAN) .addField("isnotnull", FieldType.BOOLEAN) .addField("istrue", FieldType.BOOLEAN) .addField("isnottrue", FieldType.BOOLEAN) .addField("isfalse", FieldType.BOOLEAN) .addField("isnotfalse", FieldType.BOOLEAN) .build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(true, false, false, true, false, true).build(), Row.withSchema(schema).addValues(false, true, true, false, false, true).build(), Row.withSchema(schema).addValues(false, true, false, true, true, false).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testZetaSQLBitOr() { String sql = "SELECT BIT_OR(row_id) FROM table_all_types GROUP BY bool_col"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValues(3L).build(), Row.withSchema(schema).addValue(7L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test @Ignore("NULL values don't work correctly. (https: public void testZetaSQLBitAnd() { String sql = "SELECT BIT_AND(row_id) FROM table_all_types GROUP BY bool_col"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); final Schema schema = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(schema).addValue(1L).build(), Row.withSchema(schema).addValue(0L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } @Test public void testSimpleTableName() { String sql = "SELECT Key FROM KeyValue"; ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config); BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql); PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode); Schema singleField = Schema.builder().addInt64Field("field1").build(); PAssert.that(stream) .containsInAnyOrder( Row.withSchema(singleField).addValues(14L).build(), Row.withSchema(singleField).addValues(15L).build()); pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES)); } }
Are we sure length of payload is not 0 here ?
private static String lowerCaseTheFirstLetter(String payload) { char[] characters = payload.toCharArray(); characters[0] = Character.toLowerCase(characters[0]); payload = new String(characters); return payload; }
characters[0] = Character.toLowerCase(characters[0]);
private static String lowerCaseTheFirstLetter(String payload) { if (!payload.isEmpty()) { char[] characters = payload.toCharArray(); characters[0] = Character.toLowerCase(characters[0]); payload = new String(characters); } return payload; }
class HttpUtil { private static final Logger log = LoggerFactory.getLogger(HttpUtil.class); private static final String METHOD_ACCESSED = "isMethodAccessed"; private static final String IO_EXCEPTION_OCCURED = "I/O exception occurred"; private static BStructType headerValueStructType; public static BValue[] addHeader(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = ((BStruct) abstractNativeFunction.getRefArgument(context, 0)); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); String headerName = abstractNativeFunction.getStringArgument(context, 0); String headerValue = abstractNativeFunction.getStringArgument(context, 1); HttpHeaders httpHeaders = httpCarbonMessage.getHeaders(); httpHeaders.add(headerName, headerValue); if (log.isDebugEnabled()) { log.debug("Add " + headerName + " to header with value: " + headerValue); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] getHeader(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); String headerName = abstractNativeFunction.getStringArgument(context, 0); String headerValue = httpCarbonMessage.getHeader(headerName); return abstractNativeFunction.getBValues(new BString(headerValue)); } public static BValue[] getProperty(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); String propertyName = abstractNativeFunction.getStringArgument(context, 0); Object propertyValue = httpCarbonMessage.getProperty(propertyName); if (propertyValue == null) { return AbstractNativeFunction.VOID_RETURN; } if (propertyValue instanceof String) { return abstractNativeFunction.getBValues(new BString((String) propertyValue)); } else { throw new BallerinaException("Property value is of unknown type : " + propertyValue.getClass().getName()); } } public static BValue[] removeAllHeaders(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.getHeaders().clear(); return AbstractNativeFunction.VOID_RETURN; } public static BValue[] removeHeader(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); String headerName = abstractNativeFunction.getStringArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.removeHeader(headerName); if (log.isDebugEnabled()) { log.debug("Remove header:" + headerName); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setHeader(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); String headerName = abstractNativeFunction.getStringArgument(context, 0); String headerValue = abstractNativeFunction.getStringArgument(context, 1); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.setHeader(headerName, headerValue); if (log.isDebugEnabled()) { log.debug("Set " + headerName + " header with value: " + headerValue); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setProperty(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); String propertyName = abstractNativeFunction.getStringArgument(context, 0); String propertyValue = abstractNativeFunction.getStringArgument(context, 1); if (propertyName != null && propertyValue != null) { HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.setProperty(propertyName, propertyValue); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setBinaryPayload(Context context, AbstractNativeFunction nativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) nativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.waitAndReleaseAllEntities(); byte[] payload = nativeFunction.getBlobArgument(context, 0); BlobDataSource blobDataSource = new BlobDataSource(payload); addMessageDataSource(httpMessageStruct, blobDataSource); httpCarbonMessage.setHeader(Constants.CONTENT_TYPE, Constants.APPLICATION_JSON); return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setJsonPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.waitAndReleaseAllEntities(); BJSON payload = (BJSON) abstractNativeFunction.getRefArgument(context, 1); addMessageDataSource(httpMessageStruct, payload); HttpUtil.setHeaderToStruct(context, httpMessageStruct, Constants.CONTENT_TYPE, Constants.APPLICATION_JSON); return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setStringPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.waitAndReleaseAllEntities(); String payload = abstractNativeFunction.getStringArgument(context, 0); StringDataSource stringDataSource = new StringDataSource(payload); addMessageDataSource(httpMessageStruct, stringDataSource); HttpUtil.setHeaderToStruct(context, httpMessageStruct, Constants.CONTENT_TYPE, Constants.TEXT_PLAIN); if (log.isDebugEnabled()) { log.debug("Setting new payload: " + payload); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setXMLPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.waitAndReleaseAllEntities(); BXML payload = (BXML) abstractNativeFunction.getRefArgument(context, 1); addMessageDataSource(httpMessageStruct, payload); HttpUtil.setHeaderToStruct(context, httpMessageStruct, Constants.CONTENT_TYPE, Constants.APPLICATION_XML); return AbstractNativeFunction.VOID_RETURN; } public static BValue[] getBinaryPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BlobDataSource result; try { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); if (httpMessageStruct.getNativeData(MESSAGE_DATA_SOURCE) != null) { result = (BlobDataSource) httpMessageStruct.getNativeData(MESSAGE_DATA_SOURCE); } else { HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage); result = new BlobDataSource(toByteArray(httpMessageDataStreamer.getInputStream())); HttpUtil.addMessageDataSource(httpMessageStruct, result); } if (log.isDebugEnabled()) { log.debug("String representation of the payload:" + result.getMessageAsString()); } } catch (Throwable e) { throw new BallerinaException("Error while retrieving string payload from message: " + e.getMessage()); } return abstractNativeFunction.getBValues(new BBlob(result.getValue())); } public static BValue[] getJsonPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BJSON result = null; try { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); MessageDataSource payload = HttpUtil.getMessageDataSource(httpMessageStruct); if (payload != null) { if (payload instanceof BJSON) { result = (BJSON) payload; } else { result = new BJSON(payload.getMessageAsString()); } } else { HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage); result = new BJSON(httpMessageDataStreamer.getInputStream()); addMessageDataSource(httpMessageStruct, result); } } catch (Throwable e) { throw new BallerinaException("Error while retrieving json payload from message: " + e.getMessage()); } return abstractNativeFunction.getBValues(result); } public static BValue[] getStringPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BString result; try { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); MessageDataSource messageDataSource = HttpUtil.getMessageDataSource(httpMessageStruct); if (messageDataSource != null) { result = new BString(messageDataSource.getMessageAsString()); } else { HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); if (httpCarbonMessage.isEmpty() && httpCarbonMessage.isEndOfMsgAdded()) { return abstractNativeFunction.getBValues(new BString("")); } HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage); String payload = StringUtils.getStringFromInputStream(httpMessageDataStreamer.getInputStream()); result = new BString(payload); addMessageDataSource(httpMessageStruct, new StringDataSource(payload)); } if (log.isDebugEnabled()) { log.debug("Payload in String:" + result.stringValue()); } } catch (Throwable e) { throw new BallerinaException("Error while retrieving string payload from message: " + e.getMessage()); } return abstractNativeFunction.getBValues(result); } public static BValue[] getXMLPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BXML result; try { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); MessageDataSource messageDataSource = HttpUtil.getMessageDataSource(httpMessageStruct); if (messageDataSource != null) { if (messageDataSource instanceof BXML) { result = (BXML) messageDataSource; } else { result = XMLUtils.parse(messageDataSource.getMessageAsString()); } } else { HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage); result = XMLUtils.parse(httpMessageDataStreamer.getInputStream()); addMessageDataSource(httpMessageStruct, result); } } catch (Throwable e) { throw new BallerinaException("Error while retrieving XML payload from message: " + e.getMessage()); } return abstractNativeFunction.getBValues(result); } private static byte[] toByteArray(InputStream input) throws IOException { byte[] buffer = new byte[4096]; int n1; ByteArrayOutputStream output = new ByteArrayOutputStream(); for (; -1 != (n1 = input.read(buffer)); ) { output.write(buffer, 0, n1); } byte[] bytes = output.toByteArray(); output.close(); return bytes; } public static void addMessageDataSource(BStruct struct, MessageDataSource messageDataSource) { struct.addNativeData(MESSAGE_DATA_SOURCE, messageDataSource); } public static MessageDataSource getMessageDataSource(BStruct httpMsgStruct) { return (MessageDataSource) httpMsgStruct.getNativeData(MESSAGE_DATA_SOURCE); } public static void closeMessageOutputStream(OutputStream messageOutputStream) { try { if (messageOutputStream != null) { messageOutputStream.close(); } } catch (IOException e) { log.error("Couldn't close message output stream", e); } } public static BValue[] getContentLength(Context context, AbstractNativeFunction abstractNativeFunction) { int contentLength = -1; BStruct requestStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); String lengthStr = HttpUtil.getHeaderFromStruct(requestStruct, Constants.HTTP_CONTENT_LENGTH); try { contentLength = Integer.parseInt(lengthStr); } catch (NumberFormatException e) { throw new BallerinaException("Invalid content length"); } return abstractNativeFunction.getBValues(new BInteger(contentLength)); } public static BMap<String, BValue> getParamMap(String payload) throws UnsupportedEncodingException { BMap<String, BValue> params = new BMap<>(); String[] entries = payload.split("&"); for (String entry : entries) { int index = entry.indexOf('='); if (index != -1) { String name = entry.substring(0, index).trim(); String value = URLDecoder.decode(entry.substring(index + 1).trim(), "UTF-8"); if (value.matches("")) { params.put(name, new BString("")); continue; } params.put(name, new BString(value)); } } return params; } /** * Helper method to start pending http server connectors. * * @throws BallerinaConnectorException */ public static void startPendingHttpConnectors(BallerinaHttpServerConnector httpServerConnector) throws BallerinaConnectorException { try { HttpConnectionManager.getInstance().startPendingHTTPConnectors(httpServerConnector); } catch (ServerConnectorException e) { throw new BallerinaConnectorException(e); } } public static BValue[] prepareResponseAndSend(Context context, AbstractNativeFunction abstractNativeFunction , HTTPCarbonMessage requestMessage, HTTPCarbonMessage responseMessage, BStruct httpMessageStruct) { addHTTPSessionAndCorsHeaders(requestMessage, responseMessage); MessageDataSource outboundMessageSource = HttpUtil.getMessageDataSource(httpMessageStruct); HttpResponseStatusFuture outboundResponseStatusFuture = sendOutboundResponse(requestMessage, responseMessage); if (outboundMessageSource != null) { OutputStream messageOutputStream = new HttpMessageDataStreamer(responseMessage).getOutputStream(); outboundMessageSource.serializeData(messageOutputStream); HttpUtil.closeMessageOutputStream(messageOutputStream); } try { outboundResponseStatusFuture = outboundResponseStatusFuture.sync(); } catch (InterruptedException e) { throw new BallerinaException("interrupted sync: " + e.getMessage()); } if (outboundResponseStatusFuture.getStatus().getCause() != null) { return abstractNativeFunction.getBValues(getServerConnectorError(context , outboundResponseStatusFuture.getStatus().getCause())); } return abstractNativeFunction.VOID_RETURN; } public static BStruct createSessionStruct(Context context, Session session) { BStruct sessionStruct = ConnectorUtils .createAndGetStruct(context, Constants.PROTOCOL_PACKAGE_HTTP, Constants.SESSION); sessionStruct.addNativeData(Constants.HTTP_SESSION, session); return sessionStruct; } public static String getSessionID(String cookieHeader) { return Arrays.stream(cookieHeader.split(";")) .filter(cookie -> cookie.trim().startsWith(Constants.SESSION_ID)) .findFirst().get().trim().substring(Constants.SESSION_ID.length()); } public static void addHTTPSessionAndCorsHeaders(HTTPCarbonMessage requestMsg, HTTPCarbonMessage responseMsg) { Session session = (Session) requestMsg.getProperty(Constants.HTTP_SESSION); if (session != null) { session.generateSessionHeader(responseMsg); } if (requestMsg.getHeader(Constants.ORIGIN) != null) { CorsHeaderGenerator.process(requestMsg, responseMsg, true); } } public static HttpResponseStatusFuture sendOutboundResponse(HTTPCarbonMessage requestMsg, HTTPCarbonMessage responseMsg) { HttpResponseStatusFuture responseFuture; try { responseFuture = requestMsg.respond(responseMsg); } catch (org.wso2.transport.http.netty.contract.ServerConnectorException e) { throw new BallerinaConnectorException("Error occurred during response", e); } return responseFuture; } public static void handleFailure(HTTPCarbonMessage requestMessage, BallerinaConnectorException ex) { Object carbonStatusCode = requestMessage.getProperty(Constants.HTTP_STATUS_CODE); int statusCode = (carbonStatusCode == null) ? 500 : Integer.parseInt(carbonStatusCode.toString()); String errorMsg = ex.getMessage(); log.error(errorMsg); ErrorHandlerUtils.printError(ex); sendOutboundResponse(requestMessage, createErrorMessage(errorMsg, statusCode)); } public static HTTPCarbonMessage createErrorMessage(String payload, int statusCode) { HTTPCarbonMessage response = HttpUtil.createHttpCarbonMessage(false); response.waitAndReleaseAllEntities(); if (payload != null) { payload = lowerCaseTheFirstLetter(payload); response.addHttpContent(new DefaultLastHttpContent(Unpooled.wrappedBuffer(payload.getBytes()))); } setHttpStatusCodes(statusCode, response); return response; } private static void setHttpStatusCodes(int statusCode, HTTPCarbonMessage response) { HttpHeaders httpHeaders = response.getHeaders(); httpHeaders.set(org.wso2.transport.http.netty.common.Constants.HTTP_CONTENT_TYPE, org.wso2.transport.http.netty.common.Constants.TEXT_PLAIN); response.setProperty(org.wso2.transport.http.netty.common.Constants.HTTP_STATUS_CODE, statusCode); } public static BStruct getServerConnectorError(Context context, Throwable throwable) { PackageInfo httpPackageInfo = context.getProgramFile() .getPackageInfo(Constants.PROTOCOL_PACKAGE_HTTP); StructInfo errorStructInfo = httpPackageInfo.getStructInfo(Constants.HTTP_CONNECTOR_ERROR); BStruct httpConnectorError = new BStruct(errorStructInfo.getType()); if (throwable.getMessage() == null) { httpConnectorError.setStringField(0, IO_EXCEPTION_OCCURED); } else { httpConnectorError.setStringField(0, throwable.getMessage()); } return httpConnectorError; } public static HTTPCarbonMessage getCarbonMsg(BStruct struct, HTTPCarbonMessage defaultMsg) { HTTPCarbonMessage httpCarbonMessage = (HTTPCarbonMessage) struct .getNativeData(Constants.TRANSPORT_MESSAGE); if (httpCarbonMessage != null) { return httpCarbonMessage; } addCarbonMsg(struct, defaultMsg); return defaultMsg; } public static void addCarbonMsg(BStruct struct, HTTPCarbonMessage httpCarbonMessage) { struct.addNativeData(Constants.TRANSPORT_MESSAGE, httpCarbonMessage); } public static void setHeaderValueStructType(BStruct struct) { headerValueStructType = struct.getType(); } public static void populateInboundRequest(BStruct inboundRequestStruct, HTTPCarbonMessage inboundRequestMsg) { inboundRequestStruct.addNativeData(Constants.TRANSPORT_MESSAGE, inboundRequestMsg); inboundRequestStruct.addNativeData(Constants.INBOUND_REQUEST, true); enrichWithInboundRequestInfo(inboundRequestStruct, inboundRequestMsg); enrichWithInboundRequestHeaders(inboundRequestStruct, inboundRequestMsg); } private static void enrichWithInboundRequestHeaders(BStruct inboundRequestStruct, HTTPCarbonMessage inboundRequestMsg) { if (inboundRequestMsg.getHeader(Constants.USER_AGENT_HEADER) != null) { inboundRequestStruct.setStringField(Constants.REQUEST_USER_AGENT_INDEX, inboundRequestMsg.getHeader(Constants.USER_AGENT_HEADER)); inboundRequestMsg.removeHeader(Constants.USER_AGENT_HEADER); } inboundRequestStruct.setRefField(Constants.REQUEST_HEADERS_INDEX, prepareHeaderMap(inboundRequestMsg.getHeaders(), new BMap<>())); } private static void enrichWithInboundRequestInfo(BStruct inboundRequestStruct, HTTPCarbonMessage inboundRequestMsg) { inboundRequestStruct.setStringField(Constants.REQUEST_PATH_INDEX, (String) inboundRequestMsg.getProperty(Constants.REQUEST_URL)); inboundRequestStruct.setStringField(Constants.REQUEST_HOST_INDEX, ((InetSocketAddress) inboundRequestMsg.getProperty(Constants.LOCAL_ADDRESS)).getHostName()); inboundRequestStruct.setIntField(Constants.REQUEST_PORT_INDEX, (Integer) inboundRequestMsg.getProperty(Constants.LISTENER_PORT)); inboundRequestStruct.setStringField(Constants.REQUEST_METHOD_INDEX, (String) inboundRequestMsg.getProperty(Constants.HTTP_METHOD)); inboundRequestStruct.setStringField(Constants.REQUEST_VERSION_INDEX, (String) inboundRequestMsg.getProperty(Constants.HTTP_VERSION)); Map<String, String> resourceArgValues = (Map<String, String>) inboundRequestMsg.getProperty(Constants.RESOURCE_ARGS); inboundRequestStruct.setStringField(Constants.REQUEST_REST_URI_POSTFIX_INDEX, resourceArgValues.get(Constants.REST_URI_POSTFIX)); } public static void populateInboundResponse(BStruct response, HTTPCarbonMessage cMsg) { response.addNativeData(Constants.TRANSPORT_MESSAGE, cMsg); int statusCode = (Integer) cMsg.getProperty(Constants.HTTP_STATUS_CODE); response.setIntField(Constants.RESPONSE_STATUS_CODE_INDEX, statusCode); response.setStringField(Constants.RESPONSE_REASON_PHRASE_INDEX, HttpResponseStatus.valueOf(statusCode).reasonPhrase()); if (cMsg.getHeader(Constants.SERVER_HEADER) != null) { response.setStringField(Constants.RESPONSE_SERVER_INDEX, cMsg.getHeader(Constants.SERVER_HEADER)); cMsg.removeHeader(Constants.SERVER_HEADER); } response.setRefField(Constants.RESPONSE_HEADERS_INDEX, prepareHeaderMap(cMsg.getHeaders(), new BMap<>())); } @SuppressWarnings("unchecked") public static void populateOutboundRequest(BStruct request, HTTPCarbonMessage reqMsg) { setHeadersToTransportMessage(reqMsg, request); } public static void populateOutboundResponse(BStruct response, HTTPCarbonMessage resMsg, HTTPCarbonMessage reqMsg) { response.addNativeData(Constants.TRANSPORT_MESSAGE, resMsg); response.addNativeData(Constants.INBOUND_REQUEST_MESSAGE, reqMsg); response.addNativeData(Constants.OUTBOUND_RESPONSE, true); response.setRefField(Constants.RESPONSE_HEADERS_INDEX, new BMap<>()); } private static BMap<String, BValue> prepareHeaderMap(HttpHeaders headers, BMap<String, BValue> headerMap) { Map<String, ArrayList> headerStructHolder = new HashMap<>(); for (Map.Entry<String, String> headerEntry : headers) { String headerKey = headerEntry.getKey().trim(); String headerValue = headerEntry.getValue().trim(); ArrayList<BStruct> headerValueList = headerStructHolder.get(headerKey) != null ? headerStructHolder.get(headerKey) : new ArrayList<>(); if (headerValue.contains(",")) { List<String> valueList = Arrays.stream(headerValue.split(",")).map(String::trim) .collect(Collectors.toList()); for (String value : valueList) { populateHeaderStruct(headerValueList, value); } } else { populateHeaderStruct(headerValueList, headerValue); } headerStructHolder.put(headerKey, headerValueList); } for (Map.Entry<String, ArrayList> structHolder : headerStructHolder.entrySet()) { headerMap.put(structHolder.getKey(), new BRefValueArray((BRefType[]) structHolder.getValue() .toArray(new BRefType[0]), new BArrayType(headerValueStructType))); } return headerMap; } @SuppressWarnings("unchecked") private static void populateHeaderStruct(ArrayList headerValueList, String value) { if (value.contains(";")) { headerValueList.add(populateWithHeaderValueAndParams(new BStruct(headerValueStructType), value)); } else { headerValueList.add(populateWithHeaderValue(new BStruct(headerValueStructType), value)); } } private static BStruct populateWithHeaderValueAndParams(BStruct headerStruct, String headerValue) { String value = headerValue.substring(0, headerValue.indexOf(";")).trim(); List<String> paramList = Arrays.stream(headerValue.substring(headerValue.indexOf(";") + 1) .split(";")).map(String::trim).collect(Collectors.toList()); headerStruct.setStringField(Constants.HEADER_VALUE_INDEX, value); headerStruct.setRefField(Constants.HEADER_PARAM_INDEX, createParamBMap(paramList)); return headerStruct; } private static BStruct populateWithHeaderValue(BStruct headerStruct, String headerValue) { headerStruct.setStringField(0, headerValue.trim()); return headerStruct; } private static BMap<String, BValue> createParamBMap(List<String> paramList) { BMap<String, BValue> paramMap = new BMap<>(); for (String param : paramList) { if (param.contains("=")) { String[] keyValuePair = param.split("="); paramMap.put(keyValuePair[0].trim(), new BString(keyValuePair[1].trim())); } else { paramMap.put(param.trim(), null); } } return paramMap; } /** * Set headers of request/response struct to the transport message. * * @param outboundRequest transport Http carbon message. * @param struct req/resp struct. */ public static void setHeadersToTransportMessage(HTTPCarbonMessage outboundRequest, BStruct struct) { outboundRequest.getHeaders().clear(); BMap<String, BValue> headers = struct.getType().getName().equals(Constants.REQUEST) ? getRequestStructHeaders(struct) : getResponseStructHeaders(struct); if (headers == null) { return; } Set<String> keys = headers.keySet(); for (String key : keys) { String headerValue = buildHeaderValue(headers, key); outboundRequest.setHeader(key, headerValue); } } @SuppressWarnings("unchecked") private static BMap<String, BValue> getRequestStructHeaders(BStruct struct) { BMap<String, BValue> headers = (BMap) struct.getRefField(Constants.REQUEST_HEADERS_INDEX); if (headers == null) { return null; } HttpHeaders removedHeaders = new DefaultHttpHeaders(); if (!struct.getStringField(Constants.REQUEST_USER_AGENT_INDEX).equals("")) { removedHeaders.add(Constants.USER_AGENT_HEADER, struct.getStringField(Constants.REQUEST_USER_AGENT_INDEX)); } return prepareHeaderMap(removedHeaders, headers); } @SuppressWarnings("unchecked") private static BMap<String, BValue> getResponseStructHeaders(BStruct struct) { BMap<String, BValue> headers = (BMap) struct.getRefField(Constants.RESPONSE_HEADERS_INDEX); if (headers == null) { return null; } HttpHeaders removedHeaders = new DefaultHttpHeaders(); if (struct.getNativeData(Constants.OUTBOUND_RESPONSE) == null && !struct.getStringField(Constants.RESPONSE_SERVER_INDEX).equals("")) { removedHeaders.add(Constants.SERVER_HEADER, struct.getStringField(Constants.RESPONSE_SERVER_INDEX)); } return prepareHeaderMap(removedHeaders, headers); } private static String buildHeaderValue(BMap<String, BValue> headers, String key) { StringBuilder headerValue = new StringBuilder(); if (headers.get(key).getType().getTag() != TypeTags.ARRAY_TAG) { throw new BallerinaException("expects an array as header value for header : " + key); } BRefValueArray headerValues = (BRefValueArray) headers.get(key); for (int index = 0; index < headerValues.size(); index++) { if (headerValues.get(index).getType().getTag() == TypeTags.STRUCT_TAG) { BStruct headerStruct = (BStruct) headerValues.get(index); String value = headerStruct.getStringField(Constants.HEADER_VALUE_INDEX); headerValue.append(index > 0 ? "," + value : value); BMap paramMap = (BMap) headerStruct.getRefField(Constants.HEADER_PARAM_INDEX); headerValue = paramMap != null ? concatParams(headerValue, paramMap) : headerValue; } else if (headerValues.get(index).getType().getTag() == TypeTags.MAP_TAG) { BMap headerMap = (BMap) headerValues.get(index); String value = headerMap.get(Constants.HEADER_VALUE).stringValue(); headerValue.append(index > 0 ? "," + value : value); BMap paramMap = (BMap) headerMap.get(Constants.HEADER_PARAM); headerValue = paramMap != null ? concatParams(headerValue, paramMap) : headerValue; } else { throw new BallerinaException("invalid header assignment for key : " + key); } } return headerValue.toString(); } @SuppressWarnings("unchecked") private static StringBuilder concatParams(StringBuilder headerValue, BMap paramMap) { Set<String> paramKeys = paramMap.keySet(); for (String paramKey : paramKeys) { String paramValue = paramMap.get(paramKey) != null ? paramMap.get(paramKey).stringValue() : null; headerValue.append(paramValue == null ? ";" + paramKey : ";" + paramKey + "=" + paramValue); } return headerValue; } private static void setHeaderToStruct(Context context, BStruct struct, String key, String value) { headerValueStructType = headerValueStructType == null ? ConnectorUtils.createAndGetStruct(context, Constants.HTTP_PACKAGE_PATH, Constants.HEADER_VALUE_STRUCT).getType() : headerValueStructType; int headersIndex = struct.getType().getName().equals(Constants.REQUEST) ? Constants.REQUEST_HEADERS_INDEX : Constants.RESPONSE_HEADERS_INDEX; BMap<String, BValue> headerMap = struct.getRefField(headersIndex) != null ? (BMap) struct.getRefField(headersIndex) : new BMap<>(); struct.setRefField(headersIndex, prepareHeaderMap(new DefaultHttpHeaders().add(key, value), headerMap)); } @SuppressWarnings("unchecked") private static String getHeaderFromStruct(BStruct struct, String key) { int headersIndex = struct.getType().getName().equals(Constants.REQUEST) ? Constants.REQUEST_HEADERS_INDEX : Constants.RESPONSE_HEADERS_INDEX; return struct.getRefField(headersIndex) != null ? buildHeaderValue((BMap) struct.getRefField(headersIndex), key) : null; } /** * Extract the listener configurations from the config annotation. * * @param annotationInfo configuration annotation info. * @return the set of {@link ListenerConfiguration} which were extracted from config annotation. */ public static Set<ListenerConfiguration> getDefaultOrDynamicListenerConfig(Annotation annotationInfo) { if (annotationInfo == null) { return HttpConnectionManager.getInstance().getDefaultListenerConfiugrationSet(); } Set<ListenerConfiguration> listenerConfSet = new HashSet<>(); extractBasicConfig(annotationInfo, listenerConfSet); extractHttpsConfig(annotationInfo, listenerConfSet); if (listenerConfSet.isEmpty()) { listenerConfSet = HttpConnectionManager.getInstance().getDefaultListenerConfiugrationSet(); } return listenerConfSet; } private static String getListenerInterface(String host, int port) { host = host != null ? host : "0.0.0.0"; return host + ":" + port; } private static void extractBasicConfig(Annotation configInfo, Set<ListenerConfiguration> listenerConfSet) { AnnAttrValue hostAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_HOST); AnnAttrValue portAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_PORT); AnnAttrValue keepAliveAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_KEEP_ALIVE); AnnAttrValue transferEncoding = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_TRANSFER_ENCODING); AnnAttrValue chunking = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_CHUNKING); ListenerConfiguration listenerConfiguration = new ListenerConfiguration(); if (portAttrVal != null && portAttrVal.getIntValue() > 0) { listenerConfiguration.setPort(Math.toIntExact(portAttrVal.getIntValue())); listenerConfiguration.setScheme(Constants.PROTOCOL_HTTP); if (hostAttrVal != null && hostAttrVal.getStringValue() != null) { listenerConfiguration.setHost(hostAttrVal.getStringValue()); } else { listenerConfiguration.setHost(Constants.HTTP_DEFAULT_HOST); } if (keepAliveAttrVal != null) { listenerConfiguration.setKeepAlive(keepAliveAttrVal.getBooleanValue()); } else { listenerConfiguration.setKeepAlive(Boolean.TRUE); } if (transferEncoding != null && !Constants.ANN_CONFIG_ATTR_CHUNKING .equalsIgnoreCase(transferEncoding.getStringValue())) { throw new BallerinaConnectorException("Unsupported configuration found for Transfer-Encoding : " + transferEncoding.getStringValue()); } if (chunking != null) { ChunkConfig chunkConfig = getChunkConfig(chunking.getStringValue()); listenerConfiguration.setChunkConfig(chunkConfig); } else { listenerConfiguration.setChunkConfig(ChunkConfig.AUTO); } listenerConfiguration .setId(getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort())); listenerConfSet.add(listenerConfiguration); } } public static ChunkConfig getChunkConfig(String chunking) { ChunkConfig chunkConfig; if (Constants.CHUNKING_AUTO.equalsIgnoreCase(chunking)) { chunkConfig = ChunkConfig.AUTO; } else if (Constants.CHUNKING_ALWAYS.equalsIgnoreCase(chunking)) { chunkConfig = ChunkConfig.ALWAYS; } else if (Constants.CHUNKING_NEVER.equalsIgnoreCase(chunking)) { chunkConfig = ChunkConfig.NEVER; } else { throw new BallerinaConnectorException("Invalid configuration found for Transfer-Encoding : " + chunking); } return chunkConfig; } private static void extractHttpsConfig(Annotation configInfo, Set<ListenerConfiguration> listenerConfSet) { AnnAttrValue httpsPortAttrVal; if (configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_HTTPS_PORT) == null) { httpsPortAttrVal = configInfo.getAnnAttrValue(org.ballerinalang.net.ws.Constants.ANN_CONFIG_ATTR_WSS_PORT); } else { httpsPortAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_HTTPS_PORT); } AnnAttrValue keyStoreFileAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_KEY_STORE_FILE); AnnAttrValue keyStorePasswordAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_KEY_STORE_PASS); AnnAttrValue certPasswordAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_CERT_PASS); AnnAttrValue trustStoreFileAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_TRUST_STORE_FILE); AnnAttrValue trustStorePasswordAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_TRUST_STORE_PASS); AnnAttrValue sslVerifyClientAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_SSL_VERIFY_CLIENT); AnnAttrValue sslEnabledProtocolsAttrVal = configInfo .getAnnAttrValue(Constants.ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS); AnnAttrValue ciphersAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_CIPHERS); AnnAttrValue sslProtocolAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_SSL_PROTOCOL); AnnAttrValue hostAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_HOST); ListenerConfiguration listenerConfiguration = new ListenerConfiguration(); if (httpsPortAttrVal != null && httpsPortAttrVal.getIntValue() > 0) { listenerConfiguration.setPort(Math.toIntExact(httpsPortAttrVal.getIntValue())); listenerConfiguration.setScheme(Constants.PROTOCOL_HTTPS); if (hostAttrVal != null && hostAttrVal.getStringValue() != null) { listenerConfiguration.setHost(hostAttrVal.getStringValue()); } else { listenerConfiguration.setHost(Constants.HTTP_DEFAULT_HOST); } if (keyStoreFileAttrVal == null || keyStoreFileAttrVal.getStringValue() == null) { throw new BallerinaConnectorException("Keystore location must be provided for secure connection"); } if (keyStorePasswordAttrVal == null || keyStorePasswordAttrVal.getStringValue() == null) { throw new BallerinaConnectorException("Keystore password value must be provided for secure connection"); } if (certPasswordAttrVal == null || certPasswordAttrVal.getStringValue() == null) { throw new BallerinaConnectorException( "Certificate password value must be provided for secure connection"); } if ((trustStoreFileAttrVal == null || trustStoreFileAttrVal.getStringValue() == null) && sslVerifyClientAttrVal != null) { throw new BallerinaException("Truststore location must be provided to enable Mutual SSL"); } if ((trustStorePasswordAttrVal == null || trustStorePasswordAttrVal.getStringValue() == null) && sslVerifyClientAttrVal != null) { throw new BallerinaException("Truststore password value must be provided to enable Mutual SSL"); } listenerConfiguration.setTLSStoreType(Constants.PKCS_STORE_TYPE); listenerConfiguration.setKeyStoreFile(keyStoreFileAttrVal.getStringValue()); listenerConfiguration.setKeyStorePass(keyStorePasswordAttrVal.getStringValue()); listenerConfiguration.setCertPass(certPasswordAttrVal.getStringValue()); if (sslVerifyClientAttrVal != null) { listenerConfiguration.setVerifyClient(sslVerifyClientAttrVal.getStringValue()); } if (trustStoreFileAttrVal != null) { listenerConfiguration.setTrustStoreFile(trustStoreFileAttrVal.getStringValue()); } if (trustStorePasswordAttrVal != null) { listenerConfiguration.setTrustStorePass(trustStorePasswordAttrVal.getStringValue()); } List<Parameter> serverParams = new ArrayList<>(); Parameter serverCiphers; if (sslEnabledProtocolsAttrVal != null && sslEnabledProtocolsAttrVal.getStringValue() != null) { serverCiphers = new Parameter(Constants.ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS, sslEnabledProtocolsAttrVal.getStringValue()); serverParams.add(serverCiphers); } if (ciphersAttrVal != null && ciphersAttrVal.getStringValue() != null) { serverCiphers = new Parameter(Constants.ANN_CONFIG_ATTR_CIPHERS, ciphersAttrVal.getStringValue()); serverParams.add(serverCiphers); } if (!serverParams.isEmpty()) { listenerConfiguration.setParameters(serverParams); } if (sslProtocolAttrVal != null) { listenerConfiguration.setSSLProtocol(sslProtocolAttrVal.getStringValue()); } listenerConfiguration .setId(getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort())); listenerConfSet.add(listenerConfiguration); } } public static HTTPCarbonMessage createHttpCarbonMessage(boolean isRequest) { HTTPCarbonMessage httpCarbonMessage; if (isRequest) { httpCarbonMessage = new HTTPCarbonMessage( new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "")); httpCarbonMessage.setEndOfMsgAdded(true); } else { httpCarbonMessage = new HTTPCarbonMessage( new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK)); httpCarbonMessage.setEndOfMsgAdded(true); } return httpCarbonMessage; } public static void checkFunctionValidity(BStruct bStruct, HTTPCarbonMessage httpMsg) { methodInvocationCheck(bStruct, httpMsg); outboundResponseStructCheck(bStruct); } private static void methodInvocationCheck(BStruct bStruct, HTTPCarbonMessage httpMsg) { if (bStruct.getNativeData(METHOD_ACCESSED) != null || httpMsg == null) { throw new IllegalStateException("illegal function invocation"); } if (!is100ContinueRequest(httpMsg)) { bStruct.addNativeData(METHOD_ACCESSED, true); } } private static void outboundResponseStructCheck(BStruct bStruct) { if (bStruct.getNativeData(Constants.OUTBOUND_RESPONSE) == null) { throw new BallerinaException("operation not allowed"); } } private static boolean is100ContinueRequest(HTTPCarbonMessage httpMsg) { return Constants.HEADER_VAL_100_CONTINUE.equalsIgnoreCase(httpMsg.getHeader(Constants.EXPECT_HEADER)); } public static Annotation getServiceConfigAnnotation(Service service, String pkgPath) { List<Annotation> annotationList = service.getAnnotationList(pkgPath, Constants.ANN_NAME_CONFIG); if (annotationList == null) { return null; } if (annotationList.size() > 1) { throw new BallerinaException( "multiple service configuration annotations found in service: " + service.getName()); } return annotationList.isEmpty() ? null : annotationList.get(0); } public static Annotation getResourceConfigAnnotation(Resource resource, String pkgPath) { List<Annotation> annotationList = resource.getAnnotationList(pkgPath, Constants.ANN_NAME_RESOURCE_CONFIG); if (annotationList == null) { return null; } if (annotationList.size() > 1) { throw new BallerinaException( "multiple resource configuration annotations found in resource: " + resource.getServiceName() + "." + resource.getName()); } return annotationList.isEmpty() ? null : annotationList.get(0); } }
class HttpUtil { private static final Logger log = LoggerFactory.getLogger(HttpUtil.class); private static final String METHOD_ACCESSED = "isMethodAccessed"; private static final String IO_EXCEPTION_OCCURED = "I/O exception occurred"; private static BStructType headerValueStructType; public static BValue[] addHeader(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = ((BStruct) abstractNativeFunction.getRefArgument(context, 0)); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); String headerName = abstractNativeFunction.getStringArgument(context, 0); String headerValue = abstractNativeFunction.getStringArgument(context, 1); HttpHeaders httpHeaders = httpCarbonMessage.getHeaders(); httpHeaders.add(headerName, headerValue); if (log.isDebugEnabled()) { log.debug("Add " + headerName + " to header with value: " + headerValue); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] getHeader(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); String headerName = abstractNativeFunction.getStringArgument(context, 0); String headerValue = httpCarbonMessage.getHeader(headerName); return abstractNativeFunction.getBValues(new BString(headerValue)); } public static BValue[] getProperty(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); String propertyName = abstractNativeFunction.getStringArgument(context, 0); Object propertyValue = httpCarbonMessage.getProperty(propertyName); if (propertyValue == null) { return AbstractNativeFunction.VOID_RETURN; } if (propertyValue instanceof String) { return abstractNativeFunction.getBValues(new BString((String) propertyValue)); } else { throw new BallerinaException("Property value is of unknown type : " + propertyValue.getClass().getName()); } } public static BValue[] removeAllHeaders(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.getHeaders().clear(); return AbstractNativeFunction.VOID_RETURN; } public static BValue[] removeHeader(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); String headerName = abstractNativeFunction.getStringArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.removeHeader(headerName); if (log.isDebugEnabled()) { log.debug("Remove header:" + headerName); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setHeader(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); String headerName = abstractNativeFunction.getStringArgument(context, 0); String headerValue = abstractNativeFunction.getStringArgument(context, 1); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.setHeader(headerName, headerValue); if (log.isDebugEnabled()) { log.debug("Set " + headerName + " header with value: " + headerValue); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setProperty(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); String propertyName = abstractNativeFunction.getStringArgument(context, 0); String propertyValue = abstractNativeFunction.getStringArgument(context, 1); if (propertyName != null && propertyValue != null) { HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.setProperty(propertyName, propertyValue); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setBinaryPayload(Context context, AbstractNativeFunction nativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) nativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.waitAndReleaseAllEntities(); byte[] payload = nativeFunction.getBlobArgument(context, 0); BlobDataSource blobDataSource = new BlobDataSource(payload); addMessageDataSource(httpMessageStruct, blobDataSource); httpCarbonMessage.setHeader(Constants.CONTENT_TYPE, Constants.APPLICATION_JSON); return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setJsonPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.waitAndReleaseAllEntities(); BJSON payload = (BJSON) abstractNativeFunction.getRefArgument(context, 1); addMessageDataSource(httpMessageStruct, payload); HttpUtil.setHeaderToStruct(context, httpMessageStruct, Constants.CONTENT_TYPE, Constants.APPLICATION_JSON); return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setStringPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.waitAndReleaseAllEntities(); String payload = abstractNativeFunction.getStringArgument(context, 0); StringDataSource stringDataSource = new StringDataSource(payload); addMessageDataSource(httpMessageStruct, stringDataSource); HttpUtil.setHeaderToStruct(context, httpMessageStruct, Constants.CONTENT_TYPE, Constants.TEXT_PLAIN); if (log.isDebugEnabled()) { log.debug("Setting new payload: " + payload); } return AbstractNativeFunction.VOID_RETURN; } public static BValue[] setXMLPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); httpCarbonMessage.waitAndReleaseAllEntities(); BXML payload = (BXML) abstractNativeFunction.getRefArgument(context, 1); addMessageDataSource(httpMessageStruct, payload); HttpUtil.setHeaderToStruct(context, httpMessageStruct, Constants.CONTENT_TYPE, Constants.APPLICATION_XML); return AbstractNativeFunction.VOID_RETURN; } public static BValue[] getBinaryPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BlobDataSource result; try { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); if (httpMessageStruct.getNativeData(MESSAGE_DATA_SOURCE) != null) { result = (BlobDataSource) httpMessageStruct.getNativeData(MESSAGE_DATA_SOURCE); } else { HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage); result = new BlobDataSource(toByteArray(httpMessageDataStreamer.getInputStream())); HttpUtil.addMessageDataSource(httpMessageStruct, result); } if (log.isDebugEnabled()) { log.debug("String representation of the payload:" + result.getMessageAsString()); } } catch (Throwable e) { throw new BallerinaException("Error while retrieving string payload from message: " + e.getMessage()); } return abstractNativeFunction.getBValues(new BBlob(result.getValue())); } public static BValue[] getJsonPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BJSON result = null; try { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); MessageDataSource payload = HttpUtil.getMessageDataSource(httpMessageStruct); if (payload != null) { if (payload instanceof BJSON) { result = (BJSON) payload; } else { result = new BJSON(payload.getMessageAsString()); } } else { HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage); result = new BJSON(httpMessageDataStreamer.getInputStream()); addMessageDataSource(httpMessageStruct, result); } } catch (Throwable e) { throw new BallerinaException("Error while retrieving json payload from message: " + e.getMessage()); } return abstractNativeFunction.getBValues(result); } public static BValue[] getStringPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BString result; try { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); MessageDataSource messageDataSource = HttpUtil.getMessageDataSource(httpMessageStruct); if (messageDataSource != null) { result = new BString(messageDataSource.getMessageAsString()); } else { HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); if (httpCarbonMessage.isEmpty() && httpCarbonMessage.isEndOfMsgAdded()) { return abstractNativeFunction.getBValues(new BString("")); } HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage); String payload = StringUtils.getStringFromInputStream(httpMessageDataStreamer.getInputStream()); result = new BString(payload); addMessageDataSource(httpMessageStruct, new StringDataSource(payload)); } if (log.isDebugEnabled()) { log.debug("Payload in String:" + result.stringValue()); } } catch (Throwable e) { throw new BallerinaException("Error while retrieving string payload from message: " + e.getMessage()); } return abstractNativeFunction.getBValues(result); } public static BValue[] getXMLPayload(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest) { BXML result; try { BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); MessageDataSource messageDataSource = HttpUtil.getMessageDataSource(httpMessageStruct); if (messageDataSource != null) { if (messageDataSource instanceof BXML) { result = (BXML) messageDataSource; } else { result = XMLUtils.parse(messageDataSource.getMessageAsString()); } } else { HTTPCarbonMessage httpCarbonMessage = HttpUtil .getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest)); HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage); result = XMLUtils.parse(httpMessageDataStreamer.getInputStream()); addMessageDataSource(httpMessageStruct, result); } } catch (Throwable e) { throw new BallerinaException("Error while retrieving XML payload from message: " + e.getMessage()); } return abstractNativeFunction.getBValues(result); } private static byte[] toByteArray(InputStream input) throws IOException { byte[] buffer = new byte[4096]; int n1; ByteArrayOutputStream output = new ByteArrayOutputStream(); for (; -1 != (n1 = input.read(buffer)); ) { output.write(buffer, 0, n1); } byte[] bytes = output.toByteArray(); output.close(); return bytes; } public static void addMessageDataSource(BStruct struct, MessageDataSource messageDataSource) { struct.addNativeData(MESSAGE_DATA_SOURCE, messageDataSource); } public static MessageDataSource getMessageDataSource(BStruct httpMsgStruct) { return (MessageDataSource) httpMsgStruct.getNativeData(MESSAGE_DATA_SOURCE); } public static void closeMessageOutputStream(OutputStream messageOutputStream) { try { if (messageOutputStream != null) { messageOutputStream.close(); } } catch (IOException e) { log.error("Couldn't close message output stream", e); } } public static BValue[] getContentLength(Context context, AbstractNativeFunction abstractNativeFunction) { int contentLength = -1; BStruct requestStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0); String lengthStr = HttpUtil.getHeaderFromStruct(requestStruct, Constants.HTTP_CONTENT_LENGTH); try { contentLength = Integer.parseInt(lengthStr); } catch (NumberFormatException e) { throw new BallerinaException("Invalid content length"); } return abstractNativeFunction.getBValues(new BInteger(contentLength)); } public static BMap<String, BValue> getParamMap(String payload) throws UnsupportedEncodingException { BMap<String, BValue> params = new BMap<>(); String[] entries = payload.split("&"); for (String entry : entries) { int index = entry.indexOf('='); if (index != -1) { String name = entry.substring(0, index).trim(); String value = URLDecoder.decode(entry.substring(index + 1).trim(), "UTF-8"); if (value.matches("")) { params.put(name, new BString("")); continue; } params.put(name, new BString(value)); } } return params; } /** * Helper method to start pending http server connectors. * * @throws BallerinaConnectorException */ public static void startPendingHttpConnectors(BallerinaHttpServerConnector httpServerConnector) throws BallerinaConnectorException { try { HttpConnectionManager.getInstance().startPendingHTTPConnectors(httpServerConnector); } catch (ServerConnectorException e) { throw new BallerinaConnectorException(e); } } public static BValue[] prepareResponseAndSend(Context context, AbstractNativeFunction abstractNativeFunction , HTTPCarbonMessage requestMessage, HTTPCarbonMessage responseMessage, BStruct httpMessageStruct) { addHTTPSessionAndCorsHeaders(requestMessage, responseMessage); MessageDataSource outboundMessageSource = HttpUtil.getMessageDataSource(httpMessageStruct); HttpResponseStatusFuture outboundResponseStatusFuture = sendOutboundResponse(requestMessage, responseMessage); if (outboundMessageSource != null) { OutputStream messageOutputStream = new HttpMessageDataStreamer(responseMessage).getOutputStream(); outboundMessageSource.serializeData(messageOutputStream); HttpUtil.closeMessageOutputStream(messageOutputStream); } try { outboundResponseStatusFuture = outboundResponseStatusFuture.sync(); } catch (InterruptedException e) { throw new BallerinaException("interrupted sync: " + e.getMessage()); } if (outboundResponseStatusFuture.getStatus().getCause() != null) { return abstractNativeFunction.getBValues(getServerConnectorError(context , outboundResponseStatusFuture.getStatus().getCause())); } return abstractNativeFunction.VOID_RETURN; } public static BStruct createSessionStruct(Context context, Session session) { BStruct sessionStruct = ConnectorUtils .createAndGetStruct(context, Constants.PROTOCOL_PACKAGE_HTTP, Constants.SESSION); sessionStruct.addNativeData(Constants.HTTP_SESSION, session); return sessionStruct; } public static String getSessionID(String cookieHeader) { return Arrays.stream(cookieHeader.split(";")) .filter(cookie -> cookie.trim().startsWith(Constants.SESSION_ID)) .findFirst().get().trim().substring(Constants.SESSION_ID.length()); } public static void addHTTPSessionAndCorsHeaders(HTTPCarbonMessage requestMsg, HTTPCarbonMessage responseMsg) { Session session = (Session) requestMsg.getProperty(Constants.HTTP_SESSION); if (session != null) { session.generateSessionHeader(responseMsg); } if (requestMsg.getHeader(Constants.ORIGIN) != null) { CorsHeaderGenerator.process(requestMsg, responseMsg, true); } } public static HttpResponseStatusFuture sendOutboundResponse(HTTPCarbonMessage requestMsg, HTTPCarbonMessage responseMsg) { HttpResponseStatusFuture responseFuture; try { responseFuture = requestMsg.respond(responseMsg); } catch (org.wso2.transport.http.netty.contract.ServerConnectorException e) { throw new BallerinaConnectorException("Error occurred during response", e); } return responseFuture; } public static void handleFailure(HTTPCarbonMessage requestMessage, BallerinaConnectorException ex) { Object carbonStatusCode = requestMessage.getProperty(Constants.HTTP_STATUS_CODE); int statusCode = (carbonStatusCode == null) ? 500 : Integer.parseInt(carbonStatusCode.toString()); String errorMsg = ex.getMessage(); log.error(errorMsg); ErrorHandlerUtils.printError(ex); sendOutboundResponse(requestMessage, createErrorMessage(errorMsg, statusCode)); } public static HTTPCarbonMessage createErrorMessage(String payload, int statusCode) { HTTPCarbonMessage response = HttpUtil.createHttpCarbonMessage(false); response.waitAndReleaseAllEntities(); if (payload != null) { payload = lowerCaseTheFirstLetter(payload); response.addHttpContent(new DefaultLastHttpContent(Unpooled.wrappedBuffer(payload.getBytes()))); } setHttpStatusCodes(statusCode, response); return response; } private static void setHttpStatusCodes(int statusCode, HTTPCarbonMessage response) { HttpHeaders httpHeaders = response.getHeaders(); httpHeaders.set(org.wso2.transport.http.netty.common.Constants.HTTP_CONTENT_TYPE, org.wso2.transport.http.netty.common.Constants.TEXT_PLAIN); response.setProperty(org.wso2.transport.http.netty.common.Constants.HTTP_STATUS_CODE, statusCode); } public static BStruct getServerConnectorError(Context context, Throwable throwable) { PackageInfo httpPackageInfo = context.getProgramFile() .getPackageInfo(Constants.PROTOCOL_PACKAGE_HTTP); StructInfo errorStructInfo = httpPackageInfo.getStructInfo(Constants.HTTP_CONNECTOR_ERROR); BStruct httpConnectorError = new BStruct(errorStructInfo.getType()); if (throwable.getMessage() == null) { httpConnectorError.setStringField(0, IO_EXCEPTION_OCCURED); } else { httpConnectorError.setStringField(0, throwable.getMessage()); } return httpConnectorError; } public static HTTPCarbonMessage getCarbonMsg(BStruct struct, HTTPCarbonMessage defaultMsg) { HTTPCarbonMessage httpCarbonMessage = (HTTPCarbonMessage) struct .getNativeData(Constants.TRANSPORT_MESSAGE); if (httpCarbonMessage != null) { return httpCarbonMessage; } addCarbonMsg(struct, defaultMsg); return defaultMsg; } public static void addCarbonMsg(BStruct struct, HTTPCarbonMessage httpCarbonMessage) { struct.addNativeData(Constants.TRANSPORT_MESSAGE, httpCarbonMessage); } public static void setHeaderValueStructType(BStruct struct) { headerValueStructType = struct.getType(); } public static void populateInboundRequest(BStruct inboundRequestStruct, HTTPCarbonMessage inboundRequestMsg) { inboundRequestStruct.addNativeData(Constants.TRANSPORT_MESSAGE, inboundRequestMsg); inboundRequestStruct.addNativeData(Constants.INBOUND_REQUEST, true); enrichWithInboundRequestInfo(inboundRequestStruct, inboundRequestMsg); enrichWithInboundRequestHeaders(inboundRequestStruct, inboundRequestMsg); } private static void enrichWithInboundRequestHeaders(BStruct inboundRequestStruct, HTTPCarbonMessage inboundRequestMsg) { if (inboundRequestMsg.getHeader(Constants.USER_AGENT_HEADER) != null) { inboundRequestStruct.setStringField(Constants.REQUEST_USER_AGENT_INDEX, inboundRequestMsg.getHeader(Constants.USER_AGENT_HEADER)); inboundRequestMsg.removeHeader(Constants.USER_AGENT_HEADER); } inboundRequestStruct.setRefField(Constants.REQUEST_HEADERS_INDEX, prepareHeaderMap(inboundRequestMsg.getHeaders(), new BMap<>())); } private static void enrichWithInboundRequestInfo(BStruct inboundRequestStruct, HTTPCarbonMessage inboundRequestMsg) { inboundRequestStruct.setStringField(Constants.REQUEST_PATH_INDEX, (String) inboundRequestMsg.getProperty(Constants.REQUEST_URL)); inboundRequestStruct.setStringField(Constants.REQUEST_HOST_INDEX, ((InetSocketAddress) inboundRequestMsg.getProperty(Constants.LOCAL_ADDRESS)).getHostName()); inboundRequestStruct.setIntField(Constants.REQUEST_PORT_INDEX, (Integer) inboundRequestMsg.getProperty(Constants.LISTENER_PORT)); inboundRequestStruct.setStringField(Constants.REQUEST_METHOD_INDEX, (String) inboundRequestMsg.getProperty(Constants.HTTP_METHOD)); inboundRequestStruct.setStringField(Constants.REQUEST_VERSION_INDEX, (String) inboundRequestMsg.getProperty(Constants.HTTP_VERSION)); Map<String, String> resourceArgValues = (Map<String, String>) inboundRequestMsg.getProperty(Constants.RESOURCE_ARGS); inboundRequestStruct.setStringField(Constants.REQUEST_REST_URI_POSTFIX_INDEX, resourceArgValues.get(Constants.REST_URI_POSTFIX)); } public static void populateInboundResponse(BStruct response, HTTPCarbonMessage cMsg) { response.addNativeData(Constants.TRANSPORT_MESSAGE, cMsg); int statusCode = (Integer) cMsg.getProperty(Constants.HTTP_STATUS_CODE); response.setIntField(Constants.RESPONSE_STATUS_CODE_INDEX, statusCode); response.setStringField(Constants.RESPONSE_REASON_PHRASE_INDEX, HttpResponseStatus.valueOf(statusCode).reasonPhrase()); if (cMsg.getHeader(Constants.SERVER_HEADER) != null) { response.setStringField(Constants.RESPONSE_SERVER_INDEX, cMsg.getHeader(Constants.SERVER_HEADER)); cMsg.removeHeader(Constants.SERVER_HEADER); } response.setRefField(Constants.RESPONSE_HEADERS_INDEX, prepareHeaderMap(cMsg.getHeaders(), new BMap<>())); } @SuppressWarnings("unchecked") public static void populateOutboundRequest(BStruct request, HTTPCarbonMessage reqMsg) { setHeadersToTransportMessage(reqMsg, request); } public static void populateOutboundResponse(BStruct response, HTTPCarbonMessage resMsg, HTTPCarbonMessage reqMsg) { response.addNativeData(Constants.TRANSPORT_MESSAGE, resMsg); response.addNativeData(Constants.INBOUND_REQUEST_MESSAGE, reqMsg); response.addNativeData(Constants.OUTBOUND_RESPONSE, true); response.setRefField(Constants.RESPONSE_HEADERS_INDEX, new BMap<>()); } private static BMap<String, BValue> prepareHeaderMap(HttpHeaders headers, BMap<String, BValue> headerMap) { Map<String, ArrayList> headerStructHolder = new HashMap<>(); for (Map.Entry<String, String> headerEntry : headers) { String headerKey = headerEntry.getKey().trim(); String headerValue = headerEntry.getValue().trim(); ArrayList<BStruct> headerValueList = headerStructHolder.get(headerKey) != null ? headerStructHolder.get(headerKey) : new ArrayList<>(); if (headerValue.contains(",")) { List<String> valueList = Arrays.stream(headerValue.split(",")).map(String::trim) .collect(Collectors.toList()); for (String value : valueList) { populateHeaderStruct(headerValueList, value); } } else { populateHeaderStruct(headerValueList, headerValue); } headerStructHolder.put(headerKey, headerValueList); } for (Map.Entry<String, ArrayList> structHolder : headerStructHolder.entrySet()) { headerMap.put(structHolder.getKey(), new BRefValueArray((BRefType[]) structHolder.getValue() .toArray(new BRefType[0]), new BArrayType(headerValueStructType))); } return headerMap; } @SuppressWarnings("unchecked") private static void populateHeaderStruct(ArrayList headerValueList, String value) { if (value.contains(";")) { headerValueList.add(populateWithHeaderValueAndParams(new BStruct(headerValueStructType), value)); } else { headerValueList.add(populateWithHeaderValue(new BStruct(headerValueStructType), value)); } } private static BStruct populateWithHeaderValueAndParams(BStruct headerStruct, String headerValue) { String value = headerValue.substring(0, headerValue.indexOf(";")).trim(); List<String> paramList = Arrays.stream(headerValue.substring(headerValue.indexOf(";") + 1) .split(";")).map(String::trim).collect(Collectors.toList()); headerStruct.setStringField(Constants.HEADER_VALUE_INDEX, value); headerStruct.setRefField(Constants.HEADER_PARAM_INDEX, createParamBMap(paramList)); return headerStruct; } private static BStruct populateWithHeaderValue(BStruct headerStruct, String headerValue) { headerStruct.setStringField(0, headerValue.trim()); return headerStruct; } private static BMap<String, BValue> createParamBMap(List<String> paramList) { BMap<String, BValue> paramMap = new BMap<>(); for (String param : paramList) { if (param.contains("=")) { String[] keyValuePair = param.split("="); paramMap.put(keyValuePair[0].trim(), new BString(keyValuePair[1].trim())); } else { paramMap.put(param.trim(), null); } } return paramMap; } /** * Set headers of request/response struct to the transport message. * * @param outboundRequest transport Http carbon message. * @param struct req/resp struct. */ public static void setHeadersToTransportMessage(HTTPCarbonMessage outboundRequest, BStruct struct) { outboundRequest.getHeaders().clear(); BMap<String, BValue> headers = struct.getType().getName().equals(Constants.REQUEST) ? getRequestStructHeaders(struct) : getResponseStructHeaders(struct); if (headers == null) { return; } Set<String> keys = headers.keySet(); for (String key : keys) { String headerValue = buildHeaderValue(headers, key); outboundRequest.setHeader(key, headerValue); } } @SuppressWarnings("unchecked") private static BMap<String, BValue> getRequestStructHeaders(BStruct struct) { BMap<String, BValue> headers = (BMap) struct.getRefField(Constants.REQUEST_HEADERS_INDEX); if (headers == null) { return null; } HttpHeaders removedHeaders = new DefaultHttpHeaders(); if (!struct.getStringField(Constants.REQUEST_USER_AGENT_INDEX).equals("")) { removedHeaders.add(Constants.USER_AGENT_HEADER, struct.getStringField(Constants.REQUEST_USER_AGENT_INDEX)); } return prepareHeaderMap(removedHeaders, headers); } @SuppressWarnings("unchecked") private static BMap<String, BValue> getResponseStructHeaders(BStruct struct) { BMap<String, BValue> headers = (BMap) struct.getRefField(Constants.RESPONSE_HEADERS_INDEX); if (headers == null) { return null; } HttpHeaders removedHeaders = new DefaultHttpHeaders(); if (struct.getNativeData(Constants.OUTBOUND_RESPONSE) == null && !struct.getStringField(Constants.RESPONSE_SERVER_INDEX).equals("")) { removedHeaders.add(Constants.SERVER_HEADER, struct.getStringField(Constants.RESPONSE_SERVER_INDEX)); } return prepareHeaderMap(removedHeaders, headers); } private static String buildHeaderValue(BMap<String, BValue> headers, String key) { StringBuilder headerValue = new StringBuilder(); if (headers.get(key).getType().getTag() != TypeTags.ARRAY_TAG) { throw new BallerinaException("expects an array as header value for header : " + key); } BRefValueArray headerValues = (BRefValueArray) headers.get(key); for (int index = 0; index < headerValues.size(); index++) { if (headerValues.get(index).getType().getTag() == TypeTags.STRUCT_TAG) { BStruct headerStruct = (BStruct) headerValues.get(index); String value = headerStruct.getStringField(Constants.HEADER_VALUE_INDEX); headerValue.append(index > 0 ? "," + value : value); BMap paramMap = (BMap) headerStruct.getRefField(Constants.HEADER_PARAM_INDEX); headerValue = paramMap != null ? concatParams(headerValue, paramMap) : headerValue; } else if (headerValues.get(index).getType().getTag() == TypeTags.MAP_TAG) { BMap headerMap = (BMap) headerValues.get(index); String value = headerMap.get(Constants.HEADER_VALUE).stringValue(); headerValue.append(index > 0 ? "," + value : value); BMap paramMap = (BMap) headerMap.get(Constants.HEADER_PARAM); headerValue = paramMap != null ? concatParams(headerValue, paramMap) : headerValue; } else { throw new BallerinaException("invalid header assignment for key : " + key); } } return headerValue.toString(); } @SuppressWarnings("unchecked") private static StringBuilder concatParams(StringBuilder headerValue, BMap paramMap) { Set<String> paramKeys = paramMap.keySet(); for (String paramKey : paramKeys) { String paramValue = paramMap.get(paramKey) != null ? paramMap.get(paramKey).stringValue() : null; headerValue.append(paramValue == null ? ";" + paramKey : ";" + paramKey + "=" + paramValue); } return headerValue; } private static void setHeaderToStruct(Context context, BStruct struct, String key, String value) { headerValueStructType = headerValueStructType == null ? ConnectorUtils.createAndGetStruct(context, Constants.HTTP_PACKAGE_PATH, Constants.HEADER_VALUE_STRUCT).getType() : headerValueStructType; int headersIndex = struct.getType().getName().equals(Constants.REQUEST) ? Constants.REQUEST_HEADERS_INDEX : Constants.RESPONSE_HEADERS_INDEX; BMap<String, BValue> headerMap = struct.getRefField(headersIndex) != null ? (BMap) struct.getRefField(headersIndex) : new BMap<>(); struct.setRefField(headersIndex, prepareHeaderMap(new DefaultHttpHeaders().add(key, value), headerMap)); } @SuppressWarnings("unchecked") private static String getHeaderFromStruct(BStruct struct, String key) { int headersIndex = struct.getType().getName().equals(Constants.REQUEST) ? Constants.REQUEST_HEADERS_INDEX : Constants.RESPONSE_HEADERS_INDEX; return struct.getRefField(headersIndex) != null ? buildHeaderValue((BMap) struct.getRefField(headersIndex), key) : null; } /** * Extract the listener configurations from the config annotation. * * @param annotationInfo configuration annotation info. * @return the set of {@link ListenerConfiguration} which were extracted from config annotation. */ public static Set<ListenerConfiguration> getDefaultOrDynamicListenerConfig(Annotation annotationInfo) { if (annotationInfo == null) { return HttpConnectionManager.getInstance().getDefaultListenerConfiugrationSet(); } Set<ListenerConfiguration> listenerConfSet = new HashSet<>(); extractBasicConfig(annotationInfo, listenerConfSet); extractHttpsConfig(annotationInfo, listenerConfSet); if (listenerConfSet.isEmpty()) { listenerConfSet = HttpConnectionManager.getInstance().getDefaultListenerConfiugrationSet(); } return listenerConfSet; } private static String getListenerInterface(String host, int port) { host = host != null ? host : "0.0.0.0"; return host + ":" + port; } private static void extractBasicConfig(Annotation configInfo, Set<ListenerConfiguration> listenerConfSet) { AnnAttrValue hostAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_HOST); AnnAttrValue portAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_PORT); AnnAttrValue keepAliveAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_KEEP_ALIVE); AnnAttrValue transferEncoding = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_TRANSFER_ENCODING); AnnAttrValue chunking = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_CHUNKING); ListenerConfiguration listenerConfiguration = new ListenerConfiguration(); if (portAttrVal != null && portAttrVal.getIntValue() > 0) { listenerConfiguration.setPort(Math.toIntExact(portAttrVal.getIntValue())); listenerConfiguration.setScheme(Constants.PROTOCOL_HTTP); if (hostAttrVal != null && hostAttrVal.getStringValue() != null) { listenerConfiguration.setHost(hostAttrVal.getStringValue()); } else { listenerConfiguration.setHost(Constants.HTTP_DEFAULT_HOST); } if (keepAliveAttrVal != null) { listenerConfiguration.setKeepAlive(keepAliveAttrVal.getBooleanValue()); } else { listenerConfiguration.setKeepAlive(Boolean.TRUE); } if (transferEncoding != null && !Constants.ANN_CONFIG_ATTR_CHUNKING .equalsIgnoreCase(transferEncoding.getStringValue())) { throw new BallerinaConnectorException("Unsupported configuration found for Transfer-Encoding : " + transferEncoding.getStringValue()); } if (chunking != null) { ChunkConfig chunkConfig = getChunkConfig(chunking.getStringValue()); listenerConfiguration.setChunkConfig(chunkConfig); } else { listenerConfiguration.setChunkConfig(ChunkConfig.AUTO); } listenerConfiguration .setId(getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort())); listenerConfSet.add(listenerConfiguration); } } public static ChunkConfig getChunkConfig(String chunking) { ChunkConfig chunkConfig; if (Constants.CHUNKING_AUTO.equalsIgnoreCase(chunking)) { chunkConfig = ChunkConfig.AUTO; } else if (Constants.CHUNKING_ALWAYS.equalsIgnoreCase(chunking)) { chunkConfig = ChunkConfig.ALWAYS; } else if (Constants.CHUNKING_NEVER.equalsIgnoreCase(chunking)) { chunkConfig = ChunkConfig.NEVER; } else { throw new BallerinaConnectorException("Invalid configuration found for Transfer-Encoding : " + chunking); } return chunkConfig; } private static void extractHttpsConfig(Annotation configInfo, Set<ListenerConfiguration> listenerConfSet) { AnnAttrValue httpsPortAttrVal; if (configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_HTTPS_PORT) == null) { httpsPortAttrVal = configInfo.getAnnAttrValue(org.ballerinalang.net.ws.Constants.ANN_CONFIG_ATTR_WSS_PORT); } else { httpsPortAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_HTTPS_PORT); } AnnAttrValue keyStoreFileAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_KEY_STORE_FILE); AnnAttrValue keyStorePasswordAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_KEY_STORE_PASS); AnnAttrValue certPasswordAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_CERT_PASS); AnnAttrValue trustStoreFileAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_TRUST_STORE_FILE); AnnAttrValue trustStorePasswordAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_TRUST_STORE_PASS); AnnAttrValue sslVerifyClientAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_SSL_VERIFY_CLIENT); AnnAttrValue sslEnabledProtocolsAttrVal = configInfo .getAnnAttrValue(Constants.ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS); AnnAttrValue ciphersAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_CIPHERS); AnnAttrValue sslProtocolAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_SSL_PROTOCOL); AnnAttrValue hostAttrVal = configInfo.getAnnAttrValue(Constants.ANN_CONFIG_ATTR_HOST); ListenerConfiguration listenerConfiguration = new ListenerConfiguration(); if (httpsPortAttrVal != null && httpsPortAttrVal.getIntValue() > 0) { listenerConfiguration.setPort(Math.toIntExact(httpsPortAttrVal.getIntValue())); listenerConfiguration.setScheme(Constants.PROTOCOL_HTTPS); if (hostAttrVal != null && hostAttrVal.getStringValue() != null) { listenerConfiguration.setHost(hostAttrVal.getStringValue()); } else { listenerConfiguration.setHost(Constants.HTTP_DEFAULT_HOST); } if (keyStoreFileAttrVal == null || keyStoreFileAttrVal.getStringValue() == null) { throw new BallerinaConnectorException("Keystore location must be provided for secure connection"); } if (keyStorePasswordAttrVal == null || keyStorePasswordAttrVal.getStringValue() == null) { throw new BallerinaConnectorException("Keystore password value must be provided for secure connection"); } if (certPasswordAttrVal == null || certPasswordAttrVal.getStringValue() == null) { throw new BallerinaConnectorException( "Certificate password value must be provided for secure connection"); } if ((trustStoreFileAttrVal == null || trustStoreFileAttrVal.getStringValue() == null) && sslVerifyClientAttrVal != null) { throw new BallerinaException("Truststore location must be provided to enable Mutual SSL"); } if ((trustStorePasswordAttrVal == null || trustStorePasswordAttrVal.getStringValue() == null) && sslVerifyClientAttrVal != null) { throw new BallerinaException("Truststore password value must be provided to enable Mutual SSL"); } listenerConfiguration.setTLSStoreType(Constants.PKCS_STORE_TYPE); listenerConfiguration.setKeyStoreFile(keyStoreFileAttrVal.getStringValue()); listenerConfiguration.setKeyStorePass(keyStorePasswordAttrVal.getStringValue()); listenerConfiguration.setCertPass(certPasswordAttrVal.getStringValue()); if (sslVerifyClientAttrVal != null) { listenerConfiguration.setVerifyClient(sslVerifyClientAttrVal.getStringValue()); } if (trustStoreFileAttrVal != null) { listenerConfiguration.setTrustStoreFile(trustStoreFileAttrVal.getStringValue()); } if (trustStorePasswordAttrVal != null) { listenerConfiguration.setTrustStorePass(trustStorePasswordAttrVal.getStringValue()); } List<Parameter> serverParams = new ArrayList<>(); Parameter serverCiphers; if (sslEnabledProtocolsAttrVal != null && sslEnabledProtocolsAttrVal.getStringValue() != null) { serverCiphers = new Parameter(Constants.ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS, sslEnabledProtocolsAttrVal.getStringValue()); serverParams.add(serverCiphers); } if (ciphersAttrVal != null && ciphersAttrVal.getStringValue() != null) { serverCiphers = new Parameter(Constants.ANN_CONFIG_ATTR_CIPHERS, ciphersAttrVal.getStringValue()); serverParams.add(serverCiphers); } if (!serverParams.isEmpty()) { listenerConfiguration.setParameters(serverParams); } if (sslProtocolAttrVal != null) { listenerConfiguration.setSSLProtocol(sslProtocolAttrVal.getStringValue()); } listenerConfiguration .setId(getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort())); listenerConfSet.add(listenerConfiguration); } } public static HTTPCarbonMessage createHttpCarbonMessage(boolean isRequest) { HTTPCarbonMessage httpCarbonMessage; if (isRequest) { httpCarbonMessage = new HTTPCarbonMessage( new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "")); httpCarbonMessage.setEndOfMsgAdded(true); } else { httpCarbonMessage = new HTTPCarbonMessage( new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK)); httpCarbonMessage.setEndOfMsgAdded(true); } return httpCarbonMessage; } public static void checkFunctionValidity(BStruct bStruct, HTTPCarbonMessage httpMsg) { methodInvocationCheck(bStruct, httpMsg); outboundResponseStructCheck(bStruct); } private static void methodInvocationCheck(BStruct bStruct, HTTPCarbonMessage httpMsg) { if (bStruct.getNativeData(METHOD_ACCESSED) != null || httpMsg == null) { throw new IllegalStateException("illegal function invocation"); } if (!is100ContinueRequest(httpMsg)) { bStruct.addNativeData(METHOD_ACCESSED, true); } } private static void outboundResponseStructCheck(BStruct bStruct) { if (bStruct.getNativeData(Constants.OUTBOUND_RESPONSE) == null) { throw new BallerinaException("operation not allowed"); } } private static boolean is100ContinueRequest(HTTPCarbonMessage httpMsg) { return Constants.HEADER_VAL_100_CONTINUE.equalsIgnoreCase(httpMsg.getHeader(Constants.EXPECT_HEADER)); } public static Annotation getServiceConfigAnnotation(Service service, String pkgPath) { List<Annotation> annotationList = service.getAnnotationList(pkgPath, Constants.ANN_NAME_CONFIG); if (annotationList == null) { return null; } if (annotationList.size() > 1) { throw new BallerinaException( "multiple service configuration annotations found in service: " + service.getName()); } return annotationList.isEmpty() ? null : annotationList.get(0); } public static Annotation getResourceConfigAnnotation(Resource resource, String pkgPath) { List<Annotation> annotationList = resource.getAnnotationList(pkgPath, Constants.ANN_NAME_RESOURCE_CONFIG); if (annotationList == null) { return null; } if (annotationList.size() > 1) { throw new BallerinaException( "multiple resource configuration annotations found in resource: " + resource.getServiceName() + "." + resource.getName()); } return annotationList.isEmpty() ? null : annotationList.get(0); } }
I still don't think this is a real issue. If there is a RESTEasy request is progress then CP will have captured the context, so it should be propagated everywhere and there is no change of behaviour. The only way this will be missing is if a request is started with no request is progress, and then ends up in a RESTEasy request, such as with lazy authentication. This could mean that RESTEasy requests effectively loose their context once they start dealing with the SecurityIdentity, which does not seem ideal IMHO.
public ThreadContextSnapshot currentContext(Map<String, String> props) { Map<Class<?>, Object> context = ResteasyContext.getContextDataMap(false); if (context == null) { return null; } return () -> { ResteasyContext.pushContextDataMap(context); return () -> { ResteasyContext.removeContextDataLevel(); }; }; }
return null;
public ThreadContextSnapshot currentContext(Map<String, String> props) { Map<Class<?>, Object> context = ResteasyContext.getContextDataMap(false); if (context == null) { return null; } return () -> { ResteasyContext.pushContextDataMap(context); return () -> { ResteasyContext.removeContextDataLevel(); }; }; }
class ResteasyContextProvider implements ThreadContextProvider { private static final String JAXRS_CONTEXT = "JAX-RS"; @Override @Override public ThreadContextSnapshot clearedContext(Map<String, String> props) { Map<Class<?>, Object> context = Collections.emptyMap(); return () -> { ResteasyContext.pushContextDataMap(context); return () -> { ResteasyContext.removeContextDataLevel(); }; }; } @Override public String getThreadContextType() { return JAXRS_CONTEXT; } }
class ResteasyContextProvider implements ThreadContextProvider { private static final String JAXRS_CONTEXT = "JAX-RS"; @Override @Override public ThreadContextSnapshot clearedContext(Map<String, String> props) { Map<Class<?>, Object> context = Collections.emptyMap(); return () -> { ResteasyContext.pushContextDataMap(context); return () -> { ResteasyContext.removeContextDataLevel(); }; }; } @Override public String getThreadContextType() { return JAXRS_CONTEXT; } }
we can make two sources have same length to achieve the same goal, then to void too many vars.
public void testAccumulateWithCopy() { final int firstSourceLength = 128; final int firstSourceStartPosition = 32; final int secondSourceLength = 64; final int secondSourceStartPosition = 0; final int expectedAccumulationSize = 128; final int firstCopyLength = firstSourceLength - firstSourceStartPosition; final int secondCopyLength = expectedAccumulationSize - firstCopyLength; ByteBuf firstSource = createSourceBuffer(firstSourceLength, firstSourceStartPosition); ByteBuf secondSource = createSourceBuffer(secondSourceLength, secondSourceStartPosition); ByteBuf target = Unpooled.buffer(expectedAccumulationSize); ByteBuf accumulated = ByteBufUtils.accumulate( target, firstSource, expectedAccumulationSize, target.readableBytes()); assertNull(accumulated); assertEquals(firstSourceLength, firstSource.readerIndex()); assertEquals(firstCopyLength, target.readableBytes()); accumulated = ByteBufUtils.accumulate( target, secondSource, expectedAccumulationSize, target.readableBytes()); assertSame(target, accumulated); assertEquals(secondSourceStartPosition + secondCopyLength, secondSource.readerIndex()); assertEquals(expectedAccumulationSize, target.readableBytes()); verifyBufferContent(accumulated, 0, firstCopyLength, firstSourceStartPosition); verifyBufferContent(accumulated, firstCopyLength, secondCopyLength, secondSourceStartPosition); }
final int secondSourceLength = 64;
public void testAccumulateWithCopy() { int sourceLength = 128; int firstSourceReaderIndex = 32; int secondSourceReaderIndex = 0; int expectedAccumulationSize = 128; int firstAccumulationSize = sourceLength - firstSourceReaderIndex; int secondAccumulationSize = expectedAccumulationSize - firstAccumulationSize; ByteBuf firstSource = createSourceBuffer(sourceLength, firstSourceReaderIndex, firstAccumulationSize); ByteBuf secondSource = createSourceBuffer(sourceLength, secondSourceReaderIndex, secondAccumulationSize); ByteBuf target = Unpooled.buffer(expectedAccumulationSize); ByteBuf accumulated = ByteBufUtils.accumulate( target, firstSource, expectedAccumulationSize, target.readableBytes()); assertNull(accumulated); assertEquals(sourceLength, firstSource.readerIndex()); assertEquals(firstAccumulationSize, target.readableBytes()); accumulated = ByteBufUtils.accumulate( target, secondSource, expectedAccumulationSize, target.readableBytes()); assertSame(target, accumulated); assertEquals(secondSourceReaderIndex + secondAccumulationSize, secondSource.readerIndex()); assertEquals(expectedAccumulationSize, target.readableBytes()); verifyBufferContent(accumulated, 0, expectedAccumulationSize); }
class ByteBufUtilsTest { @Test public void testAccumulateWithoutCopy() { final int sourceLength = 128; final int sourceStartPosition = 32; final int expectedAccumulationSize = 16; ByteBuf src = createSourceBuffer(sourceLength, sourceStartPosition); ByteBuf target = Unpooled.buffer(expectedAccumulationSize); ByteBuf accumulated = ByteBufUtils.accumulate(target, src, expectedAccumulationSize, target.readableBytes()); assertSame(src, accumulated); assertEquals(sourceStartPosition, src.readerIndex()); verifyBufferContent(src, sourceStartPosition, sourceLength - sourceStartPosition, sourceStartPosition); } @Test private ByteBuf createSourceBuffer(int size, int readerIndex) { ByteBuf buf = Unpooled.buffer(size); for (int i = 0; i < size; ++i) { buf.writeByte((byte) i); } buf.readerIndex(readerIndex); return buf; } private void verifyBufferContent(ByteBuf buf, int start, int length, int startValue) { for (int i = 0; i < length; ++i) { byte b = buf.getByte(start + i); assertEquals((byte) (startValue + i), b); } } }
class ByteBufUtilsTest extends TestLogger { private static final byte ACCUMULATION_BYTE = 0x7d; private static final byte NON_ACCUMULATION_BYTE = 0x23; @Test public void testAccumulateWithoutCopy() { int sourceLength = 128; int sourceReaderIndex = 32; int expectedAccumulationSize = 16; ByteBuf src = createSourceBuffer(sourceLength, sourceReaderIndex, expectedAccumulationSize); ByteBuf target = Unpooled.buffer(expectedAccumulationSize); ByteBuf accumulated = ByteBufUtils.accumulate(target, src, expectedAccumulationSize, target.readableBytes()); assertSame(src, accumulated); assertEquals(sourceReaderIndex, src.readerIndex()); verifyBufferContent(src, sourceReaderIndex, expectedAccumulationSize); } @Test /** * Create a source buffer whose length is <tt>size</tt>. The content between <tt>readerIndex</tt> and * <tt>readerIndex + accumulationSize</tt> is <tt>ACCUMULATION_BYTE</tt> and the remaining is * <tt>NON_ACCUMULATION_BYTE</tt>. * * @param size The size of the source buffer. * @param readerIndex The reader index of the source buffer. * @param accumulationSize The size of bytes that will be read for accumulating. * * @return The required source buffer. */ private ByteBuf createSourceBuffer(int size, int readerIndex, int accumulationSize) { ByteBuf buf = Unpooled.buffer(size); for (int i = 0; i < readerIndex; i++) { buf.writeByte(NON_ACCUMULATION_BYTE); } for (int i = readerIndex; i < readerIndex + accumulationSize; i++) { buf.writeByte(ACCUMULATION_BYTE); } for (int i = readerIndex + accumulationSize; i < size; i++) { buf.writeByte(NON_ACCUMULATION_BYTE); } buf.readerIndex(readerIndex); return buf; } private void verifyBufferContent(ByteBuf buf, int start, int length) { for (int i = 0; i < length; ++i) { byte b = buf.getByte(start + i); assertEquals(String.format("The byte at position %d is not right.", start + i), ACCUMULATION_BYTE, b); } } }
This change makes sense. We don't have any tests that cover this for the issue that's resolved in this PR, though (reverting the change in `Executing` won't cause any failures). We might want to add another test like that one: ```java @Test public void testNotifyNewResourcesAvailableWithCanScaleUpWithoutForceWhileTransitioningIntoExecutingState() throws Exception { try (MockExecutingContext ctx = new MockExecutingContext()) { final ExecutingStateBuilder executingStateBuilder = new ExecutingStateBuilder() .setScalingIntervalMin(Duration.ofSeconds(20L)) .setScalingIntervalMax(Duration.ofSeconds(30L)) .setLastRescale(Instant.now().minus(Duration.ofSeconds(25L))); // slots become available while transitioning into Executing state ctx.setCanScaleUp(true); final Executing exec = executingStateBuilder.build(ctx); ctx.setExpectRestarting( restartingArguments -> assertThat( "Rescaling should be triggered immediately after transitioning into Executing state.", restartingArguments.getBackoffTime(), is(Duration.ZERO))); exec.onNewResourcesAvailable(); } } ```
private Executing build(MockExecutingContext ctx) { executionGraph.transitionToRunning(); try { return new Executing( executionGraph, getExecutionGraphHandler(executionGraph, ctx.getMainThreadExecutor()), operatorCoordinatorHandler, log, ctx, ClassLoader.getSystemClassLoader(), new ArrayList<>(), scalingIntervalMin, scalingIntervalMax, lastRescale); } finally { Preconditions.checkState( !ctx.hadStateTransition, "State construction is an on-going state transition, during which no further transitions are allowed."); } }
try {
private Executing build(MockExecutingContext ctx) { executionGraph.transitionToRunning(); try { return new Executing( executionGraph, getExecutionGraphHandler(executionGraph, ctx.getMainThreadExecutor()), operatorCoordinatorHandler, log, ctx, ClassLoader.getSystemClassLoader(), new ArrayList<>(), scalingIntervalMin, scalingIntervalMax, lastRescale); } finally { Preconditions.checkState( !ctx.hadStateTransition, "State construction is an on-going state transition, during which no further transitions are allowed."); } }
class ExecutingStateBuilder { private ExecutionGraph executionGraph = TestingDefaultExecutionGraphBuilder.newBuilder() .build(EXECUTOR_RESOURCE.getExecutor()); private OperatorCoordinatorHandler operatorCoordinatorHandler; private Duration scalingIntervalMin = Duration.ZERO; @Nullable private Duration scalingIntervalMax; private Instant lastRescale = Instant.now(); private ExecutingStateBuilder() throws JobException, JobExecutionException { operatorCoordinatorHandler = new TestingOperatorCoordinatorHandler(); } public ExecutingStateBuilder setExecutionGraph(ExecutionGraph executionGraph) { this.executionGraph = executionGraph; return this; } public ExecutingStateBuilder setOperatorCoordinatorHandler( OperatorCoordinatorHandler operatorCoordinatorHandler) { this.operatorCoordinatorHandler = operatorCoordinatorHandler; return this; } public ExecutingStateBuilder setScalingIntervalMin(Duration scalingIntervalMin) { this.scalingIntervalMin = scalingIntervalMin; return this; } public ExecutingStateBuilder setScalingIntervalMax(Duration scalingIntervalMax) { this.scalingIntervalMax = scalingIntervalMax; return this; } public ExecutingStateBuilder setLastRescale(Instant lastRescale) { this.lastRescale = lastRescale; return this; } }
class ExecutingStateBuilder { private ExecutionGraph executionGraph = TestingDefaultExecutionGraphBuilder.newBuilder() .build(EXECUTOR_RESOURCE.getExecutor()); private OperatorCoordinatorHandler operatorCoordinatorHandler; private Duration scalingIntervalMin = Duration.ZERO; @Nullable private Duration scalingIntervalMax; private Instant lastRescale = Instant.now(); private ExecutingStateBuilder() throws JobException, JobExecutionException { operatorCoordinatorHandler = new TestingOperatorCoordinatorHandler(); } public ExecutingStateBuilder setExecutionGraph(ExecutionGraph executionGraph) { this.executionGraph = executionGraph; return this; } public ExecutingStateBuilder setOperatorCoordinatorHandler( OperatorCoordinatorHandler operatorCoordinatorHandler) { this.operatorCoordinatorHandler = operatorCoordinatorHandler; return this; } public ExecutingStateBuilder setScalingIntervalMin(Duration scalingIntervalMin) { this.scalingIntervalMin = scalingIntervalMin; return this; } public ExecutingStateBuilder setScalingIntervalMax(Duration scalingIntervalMax) { this.scalingIntervalMax = scalingIntervalMax; return this; } public ExecutingStateBuilder setLastRescale(Instant lastRescale) { this.lastRescale = lastRescale; return this; } }
``` if (dataType.hasPriority() || dataType.requiresAnnouncement()) { firstPriorityEvent = addPriorityBuffer(announce(sequenceBuffer)); } if (!dataType.hasPriority()) { receivedBuffers.add(sequenceBuffer); channelStatePersister.maybePersist(buffer); } ```
public void onBuffer(Buffer buffer, int sequenceNumber, int backlog) throws IOException { boolean recycleBuffer = true; try { if (expectedSequenceNumber != sequenceNumber) { onError(new BufferReorderingException(expectedSequenceNumber, sequenceNumber)); return; } final boolean wasEmpty; boolean firstPriorityEvent = false; synchronized (receivedBuffers) { if (isReleased.get()) { return; } wasEmpty = receivedBuffers.isEmpty(); SequenceBuffer sequenceBuffer = new SequenceBuffer(buffer, sequenceNumber); DataType dataType = buffer.getDataType(); if (dataType.hasPriority()) { checkPriorityXorAnnouncement(buffer); firstPriorityEvent = addPriorityBuffer(sequenceBuffer); } else { receivedBuffers.add(sequenceBuffer); channelStatePersister.maybePersist(buffer); if (dataType.requiresAnnouncement()) { checkPriorityXorAnnouncement(buffer); firstPriorityEvent = addPriorityBuffer(announce(sequenceBuffer)); } } ++expectedSequenceNumber; } recycleBuffer = false; if (firstPriorityEvent) { notifyPriorityEvent(sequenceNumber); } if (wasEmpty) { notifyChannelNonEmpty(); } if (backlog >= 0) { onSenderBacklog(backlog); } } finally { if (recycleBuffer) { buffer.recycleBuffer(); } } }
checkPriorityXorAnnouncement(buffer);
public void onBuffer(Buffer buffer, int sequenceNumber, int backlog) throws IOException { boolean recycleBuffer = true; try { if (expectedSequenceNumber != sequenceNumber) { onError(new BufferReorderingException(expectedSequenceNumber, sequenceNumber)); return; } final boolean wasEmpty; boolean firstPriorityEvent = false; synchronized (receivedBuffers) { if (isReleased.get()) { return; } wasEmpty = receivedBuffers.isEmpty(); SequenceBuffer sequenceBuffer = new SequenceBuffer(buffer, sequenceNumber); DataType dataType = buffer.getDataType(); if (dataType.hasPriority()) { firstPriorityEvent = addPriorityBuffer(sequenceBuffer); } else { receivedBuffers.add(sequenceBuffer); channelStatePersister.maybePersist(buffer); if (dataType.requiresAnnouncement()) { firstPriorityEvent = addPriorityBuffer(announce(sequenceBuffer)); } } ++expectedSequenceNumber; } recycleBuffer = false; if (firstPriorityEvent) { notifyPriorityEvent(sequenceNumber); } if (wasEmpty) { notifyChannelNonEmpty(); } if (backlog >= 0) { onSenderBacklog(backlog); } } finally { if (recycleBuffer) { buffer.recycleBuffer(); } } }
class RemoteInputChannel extends InputChannel implements ChannelStateHolder { public static final int ALL = -1; /** ID to distinguish this channel from other channels sharing the same TCP connection. */ private final InputChannelID id = new InputChannelID(); /** The connection to use to request the remote partition. */ private final ConnectionID connectionId; /** The connection manager to use connect to the remote partition provider. */ private final ConnectionManager connectionManager; /** * The received buffers. Received buffers are enqueued by the network I/O thread and the queue * is consumed by the receiving task thread. */ private final PrioritizedDeque<SequenceBuffer> receivedBuffers = new PrioritizedDeque<>(); /** * Flag indicating whether this channel has been released. Either called by the receiving task * thread or the task manager actor. */ private final AtomicBoolean isReleased = new AtomicBoolean(); /** Client to establish a (possibly shared) TCP connection and request the partition. */ private volatile PartitionRequestClient partitionRequestClient; /** * The next expected sequence number for the next buffer. */ private int expectedSequenceNumber = 0; /** The initial number of exclusive buffers assigned to this channel. */ private final int initialCredit; /** The number of available buffers that have not been announced to the producer yet. */ private final AtomicInteger unannouncedCredit = new AtomicInteger(0); private final BufferManager bufferManager; /** Stores @GuardedBy("receivedBuffers") private int numBuffersOvertaken = ALL; @GuardedBy("receivedBuffers") private ChannelStatePersister channelStatePersister = new ChannelStatePersister(null); public RemoteInputChannel( SingleInputGate inputGate, int channelIndex, ResultPartitionID partitionId, ConnectionID connectionId, ConnectionManager connectionManager, int initialBackOff, int maxBackoff, int networkBuffersPerChannel, Counter numBytesIn, Counter numBuffersIn) { super(inputGate, channelIndex, partitionId, initialBackOff, maxBackoff, numBytesIn, numBuffersIn); this.initialCredit = networkBuffersPerChannel; this.connectionId = checkNotNull(connectionId); this.connectionManager = checkNotNull(connectionManager); this.bufferManager = new BufferManager(inputGate.getMemorySegmentProvider(), this, 0); } public void setChannelStateWriter(ChannelStateWriter channelStateWriter) { checkState(!channelStatePersister.isInitialized(), "Already initialized"); channelStatePersister = new ChannelStatePersister(checkNotNull(channelStateWriter)); } /** * Setup includes assigning exclusive buffers to this input channel, and this method should be called only once * after this input channel is created. */ @Override void setup() throws IOException { checkState(bufferManager.unsynchronizedGetAvailableExclusiveBuffers() == 0, "Bug in input channel setup logic: exclusive buffers have already been set for this input channel."); bufferManager.requestExclusiveBuffers(initialCredit); } /** * Requests a remote subpartition. */ @VisibleForTesting @Override public void requestSubpartition(int subpartitionIndex) throws IOException, InterruptedException { if (partitionRequestClient == null) { try { partitionRequestClient = connectionManager.createPartitionRequestClient(connectionId); } catch (IOException e) { throw new PartitionConnectionException(partitionId, e); } partitionRequestClient.requestSubpartition(partitionId, subpartitionIndex, this, 0); } } /** * Retriggers a remote subpartition request. */ void retriggerSubpartitionRequest(int subpartitionIndex) throws IOException { checkPartitionRequestQueueInitialized(); if (increaseBackoff()) { partitionRequestClient.requestSubpartition( partitionId, subpartitionIndex, this, getCurrentBackoff()); } else { failPartitionRequest(); } } @Override Optional<BufferAndAvailability> getNextBuffer() throws IOException { checkPartitionRequestQueueInitialized(); final SequenceBuffer next; final DataType nextDataType; synchronized (receivedBuffers) { next = receivedBuffers.poll(); nextDataType = receivedBuffers.peek() != null ? receivedBuffers.peek().buffer.getDataType() : DataType.NONE; } if (next == null) { if (isReleased.get()) { throw new CancelTaskException("Queried for a buffer after channel has been released."); } return Optional.empty(); } numBytesIn.inc(next.buffer.getSize()); numBuffersIn.inc(); return Optional.of(new BufferAndAvailability(next.buffer, nextDataType, 0, next.sequenceNumber)); } @Override void sendTaskEvent(TaskEvent event) throws IOException { checkState(!isReleased.get(), "Tried to send task event to producer after channel has been released."); checkPartitionRequestQueueInitialized(); partitionRequestClient.sendTaskEvent(partitionId, event, this); } @Override public boolean isReleased() { return isReleased.get(); } /** * Releases all exclusive and floating buffers, closes the partition request client. */ @Override void releaseAllResources() throws IOException { if (isReleased.compareAndSet(false, true)) { final ArrayDeque<Buffer> releasedBuffers; synchronized (receivedBuffers) { releasedBuffers = receivedBuffers.stream().map(sb -> sb.buffer) .collect(Collectors.toCollection(ArrayDeque::new)); receivedBuffers.clear(); } bufferManager.releaseAllBuffers(releasedBuffers); if (partitionRequestClient != null) { partitionRequestClient.close(this); } else { connectionManager.closeOpenChannelConnections(connectionId); } } } private void failPartitionRequest() { setError(new PartitionNotFoundException(partitionId)); } @Override public String toString() { return "RemoteInputChannel [" + partitionId + " at " + connectionId + "]"; } /** * Enqueue this input channel in the pipeline for notifying the producer of unannounced credit. */ private void notifyCreditAvailable() throws IOException { checkPartitionRequestQueueInitialized(); partitionRequestClient.notifyCreditAvailable(this); } @VisibleForTesting public int getNumberOfAvailableBuffers() { return bufferManager.getNumberOfAvailableBuffers(); } @VisibleForTesting public int getNumberOfRequiredBuffers() { return bufferManager.unsynchronizedGetNumberOfRequiredBuffers(); } @VisibleForTesting public int getSenderBacklog() { return getNumberOfRequiredBuffers() - initialCredit; } @VisibleForTesting boolean isWaitingForFloatingBuffers() { return bufferManager.unsynchronizedIsWaitingForFloatingBuffers(); } @VisibleForTesting public Buffer getNextReceivedBuffer() { final SequenceBuffer sequenceBuffer = receivedBuffers.poll(); return sequenceBuffer != null ? sequenceBuffer.buffer : null; } @VisibleForTesting BufferManager getBufferManager() { return bufferManager; } @VisibleForTesting PartitionRequestClient getPartitionRequestClient() { return partitionRequestClient; } /** * The unannounced credit is increased by the given amount and might notify * increased credit to the producer. */ @Override public void notifyBufferAvailable(int numAvailableBuffers) throws IOException { if (numAvailableBuffers > 0 && unannouncedCredit.getAndAdd(numAvailableBuffers) == 0) { notifyCreditAvailable(); } } @Override public void resumeConsumption() throws IOException { checkState(!isReleased.get(), "Channel released."); checkPartitionRequestQueueInitialized(); partitionRequestClient.resumeConsumption(this); } /** * Gets the currently unannounced credit. * * @return Credit which was not announced to the sender yet. */ public int getUnannouncedCredit() { return unannouncedCredit.get(); } /** * Gets the unannounced credit and resets it to <tt>0</tt> atomically. * * @return Credit which was not announced to the sender yet. */ public int getAndResetUnannouncedCredit() { return unannouncedCredit.getAndSet(0); } /** * Gets the current number of received buffers which have not been processed yet. * * @return Buffers queued for processing. */ public int getNumberOfQueuedBuffers() { synchronized (receivedBuffers) { return receivedBuffers.size(); } } @Override public int unsynchronizedGetNumberOfQueuedBuffers() { return Math.max(0, receivedBuffers.size()); } public int unsynchronizedGetExclusiveBuffersUsed() { return Math.max(0, initialCredit - bufferManager.unsynchronizedGetAvailableExclusiveBuffers()); } public int unsynchronizedGetFloatingBuffersAvailable() { return Math.max(0, bufferManager.unsynchronizedGetFloatingBuffersAvailable()); } public InputChannelID getInputChannelId() { return id; } public int getInitialCredit() { return initialCredit; } public BufferProvider getBufferProvider() throws IOException { if (isReleased.get()) { return null; } return inputGate.getBufferProvider(); } /** * Requests buffer from input channel directly for receiving network data. * It should always return an available buffer in credit-based mode unless * the channel has been released. * * @return The available buffer. */ @Nullable public Buffer requestBuffer() { return bufferManager.requestBuffer(); } /** * Receives the backlog from the producer's buffer response. If the number of available * buffers is less than backlog + initialCredit, it will request floating buffers from * the buffer manager, and then notify unannounced credits to the producer. * * @param backlog The number of unsent buffers in the producer's sub partition. */ void onSenderBacklog(int backlog) throws IOException { int numRequestedBuffers = bufferManager.requestFloatingBuffers(backlog + initialCredit); if (numRequestedBuffers > 0 && unannouncedCredit.getAndAdd(numRequestedBuffers) == 0) { notifyCreditAvailable(); } } private void checkPriorityXorAnnouncement(Buffer buffer) { DataType dataType = buffer.getDataType(); checkState( dataType.requiresAnnouncement() ^ dataType.hasPriority(), "Buffer [%s] with dataType [%s] has both priority and requires announcement, which is not supported.", buffer, dataType); } /** * @return {@code true} if this was first priority buffer added. */ private boolean addPriorityBuffer(SequenceBuffer sequenceBuffer) throws IOException { receivedBuffers.addPriorityElement(sequenceBuffer); if (channelStatePersister.checkForBarrier(sequenceBuffer.buffer)) { numBuffersOvertaken = receivedBuffers.getNumUnprioritizedElements(); } return receivedBuffers.getNumPriorityElements() == 1; } private SequenceBuffer announce(SequenceBuffer sequenceBuffer) throws IOException { checkState(!sequenceBuffer.buffer.isBuffer(), "Only a CheckpointBarrier can be announced but found %s", sequenceBuffer.buffer); AbstractEvent event = EventSerializer.fromBuffer( sequenceBuffer.buffer, getClass().getClassLoader()); checkState(event instanceof CheckpointBarrier, "Only a CheckpointBarrier can be announced but found %s", sequenceBuffer.buffer); CheckpointBarrier barrier = (CheckpointBarrier) event; return new SequenceBuffer( EventSerializer.toBuffer(new CheckpointBarrierAnnouncement(barrier, sequenceBuffer.sequenceNumber), true), sequenceBuffer.sequenceNumber); } /** * Spills all queued buffers on checkpoint start. If barrier has already been received (and reordered), spill only * the overtaken buffers. */ public void checkpointStarted(CheckpointBarrier barrier) { synchronized (receivedBuffers) { channelStatePersister.startPersisting( barrier.getId(), getInflightBuffers(numBuffersOvertaken == ALL ? receivedBuffers.getNumUnprioritizedElements() : numBuffersOvertaken)); } } public void checkpointStopped(long checkpointId) { synchronized (receivedBuffers) { channelStatePersister.stopPersisting(); numBuffersOvertaken = ALL; } } /** * Returns a list of buffers, checking the first n non-priority buffers, and skipping all events. */ private List<Buffer> getInflightBuffers(int numBuffers) { assert Thread.holdsLock(receivedBuffers); if (numBuffers == 0) { return Collections.emptyList(); } final List<Buffer> inflightBuffers = new ArrayList<>(numBuffers); Iterator<SequenceBuffer> iterator = receivedBuffers.iterator(); Iterators.advance(iterator, receivedBuffers.getNumPriorityElements()); for (int pos = 0; pos < numBuffers; pos++) { Buffer buffer = iterator.next().buffer; if (buffer.isBuffer()) { inflightBuffers.add(buffer.retainBuffer()); } } return inflightBuffers; } public void onEmptyBuffer(int sequenceNumber, int backlog) throws IOException { boolean success = false; synchronized (receivedBuffers) { if (!isReleased.get()) { if (expectedSequenceNumber == sequenceNumber) { expectedSequenceNumber++; success = true; } else { onError(new BufferReorderingException(expectedSequenceNumber, sequenceNumber)); } } } if (success && backlog >= 0) { onSenderBacklog(backlog); } } public void onFailedPartitionRequest() { inputGate.triggerPartitionStateCheck(partitionId); } public void onError(Throwable cause) { setError(cause); } private void checkPartitionRequestQueueInitialized() throws IOException { checkError(); checkState(partitionRequestClient != null, "Bug: partitionRequestClient is not initialized before processing data and no error is detected."); } private static class BufferReorderingException extends IOException { private static final long serialVersionUID = -888282210356266816L; private final int expectedSequenceNumber; private final int actualSequenceNumber; BufferReorderingException(int expectedSequenceNumber, int actualSequenceNumber) { this.expectedSequenceNumber = expectedSequenceNumber; this.actualSequenceNumber = actualSequenceNumber; } @Override public String getMessage() { return String.format("Buffer re-ordering: expected buffer with sequence number %d, but received %d.", expectedSequenceNumber, actualSequenceNumber); } } private static final class SequenceBuffer { final Buffer buffer; final int sequenceNumber; private SequenceBuffer(Buffer buffer, int sequenceNumber) { this.buffer = buffer; this.sequenceNumber = sequenceNumber; } } }
class RemoteInputChannel extends InputChannel implements ChannelStateHolder { public static final int ALL = -1; /** ID to distinguish this channel from other channels sharing the same TCP connection. */ private final InputChannelID id = new InputChannelID(); /** The connection to use to request the remote partition. */ private final ConnectionID connectionId; /** The connection manager to use connect to the remote partition provider. */ private final ConnectionManager connectionManager; /** * The received buffers. Received buffers are enqueued by the network I/O thread and the queue * is consumed by the receiving task thread. */ private final PrioritizedDeque<SequenceBuffer> receivedBuffers = new PrioritizedDeque<>(); /** * Flag indicating whether this channel has been released. Either called by the receiving task * thread or the task manager actor. */ private final AtomicBoolean isReleased = new AtomicBoolean(); /** Client to establish a (possibly shared) TCP connection and request the partition. */ private volatile PartitionRequestClient partitionRequestClient; /** * The next expected sequence number for the next buffer. */ private int expectedSequenceNumber = 0; /** The initial number of exclusive buffers assigned to this channel. */ private final int initialCredit; /** The number of available buffers that have not been announced to the producer yet. */ private final AtomicInteger unannouncedCredit = new AtomicInteger(0); private final BufferManager bufferManager; /** Stores @GuardedBy("receivedBuffers") private int numBuffersOvertaken = ALL; @GuardedBy("receivedBuffers") private ChannelStatePersister channelStatePersister = new ChannelStatePersister(null); public RemoteInputChannel( SingleInputGate inputGate, int channelIndex, ResultPartitionID partitionId, ConnectionID connectionId, ConnectionManager connectionManager, int initialBackOff, int maxBackoff, int networkBuffersPerChannel, Counter numBytesIn, Counter numBuffersIn) { super(inputGate, channelIndex, partitionId, initialBackOff, maxBackoff, numBytesIn, numBuffersIn); this.initialCredit = networkBuffersPerChannel; this.connectionId = checkNotNull(connectionId); this.connectionManager = checkNotNull(connectionManager); this.bufferManager = new BufferManager(inputGate.getMemorySegmentProvider(), this, 0); } public void setChannelStateWriter(ChannelStateWriter channelStateWriter) { checkState(!channelStatePersister.isInitialized(), "Already initialized"); channelStatePersister = new ChannelStatePersister(checkNotNull(channelStateWriter)); } /** * Setup includes assigning exclusive buffers to this input channel, and this method should be called only once * after this input channel is created. */ @Override void setup() throws IOException { checkState(bufferManager.unsynchronizedGetAvailableExclusiveBuffers() == 0, "Bug in input channel setup logic: exclusive buffers have already been set for this input channel."); bufferManager.requestExclusiveBuffers(initialCredit); } /** * Requests a remote subpartition. */ @VisibleForTesting @Override public void requestSubpartition(int subpartitionIndex) throws IOException, InterruptedException { if (partitionRequestClient == null) { try { partitionRequestClient = connectionManager.createPartitionRequestClient(connectionId); } catch (IOException e) { throw new PartitionConnectionException(partitionId, e); } partitionRequestClient.requestSubpartition(partitionId, subpartitionIndex, this, 0); } } /** * Retriggers a remote subpartition request. */ void retriggerSubpartitionRequest(int subpartitionIndex) throws IOException { checkPartitionRequestQueueInitialized(); if (increaseBackoff()) { partitionRequestClient.requestSubpartition( partitionId, subpartitionIndex, this, getCurrentBackoff()); } else { failPartitionRequest(); } } @Override Optional<BufferAndAvailability> getNextBuffer() throws IOException { checkPartitionRequestQueueInitialized(); final SequenceBuffer next; final DataType nextDataType; synchronized (receivedBuffers) { next = receivedBuffers.poll(); nextDataType = receivedBuffers.peek() != null ? receivedBuffers.peek().buffer.getDataType() : DataType.NONE; } if (next == null) { if (isReleased.get()) { throw new CancelTaskException("Queried for a buffer after channel has been released."); } return Optional.empty(); } numBytesIn.inc(next.buffer.getSize()); numBuffersIn.inc(); return Optional.of(new BufferAndAvailability(next.buffer, nextDataType, 0, next.sequenceNumber)); } @Override void sendTaskEvent(TaskEvent event) throws IOException { checkState(!isReleased.get(), "Tried to send task event to producer after channel has been released."); checkPartitionRequestQueueInitialized(); partitionRequestClient.sendTaskEvent(partitionId, event, this); } @Override public boolean isReleased() { return isReleased.get(); } /** * Releases all exclusive and floating buffers, closes the partition request client. */ @Override void releaseAllResources() throws IOException { if (isReleased.compareAndSet(false, true)) { final ArrayDeque<Buffer> releasedBuffers; synchronized (receivedBuffers) { releasedBuffers = receivedBuffers.stream().map(sb -> sb.buffer) .collect(Collectors.toCollection(ArrayDeque::new)); receivedBuffers.clear(); } bufferManager.releaseAllBuffers(releasedBuffers); if (partitionRequestClient != null) { partitionRequestClient.close(this); } else { connectionManager.closeOpenChannelConnections(connectionId); } } } private void failPartitionRequest() { setError(new PartitionNotFoundException(partitionId)); } @Override public String toString() { return "RemoteInputChannel [" + partitionId + " at " + connectionId + "]"; } /** * Enqueue this input channel in the pipeline for notifying the producer of unannounced credit. */ private void notifyCreditAvailable() throws IOException { checkPartitionRequestQueueInitialized(); partitionRequestClient.notifyCreditAvailable(this); } @VisibleForTesting public int getNumberOfAvailableBuffers() { return bufferManager.getNumberOfAvailableBuffers(); } @VisibleForTesting public int getNumberOfRequiredBuffers() { return bufferManager.unsynchronizedGetNumberOfRequiredBuffers(); } @VisibleForTesting public int getSenderBacklog() { return getNumberOfRequiredBuffers() - initialCredit; } @VisibleForTesting boolean isWaitingForFloatingBuffers() { return bufferManager.unsynchronizedIsWaitingForFloatingBuffers(); } @VisibleForTesting public Buffer getNextReceivedBuffer() { final SequenceBuffer sequenceBuffer = receivedBuffers.poll(); return sequenceBuffer != null ? sequenceBuffer.buffer : null; } @VisibleForTesting BufferManager getBufferManager() { return bufferManager; } @VisibleForTesting PartitionRequestClient getPartitionRequestClient() { return partitionRequestClient; } /** * The unannounced credit is increased by the given amount and might notify * increased credit to the producer. */ @Override public void notifyBufferAvailable(int numAvailableBuffers) throws IOException { if (numAvailableBuffers > 0 && unannouncedCredit.getAndAdd(numAvailableBuffers) == 0) { notifyCreditAvailable(); } } @Override public void resumeConsumption() throws IOException { checkState(!isReleased.get(), "Channel released."); checkPartitionRequestQueueInitialized(); partitionRequestClient.resumeConsumption(this); } /** * Gets the currently unannounced credit. * * @return Credit which was not announced to the sender yet. */ public int getUnannouncedCredit() { return unannouncedCredit.get(); } /** * Gets the unannounced credit and resets it to <tt>0</tt> atomically. * * @return Credit which was not announced to the sender yet. */ public int getAndResetUnannouncedCredit() { return unannouncedCredit.getAndSet(0); } /** * Gets the current number of received buffers which have not been processed yet. * * @return Buffers queued for processing. */ public int getNumberOfQueuedBuffers() { synchronized (receivedBuffers) { return receivedBuffers.size(); } } @Override public int unsynchronizedGetNumberOfQueuedBuffers() { return Math.max(0, receivedBuffers.size()); } public int unsynchronizedGetExclusiveBuffersUsed() { return Math.max(0, initialCredit - bufferManager.unsynchronizedGetAvailableExclusiveBuffers()); } public int unsynchronizedGetFloatingBuffersAvailable() { return Math.max(0, bufferManager.unsynchronizedGetFloatingBuffersAvailable()); } public InputChannelID getInputChannelId() { return id; } public int getInitialCredit() { return initialCredit; } public BufferProvider getBufferProvider() throws IOException { if (isReleased.get()) { return null; } return inputGate.getBufferProvider(); } /** * Requests buffer from input channel directly for receiving network data. * It should always return an available buffer in credit-based mode unless * the channel has been released. * * @return The available buffer. */ @Nullable public Buffer requestBuffer() { return bufferManager.requestBuffer(); } /** * Receives the backlog from the producer's buffer response. If the number of available * buffers is less than backlog + initialCredit, it will request floating buffers from * the buffer manager, and then notify unannounced credits to the producer. * * @param backlog The number of unsent buffers in the producer's sub partition. */ void onSenderBacklog(int backlog) throws IOException { int numRequestedBuffers = bufferManager.requestFloatingBuffers(backlog + initialCredit); if (numRequestedBuffers > 0 && unannouncedCredit.getAndAdd(numRequestedBuffers) == 0) { notifyCreditAvailable(); } } /** * @return {@code true} if this was first priority buffer added. */ private boolean addPriorityBuffer(SequenceBuffer sequenceBuffer) throws IOException { receivedBuffers.addPriorityElement(sequenceBuffer); if (channelStatePersister.checkForBarrier(sequenceBuffer.buffer)) { numBuffersOvertaken = receivedBuffers.getNumUnprioritizedElements(); } return receivedBuffers.getNumPriorityElements() == 1; } private SequenceBuffer announce(SequenceBuffer sequenceBuffer) throws IOException { checkState(!sequenceBuffer.buffer.isBuffer(), "Only a CheckpointBarrier can be announced but found %s", sequenceBuffer.buffer); AbstractEvent event = EventSerializer.fromBuffer( sequenceBuffer.buffer, getClass().getClassLoader()); checkState(event instanceof CheckpointBarrier, "Only a CheckpointBarrier can be announced but found %s", sequenceBuffer.buffer); CheckpointBarrier barrier = (CheckpointBarrier) event; return new SequenceBuffer( EventSerializer.toBuffer(new EventAnnouncement(barrier, sequenceBuffer.sequenceNumber), true), sequenceBuffer.sequenceNumber); } /** * Spills all queued buffers on checkpoint start. If barrier has already been received (and reordered), spill only * the overtaken buffers. */ public void checkpointStarted(CheckpointBarrier barrier) { synchronized (receivedBuffers) { channelStatePersister.startPersisting( barrier.getId(), getInflightBuffers(numBuffersOvertaken == ALL ? receivedBuffers.getNumUnprioritizedElements() : numBuffersOvertaken)); } } public void checkpointStopped(long checkpointId) { synchronized (receivedBuffers) { channelStatePersister.stopPersisting(); numBuffersOvertaken = ALL; } } /** * Returns a list of buffers, checking the first n non-priority buffers, and skipping all events. */ private List<Buffer> getInflightBuffers(int numBuffers) { assert Thread.holdsLock(receivedBuffers); if (numBuffers == 0) { return Collections.emptyList(); } final List<Buffer> inflightBuffers = new ArrayList<>(numBuffers); Iterator<SequenceBuffer> iterator = receivedBuffers.iterator(); Iterators.advance(iterator, receivedBuffers.getNumPriorityElements()); for (int pos = 0; pos < numBuffers; pos++) { Buffer buffer = iterator.next().buffer; if (buffer.isBuffer()) { inflightBuffers.add(buffer.retainBuffer()); } } return inflightBuffers; } public void onEmptyBuffer(int sequenceNumber, int backlog) throws IOException { boolean success = false; synchronized (receivedBuffers) { if (!isReleased.get()) { if (expectedSequenceNumber == sequenceNumber) { expectedSequenceNumber++; success = true; } else { onError(new BufferReorderingException(expectedSequenceNumber, sequenceNumber)); } } } if (success && backlog >= 0) { onSenderBacklog(backlog); } } public void onFailedPartitionRequest() { inputGate.triggerPartitionStateCheck(partitionId); } public void onError(Throwable cause) { setError(cause); } private void checkPartitionRequestQueueInitialized() throws IOException { checkError(); checkState(partitionRequestClient != null, "Bug: partitionRequestClient is not initialized before processing data and no error is detected."); } private static class BufferReorderingException extends IOException { private static final long serialVersionUID = -888282210356266816L; private final int expectedSequenceNumber; private final int actualSequenceNumber; BufferReorderingException(int expectedSequenceNumber, int actualSequenceNumber) { this.expectedSequenceNumber = expectedSequenceNumber; this.actualSequenceNumber = actualSequenceNumber; } @Override public String getMessage() { return String.format("Buffer re-ordering: expected buffer with sequence number %d, but received %d.", expectedSequenceNumber, actualSequenceNumber); } } private static final class SequenceBuffer { final Buffer buffer; final int sequenceNumber; private SequenceBuffer(Buffer buffer, int sequenceNumber) { this.buffer = buffer; this.sequenceNumber = sequenceNumber; } } }
I didn't pay attention, tbh. I'll add the brackets.
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { if (dir.equals(root) || dir.toString().equals("/") || dir.startsWith(devTemplatesPath)) return FileVisitResult.CONTINUE; return FileVisitResult.SKIP_SUBTREE; }
if (dir.equals(root) || dir.toString().equals("/") || dir.startsWith(devTemplatesPath))
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { if (dir.equals(root) || dir.toString().equals("/") || dir.startsWith(devTemplatesPath)) { return FileVisitResult.CONTINUE; } return FileVisitResult.SKIP_SUBTREE; }
class DevConsoleProcessor { private static final Logger log = Logger.getLogger(DevConsoleProcessor.class); private static final String STATIC_RESOURCES_PATH = "dev-static/"; private static final Object EMPTY = new Object(); private static final String[] suffixes = new String[] { "html", "txt" }; protected static volatile ServerBootstrap virtualBootstrap; protected static volatile Vertx devConsoleVertx; protected static volatile Channel channel; static Router router; static Router mainRouter; public static void initializeVirtual() { if (virtualBootstrap != null) { return; } devConsoleVertx = initializeDevConsoleVertx(); VertxInternal vertx = (VertxInternal) devConsoleVertx; QuarkusClassLoader ccl = (QuarkusClassLoader) DevConsoleProcessor.class.getClassLoader(); ccl.addCloseTask(new Runnable() { @Override public void run() { virtualBootstrap = null; if (channel != null) { try { channel.close().sync(); } catch (InterruptedException e) { throw new RuntimeException("failed to close virtual http"); } } if (devConsoleVertx != null) { devConsoleVertx.close(); devConsoleVertx = null; } } }); virtualBootstrap = new ServerBootstrap(); virtualBootstrap.group(vertx.getEventLoopGroup()) .channel(VirtualServerChannel.class) .handler(new ChannelInitializer<VirtualServerChannel>() { @Override public void initChannel(VirtualServerChannel ch) throws Exception { } }) .childHandler(new ChannelInitializer<VirtualChannel>() { @Override public void initChannel(VirtualChannel ch) throws Exception { EventLoopContext context = vertx.createEventLoopContext(); VertxHandler<Http1xServerConnection> handler = VertxHandler.create(chctx -> { Http1xServerConnection connection = new Http1xServerConnection( () -> context, null, new HttpServerOptions(), chctx, context, "localhost", null); connection.handler(new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest event) { mainRouter.handle(event); } }); return connection; }); ch.pipeline().addLast("handler", handler); } }); try { ChannelFuture future = virtualBootstrap.bind(DevConsoleHttpHandler.QUARKUS_DEV_CONSOLE); future.sync(); channel = future.channel(); } catch (InterruptedException e) { throw new RuntimeException("failed to bind virtual http"); } } /** * Boots the Vert.x instance used by the DevConsole, * applying some minimal tuning and customizations. * * @return the initialized Vert.x instance */ private static Vertx initializeDevConsoleVertx() { final VertxOptions vertxOptions = new VertxOptions(); int POOL_SIZE = 2; vertxOptions.setEventLoopPoolSize(POOL_SIZE); vertxOptions.setWorkerPoolSize(POOL_SIZE); vertxOptions.getMetricsOptions().setEnabled(false); vertxOptions.getFileSystemOptions().setFileCachingEnabled(false); VertxBuilder builder = new VertxBuilder(vertxOptions); builder.threadFactory(new VertxThreadFactory() { @Override public VertxThread newVertxThread(Runnable target, String name, boolean worker, long maxExecTime, TimeUnit maxExecTimeUnit) { return new VertxThread(target, "[DevConsole]" + name, worker, maxExecTime, maxExecTimeUnit); } }); builder.transport(Transport.JDK); builder.init(); return builder.vertx(); } protected static void newRouter(Engine engine, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) { String httpRootPath = nonApplicationRootPathBuildItem.getNormalizedHttpRootPath(); String frameworkRootPath = nonApplicationRootPathBuildItem.getNonApplicationRootPath(); Handler<RoutingContext> errorHandler = new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { String message = "Dev console request failed"; log.error(message, event.failure()); event.response().headers().set(HttpHeaderNames.CONTENT_TYPE, "text/html; charset=utf-8"); event.response().end( new TemplateHtmlBuilder("Internal Server Error", message, message).stack(event.failure()).toString()); } }; router = Router.router(devConsoleVertx); router.errorHandler(500, errorHandler); router.route() .order(Integer.MIN_VALUE) .handler(new FlashScopeHandler()); router.route().method(HttpMethod.GET) .order(Integer.MIN_VALUE + 1) .handler(new DevConsole(engine, httpRootPath, frameworkRootPath)); mainRouter = Router.router(devConsoleVertx); mainRouter.errorHandler(500, errorHandler); mainRouter.route(nonApplicationRootPathBuildItem.resolvePath("dev*")).subRouter(router); } @BuildStep(onlyIf = IsDevelopment.class) public ServiceStartBuildItem buildTimeTemplates(List<DevConsoleTemplateInfoBuildItem> items, CurateOutcomeBuildItem curateOutcomeBuildItem) { Map<String, Map<String, Object>> results = new HashMap<>(); for (DevConsoleTemplateInfoBuildItem i : items) { Entry<String, String> groupAndArtifact = i.groupIdAndArtifactId(curateOutcomeBuildItem); Map<String, Object> map = results.computeIfAbsent(groupAndArtifact.getKey() + "." + groupAndArtifact.getValue(), (s) -> new HashMap<>()); map.put(i.getName(), i.getObject()); } DevConsoleManager.setTemplateInfo(results); return null; } @BuildStep DevTemplateVariantsBuildItem collectTemplateVariants(List<DevTemplatePathBuildItem> templatePaths) throws IOException { Set<String> allPaths = templatePaths.stream().map(DevTemplatePathBuildItem::getPath).collect(Collectors.toSet()); Map<String, List<String>> baseToVariants = new HashMap<>(); for (String path : allPaths) { int idx = path.lastIndexOf('.'); if (idx != -1) { String base = path.substring(0, idx); List<String> variants = baseToVariants.get(base); if (variants == null) { variants = new ArrayList<>(); baseToVariants.put(base, variants); } variants.add(path); } } return new DevTemplateVariantsBuildItem(baseToVariants); } @BuildStep(onlyIf = IsDevelopment.class) @Record(ExecutionTime.RUNTIME_INIT) public void runtimeTemplates(List<DevConsoleRuntimeTemplateInfoBuildItem> items, DevConsoleRecorder recorder, List<ServiceStartBuildItem> gate) { for (DevConsoleRuntimeTemplateInfoBuildItem i : items) { recorder.addInfo(i.getGroupId(), i.getArtifactId(), i.getName(), i.getObject()); } recorder.initConfigFun(); } @BuildStep(onlyIf = IsDevelopment.class) @Record(ExecutionTime.STATIC_INIT) public void handler(BuildProducer<HistoryHandlerBuildItem> historyProducer, BuildProducer<WebSocketLogHandlerBuildItem> webSocketLogHandlerBuildItem, LogStreamRecorder recorder, DevUIConfig devUiConfig) { RuntimeValue<Optional<WebSocketLogHandler>> handler = recorder.logHandler(devUiConfig.historySize); webSocketLogHandlerBuildItem.produce(new WebSocketLogHandlerBuildItem((RuntimeValue) handler)); historyProducer.produce(new HistoryHandlerBuildItem(handler)); } @Consume(LoggingSetupBuildItem.class) @BuildStep(onlyIf = IsDevelopment.class) public ServiceStartBuildItem setupDeploymentSideHandling(List<DevTemplatePathBuildItem> devTemplatePaths, CurateOutcomeBuildItem curateOutcomeBuildItem, BuildSystemTargetBuildItem buildSystemTargetBuildItem, Optional<EffectiveIdeBuildItem> effectiveIdeBuildItem, List<RouteBuildItem> allRoutes, List<DevConsoleRouteBuildItem> routes, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, List<ConfigDescriptionBuildItem> configDescriptionBuildItems, LaunchModeBuildItem launchModeBuildItem) { if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { return null; } initializeVirtual(); Engine quteEngine = buildEngine(devTemplatePaths, allRoutes, buildSystemTargetBuildItem, effectiveIdeBuildItem, nonApplicationRootPathBuildItem, configDescriptionBuildItems, launchModeBuildItem); newRouter(quteEngine, nonApplicationRootPathBuildItem); for (DevConsoleRouteBuildItem i : routes) { Entry<String, String> groupAndArtifact = i.groupIdAndArtifactId(curateOutcomeBuildItem); if (i.isDeploymentSide()) { Route route = router .route("/" + groupAndArtifact.getKey() + "." + groupAndArtifact.getValue() + "/" + i.getPath()); if (i.getMethod() != null) { route = route.method(HttpMethod.valueOf(i.getMethod())); } if (i.isBodyHandlerRequired()) { route.handler(BodyHandler.create()); } route.handler(i.getHandler()); } } return null; } @Record(ExecutionTime.RUNTIME_INIT) @Consume(LoggingSetupBuildItem.class) @BuildStep(onlyIf = IsDevelopment.class) public void setupDevConsoleRoutes( DevConsoleRecorder recorder, LogStreamRecorder logStreamRecorder, List<DevConsoleRouteBuildItem> routes, CurateOutcomeBuildItem curateOutcomeBuildItem, HistoryHandlerBuildItem historyHandlerBuildItem, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, LaunchModeBuildItem launchModeBuildItem, ShutdownContextBuildItem shutdownContext, BuildProducer<RouteBuildItem> routeBuildItemBuildProducer, LiveReloadBuildItem liveReloadBuildItem) throws IOException { if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { return; } ResolvedDependency devConsoleResourcesArtifact = WebJarUtil.getAppArtifact(curateOutcomeBuildItem, "io.quarkus", "quarkus-vertx-http-deployment"); Path devConsoleStaticResourcesDeploymentPath = WebJarUtil.copyResourcesForDevOrTest(liveReloadBuildItem, curateOutcomeBuildItem, launchModeBuildItem, devConsoleResourcesArtifact, STATIC_RESOURCES_PATH); routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .route("dev/resources/*") .handler(recorder.devConsoleHandler(devConsoleStaticResourcesDeploymentPath.toString(), shutdownContext)) .build()); routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .route("dev/logstream") .handler(logStreamRecorder.websocketHandler(historyHandlerBuildItem.value)) .build()); for (DevConsoleRouteBuildItem i : routes) { Entry<String, String> groupAndArtifact = i.groupIdAndArtifactId(curateOutcomeBuildItem); if (!i.isDeploymentSide()) { routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .routeFunction( "dev/" + groupAndArtifact.getKey() + "." + groupAndArtifact.getValue() + "/" + i.getPath(), new RuntimeDevConsoleRoute(i.getMethod())) .handler(i.getHandler()) .build()); } } DevConsoleManager.registerHandler(new DevConsoleHttpHandler()); routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .route("dev/*") .handler(new DevConsoleFilter()) .build()); routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .route("dev") .displayOnNotFoundPage("Dev UI") .handler(new RedirectHandler()) .build()); } @BuildStep void builder(Optional<EffectiveIdeBuildItem> effectiveIdeBuildItem, BuildProducer<DevConsoleRouteBuildItem> producer) { if (effectiveIdeBuildItem.isPresent()) { producer.produce(new DevConsoleRouteBuildItem("openInIDE", "POST", new OpenIdeHandler(effectiveIdeBuildItem.get().getIde()))); } } static volatile ConsoleStateManager.ConsoleContext context; @Produce(ServiceStartBuildItem.class) @BuildStep() void setupConsole(HttpRootPathBuildItem rp, NonApplicationRootPathBuildItem np, LaunchModeBuildItem launchModeBuildItem) { if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { return; } if (context == null) { context = ConsoleStateManager.INSTANCE.createContext("HTTP"); } Config c = ConfigProvider.getConfig(); String host = c.getOptionalValue("quarkus.http.host", String.class).orElse("localhost"); String port = c.getOptionalValue("quarkus.http.port", String.class).orElse("8080"); context.reset( new ConsoleCommand('w', "Open the application in a browser", null, () -> openBrowser(rp, np, "/", host, port)), new ConsoleCommand('d', "Open the Dev UI in a browser", null, () -> openBrowser(rp, np, "/q/dev", host, port))); } public static void openBrowser(HttpRootPathBuildItem rp, NonApplicationRootPathBuildItem np, String path, String host, String port) { if (path.startsWith("/q")) { path = np.resolvePath(path.substring(3)); } else { path = rp.resolvePath(path.substring(1)); } StringBuilder sb = new StringBuilder("http: Config c = ConfigProvider.getConfig(); sb.append(host); sb.append(":"); sb.append(port); sb.append(path); String url = sb.toString(); Runtime rt = Runtime.getRuntime(); OS os = OS.determineOS(); try { switch (os) { case MAC: rt.exec(new String[] { "open", url }); break; case LINUX: rt.exec(new String[] { "xdg-open", url }); break; case WINDOWS: rt.exec(new String[] { "rundll32", "url.dll,FileProtocolHandler", url }); break; case OTHER: log.error("Cannot launch browser on this operating system"); } } catch (Exception e) { log.error("Failed to launch browser", e); } } private Engine buildEngine(List<DevTemplatePathBuildItem> devTemplatePaths, List<RouteBuildItem> allRoutes, BuildSystemTargetBuildItem buildSystemTargetBuildItem, Optional<EffectiveIdeBuildItem> effectiveIdeBuildItem, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, List<ConfigDescriptionBuildItem> configDescriptionBuildItems, LaunchModeBuildItem launchModeBuildItem) { EngineBuilder builder = Engine.builder().addDefaults(); builder.addResultMapper(new HtmlEscaper(List.of(Variant.TEXT_HTML))); builder.strictRendering(true) .addValueResolver(new ReflectionValueResolver()) .addValueResolver(new JsonObjectValueResolver()) .addValueResolver(new MultiMapValueResolver()) .addValueResolver(ValueResolvers.rawResolver()) .addNamespaceResolver(NamespaceResolver.builder("ideInfo") .resolve(new IdeInfoContextFunction(buildSystemTargetBuildItem, effectiveIdeBuildItem, launchModeBuildItem)) .build()) .addNamespaceResolver(NamespaceResolver.builder("info").resolve(ctx -> { String ext = DevConsole.currentExtension.get(); if (ext == null) { return Results.NotFound.from(ctx); } Map<String, Object> map = DevConsoleManager.getTemplateInfo().get(ext); if (map == null) { return Results.NotFound.from(ctx); } Object result = map.get(ctx.getName()); return result == null ? Results.NotFound.from(ctx) : result; }).build()); Map<String, String> resolvedPaths = new HashMap<>(); for (RouteBuildItem item : allRoutes) { ConfiguredPathInfo resolvedPathBuildItem = item.getDevConsoleResolvedPath(); if (resolvedPathBuildItem != null) { resolvedPaths.put(resolvedPathBuildItem.getName(), resolvedPathBuildItem.getEndpointPath(nonApplicationRootPathBuildItem)); } } Map<String, String> defaultValues = getDefaultValues(configDescriptionBuildItems); builder.addNamespaceResolver(NamespaceResolver.builder("config").resolveAsync(ctx -> { List<Expression> params = ctx.getParams(); if (params.size() != 1 || (!ctx.getName().equals("property") && !ctx.getName().equals("http-path"))) { return Results.notFound(ctx); } if (ctx.getName().equals("http-path")) { return ctx.evaluate(params.get(0)).thenCompose(propertyName -> { String value = resolvedPaths.get(propertyName.toString()); return CompletableFuture.completedFuture(value != null ? value : Results.NotFound.from(ctx)); }); } else { return ctx.evaluate(params.get(0)).thenCompose(propertyName -> { Function<String, Optional<String>> configFun = DevConsoleManager.getGlobal("devui-config-fun"); String val = configFun.apply(propertyName.toString()).orElse(defaultValues.get(propertyName.toString())); return CompletableFuture.completedFuture(val != null ? val : Results.NotFound.from(ctx)); }); } }).build()); builder.addValueResolver(new JavaDocResolver()); Map<String, String> templates = new HashMap<>(); for (DevTemplatePathBuildItem devTemplatePath : devTemplatePaths) { templates.put(devTemplatePath.getPath(), devTemplatePath.getContents()); if (devTemplatePath.isTag()) { String tagName = devTemplatePath.getTagName(); builder.addSectionHelper(new UserTagSectionHelper.Factory(tagName, devTemplatePath.getPath())); } } builder.addLocator(id -> locateTemplate(id, templates)); builder.addResultMapper(new ResultMapper() { @Override public int getPriority() { return 10; } @Override public boolean appliesTo(Origin origin, Object result) { return Results.isNotFound(result); } @Override public String map(Object result, Expression expression) { Origin origin = expression.getOrigin(); throw new TemplateException(origin, String.format("Property not found in expression {%s} in template %s on line %s", expression.toOriginalString(), origin.getTemplateId(), origin.getLine())); } }); builder.addResultMapper(new ResultMapper() { @Override public int getPriority() { return 10; } @Override public boolean appliesTo(Origin origin, Object result) { return result.equals(EMPTY); } @Override public String map(Object result, Expression expression) { return "<<unset>>"; } }); Engine engine = builder.build(); for (DevTemplatePathBuildItem devTemplatePath : devTemplatePaths) { if (!devTemplatePath.isTag()) { engine.getTemplate(devTemplatePath.getPath()); } } return engine; } private Map<String, String> getDefaultValues(List<ConfigDescriptionBuildItem> configDescriptionBuildItems) { Map<String, String> defaultValues = new HashMap<>(); for (ConfigDescriptionBuildItem configDescriptionBuildItem : configDescriptionBuildItems) { if (configDescriptionBuildItem.getDefaultValue() != null) { defaultValues.put(configDescriptionBuildItem.getPropertyName(), configDescriptionBuildItem.getDefaultValue()); } } return defaultValues; } private static Optional<TemplateLocator.TemplateLocation> locateTemplate(String id, Map<String, String> templates) { String template = templates.get(id); if (template == null) { for (String suffix : suffixes) { id = id + "." + suffix; template = templates.get(id); if (template != null) { break; } } } if (template == null) return Optional.empty(); String templateName = id; String finalTemplate = template; return Optional.of(new TemplateLocator.TemplateLocation() { @Override public Reader read() { return new StringReader(finalTemplate); } @Override public Optional<Variant> getVariant() { Variant variant = null; String fileName = templateName; int slashIdx = fileName.lastIndexOf('/'); if (slashIdx != -1) { fileName = fileName.substring(slashIdx, fileName.length()); } int dotIdx = fileName.lastIndexOf('.'); if (dotIdx != -1) { String suffix = fileName.substring(dotIdx + 1, fileName.length()); if (suffix.equalsIgnoreCase("json")) { variant = Variant.forContentType(Variant.APPLICATION_JSON); } else { String contentType = URLConnection.getFileNameMap().getContentTypeFor(fileName); if (contentType != null) { variant = Variant.forContentType(contentType); } } } return Optional.ofNullable(variant); } }); } @BuildStep void collectTemplates(BuildProducer<DevTemplatePathBuildItem> devTemplatePaths) { final Enumeration<URL> devTemplateURLs; try { devTemplateURLs = DevConsoleProcessor.class.getClassLoader().getResources("/dev-templates"); } catch (IOException e) { throw new UncheckedIOException(e); } while (devTemplateURLs.hasMoreElements()) { URL devTemplatesUrl = devTemplateURLs.nextElement(); ClassPathUtils.consumeAsPath(devTemplatesUrl, devTemplatesDir -> { final Path classesDir = devTemplatesDir.getParent(); if (classesDir == null) { return; } final Entry<String, String> entry = readPomPropertiesIfBuilt(devTemplatesUrl, classesDir); if (entry == null) { return; } try { scanTemplates(entry, devTemplatesDir, devTemplatePaths); } catch (IOException e) { throw new UncheckedIOException(e); } }); } } private Entry<String, String> readPomPropertiesIfBuilt(URL devTemplatesURL, final Path classesDir) { Entry<String, String> entry = null; try { if ("jar".equals(devTemplatesURL.getProtocol())) { final Path metaInf = classesDir.resolve("META-INF").resolve("maven"); if (Files.exists(metaInf)) { entry = ArtifactInfoUtil.groupIdAndArtifactId(metaInf); } } else { final Path rootDir = classesDir.getParent(); final Path mavenArchiver = rootDir == null ? null : rootDir.resolve("maven-archiver"); if (mavenArchiver == null || !mavenArchiver.toFile().canRead()) { return null; } entry = ArtifactInfoUtil.groupIdAndArtifactId(mavenArchiver); } } catch (IOException e) { throw new UncheckedIOException(e); } if (entry == null) { throw new RuntimeException("Missing POM metadata in " + devTemplatesURL); } return entry; } private void scanTemplates(Entry<String, String> entry, Path devTemplatesPath, BuildProducer<DevTemplatePathBuildItem> devTemplatePaths) throws IOException { String prefix; if (entry.getKey().equals("io.quarkus") && entry.getValue().equals("quarkus-vertx-http")) prefix = ""; else prefix = entry.getKey() + "." + entry.getValue() + "/"; final Path root = devTemplatesPath.getParent(); Files.walkFileTree(root, new SimpleFileVisitor<Path>() { @Override @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String contents = Files.readString(file); String relativePath = devTemplatesPath.relativize(file).toString(); String correctedPath; if (File.separatorChar == '\\') { relativePath = relativePath.replace('\\', '/'); } if (relativePath.startsWith(DevTemplatePathBuildItem.TAGS)) correctedPath = relativePath; else correctedPath = prefix + relativePath; devTemplatePaths .produce(new DevTemplatePathBuildItem(correctedPath, contents)); return super.visitFile(file, attrs); } }); } public static class JavaDocResolver implements ValueResolver { private final Pattern codePattern = Pattern.compile("(\\{@code )([^}]+)(\\})"); private final Pattern linkPattern = Pattern.compile("(\\{@link )([^}]+)(\\}
class DevConsoleProcessor { private static final Logger log = Logger.getLogger(DevConsoleProcessor.class); private static final String STATIC_RESOURCES_PATH = "dev-static/"; private static final Object EMPTY = new Object(); private static final String[] suffixes = new String[] { "html", "txt" }; protected static volatile ServerBootstrap virtualBootstrap; protected static volatile Vertx devConsoleVertx; protected static volatile Channel channel; static Router router; static Router mainRouter; public static void initializeVirtual() { if (virtualBootstrap != null) { return; } devConsoleVertx = initializeDevConsoleVertx(); VertxInternal vertx = (VertxInternal) devConsoleVertx; QuarkusClassLoader ccl = (QuarkusClassLoader) DevConsoleProcessor.class.getClassLoader(); ccl.addCloseTask(new Runnable() { @Override public void run() { virtualBootstrap = null; if (channel != null) { try { channel.close().sync(); } catch (InterruptedException e) { throw new RuntimeException("failed to close virtual http"); } } if (devConsoleVertx != null) { devConsoleVertx.close(); devConsoleVertx = null; } } }); virtualBootstrap = new ServerBootstrap(); virtualBootstrap.group(vertx.getEventLoopGroup()) .channel(VirtualServerChannel.class) .handler(new ChannelInitializer<VirtualServerChannel>() { @Override public void initChannel(VirtualServerChannel ch) throws Exception { } }) .childHandler(new ChannelInitializer<VirtualChannel>() { @Override public void initChannel(VirtualChannel ch) throws Exception { EventLoopContext context = vertx.createEventLoopContext(); VertxHandler<Http1xServerConnection> handler = VertxHandler.create(chctx -> { Http1xServerConnection connection = new Http1xServerConnection( () -> context, null, new HttpServerOptions(), chctx, context, "localhost", null); connection.handler(new Handler<HttpServerRequest>() { @Override public void handle(HttpServerRequest event) { mainRouter.handle(event); } }); return connection; }); ch.pipeline().addLast("handler", handler); } }); try { ChannelFuture future = virtualBootstrap.bind(DevConsoleHttpHandler.QUARKUS_DEV_CONSOLE); future.sync(); channel = future.channel(); } catch (InterruptedException e) { throw new RuntimeException("failed to bind virtual http"); } } /** * Boots the Vert.x instance used by the DevConsole, * applying some minimal tuning and customizations. * * @return the initialized Vert.x instance */ private static Vertx initializeDevConsoleVertx() { final VertxOptions vertxOptions = new VertxOptions(); int POOL_SIZE = 2; vertxOptions.setEventLoopPoolSize(POOL_SIZE); vertxOptions.setWorkerPoolSize(POOL_SIZE); vertxOptions.getMetricsOptions().setEnabled(false); vertxOptions.getFileSystemOptions().setFileCachingEnabled(false); vertxOptions.getFileSystemOptions().setClassPathResolvingEnabled(false); VertxBuilder builder = new VertxBuilder(vertxOptions); builder.threadFactory(new VertxThreadFactory() { @Override public VertxThread newVertxThread(Runnable target, String name, boolean worker, long maxExecTime, TimeUnit maxExecTimeUnit) { return new VertxThread(target, "[DevConsole]" + name, worker, maxExecTime, maxExecTimeUnit); } }); builder.transport(Transport.JDK); builder.init(); return builder.vertx(); } protected static void newRouter(Engine engine, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) { String httpRootPath = nonApplicationRootPathBuildItem.getNormalizedHttpRootPath(); String frameworkRootPath = nonApplicationRootPathBuildItem.getNonApplicationRootPath(); Handler<RoutingContext> errorHandler = new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { String message = "Dev console request failed"; log.error(message, event.failure()); event.response().headers().set(HttpHeaderNames.CONTENT_TYPE, "text/html; charset=utf-8"); event.response().end( new TemplateHtmlBuilder("Internal Server Error", message, message).stack(event.failure()).toString()); } }; router = Router.router(devConsoleVertx); router.errorHandler(500, errorHandler); router.route() .order(Integer.MIN_VALUE) .handler(new FlashScopeHandler()); router.route().method(HttpMethod.GET) .order(Integer.MIN_VALUE + 1) .handler(new DevConsole(engine, httpRootPath, frameworkRootPath)); mainRouter = Router.router(devConsoleVertx); mainRouter.errorHandler(500, errorHandler); mainRouter.route(nonApplicationRootPathBuildItem.resolvePath("dev*")).subRouter(router); } @BuildStep(onlyIf = IsDevelopment.class) public ServiceStartBuildItem buildTimeTemplates(List<DevConsoleTemplateInfoBuildItem> items, CurateOutcomeBuildItem curateOutcomeBuildItem) { Map<String, Map<String, Object>> results = new HashMap<>(); for (DevConsoleTemplateInfoBuildItem i : items) { Entry<String, String> groupAndArtifact = i.groupIdAndArtifactId(curateOutcomeBuildItem); Map<String, Object> map = results.computeIfAbsent(groupAndArtifact.getKey() + "." + groupAndArtifact.getValue(), (s) -> new HashMap<>()); map.put(i.getName(), i.getObject()); } DevConsoleManager.setTemplateInfo(results); return null; } @BuildStep DevTemplateVariantsBuildItem collectTemplateVariants(List<DevTemplatePathBuildItem> templatePaths) throws IOException { Set<String> allPaths = templatePaths.stream().map(DevTemplatePathBuildItem::getPath).collect(Collectors.toSet()); Map<String, List<String>> baseToVariants = new HashMap<>(); for (String path : allPaths) { int idx = path.lastIndexOf('.'); if (idx != -1) { String base = path.substring(0, idx); List<String> variants = baseToVariants.get(base); if (variants == null) { variants = new ArrayList<>(); baseToVariants.put(base, variants); } variants.add(path); } } return new DevTemplateVariantsBuildItem(baseToVariants); } @BuildStep(onlyIf = IsDevelopment.class) @Record(ExecutionTime.RUNTIME_INIT) public void runtimeTemplates(List<DevConsoleRuntimeTemplateInfoBuildItem> items, DevConsoleRecorder recorder, List<ServiceStartBuildItem> gate) { for (DevConsoleRuntimeTemplateInfoBuildItem i : items) { recorder.addInfo(i.getGroupId(), i.getArtifactId(), i.getName(), i.getObject()); } recorder.initConfigFun(); } @BuildStep(onlyIf = IsDevelopment.class) @Record(ExecutionTime.STATIC_INIT) public void handler(BuildProducer<HistoryHandlerBuildItem> historyProducer, BuildProducer<WebSocketLogHandlerBuildItem> webSocketLogHandlerBuildItem, LogStreamRecorder recorder, DevUIConfig devUiConfig) { RuntimeValue<Optional<WebSocketLogHandler>> handler = recorder.logHandler(devUiConfig.historySize); webSocketLogHandlerBuildItem.produce(new WebSocketLogHandlerBuildItem((RuntimeValue) handler)); historyProducer.produce(new HistoryHandlerBuildItem(handler)); } @Consume(LoggingSetupBuildItem.class) @BuildStep(onlyIf = IsDevelopment.class) public ServiceStartBuildItem setupDeploymentSideHandling(List<DevTemplatePathBuildItem> devTemplatePaths, CurateOutcomeBuildItem curateOutcomeBuildItem, BuildSystemTargetBuildItem buildSystemTargetBuildItem, Optional<EffectiveIdeBuildItem> effectiveIdeBuildItem, List<RouteBuildItem> allRoutes, List<DevConsoleRouteBuildItem> routes, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, List<ConfigDescriptionBuildItem> configDescriptionBuildItems, LaunchModeBuildItem launchModeBuildItem) { if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { return null; } initializeVirtual(); Engine quteEngine = buildEngine(devTemplatePaths, allRoutes, buildSystemTargetBuildItem, effectiveIdeBuildItem, nonApplicationRootPathBuildItem, configDescriptionBuildItems, launchModeBuildItem); newRouter(quteEngine, nonApplicationRootPathBuildItem); for (DevConsoleRouteBuildItem i : routes) { Entry<String, String> groupAndArtifact = i.groupIdAndArtifactId(curateOutcomeBuildItem); if (i.isDeploymentSide()) { Route route = router .route("/" + groupAndArtifact.getKey() + "." + groupAndArtifact.getValue() + "/" + i.getPath()); if (i.getMethod() != null) { route = route.method(HttpMethod.valueOf(i.getMethod())); } if (i.isBodyHandlerRequired()) { route.handler(BodyHandler.create()); } route.handler(i.getHandler()); } } return null; } @Record(ExecutionTime.RUNTIME_INIT) @Consume(LoggingSetupBuildItem.class) @BuildStep(onlyIf = IsDevelopment.class) public void setupDevConsoleRoutes( DevConsoleRecorder recorder, LogStreamRecorder logStreamRecorder, List<DevConsoleRouteBuildItem> routes, CurateOutcomeBuildItem curateOutcomeBuildItem, HistoryHandlerBuildItem historyHandlerBuildItem, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, LaunchModeBuildItem launchModeBuildItem, ShutdownContextBuildItem shutdownContext, BuildProducer<RouteBuildItem> routeBuildItemBuildProducer, LiveReloadBuildItem liveReloadBuildItem) throws IOException { if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { return; } ResolvedDependency devConsoleResourcesArtifact = WebJarUtil.getAppArtifact(curateOutcomeBuildItem, "io.quarkus", "quarkus-vertx-http-deployment"); Path devConsoleStaticResourcesDeploymentPath = WebJarUtil.copyResourcesForDevOrTest(liveReloadBuildItem, curateOutcomeBuildItem, launchModeBuildItem, devConsoleResourcesArtifact, STATIC_RESOURCES_PATH, true, true); List<FileSystemStaticHandler.StaticWebRootConfiguration> webRootConfigurations = new ArrayList<>(); webRootConfigurations.add( new FileSystemStaticHandler.StaticWebRootConfiguration(devConsoleStaticResourcesDeploymentPath.toString(), "")); for (Path resolvedPath : devConsoleResourcesArtifact.getResolvedPaths()) { webRootConfigurations .add(new FileSystemStaticHandler.StaticWebRootConfiguration(resolvedPath.toString(), STATIC_RESOURCES_PATH)); } routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .route("dev/resources/*") .handler(recorder.fileSystemStaticHandler( webRootConfigurations, devConsoleStaticResourcesDeploymentPath.toString(), shutdownContext)) .build()); routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .route("dev/logstream") .handler(logStreamRecorder.websocketHandler(historyHandlerBuildItem.value)) .build()); for (DevConsoleRouteBuildItem i : routes) { Entry<String, String> groupAndArtifact = i.groupIdAndArtifactId(curateOutcomeBuildItem); if (!i.isDeploymentSide()) { routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .routeFunction( "dev/" + groupAndArtifact.getKey() + "." + groupAndArtifact.getValue() + "/" + i.getPath(), new RuntimeDevConsoleRoute(i.getMethod())) .handler(i.getHandler()) .build()); } } DevConsoleManager.registerHandler(new DevConsoleHttpHandler()); routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .route("dev/*") .handler(new DevConsoleFilter()) .build()); routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() .route("dev") .displayOnNotFoundPage("Dev UI") .handler(new RedirectHandler()) .build()); } @BuildStep void builder(Optional<EffectiveIdeBuildItem> effectiveIdeBuildItem, BuildProducer<DevConsoleRouteBuildItem> producer) { if (effectiveIdeBuildItem.isPresent()) { producer.produce(new DevConsoleRouteBuildItem("openInIDE", "POST", new OpenIdeHandler(effectiveIdeBuildItem.get().getIde()))); } } static volatile ConsoleStateManager.ConsoleContext context; @Produce(ServiceStartBuildItem.class) @BuildStep() void setupConsole(HttpRootPathBuildItem rp, NonApplicationRootPathBuildItem np, LaunchModeBuildItem launchModeBuildItem) { if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { return; } if (context == null) { context = ConsoleStateManager.INSTANCE.createContext("HTTP"); } Config c = ConfigProvider.getConfig(); String host = c.getOptionalValue("quarkus.http.host", String.class).orElse("localhost"); String port = c.getOptionalValue("quarkus.http.port", String.class).orElse("8080"); context.reset( new ConsoleCommand('w', "Open the application in a browser", null, () -> openBrowser(rp, np, "/", host, port)), new ConsoleCommand('d', "Open the Dev UI in a browser", null, () -> openBrowser(rp, np, "/q/dev", host, port))); } public static void openBrowser(HttpRootPathBuildItem rp, NonApplicationRootPathBuildItem np, String path, String host, String port) { if (path.startsWith("/q")) { path = np.resolvePath(path.substring(3)); } else { path = rp.resolvePath(path.substring(1)); } StringBuilder sb = new StringBuilder("http: Config c = ConfigProvider.getConfig(); sb.append(host); sb.append(":"); sb.append(port); sb.append(path); String url = sb.toString(); Runtime rt = Runtime.getRuntime(); OS os = OS.determineOS(); try { switch (os) { case MAC: rt.exec(new String[] { "open", url }); break; case LINUX: rt.exec(new String[] { "xdg-open", url }); break; case WINDOWS: rt.exec(new String[] { "rundll32", "url.dll,FileProtocolHandler", url }); break; case OTHER: log.error("Cannot launch browser on this operating system"); } } catch (Exception e) { log.error("Failed to launch browser", e); } } private Engine buildEngine(List<DevTemplatePathBuildItem> devTemplatePaths, List<RouteBuildItem> allRoutes, BuildSystemTargetBuildItem buildSystemTargetBuildItem, Optional<EffectiveIdeBuildItem> effectiveIdeBuildItem, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, List<ConfigDescriptionBuildItem> configDescriptionBuildItems, LaunchModeBuildItem launchModeBuildItem) { EngineBuilder builder = Engine.builder().addDefaults(); builder.addResultMapper(new HtmlEscaper(List.of(Variant.TEXT_HTML))); builder.strictRendering(true) .addValueResolver(new ReflectionValueResolver()) .addValueResolver(new JsonObjectValueResolver()) .addValueResolver(new MultiMapValueResolver()) .addValueResolver(ValueResolvers.rawResolver()) .addNamespaceResolver(NamespaceResolver.builder("ideInfo") .resolve(new IdeInfoContextFunction(buildSystemTargetBuildItem, effectiveIdeBuildItem, launchModeBuildItem)) .build()) .addNamespaceResolver(NamespaceResolver.builder("info").resolve(ctx -> { String ext = DevConsole.currentExtension.get(); if (ext == null) { return Results.NotFound.from(ctx); } Map<String, Object> map = DevConsoleManager.getTemplateInfo().get(ext); if (map == null) { return Results.NotFound.from(ctx); } Object result = map.get(ctx.getName()); return result == null ? Results.NotFound.from(ctx) : result; }).build()); Map<String, String> resolvedPaths = new HashMap<>(); for (RouteBuildItem item : allRoutes) { ConfiguredPathInfo resolvedPathBuildItem = item.getDevConsoleResolvedPath(); if (resolvedPathBuildItem != null) { resolvedPaths.put(resolvedPathBuildItem.getName(), resolvedPathBuildItem.getEndpointPath(nonApplicationRootPathBuildItem)); } } Map<String, String> defaultValues = getDefaultValues(configDescriptionBuildItems); builder.addNamespaceResolver(NamespaceResolver.builder("config").resolveAsync(ctx -> { List<Expression> params = ctx.getParams(); if (params.size() != 1 || (!ctx.getName().equals("property") && !ctx.getName().equals("http-path"))) { return Results.notFound(ctx); } if (ctx.getName().equals("http-path")) { return ctx.evaluate(params.get(0)).thenCompose(propertyName -> { String value = resolvedPaths.get(propertyName.toString()); return CompletableFuture.completedFuture(value != null ? value : Results.NotFound.from(ctx)); }); } else { return ctx.evaluate(params.get(0)).thenCompose(propertyName -> { Function<String, Optional<String>> configFun = DevConsoleManager.getGlobal("devui-config-fun"); String val = configFun.apply(propertyName.toString()).orElse(defaultValues.get(propertyName.toString())); return CompletableFuture.completedFuture(val != null ? val : Results.NotFound.from(ctx)); }); } }).build()); builder.addValueResolver(new JavaDocResolver()); Map<String, String> templates = new HashMap<>(); for (DevTemplatePathBuildItem devTemplatePath : devTemplatePaths) { templates.put(devTemplatePath.getPath(), devTemplatePath.getContents()); if (devTemplatePath.isTag()) { String tagName = devTemplatePath.getTagName(); builder.addSectionHelper(new UserTagSectionHelper.Factory(tagName, devTemplatePath.getPath())); } } builder.addLocator(id -> locateTemplate(id, templates)); builder.addResultMapper(new ResultMapper() { @Override public int getPriority() { return 10; } @Override public boolean appliesTo(Origin origin, Object result) { return Results.isNotFound(result); } @Override public String map(Object result, Expression expression) { Origin origin = expression.getOrigin(); throw new TemplateException(origin, String.format("Property not found in expression {%s} in template %s on line %s", expression.toOriginalString(), origin.getTemplateId(), origin.getLine())); } }); builder.addResultMapper(new ResultMapper() { @Override public int getPriority() { return 10; } @Override public boolean appliesTo(Origin origin, Object result) { return result.equals(EMPTY); } @Override public String map(Object result, Expression expression) { return "<<unset>>"; } }); Engine engine = builder.build(); for (DevTemplatePathBuildItem devTemplatePath : devTemplatePaths) { if (!devTemplatePath.isTag()) { engine.getTemplate(devTemplatePath.getPath()); } } return engine; } private Map<String, String> getDefaultValues(List<ConfigDescriptionBuildItem> configDescriptionBuildItems) { Map<String, String> defaultValues = new HashMap<>(); for (ConfigDescriptionBuildItem configDescriptionBuildItem : configDescriptionBuildItems) { if (configDescriptionBuildItem.getDefaultValue() != null) { defaultValues.put(configDescriptionBuildItem.getPropertyName(), configDescriptionBuildItem.getDefaultValue()); } } return defaultValues; } private static Optional<TemplateLocator.TemplateLocation> locateTemplate(String id, Map<String, String> templates) { String template = templates.get(id); if (template == null) { for (String suffix : suffixes) { id = id + "." + suffix; template = templates.get(id); if (template != null) { break; } } } if (template == null) return Optional.empty(); String templateName = id; String finalTemplate = template; return Optional.of(new TemplateLocator.TemplateLocation() { @Override public Reader read() { return new StringReader(finalTemplate); } @Override public Optional<Variant> getVariant() { Variant variant = null; String fileName = templateName; int slashIdx = fileName.lastIndexOf('/'); if (slashIdx != -1) { fileName = fileName.substring(slashIdx, fileName.length()); } int dotIdx = fileName.lastIndexOf('.'); if (dotIdx != -1) { String suffix = fileName.substring(dotIdx + 1, fileName.length()); if (suffix.equalsIgnoreCase("json")) { variant = Variant.forContentType(Variant.APPLICATION_JSON); } else { String contentType = URLConnection.getFileNameMap().getContentTypeFor(fileName); if (contentType != null) { variant = Variant.forContentType(contentType); } } } return Optional.ofNullable(variant); } }); } @BuildStep void collectTemplates(BuildProducer<DevTemplatePathBuildItem> devTemplatePaths) { final Enumeration<URL> devTemplateURLs; try { devTemplateURLs = DevConsoleProcessor.class.getClassLoader().getResources("/dev-templates"); } catch (IOException e) { throw new UncheckedIOException(e); } while (devTemplateURLs.hasMoreElements()) { URL devTemplatesUrl = devTemplateURLs.nextElement(); ClassPathUtils.consumeAsPath(devTemplatesUrl, devTemplatesDir -> { final Path classesDir = devTemplatesDir.getParent(); if (classesDir == null) { return; } final Entry<String, String> entry = readPomPropertiesIfBuilt(devTemplatesUrl, classesDir); if (entry == null) { return; } try { scanTemplates(entry, devTemplatesDir, devTemplatePaths); } catch (IOException e) { throw new UncheckedIOException(e); } }); } } private Entry<String, String> readPomPropertiesIfBuilt(URL devTemplatesURL, final Path classesDir) { Entry<String, String> entry = null; try { if ("jar".equals(devTemplatesURL.getProtocol())) { final Path metaInf = classesDir.resolve("META-INF").resolve("maven"); if (Files.exists(metaInf)) { entry = ArtifactInfoUtil.groupIdAndArtifactId(metaInf); } } else { final Path rootDir = classesDir.getParent(); final Path mavenArchiver = rootDir == null ? null : rootDir.resolve("maven-archiver"); if (mavenArchiver == null || !mavenArchiver.toFile().canRead()) { return null; } entry = ArtifactInfoUtil.groupIdAndArtifactId(mavenArchiver); } } catch (IOException e) { throw new UncheckedIOException(e); } if (entry == null) { throw new RuntimeException("Missing POM metadata in " + devTemplatesURL); } return entry; } private void scanTemplates(Entry<String, String> entry, Path devTemplatesPath, BuildProducer<DevTemplatePathBuildItem> devTemplatePaths) throws IOException { String prefix; if (entry.getKey().equals("io.quarkus") && entry.getValue().equals("quarkus-vertx-http")) prefix = ""; else prefix = entry.getKey() + "." + entry.getValue() + "/"; final Path root = devTemplatesPath.getParent(); Files.walkFileTree(root, new SimpleFileVisitor<Path>() { @Override @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String contents = Files.readString(file); String relativePath = devTemplatesPath.relativize(file).toString(); String correctedPath; if (File.separatorChar == '\\') { relativePath = relativePath.replace('\\', '/'); } if (relativePath.startsWith(DevTemplatePathBuildItem.TAGS)) correctedPath = relativePath; else correctedPath = prefix + relativePath; devTemplatePaths .produce(new DevTemplatePathBuildItem(correctedPath, contents)); return super.visitFile(file, attrs); } }); } public static class JavaDocResolver implements ValueResolver { private final Pattern codePattern = Pattern.compile("(\\{@code )([^}]+)(\\})"); private final Pattern linkPattern = Pattern.compile("(\\{@link )([^}]+)(\\}
Shall we add tests for error throwing if not already covered
public static BString toExpString(double x, Object fractionDigits) { BString str = FloatUtils.getBStringIfInfiniteOrNaN(x); if (str != null) { return str; } long noOfFractionDigits; double xAbsValue = Math.abs(x); if (fractionDigits == null) { if (xAbsValue == 0) { noOfFractionDigits = 1; } else { int integerPart = (int) (Math.log10(xAbsValue)); noOfFractionDigits = BigDecimal.valueOf(xAbsValue / Math.pow(10, integerPart)).scale(); } } else { noOfFractionDigits = (long) fractionDigits; } if (noOfFractionDigits < 0) { throw ErrorUtils.createInvalidFractionDigitsError(); } if (FloatUtils.checkFractionDigitsWithinRange(noOfFractionDigits)) { noOfFractionDigits = 308; } int exponent = (int) noOfFractionDigits; int tens = 0; if (xAbsValue != 0 && xAbsValue < 1) { double multipliedValue = xAbsValue; while (multipliedValue < 1) { multipliedValue = xAbsValue * Math.pow(10, tens + 1); tens++; } } BigDecimal numberBigDecimal = new BigDecimal(x); if (xAbsValue != 0 && xAbsValue < 1) { numberBigDecimal = numberBigDecimal.setScale(exponent + tens, RoundingMode.HALF_EVEN); } else { numberBigDecimal = numberBigDecimal.setScale(exponent, RoundingMode.HALF_EVEN); } String power = "0".repeat(exponent); DecimalFormat decimalFormat = new DecimalFormat("0." + power + "E0"); decimalFormat.setRoundingMode(RoundingMode.HALF_EVEN); String res = decimalFormat.format(numberBigDecimal); int indexOfExp = res.lastIndexOf("E"); String firstSection = res.substring(0, indexOfExp); int idxOfDecimalPoint = firstSection.lastIndexOf("."); if (idxOfDecimalPoint == firstSection.length() - 1) { firstSection = res.substring(0, idxOfDecimalPoint); } String secondSection = res.substring(indexOfExp + 1); int p = Integer.parseInt(secondSection); if (p >= 0) { secondSection = "e+" + secondSection; } else { secondSection = "e" + secondSection; } return StringUtils.fromString(firstSection + secondSection); }
throw ErrorUtils.createInvalidFractionDigitsError();
public static BString toExpString(double x, Object fractionDigits) { BString str = FloatUtils.getBStringIfInfiniteOrNaN(x); if (str != null) { return str; } long noOfFractionDigits; double xAbsValue = Math.abs(x); if (fractionDigits == null) { if (xAbsValue == 0) { noOfFractionDigits = 1; } else { int integerPart = (int) (Math.log10(xAbsValue)); noOfFractionDigits = BigDecimal.valueOf(xAbsValue / Math.pow(10, integerPart)).scale(); } } else { noOfFractionDigits = (long) fractionDigits; } if (noOfFractionDigits < 0) { throw ErrorUtils.createInvalidFractionDigitsError(); } if (FloatUtils.checkFractionDigitsWithinRange(noOfFractionDigits)) { noOfFractionDigits = 308; } int exponent = (int) noOfFractionDigits; int tens = 0; if (xAbsValue != 0 && xAbsValue < 1) { double multipliedValue = xAbsValue; while (multipliedValue < 1) { multipliedValue = xAbsValue * Math.pow(10, tens + 1); tens++; } } BigDecimal numberBigDecimal = new BigDecimal(x); if (xAbsValue != 0 && xAbsValue < 1) { numberBigDecimal = numberBigDecimal.setScale(exponent + tens, RoundingMode.HALF_EVEN); } else { numberBigDecimal = numberBigDecimal.setScale(exponent, RoundingMode.HALF_EVEN); } String power = "0".repeat(exponent); DecimalFormat decimalFormat = new DecimalFormat("0." + power + "E0"); decimalFormat.setRoundingMode(RoundingMode.HALF_EVEN); String res = decimalFormat.format(numberBigDecimal); int indexOfExp = res.lastIndexOf("E"); String firstSection = res.substring(0, indexOfExp); int idxOfDecimalPoint = firstSection.lastIndexOf("."); if (idxOfDecimalPoint == firstSection.length() - 1) { firstSection = res.substring(0, idxOfDecimalPoint); } String secondSection = res.substring(indexOfExp + 1); int p = Integer.parseInt(secondSection); if (p >= 0) { secondSection = "e+" + secondSection; } else { secondSection = "e" + secondSection; } return StringUtils.fromString(firstSection + secondSection); }
class ToExpString { }
class ToExpString { }
Do we have an issue to fix these, https://github.com/ballerina-platform/ballerina-spec/issues/724#issuecomment-771358941
public void testTypeGuardNegative() { CompileResult negativeResult = BCompileUtil.compile("test-src/statements/ifelse/type-guard-negative.bal"); int i = 0; BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'int'", 20, 27); BAssertUtil.validateError(negativeResult, i++, "unnecessary condition: expression will always evaluate to 'true'", 29, 13); BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'int'", 33, 13); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(float|boolean)' will not be matched to 'string'", 53, 16); BAssertUtil.validateError(negativeResult, i++, "unreachable code", 84, 5); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(int|boolean)' will not be matched to 'float'", 99, 63); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(boolean|float)' will not be matched to 'int'", 108, 30); BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'int'", 117, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'float'", 117, 37); BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'float'", 126, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Person|Student)' will not be matched to 'string'", 147, 10); BAssertUtil.validateError(negativeResult, i++, "unnecessary condition: expression will always evaluate to 'true'", 147, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Person|Student)' will not be matched to 'float'", 147, 40); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Person|Student)' will not be matched to 'boolean'", 147, 56); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Baz|int)' will not be matched to 'Bar'", 167, 15); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Baz|int)' will not be matched to 'Qux'", 173, 15); Assert.assertEquals(negativeResult.getErrorCount(), i); }
public void testTypeGuardNegative() { CompileResult negativeResult = BCompileUtil.compile("test-src/statements/ifelse/type-guard-negative.bal"); int i = 0; BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'int'", 20, 27); BAssertUtil.validateError(negativeResult, i++, "unnecessary condition: expression will always evaluate to 'true'", 29, 13); BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'int'", 33, 13); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(float|boolean)' will not be matched to 'string'", 53, 16); BAssertUtil.validateError(negativeResult, i++, "unreachable code", 84, 5); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(int|boolean)' will not be matched to 'float'", 99, 63); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(boolean|float)' will not be matched to 'int'", 108, 30); BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'int'", 117, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'float'", 117, 37); BAssertUtil.validateError(negativeResult, i++, "incompatible types: 'string' will not be matched to 'float'", 126, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Person|Student)' will not be matched to 'string'", 147, 10); BAssertUtil.validateError(negativeResult, i++, "unnecessary condition: expression will always evaluate to 'true'", 147, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Person|Student)' will not be matched to 'float'", 147, 40); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Person|Student)' will not be matched to 'boolean'", 147, 56); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Baz|int)' will not be matched to 'Bar'", 167, 15); BAssertUtil.validateError(negativeResult, i++, "incompatible types: '(Baz|int)' will not be matched to 'Qux'", 173, 15); Assert.assertEquals(negativeResult.getErrorCount(), i); }
class TypeGuardTest { CompileResult result; @BeforeClass public void setup() { result = BCompileUtil.compile("test-src/statements/ifelse/type-guard.bal"); } @Test @Test public void testTypeGuardSemanticsNegative() { CompileResult negativeResult = BCompileUtil.compile( "test-src/statements/ifelse/type-guard-semantics-negative.bal"); int i = 0; BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'string'", 22, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found 'int'", 25, 20); BAssertUtil.validateError(negativeResult, i++, "undefined field 'b' in 'A'", 43, 16); BAssertUtil.validateError(negativeResult, i++, "undefined field 'c' in 'A'", 43, 31); BAssertUtil.validateError(negativeResult, i++, "undefined field 'a' in 'B'", 45, 16); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(string|int)'", 63, 20); BAssertUtil.validateError(negativeResult, i++, "undefined symbol 'a'", 69, 8); BAssertUtil.validateError(negativeResult, i++, "undefined symbol 'a'", 70, 16); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'string'", 131, 22); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected '(int|string|boolean)', found 'float'", 133, 13); BAssertUtil.validateError(negativeResult, i++, "a type compatible with mapping constructor expressions not found in " + "type '(int|string|boolean)'", 137, 9); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(int|string)'", 154, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(int|string)'", 167, 21); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(int|boolean)'", 181, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(int|boolean)'", 199, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(float|string)'", 201, 20); BAssertUtil.validateError(negativeResult, i++, "unknown type 'T'", 216, 30); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(Person|Student)'", 240, 20); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 257, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '" + "(string|boolean)'", 262, 24); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 274, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 293, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 296, 29); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 298, 29); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 300, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(int|string|boolean)'", 301, 28); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 303, 21); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(int|string|boolean)'", 304, 24); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected '(int|string)', found '(int|string|boolean)'", 307, 24); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(int|string|boolean)'", 308, 20); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(string|int)'", 318, 21); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(string|int)'", 320, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found 'string?'", 328, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'int?'", 343, 22); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'int?'", 355, 22); BAssertUtil.validateError(negativeResult, i++, "undefined symbol 'j'", 377, 17); Assert.assertEquals(negativeResult.getErrorCount(), i); } @Test public void testValueTypeInUnion() { BValue[] returns = BRunUtil.invoke(result, "testValueTypeInUnion"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "int: 5"); } @Test public void testSimpleRecordTypes_1() { BValue[] returns = BRunUtil.invoke(result, "testSimpleRecordTypes_1"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "foo"); } @Test public void testSimpleRecordTypes_2() { BValue[] returns = BRunUtil.invoke(result, "testSimpleRecordTypes_2"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "foo-bar"); } @Test public void testSimpleTernary() { BValue[] returns = BRunUtil.invoke(result, "testSimpleTernary"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "hello"); } @Test public void testMultipleTypeGuardsWithAndOperator() { BValue[] returns = BRunUtil.invoke(result, "testMultipleTypeGuardsWithAndOperator"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); Assert.assertEquals(((BInteger) returns[0]).intValue(), 12); } @Test public void testMultipleTypeGuardsWithAndOperatorInTernary() { BValue[] returns = BRunUtil.invoke(result, "testMultipleTypeGuardsWithAndOperatorInTernary"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); Assert.assertEquals(((BInteger) returns[0]).intValue(), 12); } @Test public void testTypeGuardInElse_1() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_1"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "int: 5"); } @Test public void testTypeGuardInElse_2() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_2"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "boolean: true"); } @Test public void testTypeGuardInElse_3() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_3"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "x is boolean and y is boolean: false"); } @Test public void testTypeGuardInElse_4() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_4"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "1st round: x is boolean and y is boolean: false | " + "2nd round: x is boolean and y is boolean: false"); } @Test public void testTypeGuardInElse_5() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_5"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "x is int: 5"); } @Test public void testTypeGuardInElse_6() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_6"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "int: 5"); } @Test public void testTypeGuardInElse_7() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_7"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "int: 5"); } @Test public void testComplexTernary_1() { BValue[] returns = BRunUtil.invoke(result, "testComplexTernary_1"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "string"); } @Test public void testComplexTernary_2() { BValue[] returns = BRunUtil.invoke(result, "testComplexTernary_2"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "string"); } @Test public void testArray() { BValue[] returns = BRunUtil.invoke(result, "testArray"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); Assert.assertEquals(((BInteger) returns[0]).intValue(), 20); } @Test public void testUpdatingGuardedVar_1() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingGuardedVar_1"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "BALLERINA - updated"); } @Test public void testUpdatingGuardedVar_2() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingGuardedVar_2"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "BALLERINA - updated once - updated via function"); } @Test public void testFuncPtrTypeInferenceInElseGuard() { BValue[] returns = BRunUtil.invoke(result, "testFuncPtrTypeInferenceInElseGuard"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); Assert.assertEquals(((BInteger) returns[1]).intValue(), 100); } @Test public void testTypeGuardNegation() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardNegation", new BValue[] { new BInteger(4) }); Assert.assertEquals(returns[0].stringValue(), "int: 4"); returns = BRunUtil.invoke(result, "testTypeGuardNegation", new BValue[] { new BBoolean(true) }); Assert.assertEquals(returns[0].stringValue(), "boolean: true"); returns = BRunUtil.invoke(result, "testTypeGuardNegation", new BValue[] { new BString("Hello") }); Assert.assertEquals(returns[0].stringValue(), "string: Hello"); } @Test public void testTypeGuardsWithBinaryOps() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOps", new BValue[] { new BInteger(4) }); Assert.assertEquals(returns[0].stringValue(), "int: 4"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOps", new BValue[] { new BBoolean(true) }); Assert.assertEquals(returns[0].stringValue(), "boolean: true"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOps", new BValue[] { new BString("Hello") }); Assert.assertEquals(returns[0].stringValue(), "string: Hello"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOps", new BValue[] { new BFloat(4.5) }); Assert.assertEquals(returns[0].stringValue(), "float: 4.5"); } @Test public void testTypeGuardsWithRecords_1() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithRecords_1"); Assert.assertEquals(returns[0].stringValue(), "John"); } @Test public void testTypeGuardsWithRecords_2() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithRecords_2"); Assert.assertEquals(returns[0].stringValue(), "student: John"); } @Test public void testTypeGuardsWithError() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithError"); Assert.assertEquals(returns[0].stringValue(), "status: 500"); } @Test(groups = { "brokenOnJBallerina", "brokenOnNewParser" }) public void testTypeGuardsWithErrorInmatch() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithErrorInmatch"); Assert.assertEquals(returns[0].stringValue(), "some error"); } @Test public void testTypeNarrowingWithClosures() { BValue[] returns = BRunUtil.invoke(result, "testTypeNarrowingWithClosures"); Assert.assertEquals(returns[0].stringValue(), "int: 8"); } @Test public void testTypeGuardsWithBinaryAnd() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryAnd", new BValue[] { new BInteger(2) }); Assert.assertEquals(returns[0].stringValue(), "int: 2 is < 5"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryAnd", new BValue[] { new BInteger(6) }); Assert.assertEquals(returns[0].stringValue(), "int: 6 is >= 5"); } @Test public void testTypeGuardsWithBinaryOpsInTernary() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOpsInTernary", new BValue[] { new BInteger(4) }); Assert.assertEquals(returns[0].stringValue(), "int: 4"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOpsInTernary", new BValue[] { new BBoolean(true) }); Assert.assertEquals(returns[0].stringValue(), "boolean: true"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOpsInTernary", new BValue[] { new BString("Hello") }); Assert.assertEquals(returns[0].stringValue(), "string: Hello"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOpsInTernary", new BValue[] { new BFloat(4.5) }); Assert.assertEquals(returns[0].stringValue(), "float: 4.5"); } @Test public void testUpdatingTypeNarrowedVar_1() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingTypeNarrowedVar_1"); Assert.assertEquals(returns[0].stringValue(), "string: hello"); } @Test public void testUpdatingTypeNarrowedVar_2() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingTypeNarrowedVar_2", new BValue[] { new BInteger(2) }); Assert.assertEquals(returns[0].stringValue(), "int: 2"); returns = BRunUtil.invoke(result, "testUpdatingTypeNarrowedVar_2", new BValue[] { new BInteger(8) }); Assert.assertEquals(returns[0].stringValue(), "int: -1"); } @Test public void testUpdatingTypeNarrowedVar_3() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingTypeNarrowedVar_3"); Assert.assertEquals(returns[0].stringValue(), "string: hello"); } @Test public void testTypeGuardForGlobalVars() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForGlobalVarsUsingLocalAssignment"); Assert.assertEquals(returns[0].stringValue(), "e1"); Assert.assertEquals(returns[1].stringValue(), "e2"); } @Test(dataProvider = "finiteTypeAsBroaderTypesFunctions") public void testFiniteTypeAsBroaderTypes(String function) { BValue[] returns = BRunUtil.invoke(result, function); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test(dataProvider = "finiteTypeAsBroaderTypesAndFiniteTypeFunctions") public void testFiniteTypeAsBroaderTypesAndFiniteType(String function) { BValue[] returns = BRunUtil.invoke(result, function); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test(dataProvider = "typeNarrowingForIntersectingUnions") public void testTypeNarrowingForIntersectingUnions(String function) { BValue[] returns = BRunUtil.invoke(result, function); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test(dataProvider = "valueTypesAsFiniteTypesFunctions") public void testValueTypesAsFiniteTypesFunctions(String function) { BValue[] returns = BRunUtil.invoke(result, function); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testFiniteTypeAsBroaderTypeInStructureNegative() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeAsBroaderTypeInStructureNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testFiniteTypeAsFiniteTypeWithIntersectionNegative() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeAsFiniteTypeWithIntersectionNegative"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testFiniteTypeReassignmentToBroaderType() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeReassignmentToBroaderType"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @DataProvider(name = "finiteTypeAsBroaderTypesFunctions") public Object[][] finiteTypeAsBroaderTypesFunctions() { return new Object[][]{ {"testFiniteTypeAsBroaderTypes_1"}, {"testFiniteTypeAsBroaderTypes_2"}, {"testFiniteTypeAsBroaderTypes_3"}, {"testFiniteTypeAsBroaderTypes_4"} }; } @DataProvider(name = "finiteTypeAsBroaderTypesAndFiniteTypeFunctions") public Object[][] finiteTypeAsBroaderTypesAndFiniteTypeFunctions() { return new Object[][]{ {"testFiniteTypeAsBroaderTypesAndFiniteType_1"}, {"testFiniteTypeAsBroaderTypesAndFiniteType_2"}, {"testFiniteTypeAsBroaderTypesAndFiniteType_3"}, {"testFiniteTypeAsBroaderTypesAndFiniteType_4"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_1"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_2"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_3"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_4"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_5"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_6"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_7"}, {"testFiniteTypeAsFiniteTypeWithIntersectionPositive"}, {"testFiniteTypeAsBroaderTypeInStructurePositive"} }; } @DataProvider(name = "typeNarrowingForIntersectingUnions") public Object[][] typeNarrowingForIntersectingUnions() { return new Object[][]{ {"testTypeNarrowingForIntersectingDirectUnion_1"}, {"testTypeNarrowingForIntersectingDirectUnion_2"}, {"testTypeNarrowingForIntersectingAssignableUnion_1"}, {"testTypeNarrowingForIntersectingAssignableUnion_2"}, {"testTypeNarrowingForIntersectingUnionWithRecords"} }; } @DataProvider(name = "valueTypesAsFiniteTypesFunctions") public Object[][] valueTypesAsFiniteTypesFunctions() { return new Object[][]{ {"testTypeNarrowingForValueTypeAsFiniteType_1"}, {"testTypeNarrowingForValueTypeAsFiniteType_2"} }; } @Test public void testFiniteTypeUnionAsFiniteTypeUnionPositive() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeUnionAsFiniteTypeUnionPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testFiniteTypeUnionAsFiniteTypeUnionNegative() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeUnionAsFiniteTypeUnionNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForErrorPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForErrorPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForErrorNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForErrorNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForCustomErrorPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForCustomErrorPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); Assert.assertTrue(((BBoolean) returns[1]).booleanValue()); } @Test public void testTypeGuardForCustomErrorNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForCustomErrorNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForTupleDestructuringAssignmentPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForTupleDestructuringAssignmentPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForTupleDestructuringAssignmentNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForTupleDestructuringAssignmentNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForRecordDestructuringAssignmentPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForRecordDestructuringAssignmentPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForRecordDestructuringAssignmentNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForRecordDestructuringAssignmentNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test(groups = { "brokenOnJBallerina", "brokenOnNewParser"}) public void testTypeGuardForErrorDestructuringAssignmentPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForErrorDestructuringAssignmentPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test(groups = { "brokenOnJBallerina", "brokenOnNewParser" }) public void testTypeGuardForErrorDestructuringAssignmentNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForErrorDestructuringAssignmentNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testNarrowedTypeResetWithMultipleBranches() { BRunUtil.invoke(result, "testNarrowedTypeResetWithMultipleBranches"); } @Test public void testNarrowedTypeResetWithNestedTypeGuards() { BRunUtil.invoke(result, "testNarrowedTypeResetWithNestedTypeGuards"); } @Test public void testSameVarNameInDifferentScopes() { BRunUtil.invoke(result, "testSameVarNameInDifferentScopes"); } @Test(description = "Test Typetest for TypeDefs when types are equal") public void testTypetestForTypedefs1() { BValue[] returns = BRunUtil.invoke(result, "testTypeDescTypeTest1"); Assert.assertEquals(BBoolean.TRUE, returns[0]); } @Test(description = "Test Typetest for TypeDefs when types are not equal") public void testTypetestForTypedefs2() { BValue[] returns = BRunUtil.invoke(result, "testTypeDescTypeTest2"); Assert.assertEquals(BBoolean.TRUE, returns[0]); } @AfterClass public void tearDown() { result = null; } }
class TypeGuardTest { CompileResult result; @BeforeClass public void setup() { result = BCompileUtil.compile("test-src/statements/ifelse/type-guard.bal"); } @Test @Test public void testTypeGuardSemanticsNegative() { CompileResult negativeResult = BCompileUtil.compile( "test-src/statements/ifelse/type-guard-semantics-negative.bal"); int i = 0; BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'string'", 22, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found 'int'", 25, 20); BAssertUtil.validateError(negativeResult, i++, "undefined field 'b' in 'A'", 43, 16); BAssertUtil.validateError(negativeResult, i++, "undefined field 'c' in 'A'", 43, 31); BAssertUtil.validateError(negativeResult, i++, "undefined field 'a' in 'B'", 45, 16); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(string|int)'", 63, 20); BAssertUtil.validateError(negativeResult, i++, "undefined symbol 'a'", 69, 8); BAssertUtil.validateError(negativeResult, i++, "undefined symbol 'a'", 70, 16); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'string'", 131, 22); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected '(int|string|boolean)', found 'float'", 133, 13); BAssertUtil.validateError(negativeResult, i++, "a type compatible with mapping constructor expressions not found in " + "type '(int|string|boolean)'", 137, 9); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(int|string)'", 154, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(int|string)'", 167, 21); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(int|boolean)'", 181, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(int|boolean)'", 199, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(float|string)'", 201, 20); BAssertUtil.validateError(negativeResult, i++, "unknown type 'T'", 216, 30); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(Person|Student)'", 240, 20); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 257, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '" + "(string|boolean)'", 262, 24); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 274, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 293, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 296, 29); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 298, 29); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 300, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(int|string|boolean)'", 301, 28); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found " + "'(int|string|boolean)'", 303, 21); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(int|string|boolean)'", 304, 24); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected '(int|string)', found '(int|string|boolean)'", 307, 24); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(int|string|boolean)'", 308, 20); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found '(string|int)'", 318, 21); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found '(string|int)'", 320, 17); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'string', found 'string?'", 328, 25); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'int?'", 343, 22); BAssertUtil.validateError(negativeResult, i++, "incompatible types: expected 'int', found 'int?'", 355, 22); BAssertUtil.validateError(negativeResult, i++, "undefined symbol 'j'", 377, 17); Assert.assertEquals(negativeResult.getErrorCount(), i); } @Test public void testValueTypeInUnion() { BValue[] returns = BRunUtil.invoke(result, "testValueTypeInUnion"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "int: 5"); } @Test public void testSimpleRecordTypes_1() { BValue[] returns = BRunUtil.invoke(result, "testSimpleRecordTypes_1"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "foo"); } @Test public void testSimpleRecordTypes_2() { BValue[] returns = BRunUtil.invoke(result, "testSimpleRecordTypes_2"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "foo-bar"); } @Test public void testSimpleTernary() { BValue[] returns = BRunUtil.invoke(result, "testSimpleTernary"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "hello"); } @Test public void testMultipleTypeGuardsWithAndOperator() { BValue[] returns = BRunUtil.invoke(result, "testMultipleTypeGuardsWithAndOperator"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); Assert.assertEquals(((BInteger) returns[0]).intValue(), 12); } @Test public void testMultipleTypeGuardsWithAndOperatorInTernary() { BValue[] returns = BRunUtil.invoke(result, "testMultipleTypeGuardsWithAndOperatorInTernary"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); Assert.assertEquals(((BInteger) returns[0]).intValue(), 12); } @Test public void testTypeGuardInElse_1() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_1"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "int: 5"); } @Test public void testTypeGuardInElse_2() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_2"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "boolean: true"); } @Test public void testTypeGuardInElse_3() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_3"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "x is boolean and y is boolean: false"); } @Test public void testTypeGuardInElse_4() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_4"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "1st round: x is boolean and y is boolean: false | " + "2nd round: x is boolean and y is boolean: false"); } @Test public void testTypeGuardInElse_5() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_5"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "x is int: 5"); } @Test public void testTypeGuardInElse_6() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_6"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "int: 5"); } @Test public void testTypeGuardInElse_7() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardInElse_7"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "int: 5"); } @Test public void testComplexTernary_1() { BValue[] returns = BRunUtil.invoke(result, "testComplexTernary_1"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "string"); } @Test public void testComplexTernary_2() { BValue[] returns = BRunUtil.invoke(result, "testComplexTernary_2"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "string"); } @Test public void testArray() { BValue[] returns = BRunUtil.invoke(result, "testArray"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BInteger.class); Assert.assertEquals(((BInteger) returns[0]).intValue(), 20); } @Test public void testUpdatingGuardedVar_1() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingGuardedVar_1"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "BALLERINA - updated"); } @Test public void testUpdatingGuardedVar_2() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingGuardedVar_2"); Assert.assertEquals(returns.length, 1); Assert.assertSame(returns[0].getClass(), BString.class); Assert.assertEquals(returns[0].stringValue(), "BALLERINA - updated once - updated via function"); } @Test public void testFuncPtrTypeInferenceInElseGuard() { BValue[] returns = BRunUtil.invoke(result, "testFuncPtrTypeInferenceInElseGuard"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); Assert.assertEquals(((BInteger) returns[1]).intValue(), 100); } @Test public void testTypeGuardNegation() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardNegation", new BValue[] { new BInteger(4) }); Assert.assertEquals(returns[0].stringValue(), "int: 4"); returns = BRunUtil.invoke(result, "testTypeGuardNegation", new BValue[] { new BBoolean(true) }); Assert.assertEquals(returns[0].stringValue(), "boolean: true"); returns = BRunUtil.invoke(result, "testTypeGuardNegation", new BValue[] { new BString("Hello") }); Assert.assertEquals(returns[0].stringValue(), "string: Hello"); } @Test public void testTypeGuardsWithBinaryOps() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOps", new BValue[] { new BInteger(4) }); Assert.assertEquals(returns[0].stringValue(), "int: 4"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOps", new BValue[] { new BBoolean(true) }); Assert.assertEquals(returns[0].stringValue(), "boolean: true"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOps", new BValue[] { new BString("Hello") }); Assert.assertEquals(returns[0].stringValue(), "string: Hello"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOps", new BValue[] { new BFloat(4.5) }); Assert.assertEquals(returns[0].stringValue(), "float: 4.5"); } @Test public void testTypeGuardsWithRecords_1() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithRecords_1"); Assert.assertEquals(returns[0].stringValue(), "John"); } @Test public void testTypeGuardsWithRecords_2() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithRecords_2"); Assert.assertEquals(returns[0].stringValue(), "student: John"); } @Test public void testTypeGuardsWithError() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithError"); Assert.assertEquals(returns[0].stringValue(), "status: 500"); } @Test(groups = { "brokenOnJBallerina", "brokenOnNewParser" }) public void testTypeGuardsWithErrorInmatch() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithErrorInmatch"); Assert.assertEquals(returns[0].stringValue(), "some error"); } @Test public void testTypeNarrowingWithClosures() { BValue[] returns = BRunUtil.invoke(result, "testTypeNarrowingWithClosures"); Assert.assertEquals(returns[0].stringValue(), "int: 8"); } @Test public void testTypeGuardsWithBinaryAnd() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryAnd", new BValue[] { new BInteger(2) }); Assert.assertEquals(returns[0].stringValue(), "int: 2 is < 5"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryAnd", new BValue[] { new BInteger(6) }); Assert.assertEquals(returns[0].stringValue(), "int: 6 is >= 5"); } @Test public void testTypeGuardsWithBinaryOpsInTernary() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOpsInTernary", new BValue[] { new BInteger(4) }); Assert.assertEquals(returns[0].stringValue(), "int: 4"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOpsInTernary", new BValue[] { new BBoolean(true) }); Assert.assertEquals(returns[0].stringValue(), "boolean: true"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOpsInTernary", new BValue[] { new BString("Hello") }); Assert.assertEquals(returns[0].stringValue(), "string: Hello"); returns = BRunUtil.invoke(result, "testTypeGuardsWithBinaryOpsInTernary", new BValue[] { new BFloat(4.5) }); Assert.assertEquals(returns[0].stringValue(), "float: 4.5"); } @Test public void testUpdatingTypeNarrowedVar_1() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingTypeNarrowedVar_1"); Assert.assertEquals(returns[0].stringValue(), "string: hello"); } @Test public void testUpdatingTypeNarrowedVar_2() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingTypeNarrowedVar_2", new BValue[] { new BInteger(2) }); Assert.assertEquals(returns[0].stringValue(), "int: 2"); returns = BRunUtil.invoke(result, "testUpdatingTypeNarrowedVar_2", new BValue[] { new BInteger(8) }); Assert.assertEquals(returns[0].stringValue(), "int: -1"); } @Test public void testUpdatingTypeNarrowedVar_3() { BValue[] returns = BRunUtil.invoke(result, "testUpdatingTypeNarrowedVar_3"); Assert.assertEquals(returns[0].stringValue(), "string: hello"); } @Test public void testTypeGuardForGlobalVars() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForGlobalVarsUsingLocalAssignment"); Assert.assertEquals(returns[0].stringValue(), "e1"); Assert.assertEquals(returns[1].stringValue(), "e2"); } @Test(dataProvider = "finiteTypeAsBroaderTypesFunctions") public void testFiniteTypeAsBroaderTypes(String function) { BValue[] returns = BRunUtil.invoke(result, function); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test(dataProvider = "finiteTypeAsBroaderTypesAndFiniteTypeFunctions") public void testFiniteTypeAsBroaderTypesAndFiniteType(String function) { BValue[] returns = BRunUtil.invoke(result, function); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test(dataProvider = "typeNarrowingForIntersectingUnions") public void testTypeNarrowingForIntersectingUnions(String function) { BValue[] returns = BRunUtil.invoke(result, function); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test(dataProvider = "valueTypesAsFiniteTypesFunctions") public void testValueTypesAsFiniteTypesFunctions(String function) { BValue[] returns = BRunUtil.invoke(result, function); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testFiniteTypeAsBroaderTypeInStructureNegative() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeAsBroaderTypeInStructureNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testFiniteTypeAsFiniteTypeWithIntersectionNegative() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeAsFiniteTypeWithIntersectionNegative"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testFiniteTypeReassignmentToBroaderType() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeReassignmentToBroaderType"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @DataProvider(name = "finiteTypeAsBroaderTypesFunctions") public Object[][] finiteTypeAsBroaderTypesFunctions() { return new Object[][]{ {"testFiniteTypeAsBroaderTypes_1"}, {"testFiniteTypeAsBroaderTypes_2"}, {"testFiniteTypeAsBroaderTypes_3"}, {"testFiniteTypeAsBroaderTypes_4"} }; } @DataProvider(name = "finiteTypeAsBroaderTypesAndFiniteTypeFunctions") public Object[][] finiteTypeAsBroaderTypesAndFiniteTypeFunctions() { return new Object[][]{ {"testFiniteTypeAsBroaderTypesAndFiniteType_1"}, {"testFiniteTypeAsBroaderTypesAndFiniteType_2"}, {"testFiniteTypeAsBroaderTypesAndFiniteType_3"}, {"testFiniteTypeAsBroaderTypesAndFiniteType_4"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_1"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_2"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_3"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_4"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_5"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_6"}, {"testFiniteTypeInUnionAsComplexFiniteTypes_7"}, {"testFiniteTypeAsFiniteTypeWithIntersectionPositive"}, {"testFiniteTypeAsBroaderTypeInStructurePositive"} }; } @DataProvider(name = "typeNarrowingForIntersectingUnions") public Object[][] typeNarrowingForIntersectingUnions() { return new Object[][]{ {"testTypeNarrowingForIntersectingDirectUnion_1"}, {"testTypeNarrowingForIntersectingDirectUnion_2"}, {"testTypeNarrowingForIntersectingAssignableUnion_1"}, {"testTypeNarrowingForIntersectingAssignableUnion_2"}, {"testTypeNarrowingForIntersectingUnionWithRecords"} }; } @DataProvider(name = "valueTypesAsFiniteTypesFunctions") public Object[][] valueTypesAsFiniteTypesFunctions() { return new Object[][]{ {"testTypeNarrowingForValueTypeAsFiniteType_1"}, {"testTypeNarrowingForValueTypeAsFiniteType_2"} }; } @Test public void testFiniteTypeUnionAsFiniteTypeUnionPositive() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeUnionAsFiniteTypeUnionPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testFiniteTypeUnionAsFiniteTypeUnionNegative() { BValue[] returns = BRunUtil.invoke(result, "testFiniteTypeUnionAsFiniteTypeUnionNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForErrorPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForErrorPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForErrorNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForErrorNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForCustomErrorPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForCustomErrorPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); Assert.assertTrue(((BBoolean) returns[1]).booleanValue()); } @Test public void testTypeGuardForCustomErrorNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForCustomErrorNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForTupleDestructuringAssignmentPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForTupleDestructuringAssignmentPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForTupleDestructuringAssignmentNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForTupleDestructuringAssignmentNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForRecordDestructuringAssignmentPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForRecordDestructuringAssignmentPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test public void testTypeGuardForRecordDestructuringAssignmentNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForRecordDestructuringAssignmentNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test(groups = { "brokenOnJBallerina", "brokenOnNewParser"}) public void testTypeGuardForErrorDestructuringAssignmentPositive() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForErrorDestructuringAssignmentPositive"); Assert.assertTrue(((BBoolean) returns[0]).booleanValue()); } @Test(groups = { "brokenOnJBallerina", "brokenOnNewParser" }) public void testTypeGuardForErrorDestructuringAssignmentNegative() { BValue[] returns = BRunUtil.invoke(result, "testTypeGuardForErrorDestructuringAssignmentNegative"); Assert.assertFalse(((BBoolean) returns[0]).booleanValue()); } @Test public void testNarrowedTypeResetWithMultipleBranches() { BRunUtil.invoke(result, "testNarrowedTypeResetWithMultipleBranches"); } @Test public void testNarrowedTypeResetWithNestedTypeGuards() { BRunUtil.invoke(result, "testNarrowedTypeResetWithNestedTypeGuards"); } @Test public void testSameVarNameInDifferentScopes() { BRunUtil.invoke(result, "testSameVarNameInDifferentScopes"); } @Test(description = "Test Typetest for TypeDefs when types are equal") public void testTypetestForTypedefs1() { BValue[] returns = BRunUtil.invoke(result, "testTypeDescTypeTest1"); Assert.assertEquals(BBoolean.TRUE, returns[0]); } @Test(description = "Test Typetest for TypeDefs when types are not equal") public void testTypetestForTypedefs2() { BValue[] returns = BRunUtil.invoke(result, "testTypeDescTypeTest2"); Assert.assertEquals(BBoolean.TRUE, returns[0]); } @AfterClass public void tearDown() { result = null; } }
AvroUtils.toAvroSchema() method provides an avroSchema from the BeamSchema. But the avroSchema geenrated does not have the name of the avroSchema. Now when I use this avroSchema (without the name) to write/read records it fails with error SchemaParseException: No name in schema: {"type":"record","fields": ... } So I had to provide the name for the schema, I have provided the same using the tableName.
public PDone buildIOWriter(PCollection<Row> input) { PTransform<PCollection<Row>, PCollection<GenericRecord>> writeConverter = GenericRecordWriteConverter.builder().beamSchema(schema).build(); return input .apply("GenericRecordToRow", writeConverter) .apply( "AvroIOWrite", AvroIO.writeGenericRecords(AvroUtils.toAvroSchema(schema, tableName, null)) .to(filePattern) .withoutSharding()); }
AvroIO.writeGenericRecords(AvroUtils.toAvroSchema(schema, tableName, null))
public PDone buildIOWriter(PCollection<Row> input) { PTransform<PCollection<Row>, PCollection<GenericRecord>> writeConverter = GenericRecordWriteConverter.builder().beamSchema(schema).build(); return input .apply("GenericRecordToRow", writeConverter) .apply( "AvroIOWrite", AvroIO.writeGenericRecords(AvroUtils.toAvroSchema(schema, tableName, null)) .to(filePattern) .withoutSharding()); }
class AvroTable extends BaseBeamTable implements Serializable { private final String filePattern; private final String tableName; public AvroTable(String tableName, Schema beamSchema, String filePattern) { super(beamSchema); this.filePattern = filePattern; this.tableName = tableName; } @Override public PCollection<Row> buildIOReader(PBegin begin) { PTransform<PCollection<GenericRecord>, PCollection<Row>> readConverter = GenericRecordReadConverter.builder().beamSchema(schema).build(); return begin .apply( "AvroIORead", AvroIO.readGenericRecords(AvroUtils.toAvroSchema(schema, tableName, null)) .from(filePattern)) .apply("GenericRecordToRow", readConverter); } @Override @Override public PCollection.IsBounded isBounded() { return PCollection.IsBounded.BOUNDED; } @Override public BeamTableStatistics getTableStatistics(PipelineOptions options) { return BeamTableStatistics.BOUNDED_UNKNOWN; } }
class AvroTable extends BaseBeamTable implements Serializable { private final String filePattern; private final String tableName; public AvroTable(String tableName, Schema beamSchema, String filePattern) { super(beamSchema); this.filePattern = filePattern; this.tableName = tableName; } @Override public PCollection<Row> buildIOReader(PBegin begin) { return begin .apply( "AvroIORead", AvroIO.readGenericRecords(AvroUtils.toAvroSchema(schema, tableName, null)) .withBeamSchemas(true) .from(filePattern)) .apply("GenericRecordToRow", Convert.toRows()); } @Override @Override public PCollection.IsBounded isBounded() { return PCollection.IsBounded.BOUNDED; } @Override public BeamTableStatistics getTableStatistics(PipelineOptions options) { return BeamTableStatistics.BOUNDED_UNKNOWN; } }
Based on the known information, it seems that using a time-based gc is already effectively controlling memory usage. Directly limiting memory usage to a maximum of only 5% would introduce other variables and compromise stability. It is recommended to maintain consistency with previous behavior for now.
public void getBinlogInfo(Database db, BaseProcResult result) { BinlogConfig binlogConfig = binlogConfigCache.getTableBinlogConfig(dbId, tableId); String tableName = null; String dropped = null; if (db == null) { tableName = "(dropped).(unknown)"; dropped = "true"; } else { String dbName = db.getFullName(); Table table = db.getTableNullable(tableId); if (table == null) { dropped = "true"; tableName = dbName + ".(dropped)"; } dropped = "false"; if (table instanceof OlapTable) { OlapTable olapTable = (OlapTable) table; tableName = dbName + "." + olapTable.getName(); } else { tableName = dbName + ".(not_olaptable)"; } } lock.readLock().lock(); try { List<String> info = new ArrayList<>(); info.add(tableName); String type = "table"; info.add(type); String id = String.valueOf(tableId); info.add(id); info.add(dropped); String binlogLength = String.valueOf(binlogs.size()); info.add(binlogLength); String binlogSize = String.valueOf(this.binlogSize); info.add(binlogSize); String firstBinlogCommittedTime = null; String readableFirstBinlogCommittedTime = null; for (TBinlog binlog : binlogs) { long timestamp = binlog.getTimestamp(); if (timestamp != -1) { firstBinlogCommittedTime = String.valueOf(timestamp); readableFirstBinlogCommittedTime = BinlogUtils.convertTimeToReadable(timestamp); break; } } info.add(firstBinlogCommittedTime); info.add(readableFirstBinlogCommittedTime); String lastBinlogCommittedTime = null; String readableLastBinlogCommittedTime = null; Iterator<TBinlog> iterator = binlogs.descendingIterator(); while (iterator.hasNext()) { TBinlog binlog = iterator.next(); long timestamp = binlog.getTimestamp(); if (timestamp != -1) { lastBinlogCommittedTime = String.valueOf(timestamp); readableLastBinlogCommittedTime = BinlogUtils.convertTimeToReadable(timestamp); break; } } info.add(lastBinlogCommittedTime); info.add(readableLastBinlogCommittedTime); String binlogTtlSeconds = null; String binlogMaxBytes = null; String binlogMaxHistoryNums = null; if (binlogConfig != null) { binlogTtlSeconds = String.valueOf(binlogConfig.getTtlSeconds()); binlogMaxBytes = String.valueOf(binlogConfig.getMaxBytes()); binlogMaxHistoryNums = String.valueOf(binlogConfig.getMaxHistoryNums()); } info.add(binlogTtlSeconds); info.add(binlogMaxBytes); info.add(binlogMaxHistoryNums); result.addRow(info); } finally { lock.readLock().unlock(); } }
binlogMaxBytes = String.valueOf(binlogConfig.getMaxBytes());
public void getBinlogInfo(Database db, BaseProcResult result) { BinlogConfig binlogConfig = binlogConfigCache.getTableBinlogConfig(dbId, tableId); String tableName = null; String dropped = null; if (db == null) { tableName = "(dropped).(unknown)"; dropped = "true"; } else { String dbName = db.getFullName(); Table table = db.getTableNullable(tableId); if (table == null) { dropped = "true"; tableName = dbName + ".(dropped)"; } dropped = "false"; if (table instanceof OlapTable) { OlapTable olapTable = (OlapTable) table; tableName = dbName + "." + olapTable.getName(); } else { tableName = dbName + ".(not_olaptable)"; } } lock.readLock().lock(); try { List<String> info = new ArrayList<>(); info.add(tableName); String type = "table"; info.add(type); String id = String.valueOf(tableId); info.add(id); info.add(dropped); String binlogLength = String.valueOf(binlogs.size()); info.add(binlogLength); String binlogSize = String.valueOf(this.binlogSize); info.add(binlogSize); String firstBinlogCommittedTime = null; String readableFirstBinlogCommittedTime = null; for (TBinlog binlog : binlogs) { long timestamp = binlog.getTimestamp(); if (timestamp != -1) { firstBinlogCommittedTime = String.valueOf(timestamp); readableFirstBinlogCommittedTime = BinlogUtils.convertTimeToReadable(timestamp); break; } } info.add(firstBinlogCommittedTime); info.add(readableFirstBinlogCommittedTime); String lastBinlogCommittedTime = null; String readableLastBinlogCommittedTime = null; Iterator<TBinlog> iterator = binlogs.descendingIterator(); while (iterator.hasNext()) { TBinlog binlog = iterator.next(); long timestamp = binlog.getTimestamp(); if (timestamp != -1) { lastBinlogCommittedTime = String.valueOf(timestamp); readableLastBinlogCommittedTime = BinlogUtils.convertTimeToReadable(timestamp); break; } } info.add(lastBinlogCommittedTime); info.add(readableLastBinlogCommittedTime); String binlogTtlSeconds = null; String binlogMaxBytes = null; String binlogMaxHistoryNums = null; if (binlogConfig != null) { binlogTtlSeconds = String.valueOf(binlogConfig.getTtlSeconds()); binlogMaxBytes = String.valueOf(binlogConfig.getMaxBytes()); binlogMaxHistoryNums = String.valueOf(binlogConfig.getMaxHistoryNums()); } info.add(binlogTtlSeconds); info.add(binlogMaxBytes); info.add(binlogMaxHistoryNums); result.addRow(info); } finally { lock.readLock().unlock(); } }
class TableBinlog { private static final Logger LOG = LogManager.getLogger(TableBinlog.class); private long dbId; private long tableId; private long binlogSize; private ReentrantReadWriteLock lock; private TreeSet<TBinlog> binlogs; private List<Pair<Long, Long>> timestamps; private BinlogConfigCache binlogConfigCache; public TableBinlog(BinlogConfigCache binlogConfigCache, TBinlog binlog, long dbId, long tableId) { this.dbId = dbId; this.tableId = tableId; this.binlogSize = 0; lock = new ReentrantReadWriteLock(); binlogs = Sets.newTreeSet(Comparator.comparingLong(TBinlog::getCommitSeq)); timestamps = Lists.newArrayList(); TBinlog dummy; if (binlog.getType() == TBinlogType.DUMMY) { dummy = binlog; } else { dummy = BinlogUtils.newDummyBinlog(binlog.getDbId(), tableId); } binlogs.add(dummy); this.binlogConfigCache = binlogConfigCache; } public TBinlog getDummyBinlog() { return binlogs.first(); } public long getTableId() { return tableId; } public void recoverBinlog(TBinlog binlog) { TBinlog dummy = getDummyBinlog(); if (binlog.getCommitSeq() > dummy.getCommitSeq()) { binlogs.add(binlog); ++binlog.table_ref; binlogSize += BinlogUtils.getApproximateMemoryUsage(binlog); if (binlog.getTimestamp() > 0) { timestamps.add(Pair.of(binlog.getCommitSeq(), binlog.getTimestamp())); } } } public void addBinlog(TBinlog binlog) { lock.writeLock().lock(); try { binlogs.add(binlog); ++binlog.table_ref; binlogSize += BinlogUtils.getApproximateMemoryUsage(binlog); if (binlog.getTimestamp() > 0) { timestamps.add(Pair.of(binlog.getCommitSeq(), binlog.getTimestamp())); } } finally { lock.writeLock().unlock(); } } public Pair<TStatus, TBinlog> getBinlog(long prevCommitSeq) { lock.readLock().lock(); try { return BinlogUtils.getBinlog(binlogs, prevCommitSeq); } finally { lock.readLock().unlock(); } } public Pair<TStatus, Long> getBinlogLag(long prevCommitSeq) { lock.readLock().lock(); try { return BinlogUtils.getBinlogLag(binlogs, prevCommitSeq); } finally { lock.readLock().unlock(); } } private Pair<TBinlog, Long> getLastUpsertAndLargestCommitSeq(BinlogComparator checker) { if (binlogs.size() <= 1) { return null; } Iterator<TBinlog> iter = binlogs.iterator(); TBinlog dummyBinlog = iter.next(); TBinlog tombstoneUpsert = null; TBinlog lastExpiredBinlog = null; while (iter.hasNext()) { TBinlog binlog = iter.next(); if (checker.isExpired(binlog)) { lastExpiredBinlog = binlog; --binlog.table_ref; binlogSize -= BinlogUtils.getApproximateMemoryUsage(binlog); if (binlog.getType() == TBinlogType.UPSERT) { tombstoneUpsert = binlog; } iter.remove(); } else { break; } } if (lastExpiredBinlog == null) { return null; } long expiredCommitSeq = lastExpiredBinlog.getCommitSeq(); dummyBinlog.setCommitSeq(expiredCommitSeq); Iterator<Pair<Long, Long>> timeIterator = timestamps.iterator(); while (timeIterator.hasNext() && timeIterator.next().first <= expiredCommitSeq) { timeIterator.remove(); } return Pair.of(tombstoneUpsert, expiredCommitSeq); } public BinlogTombstone commitSeqGc(long expiredCommitSeq) { Pair<TBinlog, Long> tombstoneInfo; lock.writeLock().lock(); try { BinlogComparator check = (binlog) -> binlog.getCommitSeq() <= expiredCommitSeq; tombstoneInfo = getLastUpsertAndLargestCommitSeq(check); } finally { lock.writeLock().unlock(); } if (tombstoneInfo == null) { return null; } TBinlog lastUpsertBinlog = tombstoneInfo.first; long largestCommitSeq = tombstoneInfo.second; BinlogTombstone tombstone = new BinlogTombstone(tableId, largestCommitSeq); if (lastUpsertBinlog != null) { UpsertRecord upsertRecord = UpsertRecord.fromJson(lastUpsertBinlog.getData()); tombstone.addTableRecord(tableId, upsertRecord); } return tombstone; } public BinlogTombstone gc() { BinlogConfig tableBinlogConfig = binlogConfigCache.getTableBinlogConfig(dbId, tableId); if (tableBinlogConfig == null) { return null; } long ttlSeconds = tableBinlogConfig.getTtlSeconds(); long maxBytes = tableBinlogConfig.getMaxBytes(); long maxHistoryNums = tableBinlogConfig.getMaxHistoryNums(); long expiredMs = BinlogUtils.getExpiredMs(ttlSeconds); LOG.info( "gc table binlog. dbId: {}, tableId: {}, expiredMs: {}, ttlSecond: {}, maxBytes: {}, " + "maxHistoryNums: {}, now: {}", dbId, tableId, expiredMs, ttlSeconds, maxBytes, maxHistoryNums, System.currentTimeMillis()); Pair<TBinlog, Long> tombstoneInfo; lock.writeLock().lock(); try { long expiredCommitSeq = -1; Iterator<Pair<Long, Long>> timeIterator = timestamps.iterator(); while (timeIterator.hasNext()) { Pair<Long, Long> entry = timeIterator.next(); if (expiredMs < entry.second) { break; } expiredCommitSeq = entry.first; } final long lastExpiredCommitSeq = expiredCommitSeq; BinlogComparator check = (binlog) -> { return binlog.getCommitSeq() <= lastExpiredCommitSeq || maxBytes < binlogSize || maxHistoryNums < binlogs.size(); }; tombstoneInfo = getLastUpsertAndLargestCommitSeq(check); } finally { lock.writeLock().unlock(); } if (tombstoneInfo == null) { return null; } TBinlog lastUpsertBinlog = tombstoneInfo.first; long largestCommitSeq = tombstoneInfo.second; BinlogTombstone tombstone = new BinlogTombstone(tableId, largestCommitSeq); if (lastUpsertBinlog != null) { UpsertRecord upsertRecord = UpsertRecord.fromJson(lastUpsertBinlog.getData()); tombstone.addTableRecord(tableId, upsertRecord); } return tombstone; } public void replayGc(long largestExpiredCommitSeq) { lock.writeLock().lock(); try { BinlogComparator checker = (binlog) -> binlog.getCommitSeq() <= largestExpiredCommitSeq; getLastUpsertAndLargestCommitSeq(checker); } finally { lock.writeLock().unlock(); } } }
class TableBinlog { private static final Logger LOG = LogManager.getLogger(TableBinlog.class); private long dbId; private long tableId; private long binlogSize; private ReentrantReadWriteLock lock; private TreeSet<TBinlog> binlogs; private List<Pair<Long, Long>> timestamps; private BinlogConfigCache binlogConfigCache; public TableBinlog(BinlogConfigCache binlogConfigCache, TBinlog binlog, long dbId, long tableId) { this.dbId = dbId; this.tableId = tableId; this.binlogSize = 0; lock = new ReentrantReadWriteLock(); binlogs = Sets.newTreeSet(Comparator.comparingLong(TBinlog::getCommitSeq)); timestamps = Lists.newArrayList(); TBinlog dummy; if (binlog.getType() == TBinlogType.DUMMY) { dummy = binlog; } else { dummy = BinlogUtils.newDummyBinlog(binlog.getDbId(), tableId); } binlogs.add(dummy); this.binlogConfigCache = binlogConfigCache; } public TBinlog getDummyBinlog() { return binlogs.first(); } public long getTableId() { return tableId; } public void recoverBinlog(TBinlog binlog) { TBinlog dummy = getDummyBinlog(); if (binlog.getCommitSeq() > dummy.getCommitSeq()) { binlogs.add(binlog); ++binlog.table_ref; binlogSize += BinlogUtils.getApproximateMemoryUsage(binlog); if (binlog.getTimestamp() > 0) { timestamps.add(Pair.of(binlog.getCommitSeq(), binlog.getTimestamp())); } } } public void addBinlog(TBinlog binlog) { lock.writeLock().lock(); try { binlogs.add(binlog); ++binlog.table_ref; binlogSize += BinlogUtils.getApproximateMemoryUsage(binlog); if (binlog.getTimestamp() > 0) { timestamps.add(Pair.of(binlog.getCommitSeq(), binlog.getTimestamp())); } } finally { lock.writeLock().unlock(); } } public Pair<TStatus, TBinlog> getBinlog(long prevCommitSeq) { lock.readLock().lock(); try { return BinlogUtils.getBinlog(binlogs, prevCommitSeq); } finally { lock.readLock().unlock(); } } public Pair<TStatus, Long> getBinlogLag(long prevCommitSeq) { lock.readLock().lock(); try { return BinlogUtils.getBinlogLag(binlogs, prevCommitSeq); } finally { lock.readLock().unlock(); } } private Pair<TBinlog, Long> getLastUpsertAndLargestCommitSeq(BinlogComparator checker) { if (binlogs.size() <= 1) { return null; } Iterator<TBinlog> iter = binlogs.iterator(); TBinlog dummyBinlog = iter.next(); TBinlog tombstoneUpsert = null; TBinlog lastExpiredBinlog = null; while (iter.hasNext()) { TBinlog binlog = iter.next(); if (checker.isExpired(binlog)) { lastExpiredBinlog = binlog; --binlog.table_ref; binlogSize -= BinlogUtils.getApproximateMemoryUsage(binlog); if (binlog.getType() == TBinlogType.UPSERT) { tombstoneUpsert = binlog; } iter.remove(); } else { break; } } if (lastExpiredBinlog == null) { return null; } long expiredCommitSeq = lastExpiredBinlog.getCommitSeq(); dummyBinlog.setCommitSeq(expiredCommitSeq); Iterator<Pair<Long, Long>> timeIterator = timestamps.iterator(); while (timeIterator.hasNext() && timeIterator.next().first <= expiredCommitSeq) { timeIterator.remove(); } return Pair.of(tombstoneUpsert, expiredCommitSeq); } public BinlogTombstone commitSeqGc(long expiredCommitSeq) { Pair<TBinlog, Long> tombstoneInfo; lock.writeLock().lock(); try { BinlogComparator check = (binlog) -> binlog.getCommitSeq() <= expiredCommitSeq; tombstoneInfo = getLastUpsertAndLargestCommitSeq(check); } finally { lock.writeLock().unlock(); } if (tombstoneInfo == null) { return null; } TBinlog lastUpsertBinlog = tombstoneInfo.first; long largestCommitSeq = tombstoneInfo.second; BinlogTombstone tombstone = new BinlogTombstone(tableId, largestCommitSeq); if (lastUpsertBinlog != null) { UpsertRecord upsertRecord = UpsertRecord.fromJson(lastUpsertBinlog.getData()); tombstone.addTableRecord(tableId, upsertRecord); } return tombstone; } public BinlogTombstone gc() { BinlogConfig tableBinlogConfig = binlogConfigCache.getTableBinlogConfig(dbId, tableId); if (tableBinlogConfig == null) { return null; } long ttlSeconds = tableBinlogConfig.getTtlSeconds(); long maxBytes = tableBinlogConfig.getMaxBytes(); long maxHistoryNums = tableBinlogConfig.getMaxHistoryNums(); long expiredMs = BinlogUtils.getExpiredMs(ttlSeconds); LOG.info( "gc table binlog. dbId: {}, tableId: {}, expiredMs: {}, ttlSecond: {}, maxBytes: {}, " + "maxHistoryNums: {}, now: {}", dbId, tableId, expiredMs, ttlSeconds, maxBytes, maxHistoryNums, System.currentTimeMillis()); Pair<TBinlog, Long> tombstoneInfo; lock.writeLock().lock(); try { long expiredCommitSeq = -1; Iterator<Pair<Long, Long>> timeIterator = timestamps.iterator(); while (timeIterator.hasNext()) { Pair<Long, Long> entry = timeIterator.next(); if (expiredMs < entry.second) { break; } expiredCommitSeq = entry.first; } final long lastExpiredCommitSeq = expiredCommitSeq; BinlogComparator check = (binlog) -> { return binlog.getCommitSeq() <= lastExpiredCommitSeq || maxBytes < binlogSize || maxHistoryNums < binlogs.size(); }; tombstoneInfo = getLastUpsertAndLargestCommitSeq(check); } finally { lock.writeLock().unlock(); } if (tombstoneInfo == null) { return null; } TBinlog lastUpsertBinlog = tombstoneInfo.first; long largestCommitSeq = tombstoneInfo.second; BinlogTombstone tombstone = new BinlogTombstone(tableId, largestCommitSeq); if (lastUpsertBinlog != null) { UpsertRecord upsertRecord = UpsertRecord.fromJson(lastUpsertBinlog.getData()); tombstone.addTableRecord(tableId, upsertRecord); } return tombstone; } public void replayGc(long largestExpiredCommitSeq) { lock.writeLock().lock(); try { BinlogComparator checker = (binlog) -> binlog.getCommitSeq() <= largestExpiredCommitSeq; getLastUpsertAndLargestCommitSeq(checker); } finally { lock.writeLock().unlock(); } } }
This is already done in BuildCommandTest. `testResources` here is a temporary directory with test resources copied from the original location.
public void testBuildProjectPrecompiledWithOlderDistWithoutStickyFlag() throws IOException { Path projectPath = testResources.resolve("dep-dist-version-projects").resolve("preCompiledPackage"); replaceDependenciesTomlContent(projectPath, "**INSERT_DISTRIBUTION_VERSION_HERE**", "2201.5.0"); System.setProperty("user.dir", projectPath.toString()); BuildCommand buildCommand = new BuildCommand(projectPath, printStream, printStream, false); new CommandLine(buildCommand).parseArgs(); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals( buildLog.replaceAll("\r", ""), getOutput("build-old-dist-precomp-proj-with-dep.txt") .replaceAll("INSERT_NEW_DIST_VERSION_HERE", getNewVersionForOldDistWarning()) .replaceAll("INSERT_OLD_DIST_VERSION_HERE", getOldVersionForOldDistWarning("2201.5.0"))); Path actualDependenciesToml = projectPath.resolve("Dependencies.toml"); Path expectedDependenciesToml = testResources.resolve("dep-dist-version-projects").resolve("expected-dep-tomls") .resolve("old-dist-precomp-pkg-with-no-sticky.toml"); Assert.assertTrue(actualDependenciesToml.toFile().exists()); assertTomlFilesEquals(actualDependenciesToml, expectedDependenciesToml); replaceDependenciesTomlContent(projectPath, RepoUtils.getBallerinaShortVersion(), "**INSERT_DISTRIBUTION_VERSION_HERE**"); replaceDependenciesTomlContent(projectPath, "1.1.0", "1.0.0"); deleteDirectory(projectPath.resolve("target")); }
replaceDependenciesTomlContent(projectPath, RepoUtils.getBallerinaShortVersion(),
public void testBuildProjectPrecompiledWithOlderDistWithoutStickyFlag() throws IOException { Path projectPath = testResources.resolve("dep-dist-version-projects").resolve("preCompiledPackage"); replaceDependenciesTomlContent(projectPath, "**INSERT_DISTRIBUTION_VERSION_HERE**", "2201.5.0"); System.setProperty("user.dir", projectPath.toString()); BuildCommand buildCommand = new BuildCommand(projectPath, printStream, printStream, false); new CommandLine(buildCommand).parseArgs(); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals( buildLog.replaceAll("\r", ""), getOutput("build-old-dist-precomp-proj-without-sticky.txt") .replaceAll("INSERT_NEW_DIST_VERSION_HERE", getNewVersionForOldDistWarning()) .replaceAll("INSERT_OLD_DIST_VERSION_HERE", getOldVersionForOldDistWarning("2201.5.0"))); Path actualDependenciesToml = projectPath.resolve("Dependencies.toml"); Path expectedDependenciesToml = testResources.resolve("dep-dist-version-projects").resolve("expected-dep-tomls") .resolve("old-dist-precomp-pkg-with-no-sticky.toml"); Assert.assertTrue(actualDependenciesToml.toFile().exists()); assertTomlFilesEquals(actualDependenciesToml, expectedDependenciesToml); replaceDependenciesTomlContent(projectPath, RepoUtils.getBallerinaShortVersion(), "**INSERT_DISTRIBUTION_VERSION_HERE**"); replaceDependenciesTomlContent(projectPath, "1.1.0", "1.0.0"); deleteDirectory(projectPath.resolve("target")); }
class file */ @Test(description = "Build a ballerina project with conflicted jars") public void testBuildBalProjectWithJarConflicts() throws IOException { Path projectPath = this.testResources.resolve("projectWithConflictedJars"); System.setProperty("user.dir", projectPath.toString()); BuildCommand buildCommand = new BuildCommand(projectPath, printStream, printStream, false); new CommandLine(buildCommand).parseArgs(); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), getOutput("build-bal-project-with-jar-conflicts.txt")); Assert.assertTrue( projectPath.resolve("target").resolve("bin").resolve("conflictProject.jar").toFile().exists()); Assert.assertTrue(projectPath.resolve("target").resolve("cache").resolve("pramodya") .resolve("conflictProject").resolve("0.1.7").resolve("java11") .resolve("pramodya-conflictProject-0.1.7.jar").toFile().exists()); }
class file */ @Test(description = "Build a ballerina project with conflicted jars") public void testBuildBalProjectWithJarConflicts() throws IOException { Path projectPath = this.testResources.resolve("projectWithConflictedJars"); System.setProperty("user.dir", projectPath.toString()); BuildCommand buildCommand = new BuildCommand(projectPath, printStream, printStream, false); new CommandLine(buildCommand).parseArgs(); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), getOutput("build-bal-project-with-jar-conflicts.txt")); Assert.assertTrue( projectPath.resolve("target").resolve("bin").resolve("conflictProject.jar").toFile().exists()); Assert.assertTrue(projectPath.resolve("target").resolve("cache").resolve("pramodya") .resolve("conflictProject").resolve("0.1.7").resolve("java11") .resolve("pramodya-conflictProject-0.1.7.jar").toFile().exists()); }
LOG.warn("Exception: {} does not exist", dbName, e);
public void removeDatabase(String dbName) { if (closing) { return; } try { LOG.info("begin to remove database {} from replicatedEnviroment", dbName); replicatedEnvironment.removeDatabase(null, dbName); LOG.info("remove database {} from replicatedEnviroment successfully", dbName); } catch (DatabaseNotFoundException e) { LOG.warn("catch an exception when remove db:{}, this db does not exist", dbName, e); } }
LOG.warn("catch an exception when remove db:{}, this db does not exist", dbName, e);
public void removeDatabase(String dbName) { if (closing) { return; } try { replicatedEnvironment.removeDatabase(null, dbName); LOG.info("remove database {} from replicatedEnviroment successfully", dbName); } catch (DatabaseNotFoundException e) { LOG.warn("Exception: {} does not exist", dbName, e); } }
class BDBEnvironment { private static final Logger LOG = LogManager.getLogger(BDBEnvironment.class); protected static int RETRY_TIME = 3; protected static int SLEEP_INTERVAL_SEC = 5; private static final int MEMORY_CACHE_PERCENT = 20; private static final int INITAL_STATE_CHANGE_WAIT_SEC = 10; public static final String STARROCKS_JOURNAL_GROUP = "PALO_JOURNAL_GROUP"; private static final String BDB_DIR = "/bdb"; private ReplicatedEnvironment replicatedEnvironment; private EnvironmentConfig environmentConfig; private ReplicationConfig replicationConfig; private DatabaseConfig dbConfig; private TransactionConfig txnConfig; private CloseSafeDatabase epochDB = null; private ReplicationGroupAdmin replicationGroupAdmin = null; private volatile boolean closing = false; private final File envHome; private final String selfNodeName; private final String selfNodeHostPort; private final String helperHostPort; private final boolean isElectable; /** * init & return bdb environment * @param nodeName * @return * @throws JournalException */ public static BDBEnvironment initBDBEnvironment(String nodeName) throws JournalException, InterruptedException { Pair<String, Integer> selfNode = GlobalStateMgr.getCurrentState().getSelfNode(); try { if (NetUtils.isPortUsing(selfNode.first, selfNode.second)) { String errMsg = String.format("edit_log_port %d is already in use. will exit.", selfNode.second); LOG.error(errMsg); throw new JournalException(errMsg); } } catch (IOException e) { String errMsg = String.format("failed to check if %s:%s is used!", selfNode.first, selfNode.second); LOG.error(errMsg, e); JournalException journalException = new JournalException(errMsg); journalException.initCause(e); throw journalException; } String selfNodeHostPort = selfNode.first + ":" + selfNode.second; File dbEnv = new File(getBdbDir()); if (!dbEnv.exists()) { dbEnv.mkdirs(); } Pair<String, Integer> helperNode = GlobalStateMgr.getCurrentState().getHelperNode(); String helperHostPort = helperNode.first + ":" + helperNode.second; BDBEnvironment bdbEnvironment = new BDBEnvironment(dbEnv, nodeName, selfNodeHostPort, helperHostPort, GlobalStateMgr.getCurrentState().isElectable()); bdbEnvironment.setup(); return bdbEnvironment; } public static String getBdbDir() { return Config.meta_dir + BDB_DIR; } protected BDBEnvironment(File envHome, String selfNodeName, String selfNodeHostPort, String helperHostPort, boolean isElectable) { this.envHome = envHome; this.selfNodeName = selfNodeName; this.selfNodeHostPort = selfNodeHostPort; this.helperHostPort = helperHostPort; this.isElectable = isElectable; } protected void setup() throws JournalException, InterruptedException { this.closing = false; ensureHelperInLocal(); initConfigs(isElectable); setupEnvironment(); } protected void initConfigs(boolean isElectable) throws JournalException { if (Config.metadata_failure_recovery.equals("true")) { if (!isElectable) { String errMsg = "Current node is not in the electable_nodes list. will exit"; LOG.error(errMsg); throw new JournalException(errMsg); } DbResetRepGroup resetUtility = new DbResetRepGroup(envHome, STARROCKS_JOURNAL_GROUP, selfNodeName, selfNodeHostPort); resetUtility.reset(); LOG.info("group has been reset."); } replicationConfig = new ReplicationConfig(); replicationConfig.setNodeName(selfNodeName); replicationConfig.setNodeHostPort(selfNodeHostPort); replicationConfig.setHelperHosts(helperHostPort); replicationConfig.setGroupName(STARROCKS_JOURNAL_GROUP); replicationConfig.setConfigParam(ReplicationConfig.ENV_UNKNOWN_STATE_TIMEOUT, "10"); replicationConfig.setMaxClockDelta(Config.max_bdbje_clock_delta_ms, TimeUnit.MILLISECONDS); replicationConfig.setConfigParam(ReplicationConfig.TXN_ROLLBACK_LIMIT, String.valueOf(Config.txn_rollback_limit)); replicationConfig .setConfigParam(ReplicationConfig.REPLICA_TIMEOUT, Config.bdbje_heartbeat_timeout_second + " s"); replicationConfig .setConfigParam(ReplicationConfig.FEEDER_TIMEOUT, Config.bdbje_heartbeat_timeout_second + " s"); replicationConfig .setConfigParam(ReplicationConfig.REPLAY_COST_PERCENT, String.valueOf(Config.bdbje_replay_cost_percent)); if (isElectable) { replicationConfig.setReplicaAckTimeout(Config.bdbje_replica_ack_timeout_second, TimeUnit.SECONDS); replicationConfig.setConfigParam(ReplicationConfig.REPLICA_MAX_GROUP_COMMIT, "0"); replicationConfig.setConsistencyPolicy(new NoConsistencyRequiredPolicy()); } else { replicationConfig.setNodeType(NodeType.SECONDARY); replicationConfig.setConsistencyPolicy(new NoConsistencyRequiredPolicy()); } java.util.logging.Logger parent = java.util.logging.Logger.getLogger("com.sleepycat.je"); parent.setLevel(Level.parse(Config.bdbje_log_level)); environmentConfig = new EnvironmentConfig(); environmentConfig.setTransactional(true); environmentConfig.setAllowCreate(true); environmentConfig.setCachePercent(MEMORY_CACHE_PERCENT); environmentConfig.setLockTimeout(Config.bdbje_lock_timeout_second, TimeUnit.SECONDS); environmentConfig.setConfigParam(EnvironmentConfig.FILE_LOGGING_LEVEL, Config.bdbje_log_level); environmentConfig.setConfigParam(EnvironmentConfig.CLEANER_THREADS, String.valueOf(Config.bdbje_cleaner_threads)); if (isElectable) { Durability durability = new Durability(getSyncPolicy(Config.master_sync_policy), getSyncPolicy(Config.replica_sync_policy), getAckPolicy(Config.replica_ack_policy)); environmentConfig.setDurability(durability); } dbConfig = new DatabaseConfig(); dbConfig.setTransactional(true); if (isElectable) { dbConfig.setAllowCreate(true); dbConfig.setReadOnly(false); } else { dbConfig.setAllowCreate(false); dbConfig.setReadOnly(true); } txnConfig = new TransactionConfig(); if (isElectable) { txnConfig.setDurability(new Durability( getSyncPolicy(Config.master_sync_policy), getSyncPolicy(Config.replica_sync_policy), getAckPolicy(Config.replica_ack_policy))); } } protected void setupEnvironment() throws JournalException, InterruptedException { JournalException exception = null; for (int i = 0; i < RETRY_TIME; i++) { if (i > 0) { Thread.sleep(SLEEP_INTERVAL_SEC * 1000); } try { LOG.info("start to setup bdb environment for {} times", i + 1); replicatedEnvironment = new ReplicatedEnvironment(envHome, replicationConfig, environmentConfig); Set<InetSocketAddress> adminNodes = new HashSet<InetSocketAddress>(); HostAndPort helperAddress = HostAndPort.fromString(helperHostPort); InetSocketAddress helper = new InetSocketAddress(helperAddress.getHost(), helperAddress.getPort()); adminNodes.add(helper); LOG.info("add helper[{}] as ReplicationGroupAdmin", helperHostPort); if (!selfNodeHostPort.equals(helperHostPort) && isElectable) { HostAndPort selfNodeAddress = HostAndPort.fromString(selfNodeHostPort); InetSocketAddress self = new InetSocketAddress(selfNodeAddress.getHost(), selfNodeAddress.getPort()); adminNodes.add(self); LOG.info("add self[{}] as ReplicationGroupAdmin", selfNodeHostPort); } replicationGroupAdmin = new ReplicationGroupAdmin(STARROCKS_JOURNAL_GROUP, adminNodes); HAProtocol protocol = new BDBHA(this, selfNodeName); GlobalStateMgr.getCurrentState().setHaProtocol(protocol); BDBStateChangeListener listener = new BDBStateChangeListener(isElectable); replicatedEnvironment.setStateChangeListener(listener); LOG.info("replicated environment is all set, wait for state change..."); for (int j = 0; j < INITAL_STATE_CHANGE_WAIT_SEC; j++) { if (FrontendNodeType.UNKNOWN != listener.getNewType()) { break; } Thread.sleep(1000); } LOG.info("state change done, current role {}", listener.getNewType()); epochDB = new CloseSafeDatabase(replicatedEnvironment.openDatabase(null, "epochDB", dbConfig)); LOG.info("end setup bdb environment after {} times", i + 1); return; } catch (RestartRequiredException e) { String errMsg = String.format( "catch a RestartRequiredException when setup environment after retried %d times, refresh and setup again", i + 1); LOG.warn(errMsg, e); exception = new JournalException(errMsg); exception.initCause(e); if (e instanceof InsufficientLogException) { refreshLog((InsufficientLogException) e); } close(); } catch (DatabaseException e) { if (i == 0 && e instanceof UnknownMasterException) { LOG.warn("failed to setup environment because of UnknowMasterException for the first time, ignore it."); } else { String errMsg = String.format("failed to setup environment after retried %d times", i + 1); LOG.error(errMsg, e); exception = new JournalException(errMsg); exception.initCause(e); } } } throw exception; } /** * This method is used to check if the local replicated environment matches that of the helper. * This could happen in a situation like this: * 1. User adds a follower and starts the new follower without helper. * --> The new follower will run as a master in a standalone environment. * 2. User restarts this follower with a helper. * --> Sometimes this new follower will join the group successfully, making master crash. * * This method only init the replicated environment through a handshake. * It will not read or write any data. */ protected void ensureHelperInLocal() throws JournalException, InterruptedException { if (!isElectable) { LOG.info("skip check local environment for observer"); return; } if (selfNodeHostPort.equals(helperHostPort)) { LOG.info("skip check local environment because helper node and local node are identical."); return; } if (Config.metadata_failure_recovery.equals("true")) { LOG.info("skip check local environment because metadata_failure_recovery = true"); return; } LOG.info("start to check if local replica environment from {} contains {}", envHome, helperHostPort); initConfigs(false); HostAndPort hostAndPort = HostAndPort.fromString(helperHostPort); JournalException exception = null; for (int i = 0; i < RETRY_TIME; i++) { if (i > 0) { Thread.sleep(SLEEP_INTERVAL_SEC * 1000); } try { replicatedEnvironment = new ReplicatedEnvironment(envHome, replicationConfig, environmentConfig); Set<ReplicationNode> localNodes = replicatedEnvironment.getGroup().getNodes(); if (localNodes.isEmpty()) { LOG.info("skip check empty environment"); return; } for (ReplicationNode node : localNodes) { if (node.getHostName().equals(hostAndPort.getHost()) && node.getPort() == hostAndPort.getPort()) { LOG.info("found {} in local environment!", helperHostPort); return; } } throw new JournalException( String.format("bad environment %s! helper host %s not in local %s", envHome, helperHostPort, localNodes)); } catch (RestartRequiredException e) { String errMsg = String.format( "catch a RestartRequiredException when checking if helper in local after retried %d times, " + "refresh and check again", i + 1); LOG.warn(errMsg, e); exception = new JournalException(errMsg); exception.initCause(e); if (e instanceof InsufficientLogException) { refreshLog((InsufficientLogException) e); } } catch (DatabaseException e) { if (i == 0 && e instanceof UnknownMasterException) { LOG.warn( "failed to check if helper in local because of UnknowMasterException for the first time, ignore it."); } else { String errMsg = String.format("failed to check if helper in local after retried %d times", i + 1); LOG.error(errMsg, e); exception = new JournalException(errMsg); exception.initCause(e); } } finally { if (replicatedEnvironment != null) { replicatedEnvironment.close(); } } } throw exception; } public void refreshLog(InsufficientLogException insufficientLogEx) { try { NetworkRestore restore = new NetworkRestore(); NetworkRestoreConfig config = new NetworkRestoreConfig(); config.setRetainLogFiles(false); restore.execute(insufficientLogEx, config); } catch (Throwable t) { LOG.warn("refresh log failed", t); } } public ReplicationGroupAdmin getReplicationGroupAdmin() { return this.replicationGroupAdmin; } public void setNewReplicationGroupAdmin(Set<InetSocketAddress> newHelperNodes) { this.replicationGroupAdmin = new ReplicationGroupAdmin(STARROCKS_JOURNAL_GROUP, newHelperNodes); } public CloseSafeDatabase getEpochDB() { return epochDB; } public ReplicatedEnvironment getReplicatedEnvironment() { return replicatedEnvironment; } /** * open database and return a CloseSafeDatabase instance * We should make sure no database conflict from upper level */ public CloseSafeDatabase openDatabase(String dbName) { return new CloseSafeDatabase(replicatedEnvironment.openDatabase(null, dbName, dbConfig)); } /** * Remove the database whose name is dbName * We should make sure no database conflict from upper level **/ public List<Long> getDatabaseNames() { if (closing) { return null; } List<Long> ret = new ArrayList<Long>(); List<String> names = replicatedEnvironment.getDatabaseNames(); for (String name : names) { if (name.equals("epochDB")) { continue; } long db = Long.parseLong(name); ret.add(db); } Collections.sort(ret); return ret; } public boolean close() { boolean closeSuccess = true; try { closing = true; LOG.info("start to close epoch database"); if (epochDB != null) { try { epochDB.close(); } catch (DatabaseException exception) { LOG.error("Error closing db {}", epochDB.getDatabaseName(), exception); closeSuccess = false; } } LOG.info("close epoch database end"); LOG.info("start to close replicated environment"); if (replicatedEnvironment != null) { try { replicatedEnvironment.close(); } catch (DatabaseException exception) { LOG.error("Error closing replicatedEnvironment", exception); closeSuccess = false; } } LOG.info("close replicated environment end"); } finally { closing = false; } return closeSuccess; } public void flushVLSNMapping() { if (replicatedEnvironment != null) { RepInternal.getRepImpl(replicatedEnvironment).getVLSNIndex() .flushToDatabase(Durability.COMMIT_SYNC); } } private SyncPolicy getSyncPolicy(String policy) { if (policy.equalsIgnoreCase("SYNC")) { return Durability.SyncPolicy.SYNC; } if (policy.equalsIgnoreCase("NO_SYNC")) { return Durability.SyncPolicy.NO_SYNC; } return Durability.SyncPolicy.WRITE_NO_SYNC; } private ReplicaAckPolicy getAckPolicy(String policy) { if (policy.equalsIgnoreCase("ALL")) { return Durability.ReplicaAckPolicy.ALL; } if (policy.equalsIgnoreCase("NONE")) { return Durability.ReplicaAckPolicy.NONE; } return Durability.ReplicaAckPolicy.SIMPLE_MAJORITY; } /** * package private, used within com.starrocks.journal.bdbje */ TransactionConfig getTxnConfig() { return txnConfig; } }
class BDBEnvironment { private static final Logger LOG = LogManager.getLogger(BDBEnvironment.class); protected static int RETRY_TIME = 3; protected static int SLEEP_INTERVAL_SEC = 5; private static final int MEMORY_CACHE_PERCENT = 20; private static final int INITAL_STATE_CHANGE_WAIT_SEC = 10; public static final String STARROCKS_JOURNAL_GROUP = "PALO_JOURNAL_GROUP"; private static final String BDB_DIR = "/bdb"; private ReplicatedEnvironment replicatedEnvironment; private EnvironmentConfig environmentConfig; private ReplicationConfig replicationConfig; private DatabaseConfig dbConfig; private TransactionConfig txnConfig; private CloseSafeDatabase epochDB = null; private ReplicationGroupAdmin replicationGroupAdmin = null; private volatile boolean closing = false; private final File envHome; private final String selfNodeName; private final String selfNodeHostPort; private final String helperHostPort; private final boolean isElectable; /** * init & return bdb environment * @param nodeName * @return * @throws JournalException */ public static BDBEnvironment initBDBEnvironment(String nodeName) throws JournalException, InterruptedException { Pair<String, Integer> selfNode = GlobalStateMgr.getCurrentState().getSelfNode(); try { if (NetUtils.isPortUsing(selfNode.first, selfNode.second)) { String errMsg = String.format("edit_log_port %d is already in use. will exit.", selfNode.second); LOG.error(errMsg); throw new JournalException(errMsg); } } catch (IOException e) { String errMsg = String.format("failed to check if %s:%s is used!", selfNode.first, selfNode.second); LOG.error(errMsg, e); JournalException journalException = new JournalException(errMsg); journalException.initCause(e); throw journalException; } String selfNodeHostPort = selfNode.first + ":" + selfNode.second; File dbEnv = new File(getBdbDir()); if (!dbEnv.exists()) { dbEnv.mkdirs(); } Pair<String, Integer> helperNode = GlobalStateMgr.getCurrentState().getHelperNode(); String helperHostPort = helperNode.first + ":" + helperNode.second; BDBEnvironment bdbEnvironment = new BDBEnvironment(dbEnv, nodeName, selfNodeHostPort, helperHostPort, GlobalStateMgr.getCurrentState().isElectable()); bdbEnvironment.setup(); return bdbEnvironment; } public static String getBdbDir() { return Config.meta_dir + BDB_DIR; } protected BDBEnvironment(File envHome, String selfNodeName, String selfNodeHostPort, String helperHostPort, boolean isElectable) { this.envHome = envHome; this.selfNodeName = selfNodeName; this.selfNodeHostPort = selfNodeHostPort; this.helperHostPort = helperHostPort; this.isElectable = isElectable; } protected void setup() throws JournalException, InterruptedException { this.closing = false; ensureHelperInLocal(); initConfigs(isElectable); setupEnvironment(); } protected void initConfigs(boolean isElectable) throws JournalException { if (Config.metadata_failure_recovery.equals("true")) { if (!isElectable) { String errMsg = "Current node is not in the electable_nodes list. will exit"; LOG.error(errMsg); throw new JournalException(errMsg); } DbResetRepGroup resetUtility = new DbResetRepGroup(envHome, STARROCKS_JOURNAL_GROUP, selfNodeName, selfNodeHostPort); resetUtility.reset(); LOG.info("group has been reset."); } replicationConfig = new ReplicationConfig(); replicationConfig.setNodeName(selfNodeName); replicationConfig.setNodeHostPort(selfNodeHostPort); replicationConfig.setHelperHosts(helperHostPort); replicationConfig.setGroupName(STARROCKS_JOURNAL_GROUP); replicationConfig.setConfigParam(ReplicationConfig.ENV_UNKNOWN_STATE_TIMEOUT, "10"); replicationConfig.setMaxClockDelta(Config.max_bdbje_clock_delta_ms, TimeUnit.MILLISECONDS); replicationConfig.setConfigParam(ReplicationConfig.TXN_ROLLBACK_LIMIT, String.valueOf(Config.txn_rollback_limit)); replicationConfig .setConfigParam(ReplicationConfig.REPLICA_TIMEOUT, Config.bdbje_heartbeat_timeout_second + " s"); replicationConfig .setConfigParam(ReplicationConfig.FEEDER_TIMEOUT, Config.bdbje_heartbeat_timeout_second + " s"); replicationConfig .setConfigParam(ReplicationConfig.REPLAY_COST_PERCENT, String.valueOf(Config.bdbje_replay_cost_percent)); if (isElectable) { replicationConfig.setReplicaAckTimeout(Config.bdbje_replica_ack_timeout_second, TimeUnit.SECONDS); replicationConfig.setConfigParam(ReplicationConfig.REPLICA_MAX_GROUP_COMMIT, "0"); replicationConfig.setConsistencyPolicy(new NoConsistencyRequiredPolicy()); } else { replicationConfig.setNodeType(NodeType.SECONDARY); replicationConfig.setConsistencyPolicy(new NoConsistencyRequiredPolicy()); } java.util.logging.Logger parent = java.util.logging.Logger.getLogger("com.sleepycat.je"); parent.setLevel(Level.parse(Config.bdbje_log_level)); environmentConfig = new EnvironmentConfig(); environmentConfig.setTransactional(true); environmentConfig.setAllowCreate(true); environmentConfig.setCachePercent(MEMORY_CACHE_PERCENT); environmentConfig.setLockTimeout(Config.bdbje_lock_timeout_second, TimeUnit.SECONDS); environmentConfig.setConfigParam(EnvironmentConfig.FILE_LOGGING_LEVEL, Config.bdbje_log_level); environmentConfig.setConfigParam(EnvironmentConfig.CLEANER_THREADS, String.valueOf(Config.bdbje_cleaner_threads)); if (isElectable) { Durability durability = new Durability(getSyncPolicy(Config.master_sync_policy), getSyncPolicy(Config.replica_sync_policy), getAckPolicy(Config.replica_ack_policy)); environmentConfig.setDurability(durability); } dbConfig = new DatabaseConfig(); dbConfig.setTransactional(true); if (isElectable) { dbConfig.setAllowCreate(true); dbConfig.setReadOnly(false); } else { dbConfig.setAllowCreate(false); dbConfig.setReadOnly(true); } txnConfig = new TransactionConfig(); if (isElectable) { txnConfig.setDurability(new Durability( getSyncPolicy(Config.master_sync_policy), getSyncPolicy(Config.replica_sync_policy), getAckPolicy(Config.replica_ack_policy))); } } protected void setupEnvironment() throws JournalException, InterruptedException { JournalException exception = null; for (int i = 0; i < RETRY_TIME; i++) { if (i > 0) { Thread.sleep(SLEEP_INTERVAL_SEC * 1000); } try { LOG.info("start to setup bdb environment for {} times", i + 1); replicatedEnvironment = new ReplicatedEnvironment(envHome, replicationConfig, environmentConfig); Set<InetSocketAddress> adminNodes = new HashSet<InetSocketAddress>(); HostAndPort helperAddress = HostAndPort.fromString(helperHostPort); InetSocketAddress helper = new InetSocketAddress(helperAddress.getHost(), helperAddress.getPort()); adminNodes.add(helper); LOG.info("add helper[{}] as ReplicationGroupAdmin", helperHostPort); if (!selfNodeHostPort.equals(helperHostPort) && isElectable) { HostAndPort selfNodeAddress = HostAndPort.fromString(selfNodeHostPort); InetSocketAddress self = new InetSocketAddress(selfNodeAddress.getHost(), selfNodeAddress.getPort()); adminNodes.add(self); LOG.info("add self[{}] as ReplicationGroupAdmin", selfNodeHostPort); } replicationGroupAdmin = new ReplicationGroupAdmin(STARROCKS_JOURNAL_GROUP, adminNodes); HAProtocol protocol = new BDBHA(this, selfNodeName); GlobalStateMgr.getCurrentState().setHaProtocol(protocol); BDBStateChangeListener listener = new BDBStateChangeListener(isElectable); replicatedEnvironment.setStateChangeListener(listener); LOG.info("replicated environment is all set, wait for state change..."); for (int j = 0; j < INITAL_STATE_CHANGE_WAIT_SEC; j++) { if (FrontendNodeType.UNKNOWN != listener.getNewType()) { break; } Thread.sleep(1000); } LOG.info("state change done, current role {}", listener.getNewType()); epochDB = new CloseSafeDatabase(replicatedEnvironment.openDatabase(null, "epochDB", dbConfig)); LOG.info("end setup bdb environment after {} times", i + 1); return; } catch (RestartRequiredException e) { String errMsg = String.format( "catch a RestartRequiredException when setup environment after retried %d times, refresh and setup again", i + 1); LOG.warn(errMsg, e); exception = new JournalException(errMsg); exception.initCause(e); if (e instanceof InsufficientLogException) { refreshLog((InsufficientLogException) e); } close(); } catch (DatabaseException e) { if (i == 0 && e instanceof UnknownMasterException) { LOG.warn("failed to setup environment because of UnknowMasterException for the first time, ignore it."); } else { String errMsg = String.format("failed to setup environment after retried %d times", i + 1); LOG.error(errMsg, e); exception = new JournalException(errMsg); exception.initCause(e); } } } throw exception; } /** * This method is used to check if the local replicated environment matches that of the helper. * This could happen in a situation like this: * 1. User adds a follower and starts the new follower without helper. * --> The new follower will run as a master in a standalone environment. * 2. User restarts this follower with a helper. * --> Sometimes this new follower will join the group successfully, making master crash. * * This method only init the replicated environment through a handshake. * It will not read or write any data. */ protected void ensureHelperInLocal() throws JournalException, InterruptedException { if (!isElectable) { LOG.info("skip check local environment for observer"); return; } if (selfNodeHostPort.equals(helperHostPort)) { LOG.info("skip check local environment because helper node and local node are identical."); return; } if (Config.metadata_failure_recovery.equals("true")) { LOG.info("skip check local environment because metadata_failure_recovery = true"); return; } LOG.info("start to check if local replica environment from {} contains {}", envHome, helperHostPort); initConfigs(false); HostAndPort hostAndPort = HostAndPort.fromString(helperHostPort); JournalException exception = null; for (int i = 0; i < RETRY_TIME; i++) { if (i > 0) { Thread.sleep(SLEEP_INTERVAL_SEC * 1000); } try { replicatedEnvironment = new ReplicatedEnvironment(envHome, replicationConfig, environmentConfig); Set<ReplicationNode> localNodes = replicatedEnvironment.getGroup().getNodes(); if (localNodes.isEmpty()) { LOG.info("skip check empty environment"); return; } for (ReplicationNode node : localNodes) { if (node.getHostName().equals(hostAndPort.getHost()) && node.getPort() == hostAndPort.getPort()) { LOG.info("found {} in local environment!", helperHostPort); return; } } throw new JournalException( String.format("bad environment %s! helper host %s not in local %s", envHome, helperHostPort, localNodes)); } catch (RestartRequiredException e) { String errMsg = String.format( "catch a RestartRequiredException when checking if helper in local after retried %d times, " + "refresh and check again", i + 1); LOG.warn(errMsg, e); exception = new JournalException(errMsg); exception.initCause(e); if (e instanceof InsufficientLogException) { refreshLog((InsufficientLogException) e); } } catch (DatabaseException e) { if (i == 0 && e instanceof UnknownMasterException) { LOG.warn( "failed to check if helper in local because of UnknowMasterException for the first time, ignore it."); } else { String errMsg = String.format("failed to check if helper in local after retried %d times", i + 1); LOG.error(errMsg, e); exception = new JournalException(errMsg); exception.initCause(e); } } finally { if (replicatedEnvironment != null) { replicatedEnvironment.close(); } } } throw exception; } public void refreshLog(InsufficientLogException insufficientLogEx) { try { NetworkRestore restore = new NetworkRestore(); NetworkRestoreConfig config = new NetworkRestoreConfig(); config.setRetainLogFiles(false); restore.execute(insufficientLogEx, config); } catch (Throwable t) { LOG.warn("refresh log failed", t); } } public ReplicationGroupAdmin getReplicationGroupAdmin() { return this.replicationGroupAdmin; } public void setNewReplicationGroupAdmin(Set<InetSocketAddress> newHelperNodes) { this.replicationGroupAdmin = new ReplicationGroupAdmin(STARROCKS_JOURNAL_GROUP, newHelperNodes); } public CloseSafeDatabase getEpochDB() { return epochDB; } public ReplicatedEnvironment getReplicatedEnvironment() { return replicatedEnvironment; } /** * open database and return a CloseSafeDatabase instance * We should make sure no database conflict from upper level */ public CloseSafeDatabase openDatabase(String dbName) { return new CloseSafeDatabase(replicatedEnvironment.openDatabase(null, dbName, dbConfig)); } /** * Remove the database whose name is dbName * We should make sure no database conflict from upper level **/ public List<Long> getDatabaseNames() { return getDatabaseNamesWithPrefix(""); } public List<Long> getDatabaseNamesWithPrefix(String prefix) { if (closing) { return null; } List<Long> ret = new ArrayList<Long>(); List<String> names = replicatedEnvironment.getDatabaseNames(); for (String name : names) { if (name.equals("epochDB")) { continue; } if (Strings.isNullOrEmpty(prefix)) { if (StringUtils.isNumeric(name)) { long db = Long.parseLong(name); ret.add(db); } else { } } else { if (name.startsWith(prefix)) { String dbStr = name.substring(prefix.length()); if (StringUtils.isNumeric(dbStr)) { long db = Long.parseLong(dbStr); ret.add(db); } else { } } else { } } } Collections.sort(ret); return ret; } public boolean close() { boolean closeSuccess = true; try { closing = true; LOG.info("start to close epoch database"); if (epochDB != null) { try { epochDB.close(); } catch (DatabaseException exception) { LOG.error("Error closing db {}", epochDB.getDatabaseName(), exception); closeSuccess = false; } } LOG.info("close epoch database end"); LOG.info("start to close replicated environment"); if (replicatedEnvironment != null) { try { replicatedEnvironment.close(); } catch (DatabaseException exception) { LOG.error("Error closing replicatedEnvironment", exception); closeSuccess = false; } } LOG.info("close replicated environment end"); } finally { closing = false; } return closeSuccess; } public void flushVLSNMapping() { if (replicatedEnvironment != null) { RepInternal.getRepImpl(replicatedEnvironment).getVLSNIndex() .flushToDatabase(Durability.COMMIT_SYNC); } } private SyncPolicy getSyncPolicy(String policy) { if (policy.equalsIgnoreCase("SYNC")) { return Durability.SyncPolicy.SYNC; } if (policy.equalsIgnoreCase("NO_SYNC")) { return Durability.SyncPolicy.NO_SYNC; } return Durability.SyncPolicy.WRITE_NO_SYNC; } private ReplicaAckPolicy getAckPolicy(String policy) { if (policy.equalsIgnoreCase("ALL")) { return Durability.ReplicaAckPolicy.ALL; } if (policy.equalsIgnoreCase("NONE")) { return Durability.ReplicaAckPolicy.NONE; } return Durability.ReplicaAckPolicy.SIMPLE_MAJORITY; } /** * package private, used within com.starrocks.journal.bdbje */ TransactionConfig getTxnConfig() { return txnConfig; } }
super() should be first in method
protected void doPrepare(DeployState deployState) { addAndSendApplicationBundles(deployState); sendUserConfiguredFiles(deployState); createEndpointList(deployState); super.doPrepare(deployState); }
super.doPrepare(deployState);
protected void doPrepare(DeployState deployState) { super.doPrepare(deployState); addAndSendApplicationBundles(deployState); sendUserConfiguredFiles(deployState); createEndpointList(deployState); }
class ApplicationContainerCluster extends ContainerCluster<ApplicationContainer> implements ApplicationBundlesConfig.Producer, QrStartConfig.Producer, RankProfilesConfig.Producer, RankingConstantsConfig.Producer, OnnxModelsConfig.Producer, RankingExpressionsConfig.Producer, ContainerMbusConfig.Producer, MetricsProxyApiConfig.Producer, ZookeeperServerConfig.Producer, ApplicationClusterInfo { public static final String METRICS_V2_HANDLER_CLASS = MetricsV2Handler.class.getName(); public static final BindingPattern METRICS_V2_HANDLER_BINDING_1 = SystemBindingPattern.fromHttpPath(MetricsV2Handler.V2_PATH); public static final BindingPattern METRICS_V2_HANDLER_BINDING_2 = SystemBindingPattern.fromHttpPath(MetricsV2Handler.V2_PATH + "/*"); public static final String PROMETHEUS_V1_HANDLER_CLASS = PrometheusV1Handler.class.getName(); private static final BindingPattern PROMETHEUS_V1_HANDLER_BINDING_1 = SystemBindingPattern.fromHttpPath(PrometheusV1Handler.V1_PATH); private static final BindingPattern PROMETHEUS_V1_HANDLER_BINDING_2 = SystemBindingPattern.fromHttpPath(PrometheusV1Handler.V1_PATH + "/*"); public static final int defaultHeapSizePercentageOfTotalNodeMemory = 70; public static final int heapSizePercentageOfTotalNodeMemoryWhenCombinedCluster = 18; private final Set<FileReference> applicationBundles = new LinkedHashSet<>(); private final Set<String> previousHosts; private ContainerModelEvaluation modelEvaluation; private final Optional<String> tlsClientAuthority; private MbusParams mbusParams; private boolean messageBusEnabled = true; private int zookeeperSessionTimeoutSeconds = 30; private final int transport_events_before_wakeup; private final int transport_connections_per_target; private final int heapSizePercentageOfTotalNodeMemory; private Integer memoryPercentage = null; private List<ApplicationClusterEndpoint> endpointList = List.of(); public ApplicationContainerCluster(TreeConfigProducer<?> parent, String configSubId, String clusterId, DeployState deployState) { super(parent, configSubId, clusterId, deployState, true, 10); this.tlsClientAuthority = deployState.tlsClientAuthority(); previousHosts = Collections.unmodifiableSet(deployState.getPreviousModel().stream() .map(Model::allocatedHosts) .map(AllocatedHosts::getHosts) .flatMap(Collection::stream) .map(HostSpec::hostname) .collect(Collectors.toCollection(() -> new LinkedHashSet<>()))); addSimpleComponent("com.yahoo.language.provider.DefaultLinguisticsProvider"); addSimpleComponent("com.yahoo.language.provider.DefaultEmbedderProvider"); addSimpleComponent("com.yahoo.container.jdisc.SecretStoreProvider"); addSimpleComponent("com.yahoo.container.jdisc.CertificateStoreProvider"); addSimpleComponent("com.yahoo.container.jdisc.AthenzIdentityProviderProvider"); addSimpleComponent("com.yahoo.container.core.documentapi.DocumentAccessProvider"); addSimpleComponent(DOCUMENT_TYPE_MANAGER_CLASS); addMetricsHandlers(); addTestrunnerComponentsIfTester(deployState); transport_connections_per_target = deployState.featureFlags().mbusJavaRpcNumTargets(); transport_events_before_wakeup = deployState.featureFlags().mbusJavaEventsBeforeWakeup(); heapSizePercentageOfTotalNodeMemory = deployState.featureFlags().heapSizePercentage() > 0 ? Math.min(99, deployState.featureFlags().heapSizePercentage()) : defaultHeapSizePercentageOfTotalNodeMemory; } @Override private void addAndSendApplicationBundles(DeployState deployState) { for (ComponentInfo component : deployState.getApplicationPackage().getComponentsInfo(deployState.getVespaVersion())) { FileReference reference = deployState.getFileRegistry().addFile(component.getPathRelativeToAppDir()); applicationBundles.add(reference); } } private void sendUserConfiguredFiles(DeployState deployState) { FileSender fileSender = new FileSender(containers, deployState.getFileRegistry(), deployState.getDeployLogger()); for (Component<?, ?> component : getAllComponents()) { fileSender.sendUserConfiguredFiles(component); } } private void addMetricsHandlers() { addMetricsHandler(METRICS_V2_HANDLER_CLASS, METRICS_V2_HANDLER_BINDING_1, METRICS_V2_HANDLER_BINDING_2); addMetricsHandler(PROMETHEUS_V1_HANDLER_CLASS, PROMETHEUS_V1_HANDLER_BINDING_1, PROMETHEUS_V1_HANDLER_BINDING_2); } private void addMetricsHandler(String handlerClass, BindingPattern rootBinding, BindingPattern innerBinding) { Handler handler = new Handler( new ComponentModel(handlerClass, null, null, null)); handler.addServerBindings(rootBinding, innerBinding); addComponent(handler); } private void addTestrunnerComponentsIfTester(DeployState deployState) { if (deployState.isHosted() && deployState.getProperties().applicationId().instance().isTester()) { addPlatformBundle(PlatformBundles.absoluteBundlePath("vespa-testrunner-components")); addPlatformBundle(PlatformBundles.absoluteBundlePath("vespa-osgi-testrunner")); addPlatformBundle(PlatformBundles.absoluteBundlePath("tenant-cd-api")); if(deployState.zone().system().isPublic()) { addPlatformBundle(PlatformBundles.absoluteBundlePath("cloud-tenant-cd")); } } } public void setModelEvaluation(ContainerModelEvaluation modelEvaluation) { this.modelEvaluation = modelEvaluation; } public void setMemoryPercentage(Integer memoryPercentage) { this.memoryPercentage = memoryPercentage; } @Override public Optional<Integer> getMemoryPercentage() { if (memoryPercentage != null) { return Optional.of(memoryPercentage); } else if (isHostedVespa()) { return getHostClusterId().isPresent() ? Optional.of(heapSizePercentageOfTotalNodeMemoryWhenCombinedCluster) : Optional.of(heapSizePercentageOfTotalNodeMemory); } return Optional.empty(); } /** Create list of endpoints, these will be consumed later by LbServicesProducer */ private void createEndpointList(DeployState deployState) { if(!deployState.isHosted()) return; if(deployState.getProperties().applicationId().instance().isTester()) return; List<ApplicationClusterEndpoint> endpoints = new ArrayList<>(); List<String> hosts = getContainers().stream() .map(AbstractService::getHostName) .sorted() .toList(); for (String suffix : deployState.getProperties().zoneDnsSuffixes()) { ApplicationClusterEndpoint.DnsName l4Name = ApplicationClusterEndpoint.DnsName.sharedL4NameFrom( deployState.zone().system(), ClusterSpec.Id.from(getName()), deployState.getProperties().applicationId(), suffix); endpoints.add(ApplicationClusterEndpoint.builder() .zoneScope() .sharedL4Routing() .dnsName(l4Name) .hosts(hosts) .clusterId(getName()) .build()); } Set<ContainerEndpoint> endpointsFromController = deployState.getEndpoints(); endpointsFromController.stream() .filter(ce -> ce.clusterId().equals(getName())) .filter(ce -> ce.routingMethod() == sharedLayer4) .forEach(ce -> ce.names().forEach( name -> endpoints.add(ApplicationClusterEndpoint.builder() .scope(ce.scope()) .weight(Long.valueOf(ce.weight().orElse(1)).intValue()) .routingMethod(ce.routingMethod()) .dnsName(ApplicationClusterEndpoint.DnsName.from(name)) .hosts(hosts) .clusterId(getName()) .build()) )); endpointList = List.copyOf(endpoints); } @Override public void getConfig(ApplicationBundlesConfig.Builder builder) { applicationBundles.stream().map(FileReference::value) .forEach(builder::bundles); } @Override public void getConfig(RankProfilesConfig.Builder builder) { if (modelEvaluation != null) modelEvaluation.getConfig(builder); } @Override public void getConfig(RankingConstantsConfig.Builder builder) { if (modelEvaluation != null) modelEvaluation.getConfig(builder); } @Override public void getConfig(OnnxModelsConfig.Builder builder) { if (modelEvaluation != null) modelEvaluation.getConfig(builder); } public void getConfig(RankingExpressionsConfig.Builder builder) { if (modelEvaluation != null) modelEvaluation.getConfig(builder); } @Override public void getConfig(ContainerMbusConfig.Builder builder) { if (mbusParams != null) { if (mbusParams.maxConcurrentFactor != null) builder.maxConcurrentFactor(mbusParams.maxConcurrentFactor); if (mbusParams.documentExpansionFactor != null) builder.documentExpansionFactor(mbusParams.documentExpansionFactor); if (mbusParams.containerCoreMemory != null) builder.containerCoreMemory(mbusParams.containerCoreMemory); } if (getDocproc() != null) getDocproc().getConfig(builder); builder.transport_events_before_wakeup(transport_events_before_wakeup); builder.numconnectionspertarget(transport_connections_per_target); } @Override public void getConfig(MetricsProxyApiConfig.Builder builder) { builder.metricsPort(MetricsProxyContainer.BASEPORT) .metricsApiPath(ApplicationMetricsHandler.METRICS_VALUES_PATH) .prometheusApiPath(ApplicationMetricsHandler.PROMETHEUS_VALUES_PATH); } @Override public void getConfig(QrStartConfig.Builder builder) { super.getConfig(builder); builder.jvm.verbosegc(true) .availableProcessors(0) .compressedClassSpaceSize(0) .minHeapsize(1536) .heapsize(1536); if (getMemoryPercentage().isPresent()) { builder.jvm.heapSizeAsPercentageOfPhysicalMemory(getMemoryPercentage().get()); } } @Override public void getConfig(ZookeeperServerConfig.Builder builder) { if (getParent() instanceof ConfigserverCluster) return; for (Container container : getContainers()) { ZookeeperServerConfig.Server.Builder serverBuilder = new ZookeeperServerConfig.Server.Builder(); serverBuilder.hostname(container.getHostName()) .id(container.index()) .joining( ! previousHosts.isEmpty() && ! previousHosts.contains(container.getHostName())) .retired(container.isRetired()); builder.server(serverBuilder); } builder.dynamicReconfiguration(true); } @Override public void getConfig(CuratorConfig.Builder builder) { super.getConfig(builder); if (getParent() instanceof ConfigserverCluster) return; builder.zookeeperSessionTimeoutSeconds(zookeeperSessionTimeoutSeconds); } public Optional<String> getTlsClientAuthority() { return tlsClientAuthority; } public void setMbusParams(MbusParams mbusParams) { this.mbusParams = mbusParams; } public void setMessageBusEnabled(boolean messageBusEnabled) { this.messageBusEnabled = messageBusEnabled; } public void setZookeeperSessionTimeoutSeconds(int timeoutSeconds) { this.zookeeperSessionTimeoutSeconds = timeoutSeconds; } protected boolean messageBusEnabled() { return messageBusEnabled; } public void addMbusServer(ComponentId chainId) { ComponentId serviceId = chainId.nestInNamespace(ComponentId.fromString("MbusServer")); addComponent( new Component<>(new ComponentModel(new BundleInstantiationSpecification( serviceId, ComponentSpecification.fromString(MbusServerProvider.class.getName()), null)))); } @Override public List<ApplicationClusterEndpoint> endpoints() { return endpointList; } @Override public String name() { return getName(); } public static class MbusParams { final Double maxConcurrentFactor; final Double documentExpansionFactor; final Integer containerCoreMemory; public MbusParams(Double maxConcurrentFactor, Double documentExpansionFactor, Integer containerCoreMemory) { this.maxConcurrentFactor = maxConcurrentFactor; this.documentExpansionFactor = documentExpansionFactor; this.containerCoreMemory = containerCoreMemory; } } }
class ApplicationContainerCluster extends ContainerCluster<ApplicationContainer> implements ApplicationBundlesConfig.Producer, QrStartConfig.Producer, RankProfilesConfig.Producer, RankingConstantsConfig.Producer, OnnxModelsConfig.Producer, RankingExpressionsConfig.Producer, ContainerMbusConfig.Producer, MetricsProxyApiConfig.Producer, ZookeeperServerConfig.Producer, ApplicationClusterInfo { public static final String METRICS_V2_HANDLER_CLASS = MetricsV2Handler.class.getName(); public static final BindingPattern METRICS_V2_HANDLER_BINDING_1 = SystemBindingPattern.fromHttpPath(MetricsV2Handler.V2_PATH); public static final BindingPattern METRICS_V2_HANDLER_BINDING_2 = SystemBindingPattern.fromHttpPath(MetricsV2Handler.V2_PATH + "/*"); public static final String PROMETHEUS_V1_HANDLER_CLASS = PrometheusV1Handler.class.getName(); private static final BindingPattern PROMETHEUS_V1_HANDLER_BINDING_1 = SystemBindingPattern.fromHttpPath(PrometheusV1Handler.V1_PATH); private static final BindingPattern PROMETHEUS_V1_HANDLER_BINDING_2 = SystemBindingPattern.fromHttpPath(PrometheusV1Handler.V1_PATH + "/*"); public static final int defaultHeapSizePercentageOfTotalNodeMemory = 70; public static final int heapSizePercentageOfTotalNodeMemoryWhenCombinedCluster = 18; private final Set<FileReference> applicationBundles = new LinkedHashSet<>(); private final Set<String> previousHosts; private ContainerModelEvaluation modelEvaluation; private final Optional<String> tlsClientAuthority; private MbusParams mbusParams; private boolean messageBusEnabled = true; private int zookeeperSessionTimeoutSeconds = 30; private final int transport_events_before_wakeup; private final int transport_connections_per_target; private final int heapSizePercentageOfTotalNodeMemory; private Integer memoryPercentage = null; private List<ApplicationClusterEndpoint> endpointList = List.of(); public ApplicationContainerCluster(TreeConfigProducer<?> parent, String configSubId, String clusterId, DeployState deployState) { super(parent, configSubId, clusterId, deployState, true, 10); this.tlsClientAuthority = deployState.tlsClientAuthority(); previousHosts = Collections.unmodifiableSet(deployState.getPreviousModel().stream() .map(Model::allocatedHosts) .map(AllocatedHosts::getHosts) .flatMap(Collection::stream) .map(HostSpec::hostname) .collect(Collectors.toCollection(() -> new LinkedHashSet<>()))); addSimpleComponent("com.yahoo.language.provider.DefaultLinguisticsProvider"); addSimpleComponent("com.yahoo.language.provider.DefaultEmbedderProvider"); addSimpleComponent("com.yahoo.container.jdisc.SecretStoreProvider"); addSimpleComponent("com.yahoo.container.jdisc.CertificateStoreProvider"); addSimpleComponent("com.yahoo.container.jdisc.AthenzIdentityProviderProvider"); addSimpleComponent("com.yahoo.container.core.documentapi.DocumentAccessProvider"); addSimpleComponent(DOCUMENT_TYPE_MANAGER_CLASS); addMetricsHandlers(); addTestrunnerComponentsIfTester(deployState); transport_connections_per_target = deployState.featureFlags().mbusJavaRpcNumTargets(); transport_events_before_wakeup = deployState.featureFlags().mbusJavaEventsBeforeWakeup(); heapSizePercentageOfTotalNodeMemory = deployState.featureFlags().heapSizePercentage() > 0 ? Math.min(99, deployState.featureFlags().heapSizePercentage()) : defaultHeapSizePercentageOfTotalNodeMemory; } @Override private void addAndSendApplicationBundles(DeployState deployState) { for (ComponentInfo component : deployState.getApplicationPackage().getComponentsInfo(deployState.getVespaVersion())) { FileReference reference = deployState.getFileRegistry().addFile(component.getPathRelativeToAppDir()); applicationBundles.add(reference); } } private void sendUserConfiguredFiles(DeployState deployState) { FileSender fileSender = new FileSender(containers, deployState.getFileRegistry(), deployState.getDeployLogger()); for (Component<?, ?> component : getAllComponents()) { fileSender.sendUserConfiguredFiles(component); } } private void addMetricsHandlers() { addMetricsHandler(METRICS_V2_HANDLER_CLASS, METRICS_V2_HANDLER_BINDING_1, METRICS_V2_HANDLER_BINDING_2); addMetricsHandler(PROMETHEUS_V1_HANDLER_CLASS, PROMETHEUS_V1_HANDLER_BINDING_1, PROMETHEUS_V1_HANDLER_BINDING_2); } private void addMetricsHandler(String handlerClass, BindingPattern rootBinding, BindingPattern innerBinding) { Handler handler = new Handler( new ComponentModel(handlerClass, null, null, null)); handler.addServerBindings(rootBinding, innerBinding); addComponent(handler); } private void addTestrunnerComponentsIfTester(DeployState deployState) { if (deployState.isHosted() && deployState.getProperties().applicationId().instance().isTester()) { addPlatformBundle(PlatformBundles.absoluteBundlePath("vespa-testrunner-components")); addPlatformBundle(PlatformBundles.absoluteBundlePath("vespa-osgi-testrunner")); addPlatformBundle(PlatformBundles.absoluteBundlePath("tenant-cd-api")); if(deployState.zone().system().isPublic()) { addPlatformBundle(PlatformBundles.absoluteBundlePath("cloud-tenant-cd")); } } } public void setModelEvaluation(ContainerModelEvaluation modelEvaluation) { this.modelEvaluation = modelEvaluation; } public void setMemoryPercentage(Integer memoryPercentage) { this.memoryPercentage = memoryPercentage; } @Override public Optional<Integer> getMemoryPercentage() { if (memoryPercentage != null) { return Optional.of(memoryPercentage); } else if (isHostedVespa()) { return getHostClusterId().isPresent() ? Optional.of(heapSizePercentageOfTotalNodeMemoryWhenCombinedCluster) : Optional.of(heapSizePercentageOfTotalNodeMemory); } return Optional.empty(); } /** Create list of endpoints, these will be consumed later by LbServicesProducer */ private void createEndpointList(DeployState deployState) { if(!deployState.isHosted()) return; if(deployState.getProperties().applicationId().instance().isTester()) return; List<ApplicationClusterEndpoint> endpoints = new ArrayList<>(); List<String> hosts = getContainers().stream() .map(AbstractService::getHostName) .sorted() .toList(); for (String suffix : deployState.getProperties().zoneDnsSuffixes()) { ApplicationClusterEndpoint.DnsName l4Name = ApplicationClusterEndpoint.DnsName.sharedL4NameFrom( deployState.zone().system(), ClusterSpec.Id.from(getName()), deployState.getProperties().applicationId(), suffix); endpoints.add(ApplicationClusterEndpoint.builder() .zoneScope() .sharedL4Routing() .dnsName(l4Name) .hosts(hosts) .clusterId(getName()) .build()); } Set<ContainerEndpoint> endpointsFromController = deployState.getEndpoints(); endpointsFromController.stream() .filter(ce -> ce.clusterId().equals(getName())) .filter(ce -> ce.routingMethod() == sharedLayer4) .forEach(ce -> ce.names().forEach( name -> endpoints.add(ApplicationClusterEndpoint.builder() .scope(ce.scope()) .weight(Long.valueOf(ce.weight().orElse(1)).intValue()) .routingMethod(ce.routingMethod()) .dnsName(ApplicationClusterEndpoint.DnsName.from(name)) .hosts(hosts) .clusterId(getName()) .build()) )); endpointList = List.copyOf(endpoints); } @Override public void getConfig(ApplicationBundlesConfig.Builder builder) { applicationBundles.stream().map(FileReference::value) .forEach(builder::bundles); } @Override public void getConfig(RankProfilesConfig.Builder builder) { if (modelEvaluation != null) modelEvaluation.getConfig(builder); } @Override public void getConfig(RankingConstantsConfig.Builder builder) { if (modelEvaluation != null) modelEvaluation.getConfig(builder); } @Override public void getConfig(OnnxModelsConfig.Builder builder) { if (modelEvaluation != null) modelEvaluation.getConfig(builder); } public void getConfig(RankingExpressionsConfig.Builder builder) { if (modelEvaluation != null) modelEvaluation.getConfig(builder); } @Override public void getConfig(ContainerMbusConfig.Builder builder) { if (mbusParams != null) { if (mbusParams.maxConcurrentFactor != null) builder.maxConcurrentFactor(mbusParams.maxConcurrentFactor); if (mbusParams.documentExpansionFactor != null) builder.documentExpansionFactor(mbusParams.documentExpansionFactor); if (mbusParams.containerCoreMemory != null) builder.containerCoreMemory(mbusParams.containerCoreMemory); } if (getDocproc() != null) getDocproc().getConfig(builder); builder.transport_events_before_wakeup(transport_events_before_wakeup); builder.numconnectionspertarget(transport_connections_per_target); } @Override public void getConfig(MetricsProxyApiConfig.Builder builder) { builder.metricsPort(MetricsProxyContainer.BASEPORT) .metricsApiPath(ApplicationMetricsHandler.METRICS_VALUES_PATH) .prometheusApiPath(ApplicationMetricsHandler.PROMETHEUS_VALUES_PATH); } @Override public void getConfig(QrStartConfig.Builder builder) { super.getConfig(builder); builder.jvm.verbosegc(true) .availableProcessors(0) .compressedClassSpaceSize(0) .minHeapsize(1536) .heapsize(1536); if (getMemoryPercentage().isPresent()) { builder.jvm.heapSizeAsPercentageOfPhysicalMemory(getMemoryPercentage().get()); } } @Override public void getConfig(ZookeeperServerConfig.Builder builder) { if (getParent() instanceof ConfigserverCluster) return; for (Container container : getContainers()) { ZookeeperServerConfig.Server.Builder serverBuilder = new ZookeeperServerConfig.Server.Builder(); serverBuilder.hostname(container.getHostName()) .id(container.index()) .joining( ! previousHosts.isEmpty() && ! previousHosts.contains(container.getHostName())) .retired(container.isRetired()); builder.server(serverBuilder); } builder.dynamicReconfiguration(true); } @Override public void getConfig(CuratorConfig.Builder builder) { super.getConfig(builder); if (getParent() instanceof ConfigserverCluster) return; builder.zookeeperSessionTimeoutSeconds(zookeeperSessionTimeoutSeconds); } public Optional<String> getTlsClientAuthority() { return tlsClientAuthority; } public void setMbusParams(MbusParams mbusParams) { this.mbusParams = mbusParams; } public void setMessageBusEnabled(boolean messageBusEnabled) { this.messageBusEnabled = messageBusEnabled; } public void setZookeeperSessionTimeoutSeconds(int timeoutSeconds) { this.zookeeperSessionTimeoutSeconds = timeoutSeconds; } protected boolean messageBusEnabled() { return messageBusEnabled; } public void addMbusServer(ComponentId chainId) { ComponentId serviceId = chainId.nestInNamespace(ComponentId.fromString("MbusServer")); addComponent( new Component<>(new ComponentModel(new BundleInstantiationSpecification( serviceId, ComponentSpecification.fromString(MbusServerProvider.class.getName()), null)))); } @Override public List<ApplicationClusterEndpoint> endpoints() { return endpointList; } @Override public String name() { return getName(); } public static class MbusParams { final Double maxConcurrentFactor; final Double documentExpansionFactor; final Integer containerCoreMemory; public MbusParams(Double maxConcurrentFactor, Double documentExpansionFactor, Integer containerCoreMemory) { this.maxConcurrentFactor = maxConcurrentFactor; this.documentExpansionFactor = documentExpansionFactor; this.containerCoreMemory = containerCoreMemory; } } }
We don't have any standard defined; as you mentioned, it's case by case. For this specific case, I was thinking something like (please review/consider if there is a better format) ```java final String id = "trackingId:" + System.nanoTime(); logger.info("Unable to acquire new session. {}", id, error); Mono.<Long>error(failure).publishOn(Schedulers.boundedElastic()).doOnError(e -> { logger.info("Emitting the error signal received for session acquire attempt. {}", id, error); }); ```
Mono<ServiceBusReceiveLink> getActiveLink() { if (this.receiveLink != null) { return Mono.just(this.receiveLink); } return Mono.defer(() -> createSessionReceiveLink() .flatMap(link -> link.getEndpointStates() .filter(e -> e == AmqpEndpointState.ACTIVE) .next() .switchIfEmpty(Mono.error(() -> new AmqpException(true, "Session receive link completed without being active", null))) .timeout(operationTimeout) .then(Mono.just(link)))) .retryWhen(Retry.from(retrySignals -> retrySignals.flatMap(signal -> { final Throwable failure = signal.failure(); LOGGER.atInfo() .addKeyValue(ENTITY_PATH_KEY, entityPath) .addKeyValue("attempt", signal.totalRetriesInARow()) .log("Error occurred while getting unnamed session.", failure); if (isDisposed.get()) { return Mono.<Long>error(new AmqpException(false, "SessionManager is already disposed.", failure, getErrorContext())); } else if (failure instanceof TimeoutException) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).getErrorCondition() == AmqpErrorCondition.TIMEOUT_ERROR) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else { return Mono.<Long>error(failure).publishOn(Schedulers.boundedElastic()); } }))); }
return Mono.<Long>error(failure).publishOn(Schedulers.boundedElastic());
Mono<ServiceBusReceiveLink> getActiveLink() { if (this.receiveLink != null) { return Mono.just(this.receiveLink); } return Mono.defer(() -> createSessionReceiveLink() .flatMap(link -> link.getEndpointStates() .filter(e -> e == AmqpEndpointState.ACTIVE) .next() .switchIfEmpty(Mono.error(() -> new AmqpException(true, "Session receive link completed without being active", null))) .timeout(operationTimeout) .then(Mono.just(link)))) .retryWhen(Retry.from(retrySignals -> retrySignals.flatMap(signal -> { final Throwable failure = signal.failure(); LOGGER.atInfo() .addKeyValue(ENTITY_PATH_KEY, entityPath) .addKeyValue("attempt", signal.totalRetriesInARow()) .log("Error occurred while getting unnamed session.", failure); if (isDisposed.get()) { return Mono.<Long>error(new AmqpException(false, "SessionManager is already disposed.", failure, getErrorContext())); } else if (failure instanceof TimeoutException) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else if (failure instanceof AmqpException && ((AmqpException) failure).getErrorCondition() == AmqpErrorCondition.TIMEOUT_ERROR) { return Mono.delay(SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION); } else { final long id = System.nanoTime(); LOGGER.atInfo() .addKeyValue(TRACKING_ID_KEY, id) .log("Unable to acquire new session.", failure); return Mono.<Long>error(failure) .publishOn(Schedulers.boundedElastic()) .doOnError(e -> LOGGER.atInfo() .addKeyValue(TRACKING_ID_KEY, id) .log("Emitting the error signal received for session acquire attempt.", e) ); } }))); }
class ServiceBusSessionManager implements AutoCloseable { private static final Duration SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION = Duration.ofMinutes(1); private static final ClientLogger LOGGER = new ClientLogger(ServiceBusSessionManager.class); private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusReceiveLink receiveLink; private final ServiceBusConnectionProcessor connectionProcessor; private final Duration operationTimeout; private final MessageSerializer messageSerializer; private final String identifier; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicBoolean isStarted = new AtomicBoolean(); private final List<Scheduler> schedulers; private final Deque<Scheduler> availableSchedulers = new ConcurrentLinkedDeque<>(); private final Duration maxSessionLockRenewDuration; /** * SessionId to receiver mapping. */ private final ConcurrentHashMap<String, ServiceBusSessionReceiver> sessionReceivers = new ConcurrentHashMap<>(); private final EmitterProcessor<Flux<ServiceBusMessageContext>> processor; private final FluxSink<Flux<ServiceBusMessageContext>> sessionReceiveSink; private volatile Flux<ServiceBusMessageContext> receiveFlux; ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, ServiceBusReceiveLink receiveLink, String identifier) { this.entityPath = entityPath; this.entityType = entityType; this.receiverOptions = receiverOptions; this.connectionProcessor = connectionProcessor; this.operationTimeout = connectionProcessor.getRetryOptions().getTryTimeout(); this.messageSerializer = messageSerializer; this.maxSessionLockRenewDuration = receiverOptions.getMaxLockRenewDuration(); this.identifier = identifier; final int numberOfSchedulers = receiverOptions.isRollingSessionReceiver() ? receiverOptions.getMaxConcurrentSessions() : 1; final List<Scheduler> schedulerList = IntStream.range(0, numberOfSchedulers) .mapToObj(index -> Schedulers.newBoundedElastic(DEFAULT_BOUNDED_ELASTIC_SIZE, DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "receiver-" + index)) .collect(Collectors.toList()); this.schedulers = Collections.unmodifiableList(schedulerList); this.availableSchedulers.addAll(this.schedulers); this.processor = EmitterProcessor.create(numberOfSchedulers, false); this.sessionReceiveSink = processor.sink(); this.receiveLink = receiveLink; } ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, String identifier) { this(entityPath, entityType, connectionProcessor, messageSerializer, receiverOptions, null, identifier); } /** * Gets the link name with the matching {@code sessionId}. * * @param sessionId Session id to get link name for. * * @return The name of the link, or {@code null} if there is no open link with that {@code sessionId}. */ String getLinkName(String sessionId) { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); return receiver != null ? receiver.getLinkName() : null; } /** * Gets the identifier of the instance of {@link ServiceBusSessionManager}. * * @return The identifier that can identify the instance of {@link ServiceBusSessionManager}. */ public String getIdentifier() { return this.identifier; } /** * Gets the state of a session given its identifier. * * @param sessionId Identifier of session to get. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<byte[]> getSessionState(String sessionId) { return validateParameter(sessionId, "sessionId", "getSessionState").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.getSessionState(sessionId, associatedLinkName); })); } /** * Gets a stream of messages from different sessions. * * @return A Flux of messages merged from different sessions. */ Flux<ServiceBusMessageContext> receive() { if (!isStarted.getAndSet(true)) { this.sessionReceiveSink.onRequest(this::onSessionRequest); if (!receiverOptions.isRollingSessionReceiver()) { receiveFlux = getSession(schedulers.get(0), false); } else { receiveFlux = Flux.merge(processor, receiverOptions.getMaxConcurrentSessions()); } } return receiveFlux; } /** * Renews the session lock. * * @param sessionId Identifier of session to get. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<OffsetDateTime> renewSessionLock(String sessionId) { return validateParameter(sessionId, "sessionId", "renewSessionLock").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.renewSessionLock(sessionId, associatedLinkName).handle((offsetDateTime, sink) -> { if (receiver != null) { receiver.setSessionLockedUntil(offsetDateTime); } sink.next(offsetDateTime); }); })); } /** * Tries to update the message disposition on a session aware receive link. * * @return {@code true} if the {@code lockToken} was updated on receive link. {@code false} otherwise. This means * there isn't an open link with that {@code sessionId}. */ Mono<Boolean> updateDisposition(String lockToken, String sessionId, DispositionStatus dispositionStatus, Map<String, Object> propertiesToModify, String deadLetterReason, String deadLetterDescription, ServiceBusTransactionContext transactionContext) { final String operation = "updateDisposition"; return Mono.when( validateParameter(lockToken, "lockToken", operation), validateParameter(lockToken, "lockToken", operation), validateParameter(sessionId, "'sessionId'", operation)).then( Mono.defer(() -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); if (receiver == null || !receiver.containsLockToken(lockToken)) { return Mono.just(false); } final DeliveryState deliveryState = MessageUtils.getDeliveryState(dispositionStatus, deadLetterReason, deadLetterDescription, propertiesToModify, transactionContext); return receiver.updateDisposition(lockToken, deliveryState).thenReturn(true); })); } @Override public void close() { if (isDisposed.getAndSet(true)) { return; } final List<Mono<Void>> closeables = sessionReceivers.values().stream() .map(receiver -> receiver.closeAsync()) .collect(Collectors.toList()); Mono.when(closeables).block(operationTimeout); sessionReceiveSink.complete(); for (Scheduler scheduler : schedulers) { scheduler.dispose(); } } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(connectionProcessor.getFullyQualifiedNamespace(), entityPath); } /** * Creates an session receive link. * * @return A Mono that completes with an session receive link. */ private Mono<ServiceBusReceiveLink> createSessionReceiveLink() { final String sessionId = receiverOptions.getSessionId(); final String linkName = (sessionId != null) ? sessionId : StringUtil.getRandomString("session-"); return connectionProcessor .flatMap(connection -> { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, identifier, sessionId); }); } /** * Gets an active unnamed session link. * * @return A Mono that completes when an unnamed session becomes available. * @throws AmqpException if the session manager is already disposed. */ /** * Gets the next available unnamed session with the given receive options and publishes its contents on the given * {@code scheduler}. * * @param scheduler Scheduler to coordinate received methods on. * @param disposeOnIdle true to dispose receiver when it idles; false otherwise. * @return A Mono that completes with an unnamed session receiver. */ private Flux<ServiceBusMessageContext> getSession(Scheduler scheduler, boolean disposeOnIdle) { return getActiveLink().flatMap(link -> link.getSessionId() .map(sessionId -> sessionReceivers.compute(sessionId, (key, existing) -> { if (existing != null) { return existing; } return new ServiceBusSessionReceiver(link, messageSerializer, connectionProcessor.getRetryOptions(), receiverOptions.getPrefetchCount(), disposeOnIdle, scheduler, this::renewSessionLock, maxSessionLockRenewDuration); }))) .flatMapMany(sessionReceiver -> sessionReceiver.receive().doFinally(signalType -> { LOGGER.atVerbose() .addKeyValue(SESSION_ID_KEY, sessionReceiver.getSessionId()) .log("Closing session receiver."); availableSchedulers.push(scheduler); sessionReceivers.remove(sessionReceiver.getSessionId()); sessionReceiver.closeAsync().subscribe(); if (receiverOptions.isRollingSessionReceiver()) { onSessionRequest(1L); } })); } private Mono<ServiceBusManagementNode> getManagementNode() { return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType)); } /** * Emits a new unnamed active session when it becomes available. * * @param request Number of unnamed active sessions to emit. */ private void onSessionRequest(long request) { if (isDisposed.get()) { LOGGER.info("Session manager is disposed. Not emitting more unnamed sessions."); return; } LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("Requested unnamed sessions."); for (int i = 0; i < request; i++) { final Scheduler scheduler = availableSchedulers.poll(); if (scheduler == null) { if (request != Long.MAX_VALUE) { LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("There are no available schedulers to fetch."); } return; } Flux<ServiceBusMessageContext> session = getSession(scheduler, true); sessionReceiveSink.next(session); } } private <T> Mono<Void> validateParameter(T parameter, String parameterName, String operation) { if (isDisposed.get()) { return monoError(LOGGER, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, operation))); } else if (parameter == null) { return monoError(LOGGER, new NullPointerException(String.format("'%s' cannot be null.", parameterName))); } else if ((parameter instanceof String) && (((String) parameter).isEmpty())) { return monoError(LOGGER, new IllegalArgumentException(String.format("'%s' cannot be an empty string.", parameterName))); } else { return Mono.empty(); } } }
class ServiceBusSessionManager implements AutoCloseable { private static final Duration SLEEP_DURATION_ON_ACCEPT_SESSION_EXCEPTION = Duration.ofMinutes(1); private static final String TRACKING_ID_KEY = "trackingId"; private static final ClientLogger LOGGER = new ClientLogger(ServiceBusSessionManager.class); private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusReceiveLink receiveLink; private final ServiceBusConnectionProcessor connectionProcessor; private final Duration operationTimeout; private final MessageSerializer messageSerializer; private final String identifier; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicBoolean isStarted = new AtomicBoolean(); private final List<Scheduler> schedulers; private final Deque<Scheduler> availableSchedulers = new ConcurrentLinkedDeque<>(); private final Duration maxSessionLockRenewDuration; /** * SessionId to receiver mapping. */ private final ConcurrentHashMap<String, ServiceBusSessionReceiver> sessionReceivers = new ConcurrentHashMap<>(); private final EmitterProcessor<Flux<ServiceBusMessageContext>> processor; private final FluxSink<Flux<ServiceBusMessageContext>> sessionReceiveSink; private volatile Flux<ServiceBusMessageContext> receiveFlux; ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, ServiceBusReceiveLink receiveLink, String identifier) { this.entityPath = entityPath; this.entityType = entityType; this.receiverOptions = receiverOptions; this.connectionProcessor = connectionProcessor; this.operationTimeout = connectionProcessor.getRetryOptions().getTryTimeout(); this.messageSerializer = messageSerializer; this.maxSessionLockRenewDuration = receiverOptions.getMaxLockRenewDuration(); this.identifier = identifier; final int numberOfSchedulers = receiverOptions.isRollingSessionReceiver() ? receiverOptions.getMaxConcurrentSessions() : 1; final List<Scheduler> schedulerList = IntStream.range(0, numberOfSchedulers) .mapToObj(index -> Schedulers.newBoundedElastic(DEFAULT_BOUNDED_ELASTIC_SIZE, DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "receiver-" + index)) .collect(Collectors.toList()); this.schedulers = Collections.unmodifiableList(schedulerList); this.availableSchedulers.addAll(this.schedulers); this.processor = EmitterProcessor.create(numberOfSchedulers, false); this.sessionReceiveSink = processor.sink(); this.receiveLink = receiveLink; } ServiceBusSessionManager(String entityPath, MessagingEntityType entityType, ServiceBusConnectionProcessor connectionProcessor, MessageSerializer messageSerializer, ReceiverOptions receiverOptions, String identifier) { this(entityPath, entityType, connectionProcessor, messageSerializer, receiverOptions, null, identifier); } /** * Gets the link name with the matching {@code sessionId}. * * @param sessionId Session id to get link name for. * * @return The name of the link, or {@code null} if there is no open link with that {@code sessionId}. */ String getLinkName(String sessionId) { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); return receiver != null ? receiver.getLinkName() : null; } /** * Gets the identifier of the instance of {@link ServiceBusSessionManager}. * * @return The identifier that can identify the instance of {@link ServiceBusSessionManager}. */ public String getIdentifier() { return this.identifier; } /** * Gets the state of a session given its identifier. * * @param sessionId Identifier of session to get. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<byte[]> getSessionState(String sessionId) { return validateParameter(sessionId, "sessionId", "getSessionState").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.getSessionState(sessionId, associatedLinkName); })); } /** * Gets a stream of messages from different sessions. * * @return A Flux of messages merged from different sessions. */ Flux<ServiceBusMessageContext> receive() { if (!isStarted.getAndSet(true)) { this.sessionReceiveSink.onRequest(this::onSessionRequest); if (!receiverOptions.isRollingSessionReceiver()) { receiveFlux = getSession(schedulers.get(0), false); } else { receiveFlux = Flux.merge(processor, receiverOptions.getMaxConcurrentSessions()); } } return receiveFlux; } /** * Renews the session lock. * * @param sessionId Identifier of session to get. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ Mono<OffsetDateTime> renewSessionLock(String sessionId) { return validateParameter(sessionId, "sessionId", "renewSessionLock").then( getManagementNode().flatMap(channel -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); final String associatedLinkName = receiver != null ? receiver.getLinkName() : null; return channel.renewSessionLock(sessionId, associatedLinkName).handle((offsetDateTime, sink) -> { if (receiver != null) { receiver.setSessionLockedUntil(offsetDateTime); } sink.next(offsetDateTime); }); })); } /** * Tries to update the message disposition on a session aware receive link. * * @return {@code true} if the {@code lockToken} was updated on receive link. {@code false} otherwise. This means * there isn't an open link with that {@code sessionId}. */ Mono<Boolean> updateDisposition(String lockToken, String sessionId, DispositionStatus dispositionStatus, Map<String, Object> propertiesToModify, String deadLetterReason, String deadLetterDescription, ServiceBusTransactionContext transactionContext) { final String operation = "updateDisposition"; return Mono.when( validateParameter(lockToken, "lockToken", operation), validateParameter(lockToken, "lockToken", operation), validateParameter(sessionId, "'sessionId'", operation)).then( Mono.defer(() -> { final ServiceBusSessionReceiver receiver = sessionReceivers.get(sessionId); if (receiver == null || !receiver.containsLockToken(lockToken)) { return Mono.just(false); } final DeliveryState deliveryState = MessageUtils.getDeliveryState(dispositionStatus, deadLetterReason, deadLetterDescription, propertiesToModify, transactionContext); return receiver.updateDisposition(lockToken, deliveryState).thenReturn(true); })); } @Override public void close() { if (isDisposed.getAndSet(true)) { return; } final List<Mono<Void>> closeables = sessionReceivers.values().stream() .map(receiver -> receiver.closeAsync()) .collect(Collectors.toList()); Mono.when(closeables).block(operationTimeout); sessionReceiveSink.complete(); for (Scheduler scheduler : schedulers) { scheduler.dispose(); } } private AmqpErrorContext getErrorContext() { return new SessionErrorContext(connectionProcessor.getFullyQualifiedNamespace(), entityPath); } /** * Creates an session receive link. * * @return A Mono that completes with an session receive link. */ private Mono<ServiceBusReceiveLink> createSessionReceiveLink() { final String sessionId = receiverOptions.getSessionId(); final String linkName = (sessionId != null) ? sessionId : StringUtil.getRandomString("session-"); return connectionProcessor .flatMap(connection -> { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, identifier, sessionId); }); } /** * Gets an active unnamed session link. * * @return A Mono that completes when an unnamed session becomes available. * @throws AmqpException if the session manager is already disposed. */ /** * Gets the next available unnamed session with the given receive options and publishes its contents on the given * {@code scheduler}. * * @param scheduler Scheduler to coordinate received methods on. * @param disposeOnIdle true to dispose receiver when it idles; false otherwise. * @return A Mono that completes with an unnamed session receiver. */ private Flux<ServiceBusMessageContext> getSession(Scheduler scheduler, boolean disposeOnIdle) { return getActiveLink().flatMap(link -> link.getSessionId() .map(sessionId -> sessionReceivers.compute(sessionId, (key, existing) -> { if (existing != null) { return existing; } return new ServiceBusSessionReceiver(link, messageSerializer, connectionProcessor.getRetryOptions(), receiverOptions.getPrefetchCount(), disposeOnIdle, scheduler, this::renewSessionLock, maxSessionLockRenewDuration); }))) .flatMapMany(sessionReceiver -> sessionReceiver.receive().doFinally(signalType -> { LOGGER.atVerbose() .addKeyValue(SESSION_ID_KEY, sessionReceiver.getSessionId()) .log("Closing session receiver."); availableSchedulers.push(scheduler); sessionReceivers.remove(sessionReceiver.getSessionId()); sessionReceiver.closeAsync().subscribe(); if (receiverOptions.isRollingSessionReceiver()) { onSessionRequest(1L); } })); } private Mono<ServiceBusManagementNode> getManagementNode() { return connectionProcessor.flatMap(connection -> connection.getManagementNode(entityPath, entityType)); } /** * Emits a new unnamed active session when it becomes available. * * @param request Number of unnamed active sessions to emit. */ private void onSessionRequest(long request) { if (isDisposed.get()) { LOGGER.info("Session manager is disposed. Not emitting more unnamed sessions."); return; } LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("Requested unnamed sessions."); for (int i = 0; i < request; i++) { final Scheduler scheduler = availableSchedulers.poll(); if (scheduler == null) { if (request != Long.MAX_VALUE) { LOGGER.atVerbose() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log("There are no available schedulers to fetch."); } return; } Flux<ServiceBusMessageContext> session = getSession(scheduler, true); sessionReceiveSink.next(session); } } private <T> Mono<Void> validateParameter(T parameter, String parameterName, String operation) { if (isDisposed.get()) { return monoError(LOGGER, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, operation))); } else if (parameter == null) { return monoError(LOGGER, new NullPointerException(String.format("'%s' cannot be null.", parameterName))); } else if ((parameter instanceof String) && (((String) parameter).isEmpty())) { return monoError(LOGGER, new IllegalArgumentException(String.format("'%s' cannot be an empty string.", parameterName))); } else { return Mono.empty(); } } }
Precicely, I wanted to use the same exact keys as Spring Boot does for the benefit of users who might want to migrate.
InfoBuildTimeValuesBuildItem buildInfo(CurateOutcomeBuildItem curateOutcomeBuildItem, InfoBuildTimeConfig config) { ApplicationModel applicationModel = curateOutcomeBuildItem.getApplicationModel(); ResolvedDependency appArtifact = applicationModel.getAppArtifact(); Map<String, Object> buildData = new LinkedHashMap<>(); buildData.put("group", appArtifact.getGroupId()); buildData.put("artifact", appArtifact.getArtifactId()); buildData.put("version", appArtifact.getVersion()); buildData.put("time", ISO_OFFSET_DATE_TIME.format(OffsetDateTime.now())); return new InfoBuildTimeValuesBuildItem("build", finalBuildData(buildData, config.build())); }
buildData.put("artifact", appArtifact.getArtifactId());
InfoBuildTimeValuesBuildItem buildInfo(CurateOutcomeBuildItem curateOutcomeBuildItem, InfoBuildTimeConfig config) { ApplicationModel applicationModel = curateOutcomeBuildItem.getApplicationModel(); ResolvedDependency appArtifact = applicationModel.getAppArtifact(); Map<String, Object> buildData = new LinkedHashMap<>(); buildData.put("group", appArtifact.getGroupId()); buildData.put("artifact", appArtifact.getArtifactId()); buildData.put("version", appArtifact.getVersion()); buildData.put("time", ISO_OFFSET_DATE_TIME.format(OffsetDateTime.now())); return new InfoBuildTimeValuesBuildItem("build", finalBuildData(buildData, config.build())); }
class InfoProcessor { private static final Logger log = Logger.getLogger(InfoProcessor.class); @BuildStep(onlyIf = GitInInfoEndpointEnabled.class) InfoBuildTimeValuesBuildItem gitInfo(CurateOutcomeBuildItem curateOutcomeBuildItem, OutputTargetBuildItem outputTargetBuildItem) { File projectRoot = highestKnownProjectDirectory(curateOutcomeBuildItem, outputTargetBuildItem); if (projectRoot == null) { log.debug("Unable to determine project directory"); return null; } RepositoryBuilder repositoryBuilder = new RepositoryBuilder().findGitDir(projectRoot); if (repositoryBuilder.getGitDir() == null) { log.debug("Project is not checked in to git"); return null; } try (Repository repository = repositoryBuilder.build()) { RevCommit latestCommit = new Git(repository).log().setMaxCount(1).call().iterator().next(); PersonIdent authorIdent = latestCommit.getAuthorIdent(); Date authorDate = authorIdent.getWhen(); TimeZone authorTimeZone = authorIdent.getTimeZone(); Map<String, Object> data = new LinkedHashMap<>(); data.put("branch", repository.getBranch()); data.put("commit", Map.of( "id", latestCommit.getName(), "time", ISO_OFFSET_DATE_TIME.format( OffsetDateTime.ofInstant(authorDate.toInstant(), authorTimeZone.toZoneId())))); return new InfoBuildTimeValuesBuildItem("git", data); } catch (Exception e) { log.debug("Unable to determine git information", e); return null; } } private File highestKnownProjectDirectory(CurateOutcomeBuildItem curateOutcomeBuildItem, OutputTargetBuildItem outputTargetBuildItem) { ApplicationModel applicationModel = curateOutcomeBuildItem.getApplicationModel(); WorkspaceModule workspaceModule = applicationModel.getAppArtifact().getWorkspaceModule(); if (workspaceModule != null) { return workspaceModule.getModuleDir(); } return outputTargetBuildItem.getOutputDirectory().toFile(); } @BuildStep(onlyIf = BuildInInfoEndpointEnabled.class) private Map<String, Object> finalBuildData(Map<String, Object> buildData, InfoBuildTimeConfig.Build buildConfig) { if (buildConfig.additionalProperties().isEmpty()) { return buildData; } Map<String, Object> result = new LinkedHashMap<>(buildData); result.putAll(buildConfig.additionalProperties()); return result; } @BuildStep(onlyIf = OsInInfoEndpointEnabled.class) @Record(ExecutionTime.RUNTIME_INIT) InfoBuildTimeContributorBuildItem osInfo(InfoRecorder recorder) { return new InfoBuildTimeContributorBuildItem(recorder.osInfoContributor()); } @BuildStep(onlyIf = JavaInInfoEndpointEnabled.class) @Record(ExecutionTime.RUNTIME_INIT) InfoBuildTimeContributorBuildItem javaInfo(InfoRecorder recorder) { return new InfoBuildTimeContributorBuildItem(recorder.javaInfoContributor()); } @BuildStep(onlyIf = InfoEndpointEnabled.class) @Record(ExecutionTime.RUNTIME_INIT) RouteBuildItem defineRoute(InfoBuildTimeConfig buildTimeConfig, List<InfoBuildTimeValuesBuildItem> buildTimeValues, List<InfoBuildTimeContributorBuildItem> contributors, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, InfoRecorder recorder) { Map<String, Object> buildTimeInfo = buildTimeValues.stream().collect( Collectors.toMap(InfoBuildTimeValuesBuildItem::getName, InfoBuildTimeValuesBuildItem::getValue, (x, y) -> y, LinkedHashMap::new)); List<InfoContributor> infoContributors = contributors.stream() .map(InfoBuildTimeContributorBuildItem::getInfoContributor) .collect(Collectors.toList()); return nonApplicationRootPathBuildItem.routeBuilder() .management() .route(buildTimeConfig.path()) .routeConfigKey("quarkus.info.path") .handler(recorder.handler(buildTimeInfo, infoContributors)) .displayOnNotFoundPage() .blockingRoute() .build(); } }
class InfoProcessor { private static final Logger log = Logger.getLogger(InfoProcessor.class); @BuildStep(onlyIf = GitInInfoEndpointEnabled.class) InfoBuildTimeValuesBuildItem gitInfo(CurateOutcomeBuildItem curateOutcomeBuildItem, OutputTargetBuildItem outputTargetBuildItem) { File projectRoot = highestKnownProjectDirectory(curateOutcomeBuildItem, outputTargetBuildItem); if (projectRoot == null) { log.debug("Unable to determine project directory"); return null; } RepositoryBuilder repositoryBuilder = new RepositoryBuilder().findGitDir(projectRoot); if (repositoryBuilder.getGitDir() == null) { log.debug("Project is not checked in to git"); return null; } try (Repository repository = repositoryBuilder.build()) { RevCommit latestCommit = new Git(repository).log().setMaxCount(1).call().iterator().next(); PersonIdent authorIdent = latestCommit.getAuthorIdent(); Date authorDate = authorIdent.getWhen(); TimeZone authorTimeZone = authorIdent.getTimeZone(); Map<String, Object> data = new LinkedHashMap<>(); data.put("branch", repository.getBranch()); data.put("commit", Map.of( "id", latestCommit.getName(), "time", ISO_OFFSET_DATE_TIME.format( OffsetDateTime.ofInstant(authorDate.toInstant(), authorTimeZone.toZoneId())))); return new InfoBuildTimeValuesBuildItem("git", data); } catch (Exception e) { log.debug("Unable to determine git information", e); return null; } } private File highestKnownProjectDirectory(CurateOutcomeBuildItem curateOutcomeBuildItem, OutputTargetBuildItem outputTargetBuildItem) { ApplicationModel applicationModel = curateOutcomeBuildItem.getApplicationModel(); WorkspaceModule workspaceModule = applicationModel.getAppArtifact().getWorkspaceModule(); if (workspaceModule != null) { return workspaceModule.getModuleDir(); } return outputTargetBuildItem.getOutputDirectory().toFile(); } @BuildStep(onlyIf = BuildInInfoEndpointEnabled.class) private Map<String, Object> finalBuildData(Map<String, Object> buildData, InfoBuildTimeConfig.Build buildConfig) { if (buildConfig.additionalProperties().isEmpty()) { return buildData; } Map<String, Object> result = new LinkedHashMap<>(buildData); result.putAll(buildConfig.additionalProperties()); return result; } @BuildStep(onlyIf = OsInInfoEndpointEnabled.class) @Record(ExecutionTime.RUNTIME_INIT) InfoBuildTimeContributorBuildItem osInfo(InfoRecorder recorder) { return new InfoBuildTimeContributorBuildItem(recorder.osInfoContributor()); } @BuildStep(onlyIf = JavaInInfoEndpointEnabled.class) @Record(ExecutionTime.RUNTIME_INIT) InfoBuildTimeContributorBuildItem javaInfo(InfoRecorder recorder) { return new InfoBuildTimeContributorBuildItem(recorder.javaInfoContributor()); } @BuildStep(onlyIf = InfoEndpointEnabled.class) @Record(ExecutionTime.RUNTIME_INIT) RouteBuildItem defineRoute(InfoBuildTimeConfig buildTimeConfig, List<InfoBuildTimeValuesBuildItem> buildTimeValues, List<InfoBuildTimeContributorBuildItem> contributors, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, InfoRecorder recorder) { Map<String, Object> buildTimeInfo = buildTimeValues.stream().collect( Collectors.toMap(InfoBuildTimeValuesBuildItem::getName, InfoBuildTimeValuesBuildItem::getValue, (x, y) -> y, LinkedHashMap::new)); List<InfoContributor> infoContributors = contributors.stream() .map(InfoBuildTimeContributorBuildItem::getInfoContributor) .collect(Collectors.toList()); return nonApplicationRootPathBuildItem.routeBuilder() .management() .route(buildTimeConfig.path()) .routeConfigKey("quarkus.info.path") .handler(recorder.handler(buildTimeInfo, infoContributors)) .displayOnNotFoundPage() .blockingRoute() .build(); } }
```suggestion childAccessPaths.stream().distinct() ``` and add some comment?
private Optional<AccessPath> process(ScalarOperator scalarOperator, Deque<AccessPath> accessPaths) { List<Optional<AccessPath>> childAccessPaths = scalarOperator.getChildren().stream() .map(child -> process(child, accessPaths)) .collect(Collectors.toList()); if (!childAccessPaths.isEmpty() && childAccessPaths.stream().noneMatch(Optional::isPresent)) { return Optional.empty(); } Optional<AccessPath> currentPath = scalarOperator.accept(this, childAccessPaths); AccessPath path = currentPath.orElse(null); childAccessPaths.stream().filter(p -> p.isPresent() && p.get() != path) .map(Optional::get).forEach(accessPaths::add); return currentPath; }
childAccessPaths.stream().filter(p -> p.isPresent() && p.get() != path)
private Optional<AccessPath> process(ScalarOperator scalarOperator, Deque<AccessPath> accessPaths) { List<Optional<AccessPath>> childAccessPaths = scalarOperator.getChildren().stream() .map(child -> process(child, accessPaths)) .collect(Collectors.toList()); if (!childAccessPaths.isEmpty() && childAccessPaths.stream().noneMatch(Optional::isPresent)) { return Optional.empty(); } Optional<AccessPath> currentPath = scalarOperator.accept(this, childAccessPaths); AccessPath path = currentPath.orElse(null); childAccessPaths.stream().filter(p -> p.isPresent() && p.get() != path) .map(Optional::get).forEach(accessPaths::add); return currentPath; }
class Collector extends ScalarOperatorVisitor<Optional<AccessPath>, List<Optional<AccessPath>>> { @Override public Optional<AccessPath> visit(ScalarOperator scalarOperator, List<Optional<AccessPath>> childrenAccessPaths) { return Optional.empty(); } @Override public Optional<AccessPath> visitVariableReference(ColumnRefOperator variable, List<Optional<AccessPath>> childrenAccessPaths) { if (variable.getType().isComplexType()) { return Optional.of(new AccessPath(variable)); } return Optional.empty(); } @Override public Optional<AccessPath> visitSubfield(SubfieldOperator subfieldOperator, List<Optional<AccessPath>> childAccessPaths) { return childAccessPaths.get(0).map(parent -> parent.appendFieldNames(subfieldOperator.getFieldNames())); } @Override public Optional<AccessPath> visitCollectionElement(CollectionElementOperator collectionElementOp, List<Optional<AccessPath>> childrenAccessPaths) { Optional<AccessPath> parent = childrenAccessPaths.get(0); if (!parent.isPresent()) { return Optional.empty(); } if (!collectionElementOp.getChild(1).isConstant()) { return parent.map(p -> p.appendPath(ColumnAccessPath.PATH_PLACEHOLDER, TAccessPathType.ALL)); } else { return parent.map(p -> p.appendPath(ColumnAccessPath.PATH_PLACEHOLDER, TAccessPathType.INDEX)); } } @Override public Optional<AccessPath> visitCall(CallOperator call, List<Optional<AccessPath>> childrenAccessPaths) { if (!PruneSubfieldRule.SUPPORT_FUNCTIONS.contains(call.getFnName())) { return Optional.empty(); } if (call.getFnName().equals(FunctionSet.MAP_KEYS)) { return childrenAccessPaths.get(0) .map(p -> p.appendPath(ColumnAccessPath.PATH_PLACEHOLDER, TAccessPathType.KEY)); } else if (FunctionSet.MAP_SIZE.equals(call.getFnName()) || FunctionSet.CARDINALITY.equals(call.getFnName()) || FunctionSet.ARRAY_LENGTH.equals(call.getFnName())) { return childrenAccessPaths.get(0) .map(p -> p.appendPath(ColumnAccessPath.PATH_PLACEHOLDER, TAccessPathType.OFFSET)); } return Optional.empty(); } }
class Collector extends ScalarOperatorVisitor<Optional<AccessPath>, List<Optional<AccessPath>>> { @Override public Optional<AccessPath> visit(ScalarOperator scalarOperator, List<Optional<AccessPath>> childrenAccessPaths) { return Optional.empty(); } @Override public Optional<AccessPath> visitVariableReference(ColumnRefOperator variable, List<Optional<AccessPath>> childrenAccessPaths) { if (variable.getType().isComplexType()) { return Optional.of(new AccessPath(variable)); } return Optional.empty(); } @Override public Optional<AccessPath> visitSubfield(SubfieldOperator subfieldOperator, List<Optional<AccessPath>> childAccessPaths) { return childAccessPaths.get(0).map(parent -> parent.appendFieldNames(subfieldOperator.getFieldNames())); } @Override public Optional<AccessPath> visitCollectionElement(CollectionElementOperator collectionElementOp, List<Optional<AccessPath>> childrenAccessPaths) { Optional<AccessPath> parent = childrenAccessPaths.get(0); if (!parent.isPresent()) { return Optional.empty(); } if (!collectionElementOp.getChild(1).isConstant()) { return parent.map(p -> p.appendPath(ColumnAccessPath.PATH_PLACEHOLDER, TAccessPathType.ALL)); } else { return parent.map(p -> p.appendPath(ColumnAccessPath.PATH_PLACEHOLDER, TAccessPathType.INDEX)); } } @Override public Optional<AccessPath> visitCall(CallOperator call, List<Optional<AccessPath>> childrenAccessPaths) { if (!PruneSubfieldRule.SUPPORT_FUNCTIONS.contains(call.getFnName())) { return Optional.empty(); } if (call.getFnName().equals(FunctionSet.MAP_KEYS)) { return childrenAccessPaths.get(0) .map(p -> p.appendPath(ColumnAccessPath.PATH_PLACEHOLDER, TAccessPathType.KEY)); } else if (FunctionSet.MAP_SIZE.equals(call.getFnName()) || FunctionSet.CARDINALITY.equals(call.getFnName()) || FunctionSet.ARRAY_LENGTH.equals(call.getFnName())) { return childrenAccessPaths.get(0) .map(p -> p.appendPath(ColumnAccessPath.PATH_PLACEHOLDER, TAccessPathType.OFFSET)); } return Optional.empty(); } }
If you look at the comments you see this can not be used for anything machine readable. * If length is 4 or less the string will be truncated to length. * If length is longer than 4, it will be truncated at length-4 with " ..." added at the end. We have a lot of public methods that we have only added for our own convenience, and this looks like one of them. And the change we are making here will already change change something, that is just a theoretical difference. The only value it add over s.substring(), is the optional padding with "...".
public boolean annotate(StringFieldValue text) { if (text.getSpanTree(SpanTrees.LINGUISTICS) != null) return true; Tokenizer tokenizer = factory.getTokenizer(); String input = (text.getString().length() <= config.getMaxTokenizeLength()) ? text.getString() : Text.safeSubstring(text.getString(), config.getMaxTokenizeLength()); Iterable<Token> tokens = tokenizer.tokenize(input, config.getLanguage(), config.getStemMode(), config.getRemoveAccents()); TermOccurrences termOccurrences = new TermOccurrences(config.getMaxTermOccurrences()); SpanTree tree = new SpanTree(SpanTrees.LINGUISTICS); for (Token token : tokens) addAnnotationSpan(text.getString(), tree.spanList(), token, config.getStemMode(), termOccurrences); if (tree.numAnnotations() == 0) return false; text.setSpanTree(tree); return true; }
: Text.safeSubstring(text.getString(), config.getMaxTokenizeLength());
public boolean annotate(StringFieldValue text) { if (text.getSpanTree(SpanTrees.LINGUISTICS) != null) return true; Tokenizer tokenizer = factory.getTokenizer(); String input = (text.getString().length() <= config.getMaxTokenizeLength()) ? text.getString() : Text.substringByCodepoints(text.getString(), 0, config.getMaxTokenizeLength()); Iterable<Token> tokens = tokenizer.tokenize(input, config.getLanguage(), config.getStemMode(), config.getRemoveAccents()); TermOccurrences termOccurrences = new TermOccurrences(config.getMaxTermOccurrences()); SpanTree tree = new SpanTree(SpanTrees.LINGUISTICS); for (Token token : tokens) addAnnotationSpan(text.getString(), tree.spanList(), token, config.getStemMode(), termOccurrences); if (tree.numAnnotations() == 0) return false; text.setSpanTree(tree); return true; }
class TermOccurrences { final Map<String, Integer> termOccurrences = new HashMap<>(); final int maxOccurrences; public TermOccurrences(int maxOccurences) { this.maxOccurrences = maxOccurences; } boolean termCountBelowLimit(String term) { String lowerCasedTerm = toLowerCase(term); int occurrences = termOccurrences.getOrDefault(lowerCasedTerm, 0); if (occurrences >= maxOccurrences) return false; termOccurrences.put(lowerCasedTerm, occurrences + 1); return true; } }
class TermOccurrences { final Map<String, Integer> termOccurrences = new HashMap<>(); final int maxOccurrences; public TermOccurrences(int maxOccurences) { this.maxOccurrences = maxOccurences; } boolean termCountBelowLimit(String term) { String lowerCasedTerm = toLowerCase(term); int occurrences = termOccurrences.getOrDefault(lowerCasedTerm, 0); if (occurrences >= maxOccurrences) return false; termOccurrences.put(lowerCasedTerm, occurrences + 1); return true; } }
Why don't we have a static variable for this one?
public void validateUserGroupProperties() { if (this.sessionStateless) { if (allowedGroupsConfigured()) { LOGGER.warn("Group names are not supported if you set 'sessionSateless' to 'true'."); } } else if (!allowedGroupsConfigured()) { throw new IllegalArgumentException("One of the User Group Properties must be populated. " + "Please populate azure.activedirectory.user-group.allowed-groups"); } if (!DEFAULT_GROUP_RELATIONSHIP.equalsIgnoreCase(groupRelationship) && !"transitive".equalsIgnoreCase(groupRelationship)) { throw new IllegalArgumentException("Configuration 'azure.activedirectory.group-relationship' " + "should be 'direct' or 'transitive'."); } }
&& !"transitive".equalsIgnoreCase(groupRelationship)) {
public void validateUserGroupProperties() { if (this.sessionStateless) { if (allowedGroupsConfigured()) { LOGGER.warn("Group names are not supported if you set 'sessionSateless' to 'true'."); } } else if (!allowedGroupsConfigured()) { throw new IllegalArgumentException("One of the User Group Properties must be populated. " + "Please populate azure.activedirectory.user-group.allowed-groups"); } if (!GROUP_RELATIONSHIP_DIRECT.equalsIgnoreCase(userGroup.groupRelationship) && !GROUP_RELATIONSHIP_TRANSITIVE.equalsIgnoreCase(userGroup.groupRelationship)) { throw new IllegalArgumentException("Configuration 'azure.activedirectory.user-group.group-relationship' " + "should be 'direct' or 'transitive'."); } }
class UserGroupProperties { /** * Expected UserGroups that an authority will be granted to if found in the response from the MemeberOf Graph * API Call. */ private List<String> allowedGroups = new ArrayList<>(); /** * Key of the JSON Node to get from the Azure AD response object that will be checked to contain the {@code * azure.activedirectory.user-group.value} to signify that this node is a valid {@code UserGroup}. */ @NotEmpty private String key = "objectType"; /** * Value of the JSON Node identified by the {@code azure.activedirectory.user-group.key} to validate the JSON * Node is a UserGroup. */ @NotEmpty private String value = Membership.OBJECT_TYPE_GROUP; /** * Key of the JSON Node containing the Azure Object ID for the {@code UserGroup}. */ @NotEmpty private String objectIDKey = "objectId"; public List<String> getAllowedGroups() { return allowedGroups; } public void setAllowedGroups(List<String> allowedGroups) { this.allowedGroups = allowedGroups; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String getObjectIDKey() { return objectIDKey; } public void setObjectIDKey(String objectIDKey) { this.objectIDKey = objectIDKey; } @Override public String toString() { return "UserGroupProperties{" + "allowedGroups=" + allowedGroups + ", key='" + key + '\'' + ", value='" + value + '\'' + ", objectIDKey='" + objectIDKey + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } UserGroupProperties that = (UserGroupProperties) o; return Objects.equals(allowedGroups, that.allowedGroups) && Objects.equals(key, that.key) && Objects.equals(value, that.value) && Objects.equals(objectIDKey, that.objectIDKey); } @Override public int hashCode() { return Objects.hash(allowedGroups, key, value, objectIDKey); } }
class UserGroupProperties { /** * Expected UserGroups that an authority will be granted to if found in the response from the MemeberOf Graph * API Call. */ private List<String> allowedGroups = new ArrayList<>(); /** * Key of the JSON Node to get from the Azure AD response object that will be checked to contain the {@code * azure.activedirectory.user-group.value} to signify that this node is a valid {@code UserGroup}. */ @NotEmpty private String key = "objectType"; /** * Value of the JSON Node identified by the {@code azure.activedirectory.user-group.key} to validate the JSON * Node is a UserGroup. */ @NotEmpty private String value = Membership.OBJECT_TYPE_GROUP; /** * Key of the JSON Node containing the Azure Object ID for the {@code UserGroup}. */ @NotEmpty private String objectIDKey = "objectId"; /** * The way to obtain group relationship.<br/> * direct: the default value, get groups that the user is a direct member of;<br/> * transitive: Get groups that the user is a member of, and will also return all * groups the user is a nested member of; */ @NotEmpty private String groupRelationship = GROUP_RELATIONSHIP_DIRECT; public List<String> getAllowedGroups() { return allowedGroups; } public void setAllowedGroups(List<String> allowedGroups) { this.allowedGroups = allowedGroups; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String getObjectIDKey() { return objectIDKey; } public void setObjectIDKey(String objectIDKey) { this.objectIDKey = objectIDKey; } public String getGroupRelationship() { return groupRelationship; } public void setGroupRelationship(String groupRelationship) { this.groupRelationship = groupRelationship; } @Override public String toString() { return "UserGroupProperties{" + "allowedGroups=" + allowedGroups + ", key='" + key + '\'' + ", value='" + value + '\'' + ", objectIDKey='" + objectIDKey + '\'' + ", groupRelationship='" + groupRelationship + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } UserGroupProperties that = (UserGroupProperties) o; return Objects.equals(allowedGroups, that.allowedGroups) && Objects.equals(key, that.key) && Objects.equals(value, that.value) && Objects.equals(objectIDKey, that.objectIDKey) && Objects.equals(groupRelationship, that.groupRelationship); } @Override public int hashCode() { return Objects.hash(allowedGroups, key, value, objectIDKey); } }
Add it into method Javadoc please
public SerializationProxy(Read read) { configuration = read.configuration; tableId = read.tableId; scan = read.scan; }
configuration = read.configuration;
public SerializationProxy(Read read) { configuration = read.configuration; tableId = read.tableId; scan = read.scan; }
class SerializationProxy implements Serializable { public SerializationProxy() {} private void writeObject(ObjectOutputStream out) throws IOException { SerializableCoder.of(SerializableConfiguration.class) .encode(new SerializableConfiguration(this.configuration), out); StringUtf8Coder.of().encode(this.tableId, out); ProtobufUtil.toScan(this.scan).writeDelimitedTo(out); } private void readObject(ObjectInputStream in) throws IOException { this.configuration = SerializableCoder.of(SerializableConfiguration.class).decode(in).get(); this.tableId = StringUtf8Coder.of().decode(in); this.scan = ProtobufUtil.toScan(ClientProtos.Scan.parseDelimitedFrom(in)); } Object readResolve() { return HBaseIO.read().withConfiguration(configuration).withTableId(tableId).withScan(scan); } private Configuration configuration; private String tableId; private Scan scan; }
class SerializationProxy implements Serializable { public SerializationProxy() {} private void writeObject(ObjectOutputStream out) throws IOException { SerializableCoder.of(SerializableConfiguration.class) .encode(new SerializableConfiguration(this.configuration), out); StringUtf8Coder.of().encode(this.tableId, out); ProtobufUtil.toScan(this.scan).writeDelimitedTo(out); } private void readObject(ObjectInputStream in) throws IOException { this.configuration = SerializableCoder.of(SerializableConfiguration.class).decode(in).get(); this.tableId = StringUtf8Coder.of().decode(in); this.scan = ProtobufUtil.toScan(ClientProtos.Scan.parseDelimitedFrom(in)); } Object readResolve() { return HBaseIO.read().withConfiguration(configuration).withTableId(tableId).withScan(scan); } private Configuration configuration; private String tableId; private Scan scan; }
I have added test cases for Integer, String.
public void queryItemsAggregate() { long startTime = Instant.now().getEpochSecond(); List<String> actualIds = new ArrayList<>(); EncryptionPojo properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); String query1 = String.format("Select value max(c._ts) from c"); CosmosQueryRequestOptions cosmosQueryRequestOptions1 = new CosmosQueryRequestOptions(); SqlQuerySpec querySpec1 = new SqlQuerySpec(query1); CosmosPagedFlux<JsonNode> feedResponseIterator1 = cosmosEncryptionAsyncContainer.queryItems(querySpec1, cosmosQueryRequestOptions1, JsonNode.class); List<JsonNode> feedResponse1 = feedResponseIterator1.byPage().blockFirst().getResults(); int timeStamp = feedResponse1.get(0).asInt(); long endTime = Instant.now().getEpochSecond(); assertThat(timeStamp).isGreaterThanOrEqualTo((int)startTime); assertThat(timeStamp).isLessThanOrEqualTo((int)endTime); assertThat(feedResponse1.size()).isEqualTo(1); String query2 = String.format("Select top 1 value count(c) from c order by c._ts"); CosmosQueryRequestOptions cosmosQueryRequestOptions2 = new CosmosQueryRequestOptions(); SqlQuerySpec querySpec2 = new SqlQuerySpec(query2); CosmosPagedFlux<EncryptionPojo> feedResponseIterator2 = cosmosEncryptionAsyncContainer.queryItems(querySpec2, cosmosQueryRequestOptions2, EncryptionPojo.class); List<EncryptionPojo> feedResponse2 = feedResponseIterator2.byPage().blockFirst().getResults(); assertThat(feedResponse2.size()).isEqualTo(1); }
CosmosPagedFlux<JsonNode> feedResponseIterator1 =
public void queryItemsAggregate() { long startTime = Instant.now().getEpochSecond(); List<String> actualIds = new ArrayList<>(); EncryptionPojo properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); String query1 = String.format("Select value max(c._ts) from c"); CosmosQueryRequestOptions cosmosQueryRequestOptions1 = new CosmosQueryRequestOptions(); SqlQuerySpec querySpec1 = new SqlQuerySpec(query1); CosmosPagedFlux<Integer> feedResponseIterator1 = cosmosEncryptionAsyncContainer.queryItems(querySpec1, cosmosQueryRequestOptions1, Integer.class); List<Integer> feedResponse1 = feedResponseIterator1.byPage().blockFirst().getResults(); int timeStamp = feedResponse1.get(0); long endTime = Instant.now().getEpochSecond(); assertThat(timeStamp).isGreaterThanOrEqualTo((int)startTime); assertThat(timeStamp).isLessThanOrEqualTo((int)endTime); assertThat(feedResponse1.size()).isEqualTo(1); String query2 = String.format("Select top 1 value count(c) from c order by c._ts"); CosmosQueryRequestOptions cosmosQueryRequestOptions2 = new CosmosQueryRequestOptions(); SqlQuerySpec querySpec2 = new SqlQuerySpec(query2); CosmosPagedFlux<Integer> feedResponseIterator2 = cosmosEncryptionAsyncContainer.queryItems(querySpec2, cosmosQueryRequestOptions2, Integer.class); List<Integer> feedResponse2 = feedResponseIterator2.byPage().blockFirst().getResults(); assertThat(feedResponse2.size()).isEqualTo(1); String query3 = String.format("Select value max(c.sensitiveString) from c"); CosmosQueryRequestOptions cosmosQueryRequestOptions3 = new CosmosQueryRequestOptions(); SqlQuerySpec querySpec3 = new SqlQuerySpec(query3); CosmosPagedFlux<String> feedResponseIterator3 = cosmosEncryptionAsyncContainer.queryItems(querySpec3, cosmosQueryRequestOptions3, String.class); List<String> feedResponse3 = feedResponseIterator3.byPage().blockFirst().getResults(); assertThat(feedResponse3.size()).isEqualTo(1); }
class EncryptionAsyncApiCrudTest extends TestSuiteBase { private CosmosAsyncClient client; private CosmosEncryptionAsyncClient cosmosEncryptionAsyncClient; private CosmosEncryptionAsyncContainer encryptionContainerWithIncompatiblePolicyVersion; CosmosEncryptionAsyncContainer cosmosEncryptionAsyncContainer; CosmosEncryptionAsyncDatabase cosmosEncryptionAsyncDatabase; @Factory(dataProvider = "clientBuilders") public EncryptionAsyncApiCrudTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); } @BeforeClass(groups = {"encryption"}, timeOut = SETUP_TIMEOUT) public void before_CosmosItemTest() { assertThat(this.client).isNull(); this.client = getClientBuilder().buildAsyncClient(); TestEncryptionKeyStoreProvider encryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); cosmosEncryptionAsyncClient = CosmosEncryptionAsyncClient.createCosmosEncryptionAsyncClient(this.client, encryptionKeyStoreProvider); cosmosEncryptionAsyncDatabase = getSharedEncryptionDatabase(cosmosEncryptionAsyncClient); cosmosEncryptionAsyncContainer = getSharedEncryptionContainer(cosmosEncryptionAsyncClient); ClientEncryptionPolicy clientEncryptionWithPolicyFormatVersion2 = new ClientEncryptionPolicy(getPaths()); ReflectionUtils.setPolicyFormatVersion(clientEncryptionWithPolicyFormatVersion2, 2); String containerId = UUID.randomUUID().toString(); CosmosContainerProperties properties = new CosmosContainerProperties(containerId, "/mypk"); properties.setClientEncryptionPolicy(clientEncryptionWithPolicyFormatVersion2); cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().createContainer(properties).block(); encryptionContainerWithIncompatiblePolicyVersion = cosmosEncryptionAsyncDatabase.getCosmosEncryptionAsyncContainer(containerId); } @AfterClass(groups = {"encryption"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { assertThat(this.client).isNotNull(); this.client.close(); } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void createItemEncrypt_readItemDecrypt() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); EncryptionPojo readItem = cosmosEncryptionAsyncContainer.readItem(properties.getId(), new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponse(properties, readItem); properties = getItem(UUID.randomUUID().toString()); String longString = ""; for (int i = 0; i < 10000; i++) { longString += "a"; } properties.setSensitiveString(longString); itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void createItemEncryptWithContentResponseOnWriteEnabledFalse() { CosmosItemRequestOptions requestOptions = new CosmosItemRequestOptions(); requestOptions.setContentResponseOnWriteEnabled(false); EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), requestOptions).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); assertThat(itemResponse.getItem()).isNull(); } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void upsertItem_readItem() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.upsertItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); EncryptionPojo readItem = cosmosEncryptionAsyncContainer.readItem(properties.getId(), new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponse(properties, readItem); } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void queryItems() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); String query = String.format("SELECT * from c where c.id = '%s'", properties.getId()); CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions(); SqlQuerySpec querySpec = new SqlQuerySpec(query); CosmosPagedFlux<EncryptionPojo> feedResponseIterator = cosmosEncryptionAsyncContainer.queryItems(querySpec, cosmosQueryRequestOptions, EncryptionPojo.class); List<EncryptionPojo> feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); assertThat(feedResponse.size()).isGreaterThanOrEqualTo(1); for (EncryptionPojo pojo : feedResponse) { if (pojo.getId().equals(properties.getId())) { validateResponse(pojo, responseItem); } } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void queryItemsOnEncryptedProperties() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); String query = String.format("SELECT * FROM c where c.sensitiveString = @sensitiveString and c.nonSensitive =" + " " + "@nonSensitive and c.sensitiveLong = @sensitiveLong"); SqlQuerySpec querySpec = new SqlQuerySpec(query); SqlParameter parameter1 = new SqlParameter("@nonSensitive", properties.getNonSensitive()); querySpec.getParameters().add(parameter1); SqlParameter parameter2 = new SqlParameter("@sensitiveString", properties.getSensitiveString()); SqlParameter parameter3 = new SqlParameter("@sensitiveLong", properties.getSensitiveLong()); SqlQuerySpecWithEncryption sqlQuerySpecWithEncryption = new SqlQuerySpecWithEncryption(querySpec); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveString", parameter2); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveLong", parameter3); CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions(); CosmosPagedFlux<EncryptionPojo> feedResponseIterator = cosmosEncryptionAsyncContainer.queryItemsOnEncryptedProperties(sqlQuerySpecWithEncryption, cosmosQueryRequestOptions, EncryptionPojo.class); List<EncryptionPojo> feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); assertThat(feedResponse.size()).isGreaterThanOrEqualTo(1); for (EncryptionPojo pojo : feedResponse) { if (pojo.getId().equals(properties.getId())) { validateResponse(pojo, responseItem); } } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void queryItemsOnRandomizedEncryption() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); String query = String.format("SELECT * FROM c where c.sensitiveString = @sensitiveString and c.nonSensitive =" + " " + "@nonSensitive and c.sensitiveDouble = @sensitiveDouble"); SqlQuerySpec querySpec = new SqlQuerySpec(query); SqlParameter parameter1 = new SqlParameter("@nonSensitive", properties.getNonSensitive()); querySpec.getParameters().add(parameter1); SqlParameter parameter2 = new SqlParameter("@sensitiveString", properties.getSensitiveString()); SqlParameter parameter3 = new SqlParameter("@sensitiveDouble", properties.getSensitiveDouble()); SqlQuerySpecWithEncryption sqlQuerySpecWithEncryption = new SqlQuerySpecWithEncryption(querySpec); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveString", parameter2); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveDouble", parameter3); CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions(); CosmosPagedFlux<EncryptionPojo> feedResponseIterator = cosmosEncryptionAsyncContainer.queryItemsOnEncryptedProperties(sqlQuerySpecWithEncryption, cosmosQueryRequestOptions, EncryptionPojo.class); try { List<EncryptionPojo> feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); fail("Query on randomized parameter should fail"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage()).contains("Path /sensitiveDouble cannot be used in the " + "query because of randomized encryption"); } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void queryItemsWithContinuationTokenAndPageSize() { List<String> actualIds = new ArrayList<>(); EncryptionPojo properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); String query = String.format("SELECT * from c where c.id in ('%s', '%s', '%s')", actualIds.get(0), actualIds.get(1), actualIds.get(2)); CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions(); String continuationToken = null; int pageSize = 1; int initialDocumentCount = 3; int finalDocumentCount = 0; CosmosPagedFlux<EncryptionPojo> feedResponseIterator = cosmosEncryptionAsyncContainer.queryItems(query, cosmosQueryRequestOptions, EncryptionPojo.class); do { Iterable<FeedResponse<EncryptionPojo>> feedResponseIterable = feedResponseIterator.byPage(continuationToken, 1).toIterable(); for (FeedResponse<EncryptionPojo> fr : feedResponseIterable) { int resultSize = fr.getResults().size(); assertThat(resultSize).isEqualTo(pageSize); finalDocumentCount += fr.getResults().size(); continuationToken = fr.getContinuationToken(); } } while (continuationToken != null); assertThat(finalDocumentCount).isEqualTo(initialDocumentCount); } @Ignore("Ignoring it temporarily because server always returning policyFormatVersion 0") @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void incompatiblePolicyFormatVersion() { try { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); encryptionContainerWithIncompatiblePolicyVersion.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); fail("encryptionContainerWithIncompatiblePolicyVersion crud operation should fail on client encryption " + "policy " + "fetch because of policy format version greater than 1"); } catch (UnsupportedOperationException ex) { assertThat(ex.getMessage()).isEqualTo("This version of the Encryption library cannot be used with this " + "container. Please upgrade to the latest version of the same."); } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void crudQueryStaleCache() { String databaseId = UUID.randomUUID().toString(); try { createNewDatabaseWithClientEncryptionKey(databaseId); CosmosAsyncClient asyncClient = getClientBuilder().buildAsyncClient(); EncryptionKeyStoreProvider encryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); CosmosEncryptionAsyncClient cosmosEncryptionAsyncClient = CosmosEncryptionAsyncClient.createCosmosEncryptionAsyncClient(asyncClient, encryptionKeyStoreProvider); CosmosEncryptionAsyncDatabase cosmosEncryptionAsyncDatabase = cosmosEncryptionAsyncClient.getCosmosEncryptionAsyncDatabase(asyncClient.getDatabase(databaseId)); String containerId = UUID.randomUUID().toString(); ClientEncryptionPolicy clientEncryptionPolicy = new ClientEncryptionPolicy(getPaths()); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); CosmosEncryptionAsyncContainer encryptionAsyncContainerOriginal = cosmosEncryptionAsyncDatabase.getCosmosEncryptionAsyncContainer(containerId); EncryptionPojo encryptionPojo = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> createResponse = encryptionAsyncContainerOriginal.createItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); validateResponse(encryptionPojo, createResponse.getItem()); String query = String.format("SELECT * from c where c.id = '%s'", encryptionPojo.getId()); SqlQuerySpec querySpec = new SqlQuerySpec(query); CosmosPagedFlux<EncryptionPojo> feedResponseIterator = encryptionAsyncContainerOriginal.queryItems(querySpec, null, EncryptionPojo.class); List<EncryptionPojo> feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); EncryptionPojo readItem = encryptionAsyncContainerOriginal.readItem(encryptionPojo.getId(), new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponse(encryptionPojo, readItem); cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().delete().block(); createNewDatabaseWithClientEncryptionKey(databaseId); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); createResponse = encryptionAsyncContainerOriginal.createItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); validateResponse(encryptionPojo, createResponse.getItem()); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); ClientEncryptionPolicy policyWithOneEncryptionPolicy = new ClientEncryptionPolicy(getPathWithOneEncryptionField()); createEncryptionContainer(cosmosEncryptionAsyncDatabase, policyWithOneEncryptionPolicy, containerId); CosmosEncryptionAsyncContainer encryptionAsyncContainerNew = getNewEncryptionContainerProxyObject(cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().getId(), containerId); encryptionAsyncContainerNew.createItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); EncryptionPojo pojoWithOneFieldEncrypted = encryptionAsyncContainerNew.getCosmosAsyncContainer().readItem(encryptionPojo.getId(), new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponseWithOneFieldEncryption(encryptionPojo, pojoWithOneFieldEncrypted); readItem = encryptionAsyncContainerOriginal.readItem(encryptionPojo.getId(), new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponse(encryptionPojo, readItem); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); CosmosItemResponse<EncryptionPojo> upsertResponse = encryptionAsyncContainerOriginal.upsertItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(upsertResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = upsertResponse.getItem(); validateResponse(encryptionPojo, responseItem); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); encryptionAsyncContainerNew = getNewEncryptionContainerProxyObject(cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().getId(), containerId); encryptionAsyncContainerNew.createItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); CosmosItemResponse<EncryptionPojo> replaceResponse = encryptionAsyncContainerOriginal.replaceItem(encryptionPojo, encryptionPojo.getId(), new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(upsertResponse.getRequestCharge()).isGreaterThan(0); responseItem = replaceResponse.getItem(); validateResponse(encryptionPojo, responseItem); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); CosmosEncryptionAsyncContainer newEncryptionAsyncContainer = getNewEncryptionContainerProxyObject(cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().getId(), containerId); for (int i = 0; i < 10; i++) { EncryptionPojo pojo = getItem(UUID.randomUUID().toString()); newEncryptionAsyncContainer.createItem(pojo, new PartitionKey(pojo.getMypk()), new CosmosItemRequestOptions()).block(); } feedResponseIterator = encryptionAsyncContainerOriginal.queryItems("Select * from C", null, EncryptionPojo.class); String continuationToken = null; int pageSize = 3; int finalDocumentCount = 0; do { Iterable<FeedResponse<EncryptionPojo>> feedResponseIterable = feedResponseIterator.byPage(continuationToken, pageSize).toIterable(); for (FeedResponse<EncryptionPojo> fr : feedResponseIterable) { int resultSize = fr.getResults().size(); assertThat(resultSize).isLessThanOrEqualTo(pageSize); finalDocumentCount += fr.getResults().size(); continuationToken = fr.getContinuationToken(); } } while (continuationToken != null); assertThat(finalDocumentCount).isEqualTo(10); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); newEncryptionAsyncContainer = getNewEncryptionContainerProxyObject(cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().getId(), containerId); EncryptionPojo encryptionPojoForQueryItemsOnEncryptedProperties = getItem(UUID.randomUUID().toString()); newEncryptionAsyncContainer.createItem(encryptionPojoForQueryItemsOnEncryptedProperties, new PartitionKey(encryptionPojoForQueryItemsOnEncryptedProperties.getMypk()), new CosmosItemRequestOptions()).block(); query = String.format("SELECT * FROM c where c.sensitiveString = @sensitiveString and c.nonSensitive =" + " " + "@nonSensitive and c.sensitiveLong = @sensitiveLong"); querySpec = new SqlQuerySpec(query); SqlParameter parameter1 = new SqlParameter("@nonSensitive", encryptionPojoForQueryItemsOnEncryptedProperties.getNonSensitive()); querySpec.getParameters().add(parameter1); SqlParameter parameter2 = new SqlParameter("@sensitiveString", encryptionPojoForQueryItemsOnEncryptedProperties.getSensitiveString()); SqlParameter parameter3 = new SqlParameter("@sensitiveLong", encryptionPojoForQueryItemsOnEncryptedProperties.getSensitiveLong()); SqlQuerySpecWithEncryption sqlQuerySpecWithEncryption = new SqlQuerySpecWithEncryption(querySpec); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveString", parameter2); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveLong", parameter3); feedResponseIterator = encryptionAsyncContainerOriginal.queryItemsOnEncryptedProperties(sqlQuerySpecWithEncryption, null, EncryptionPojo.class); feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); assertThat(feedResponse.size()).isGreaterThanOrEqualTo(1); for (EncryptionPojo pojo : feedResponse) { if (pojo.getId().equals(encryptionPojoForQueryItemsOnEncryptedProperties.getId())) { validateResponse(encryptionPojoForQueryItemsOnEncryptedProperties, pojo); } } encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); String itemId= UUID.randomUUID().toString(); EncryptionPojo createPojo = getItem(itemId); CosmosBatch cosmosEncryptionBatch = CosmosBatch.createCosmosBatch(new PartitionKey(itemId)); cosmosEncryptionBatch.createItemOperation(createPojo); cosmosEncryptionBatch.readItemOperation(itemId); CosmosBatchResponse batchResponse = encryptionAsyncContainerOriginal.executeCosmosBatch(cosmosEncryptionBatch).block(); assertThat(batchResponse.getResults().size()).isEqualTo(2); validateResponse(createPojo, batchResponse.getResults().get(0).getItem(EncryptionPojo.class)); validateResponse(createPojo, batchResponse.getResults().get(1).getItem(EncryptionPojo.class)); } finally { try { this.client.getDatabase(databaseId).delete().block(); } catch(Exception ex) { } } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void invalidDataEncryptionKeyAlgorithm() { try { TestEncryptionKeyStoreProvider testEncryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); EncryptionKeyWrapMetadata metadata = new EncryptionKeyWrapMetadata(testEncryptionKeyStoreProvider.getProviderName(), "key1", "tempmetadata1"); this.cosmosEncryptionAsyncDatabase.createClientEncryptionKey("key1", "InvalidAlgorithm", metadata).block(); fail("client encryption key create should fail on invalid algorithm"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage()).isEqualTo("Invalid Encryption Algorithm 'InvalidAlgorithm'"); } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void batchExecution() { String itemId= UUID.randomUUID().toString(); EncryptionPojo createPojo = getItem(itemId); EncryptionPojo replacePojo = getItem(itemId); replacePojo.setSensitiveString("ReplacedSensitiveString"); CosmosBatch cosmosEncryptionBatch = CosmosBatch.createCosmosBatch(new PartitionKey(itemId)); cosmosEncryptionBatch.createItemOperation(createPojo); cosmosEncryptionBatch.replaceItemOperation(itemId, replacePojo); cosmosEncryptionBatch.upsertItemOperation(createPojo); cosmosEncryptionBatch.readItemOperation(itemId); cosmosEncryptionBatch.deleteItemOperation(itemId); CosmosBatchResponse batchResponse = this.cosmosEncryptionAsyncContainer.executeCosmosBatch(cosmosEncryptionBatch).block(); assertThat(batchResponse.getResults().size()).isEqualTo(5); assertThat(batchResponse.getResults().get(0).getStatusCode()).isEqualTo(HttpResponseStatus.CREATED.code()); assertThat(batchResponse.getResults().get(1).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(2).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(3).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(4).getStatusCode()).isEqualTo(HttpResponseStatus.NO_CONTENT.code()); validateResponse(batchResponse.getResults().get(0).getItem(EncryptionPojo.class), createPojo); validateResponse(batchResponse.getResults().get(1).getItem(EncryptionPojo.class), replacePojo); validateResponse(batchResponse.getResults().get(2).getItem(EncryptionPojo.class), createPojo); validateResponse(batchResponse.getResults().get(3).getItem(EncryptionPojo.class), createPojo); } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void batchExecutionWithOptionsApi() { String itemId= UUID.randomUUID().toString(); EncryptionPojo createPojo = getItem(itemId); EncryptionPojo replacePojo = getItem(itemId); replacePojo.setSensitiveString("ReplacedSensitiveString"); CosmosBatch cosmosBatch = CosmosBatch.createCosmosBatch(new PartitionKey(itemId)); CosmosBatchItemRequestOptions cosmosBatchItemRequestOptions = new CosmosBatchItemRequestOptions(); cosmosBatch.createItemOperation(createPojo, cosmosBatchItemRequestOptions); cosmosBatch.replaceItemOperation(itemId, replacePojo,cosmosBatchItemRequestOptions); cosmosBatch.upsertItemOperation(createPojo, cosmosBatchItemRequestOptions); cosmosBatch.readItemOperation(itemId, cosmosBatchItemRequestOptions); cosmosBatch.deleteItemOperation(itemId, cosmosBatchItemRequestOptions); CosmosBatchResponse batchResponse = this.cosmosEncryptionAsyncContainer.executeCosmosBatch(cosmosBatch).block(); assertThat(batchResponse.getResults().size()).isEqualTo(5); assertThat(batchResponse.getResults().get(0).getStatusCode()).isEqualTo(HttpResponseStatus.CREATED.code()); assertThat(batchResponse.getResults().get(1).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(2).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(3).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(4).getStatusCode()).isEqualTo(HttpResponseStatus.NO_CONTENT.code()); validateResponse(batchResponse.getResults().get(0).getItem(EncryptionPojo.class), createPojo); validateResponse(batchResponse.getResults().get(1).getItem(EncryptionPojo.class), replacePojo); validateResponse(batchResponse.getResults().get(2).getItem(EncryptionPojo.class), createPojo); validateResponse(batchResponse.getResults().get(3).getItem(EncryptionPojo.class), createPojo); } static void validateResponseWithOneFieldEncryption(EncryptionPojo originalItem, EncryptionPojo result) { assertThat(result.getId()).isEqualTo(originalItem.getId()); assertThat(result.getNonSensitive()).isEqualTo(originalItem.getNonSensitive()); assertThat(result.getSensitiveString()).isNotEqualTo(originalItem.getSensitiveString()); assertThat(result.getSensitiveInt()).isEqualTo(originalItem.getSensitiveInt()); assertThat(result.getSensitiveFloat()).isEqualTo(originalItem.getSensitiveFloat()); assertThat(result.getSensitiveLong()).isEqualTo(originalItem.getSensitiveLong()); assertThat(result.getSensitiveDouble()).isEqualTo(originalItem.getSensitiveDouble()); assertThat(result.isSensitiveBoolean()).isEqualTo(originalItem.isSensitiveBoolean()); assertThat(result.getSensitiveIntArray()).isEqualTo(originalItem.getSensitiveIntArray()); assertThat(result.getSensitiveStringArray()).isEqualTo(originalItem.getSensitiveStringArray()); assertThat(result.getSensitiveString3DArray()).isEqualTo(originalItem.getSensitiveString3DArray()); } public static List<ClientEncryptionIncludedPath> getPathWithOneEncryptionField() { ClientEncryptionIncludedPath includedPath = new ClientEncryptionIncludedPath(); includedPath.setClientEncryptionKeyId("key1"); includedPath.setPath("/sensitiveString"); includedPath.setEncryptionType(CosmosEncryptionType.DETERMINISTIC); includedPath.setEncryptionAlgorithm(CosmosEncryptionAlgorithm.AEAD_AES_256_CBC_HMAC_SHA256); List<ClientEncryptionIncludedPath> paths = new ArrayList<>(); paths.add(includedPath); return paths; } private void createEncryptionContainer(CosmosEncryptionAsyncDatabase cosmosEncryptionAsyncDatabase, ClientEncryptionPolicy clientEncryptionPolicy, String containerId) { CosmosContainerProperties properties = new CosmosContainerProperties(containerId, "/mypk"); properties.setClientEncryptionPolicy(clientEncryptionPolicy); cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().createContainer(properties).block(); } private void createNewDatabaseWithClientEncryptionKey(String databaseId){ TestEncryptionKeyStoreProvider testEncryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); EncryptionKeyWrapMetadata metadata1 = new EncryptionKeyWrapMetadata(testEncryptionKeyStoreProvider.getProviderName(), "key1", "tempmetadata1"); EncryptionKeyWrapMetadata metadata2 = new EncryptionKeyWrapMetadata(testEncryptionKeyStoreProvider.getProviderName(), "key2", "tempmetadata2"); cosmosEncryptionAsyncClient.getCosmosAsyncClient().createDatabase(databaseId).block(); CosmosEncryptionAsyncDatabase encryptionAsyncDatabase = cosmosEncryptionAsyncClient.getCosmosEncryptionAsyncDatabase(databaseId); encryptionAsyncDatabase.createClientEncryptionKey("key1", CosmosEncryptionAlgorithm.AEAD_AES_256_CBC_HMAC_SHA256, metadata1).block(); encryptionAsyncDatabase.createClientEncryptionKey("key2", CosmosEncryptionAlgorithm.AEAD_AES_256_CBC_HMAC_SHA256, metadata2).block(); } private CosmosEncryptionAsyncContainer getNewEncryptionContainerProxyObject(String databaseId, String containerId) { CosmosAsyncClient client = getClientBuilder().buildAsyncClient(); EncryptionKeyStoreProvider encryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); CosmosEncryptionAsyncClient cosmosEncryptionAsyncClient = CosmosEncryptionAsyncClient.createCosmosEncryptionAsyncClient(client, encryptionKeyStoreProvider); CosmosEncryptionAsyncDatabase cosmosEncryptionAsyncDatabase = cosmosEncryptionAsyncClient.getCosmosEncryptionAsyncDatabase(client.getDatabase(databaseId)); CosmosEncryptionAsyncContainer cosmosEncryptionAsyncContainer = cosmosEncryptionAsyncDatabase.getCosmosEncryptionAsyncContainer(containerId); return cosmosEncryptionAsyncContainer; } }
class EncryptionAsyncApiCrudTest extends TestSuiteBase { private CosmosAsyncClient client; private CosmosEncryptionAsyncClient cosmosEncryptionAsyncClient; private CosmosEncryptionAsyncContainer encryptionContainerWithIncompatiblePolicyVersion; CosmosEncryptionAsyncContainer cosmosEncryptionAsyncContainer; CosmosEncryptionAsyncDatabase cosmosEncryptionAsyncDatabase; @Factory(dataProvider = "clientBuilders") public EncryptionAsyncApiCrudTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); } @BeforeClass(groups = {"encryption"}, timeOut = SETUP_TIMEOUT) public void before_CosmosItemTest() { assertThat(this.client).isNull(); this.client = getClientBuilder().buildAsyncClient(); TestEncryptionKeyStoreProvider encryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); cosmosEncryptionAsyncClient = CosmosEncryptionAsyncClient.createCosmosEncryptionAsyncClient(this.client, encryptionKeyStoreProvider); cosmosEncryptionAsyncDatabase = getSharedEncryptionDatabase(cosmosEncryptionAsyncClient); cosmosEncryptionAsyncContainer = getSharedEncryptionContainer(cosmosEncryptionAsyncClient); ClientEncryptionPolicy clientEncryptionWithPolicyFormatVersion2 = new ClientEncryptionPolicy(getPaths()); ReflectionUtils.setPolicyFormatVersion(clientEncryptionWithPolicyFormatVersion2, 2); String containerId = UUID.randomUUID().toString(); CosmosContainerProperties properties = new CosmosContainerProperties(containerId, "/mypk"); properties.setClientEncryptionPolicy(clientEncryptionWithPolicyFormatVersion2); cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().createContainer(properties).block(); encryptionContainerWithIncompatiblePolicyVersion = cosmosEncryptionAsyncDatabase.getCosmosEncryptionAsyncContainer(containerId); } @AfterClass(groups = {"encryption"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { assertThat(this.client).isNotNull(); this.client.close(); } @Test(groups = {"encryption"}, priority = 1, timeOut = TIMEOUT) public void createItemEncrypt_readItemDecrypt() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); EncryptionPojo readItem = cosmosEncryptionAsyncContainer.readItem(properties.getId(), new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponse(properties, readItem); properties = getItem(UUID.randomUUID().toString()); String longString = ""; for (int i = 0; i < 10000; i++) { longString += "a"; } properties.setSensitiveString(longString); itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void createItemEncryptWithContentResponseOnWriteEnabledFalse() { CosmosItemRequestOptions requestOptions = new CosmosItemRequestOptions(); requestOptions.setContentResponseOnWriteEnabled(false); EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), requestOptions).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); assertThat(itemResponse.getItem()).isNull(); } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void upsertItem_readItem() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.upsertItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); EncryptionPojo readItem = cosmosEncryptionAsyncContainer.readItem(properties.getId(), new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponse(properties, readItem); } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void queryItems() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); String query = String.format("SELECT * from c where c.id = '%s'", properties.getId()); CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions(); SqlQuerySpec querySpec = new SqlQuerySpec(query); CosmosPagedFlux<EncryptionPojo> feedResponseIterator = cosmosEncryptionAsyncContainer.queryItems(querySpec, cosmosQueryRequestOptions, EncryptionPojo.class); List<EncryptionPojo> feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); assertThat(feedResponse.size()).isGreaterThanOrEqualTo(1); for (EncryptionPojo pojo : feedResponse) { if (pojo.getId().equals(properties.getId())) { validateResponse(pojo, responseItem); } } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void queryItemsOnEncryptedProperties() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); String query = String.format("SELECT * FROM c where c.sensitiveString = @sensitiveString and c.nonSensitive =" + " " + "@nonSensitive and c.sensitiveLong = @sensitiveLong"); SqlQuerySpec querySpec = new SqlQuerySpec(query); SqlParameter parameter1 = new SqlParameter("@nonSensitive", properties.getNonSensitive()); querySpec.getParameters().add(parameter1); SqlParameter parameter2 = new SqlParameter("@sensitiveString", properties.getSensitiveString()); SqlParameter parameter3 = new SqlParameter("@sensitiveLong", properties.getSensitiveLong()); SqlQuerySpecWithEncryption sqlQuerySpecWithEncryption = new SqlQuerySpecWithEncryption(querySpec); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveString", parameter2); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveLong", parameter3); CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions(); CosmosPagedFlux<EncryptionPojo> feedResponseIterator = cosmosEncryptionAsyncContainer.queryItemsOnEncryptedProperties(sqlQuerySpecWithEncryption, cosmosQueryRequestOptions, EncryptionPojo.class); List<EncryptionPojo> feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); assertThat(feedResponse.size()).isGreaterThanOrEqualTo(1); for (EncryptionPojo pojo : feedResponse) { if (pojo.getId().equals(properties.getId())) { validateResponse(pojo, responseItem); } } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void queryItemsOnRandomizedEncryption() { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> itemResponse = cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = itemResponse.getItem(); validateResponse(properties, responseItem); String query = String.format("SELECT * FROM c where c.sensitiveString = @sensitiveString and c.nonSensitive =" + " " + "@nonSensitive and c.sensitiveDouble = @sensitiveDouble"); SqlQuerySpec querySpec = new SqlQuerySpec(query); SqlParameter parameter1 = new SqlParameter("@nonSensitive", properties.getNonSensitive()); querySpec.getParameters().add(parameter1); SqlParameter parameter2 = new SqlParameter("@sensitiveString", properties.getSensitiveString()); SqlParameter parameter3 = new SqlParameter("@sensitiveDouble", properties.getSensitiveDouble()); SqlQuerySpecWithEncryption sqlQuerySpecWithEncryption = new SqlQuerySpecWithEncryption(querySpec); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveString", parameter2); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveDouble", parameter3); CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions(); CosmosPagedFlux<EncryptionPojo> feedResponseIterator = cosmosEncryptionAsyncContainer.queryItemsOnEncryptedProperties(sqlQuerySpecWithEncryption, cosmosQueryRequestOptions, EncryptionPojo.class); try { List<EncryptionPojo> feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); fail("Query on randomized parameter should fail"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage()).contains("Path /sensitiveDouble cannot be used in the " + "query because of randomized encryption"); } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void queryItemsWithContinuationTokenAndPageSize() { List<String> actualIds = new ArrayList<>(); EncryptionPojo properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); properties = getItem(UUID.randomUUID().toString()); cosmosEncryptionAsyncContainer.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); actualIds.add(properties.getId()); String query = String.format("SELECT * from c where c.id in ('%s', '%s', '%s')", actualIds.get(0), actualIds.get(1), actualIds.get(2)); CosmosQueryRequestOptions cosmosQueryRequestOptions = new CosmosQueryRequestOptions(); String continuationToken = null; int pageSize = 1; int initialDocumentCount = 3; int finalDocumentCount = 0; CosmosPagedFlux<EncryptionPojo> feedResponseIterator = cosmosEncryptionAsyncContainer.queryItems(query, cosmosQueryRequestOptions, EncryptionPojo.class); do { Iterable<FeedResponse<EncryptionPojo>> feedResponseIterable = feedResponseIterator.byPage(continuationToken, 1).toIterable(); for (FeedResponse<EncryptionPojo> fr : feedResponseIterable) { int resultSize = fr.getResults().size(); assertThat(resultSize).isEqualTo(pageSize); finalDocumentCount += fr.getResults().size(); continuationToken = fr.getContinuationToken(); } } while (continuationToken != null); assertThat(finalDocumentCount).isEqualTo(initialDocumentCount); } @Ignore("Ignoring it temporarily because server always returning policyFormatVersion 0") @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void incompatiblePolicyFormatVersion() { try { EncryptionPojo properties = getItem(UUID.randomUUID().toString()); encryptionContainerWithIncompatiblePolicyVersion.createItem(properties, new PartitionKey(properties.getMypk()), new CosmosItemRequestOptions()).block(); fail("encryptionContainerWithIncompatiblePolicyVersion crud operation should fail on client encryption " + "policy " + "fetch because of policy format version greater than 1"); } catch (UnsupportedOperationException ex) { assertThat(ex.getMessage()).isEqualTo("This version of the Encryption library cannot be used with this " + "container. Please upgrade to the latest version of the same."); } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void crudQueryStaleCache() { String databaseId = UUID.randomUUID().toString(); try { createNewDatabaseWithClientEncryptionKey(databaseId); CosmosAsyncClient asyncClient = getClientBuilder().buildAsyncClient(); EncryptionKeyStoreProvider encryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); CosmosEncryptionAsyncClient cosmosEncryptionAsyncClient = CosmosEncryptionAsyncClient.createCosmosEncryptionAsyncClient(asyncClient, encryptionKeyStoreProvider); CosmosEncryptionAsyncDatabase cosmosEncryptionAsyncDatabase = cosmosEncryptionAsyncClient.getCosmosEncryptionAsyncDatabase(asyncClient.getDatabase(databaseId)); String containerId = UUID.randomUUID().toString(); ClientEncryptionPolicy clientEncryptionPolicy = new ClientEncryptionPolicy(getPaths()); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); CosmosEncryptionAsyncContainer encryptionAsyncContainerOriginal = cosmosEncryptionAsyncDatabase.getCosmosEncryptionAsyncContainer(containerId); EncryptionPojo encryptionPojo = getItem(UUID.randomUUID().toString()); CosmosItemResponse<EncryptionPojo> createResponse = encryptionAsyncContainerOriginal.createItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); validateResponse(encryptionPojo, createResponse.getItem()); String query = String.format("SELECT * from c where c.id = '%s'", encryptionPojo.getId()); SqlQuerySpec querySpec = new SqlQuerySpec(query); CosmosPagedFlux<EncryptionPojo> feedResponseIterator = encryptionAsyncContainerOriginal.queryItems(querySpec, null, EncryptionPojo.class); List<EncryptionPojo> feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); EncryptionPojo readItem = encryptionAsyncContainerOriginal.readItem(encryptionPojo.getId(), new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponse(encryptionPojo, readItem); cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().delete().block(); createNewDatabaseWithClientEncryptionKey(databaseId); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); createResponse = encryptionAsyncContainerOriginal.createItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); validateResponse(encryptionPojo, createResponse.getItem()); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); ClientEncryptionPolicy policyWithOneEncryptionPolicy = new ClientEncryptionPolicy(getPathWithOneEncryptionField()); createEncryptionContainer(cosmosEncryptionAsyncDatabase, policyWithOneEncryptionPolicy, containerId); CosmosEncryptionAsyncContainer encryptionAsyncContainerNew = getNewEncryptionContainerProxyObject(cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().getId(), containerId); encryptionAsyncContainerNew.createItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); EncryptionPojo pojoWithOneFieldEncrypted = encryptionAsyncContainerNew.getCosmosAsyncContainer().readItem(encryptionPojo.getId(), new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponseWithOneFieldEncryption(encryptionPojo, pojoWithOneFieldEncrypted); readItem = encryptionAsyncContainerOriginal.readItem(encryptionPojo.getId(), new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions(), EncryptionPojo.class).block().getItem(); validateResponse(encryptionPojo, readItem); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); CosmosItemResponse<EncryptionPojo> upsertResponse = encryptionAsyncContainerOriginal.upsertItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(upsertResponse.getRequestCharge()).isGreaterThan(0); EncryptionPojo responseItem = upsertResponse.getItem(); validateResponse(encryptionPojo, responseItem); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); encryptionAsyncContainerNew = getNewEncryptionContainerProxyObject(cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().getId(), containerId); encryptionAsyncContainerNew.createItem(encryptionPojo, new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); CosmosItemResponse<EncryptionPojo> replaceResponse = encryptionAsyncContainerOriginal.replaceItem(encryptionPojo, encryptionPojo.getId(), new PartitionKey(encryptionPojo.getMypk()), new CosmosItemRequestOptions()).block(); assertThat(upsertResponse.getRequestCharge()).isGreaterThan(0); responseItem = replaceResponse.getItem(); validateResponse(encryptionPojo, responseItem); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); CosmosEncryptionAsyncContainer newEncryptionAsyncContainer = getNewEncryptionContainerProxyObject(cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().getId(), containerId); for (int i = 0; i < 10; i++) { EncryptionPojo pojo = getItem(UUID.randomUUID().toString()); newEncryptionAsyncContainer.createItem(pojo, new PartitionKey(pojo.getMypk()), new CosmosItemRequestOptions()).block(); } feedResponseIterator = encryptionAsyncContainerOriginal.queryItems("Select * from C", null, EncryptionPojo.class); String continuationToken = null; int pageSize = 3; int finalDocumentCount = 0; do { Iterable<FeedResponse<EncryptionPojo>> feedResponseIterable = feedResponseIterator.byPage(continuationToken, pageSize).toIterable(); for (FeedResponse<EncryptionPojo> fr : feedResponseIterable) { int resultSize = fr.getResults().size(); assertThat(resultSize).isLessThanOrEqualTo(pageSize); finalDocumentCount += fr.getResults().size(); continuationToken = fr.getContinuationToken(); } } while (continuationToken != null); assertThat(finalDocumentCount).isEqualTo(10); encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); newEncryptionAsyncContainer = getNewEncryptionContainerProxyObject(cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().getId(), containerId); EncryptionPojo encryptionPojoForQueryItemsOnEncryptedProperties = getItem(UUID.randomUUID().toString()); newEncryptionAsyncContainer.createItem(encryptionPojoForQueryItemsOnEncryptedProperties, new PartitionKey(encryptionPojoForQueryItemsOnEncryptedProperties.getMypk()), new CosmosItemRequestOptions()).block(); query = String.format("SELECT * FROM c where c.sensitiveString = @sensitiveString and c.nonSensitive =" + " " + "@nonSensitive and c.sensitiveLong = @sensitiveLong"); querySpec = new SqlQuerySpec(query); SqlParameter parameter1 = new SqlParameter("@nonSensitive", encryptionPojoForQueryItemsOnEncryptedProperties.getNonSensitive()); querySpec.getParameters().add(parameter1); SqlParameter parameter2 = new SqlParameter("@sensitiveString", encryptionPojoForQueryItemsOnEncryptedProperties.getSensitiveString()); SqlParameter parameter3 = new SqlParameter("@sensitiveLong", encryptionPojoForQueryItemsOnEncryptedProperties.getSensitiveLong()); SqlQuerySpecWithEncryption sqlQuerySpecWithEncryption = new SqlQuerySpecWithEncryption(querySpec); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveString", parameter2); sqlQuerySpecWithEncryption.addEncryptionParameter("/sensitiveLong", parameter3); feedResponseIterator = encryptionAsyncContainerOriginal.queryItemsOnEncryptedProperties(sqlQuerySpecWithEncryption, null, EncryptionPojo.class); feedResponse = feedResponseIterator.byPage().blockFirst().getResults(); assertThat(feedResponse.size()).isGreaterThanOrEqualTo(1); for (EncryptionPojo pojo : feedResponse) { if (pojo.getId().equals(encryptionPojoForQueryItemsOnEncryptedProperties.getId())) { validateResponse(encryptionPojoForQueryItemsOnEncryptedProperties, pojo); } } encryptionAsyncContainerOriginal.getCosmosAsyncContainer().delete().block(); createEncryptionContainer(cosmosEncryptionAsyncDatabase, clientEncryptionPolicy, containerId); String itemId= UUID.randomUUID().toString(); EncryptionPojo createPojo = getItem(itemId); CosmosBatch cosmosEncryptionBatch = CosmosBatch.createCosmosBatch(new PartitionKey(itemId)); cosmosEncryptionBatch.createItemOperation(createPojo); cosmosEncryptionBatch.readItemOperation(itemId); CosmosBatchResponse batchResponse = encryptionAsyncContainerOriginal.executeCosmosBatch(cosmosEncryptionBatch).block(); assertThat(batchResponse.getResults().size()).isEqualTo(2); validateResponse(createPojo, batchResponse.getResults().get(0).getItem(EncryptionPojo.class)); validateResponse(createPojo, batchResponse.getResults().get(1).getItem(EncryptionPojo.class)); } finally { try { this.client.getDatabase(databaseId).delete().block(); } catch(Exception ex) { } } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void invalidDataEncryptionKeyAlgorithm() { try { TestEncryptionKeyStoreProvider testEncryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); EncryptionKeyWrapMetadata metadata = new EncryptionKeyWrapMetadata(testEncryptionKeyStoreProvider.getProviderName(), "key1", "tempmetadata1"); this.cosmosEncryptionAsyncDatabase.createClientEncryptionKey("key1", "InvalidAlgorithm", metadata).block(); fail("client encryption key create should fail on invalid algorithm"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage()).isEqualTo("Invalid Encryption Algorithm 'InvalidAlgorithm'"); } } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void batchExecution() { String itemId= UUID.randomUUID().toString(); EncryptionPojo createPojo = getItem(itemId); EncryptionPojo replacePojo = getItem(itemId); replacePojo.setSensitiveString("ReplacedSensitiveString"); CosmosBatch cosmosEncryptionBatch = CosmosBatch.createCosmosBatch(new PartitionKey(itemId)); cosmosEncryptionBatch.createItemOperation(createPojo); cosmosEncryptionBatch.replaceItemOperation(itemId, replacePojo); cosmosEncryptionBatch.upsertItemOperation(createPojo); cosmosEncryptionBatch.readItemOperation(itemId); cosmosEncryptionBatch.deleteItemOperation(itemId); CosmosBatchResponse batchResponse = this.cosmosEncryptionAsyncContainer.executeCosmosBatch(cosmosEncryptionBatch).block(); assertThat(batchResponse.getResults().size()).isEqualTo(5); assertThat(batchResponse.getResults().get(0).getStatusCode()).isEqualTo(HttpResponseStatus.CREATED.code()); assertThat(batchResponse.getResults().get(1).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(2).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(3).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(4).getStatusCode()).isEqualTo(HttpResponseStatus.NO_CONTENT.code()); validateResponse(batchResponse.getResults().get(0).getItem(EncryptionPojo.class), createPojo); validateResponse(batchResponse.getResults().get(1).getItem(EncryptionPojo.class), replacePojo); validateResponse(batchResponse.getResults().get(2).getItem(EncryptionPojo.class), createPojo); validateResponse(batchResponse.getResults().get(3).getItem(EncryptionPojo.class), createPojo); } @Test(groups = {"encryption"}, timeOut = TIMEOUT) public void batchExecutionWithOptionsApi() { String itemId= UUID.randomUUID().toString(); EncryptionPojo createPojo = getItem(itemId); EncryptionPojo replacePojo = getItem(itemId); replacePojo.setSensitiveString("ReplacedSensitiveString"); CosmosBatch cosmosBatch = CosmosBatch.createCosmosBatch(new PartitionKey(itemId)); CosmosBatchItemRequestOptions cosmosBatchItemRequestOptions = new CosmosBatchItemRequestOptions(); cosmosBatch.createItemOperation(createPojo, cosmosBatchItemRequestOptions); cosmosBatch.replaceItemOperation(itemId, replacePojo,cosmosBatchItemRequestOptions); cosmosBatch.upsertItemOperation(createPojo, cosmosBatchItemRequestOptions); cosmosBatch.readItemOperation(itemId, cosmosBatchItemRequestOptions); cosmosBatch.deleteItemOperation(itemId, cosmosBatchItemRequestOptions); CosmosBatchResponse batchResponse = this.cosmosEncryptionAsyncContainer.executeCosmosBatch(cosmosBatch).block(); assertThat(batchResponse.getResults().size()).isEqualTo(5); assertThat(batchResponse.getResults().get(0).getStatusCode()).isEqualTo(HttpResponseStatus.CREATED.code()); assertThat(batchResponse.getResults().get(1).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(2).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(3).getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(batchResponse.getResults().get(4).getStatusCode()).isEqualTo(HttpResponseStatus.NO_CONTENT.code()); validateResponse(batchResponse.getResults().get(0).getItem(EncryptionPojo.class), createPojo); validateResponse(batchResponse.getResults().get(1).getItem(EncryptionPojo.class), replacePojo); validateResponse(batchResponse.getResults().get(2).getItem(EncryptionPojo.class), createPojo); validateResponse(batchResponse.getResults().get(3).getItem(EncryptionPojo.class), createPojo); } static void validateResponseWithOneFieldEncryption(EncryptionPojo originalItem, EncryptionPojo result) { assertThat(result.getId()).isEqualTo(originalItem.getId()); assertThat(result.getNonSensitive()).isEqualTo(originalItem.getNonSensitive()); assertThat(result.getSensitiveString()).isNotEqualTo(originalItem.getSensitiveString()); assertThat(result.getSensitiveInt()).isEqualTo(originalItem.getSensitiveInt()); assertThat(result.getSensitiveFloat()).isEqualTo(originalItem.getSensitiveFloat()); assertThat(result.getSensitiveLong()).isEqualTo(originalItem.getSensitiveLong()); assertThat(result.getSensitiveDouble()).isEqualTo(originalItem.getSensitiveDouble()); assertThat(result.isSensitiveBoolean()).isEqualTo(originalItem.isSensitiveBoolean()); assertThat(result.getSensitiveIntArray()).isEqualTo(originalItem.getSensitiveIntArray()); assertThat(result.getSensitiveStringArray()).isEqualTo(originalItem.getSensitiveStringArray()); assertThat(result.getSensitiveString3DArray()).isEqualTo(originalItem.getSensitiveString3DArray()); } public static List<ClientEncryptionIncludedPath> getPathWithOneEncryptionField() { ClientEncryptionIncludedPath includedPath = new ClientEncryptionIncludedPath(); includedPath.setClientEncryptionKeyId("key1"); includedPath.setPath("/sensitiveString"); includedPath.setEncryptionType(CosmosEncryptionType.DETERMINISTIC); includedPath.setEncryptionAlgorithm(CosmosEncryptionAlgorithm.AEAD_AES_256_CBC_HMAC_SHA256); List<ClientEncryptionIncludedPath> paths = new ArrayList<>(); paths.add(includedPath); return paths; } private void createEncryptionContainer(CosmosEncryptionAsyncDatabase cosmosEncryptionAsyncDatabase, ClientEncryptionPolicy clientEncryptionPolicy, String containerId) { CosmosContainerProperties properties = new CosmosContainerProperties(containerId, "/mypk"); properties.setClientEncryptionPolicy(clientEncryptionPolicy); cosmosEncryptionAsyncDatabase.getCosmosAsyncDatabase().createContainer(properties).block(); } private void createNewDatabaseWithClientEncryptionKey(String databaseId){ TestEncryptionKeyStoreProvider testEncryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); EncryptionKeyWrapMetadata metadata1 = new EncryptionKeyWrapMetadata(testEncryptionKeyStoreProvider.getProviderName(), "key1", "tempmetadata1"); EncryptionKeyWrapMetadata metadata2 = new EncryptionKeyWrapMetadata(testEncryptionKeyStoreProvider.getProviderName(), "key2", "tempmetadata2"); cosmosEncryptionAsyncClient.getCosmosAsyncClient().createDatabase(databaseId).block(); CosmosEncryptionAsyncDatabase encryptionAsyncDatabase = cosmosEncryptionAsyncClient.getCosmosEncryptionAsyncDatabase(databaseId); encryptionAsyncDatabase.createClientEncryptionKey("key1", CosmosEncryptionAlgorithm.AEAD_AES_256_CBC_HMAC_SHA256, metadata1).block(); encryptionAsyncDatabase.createClientEncryptionKey("key2", CosmosEncryptionAlgorithm.AEAD_AES_256_CBC_HMAC_SHA256, metadata2).block(); } private CosmosEncryptionAsyncContainer getNewEncryptionContainerProxyObject(String databaseId, String containerId) { CosmosAsyncClient client = getClientBuilder().buildAsyncClient(); EncryptionKeyStoreProvider encryptionKeyStoreProvider = new TestEncryptionKeyStoreProvider(); CosmosEncryptionAsyncClient cosmosEncryptionAsyncClient = CosmosEncryptionAsyncClient.createCosmosEncryptionAsyncClient(client, encryptionKeyStoreProvider); CosmosEncryptionAsyncDatabase cosmosEncryptionAsyncDatabase = cosmosEncryptionAsyncClient.getCosmosEncryptionAsyncDatabase(client.getDatabase(databaseId)); CosmosEncryptionAsyncContainer cosmosEncryptionAsyncContainer = cosmosEncryptionAsyncDatabase.getCosmosEncryptionAsyncContainer(containerId); return cosmosEncryptionAsyncContainer; } }
I think you can just do the same in your new Consumer and it should give the same result that you are after.
public Handler<RoutingContext> authenticationMechanismHandler(boolean proactiveAuthentication) { return new Handler<RoutingContext>() { volatile HttpAuthenticator authenticator; @Override public void handle(RoutingContext event) { if (authenticator == null) { authenticator = CDI.current().select(HttpAuthenticator.class).get(); } event.put(HttpAuthenticator.class.getName(), authenticator); event.put(QuarkusHttpUser.AUTH_FAILURE_HANDLER, new BiConsumer<RoutingContext, Throwable>() { private final DefaultAuthFailureHandlerEndStrategyExec endStrategyExec = new DefaultAuthFailureHandlerEndStrategyExec( event); @Override public void accept(RoutingContext routingContext, Throwable throwable) { if (endStrategyExec.preventRepeating()) { return; } throwable = extractRootCause(throwable); if (throwable instanceof AuthenticationFailedException) { final AuthenticationFailedException authenticationFailedException = (AuthenticationFailedException) throwable; authenticator.sendChallenge(event).subscribe().with(new Consumer<Boolean>() { @Override public void accept(Boolean aBoolean) { endStrategyExec.proceed(authenticationFailedException); } }, new Consumer<Throwable>() { @Override public void accept(Throwable throwable) { event.fail(throwable); } }); } else if (throwable instanceof AuthenticationCompletionException) { log.debug("Authentication has failed, returning HTTP status 401"); event.response().setStatusCode(401); event.response().end(); } else if (throwable instanceof AuthenticationRedirectException) { AuthenticationRedirectException redirectEx = (AuthenticationRedirectException) throwable; event.response().setStatusCode(redirectEx.getCode()); event.response().headers().set(HttpHeaders.LOCATION, redirectEx.getRedirectUri()); event.response().headers().set(HttpHeaders.CACHE_CONTROL, "no-store"); event.response().headers().set("Pragma", "no-cache"); event.response().end(); } else { event.fail(throwable); } } }); if (proactiveAuthentication) { Uni<SecurityIdentity> potentialUser = authenticator.attemptAuthentication(event).memoize().indefinitely(); potentialUser .subscribe().withSubscriber(new UniSubscriber<SecurityIdentity>() { @Override public void onSubscribe(UniSubscription subscription) { } @Override public void onItem(SecurityIdentity identity) { if (event.response().ended()) { return; } if (identity == null) { Uni<SecurityIdentity> anon = authenticator.getIdentityProviderManager() .authenticate(AnonymousAuthenticationRequest.INSTANCE); anon.subscribe().withSubscriber(new UniSubscriber<SecurityIdentity>() { @Override public void onSubscribe(UniSubscription subscription) { } @Override public void onItem(SecurityIdentity item) { event.put(QuarkusHttpUser.DEFERRED_IDENTITY_KEY, anon); event.setUser(new QuarkusHttpUser(item)); event.next(); } @Override public void onFailure(Throwable failure) { BiConsumer<RoutingContext, Throwable> handler = event .get(QuarkusHttpUser.AUTH_FAILURE_HANDLER); if (handler != null) { handler.accept(event, failure); } } }); } else { event.setUser(new QuarkusHttpUser(identity)); event.put(QuarkusHttpUser.DEFERRED_IDENTITY_KEY, potentialUser); event.next(); } } @Override public void onFailure(Throwable failure) { BiConsumer<RoutingContext, Throwable> handler = event .get(QuarkusHttpUser.AUTH_FAILURE_HANDLER); if (handler != null) { handler.accept(event, failure); } } }); } else { Uni<SecurityIdentity> lazyUser = Uni .createFrom() .nullItem() .flatMap(n -> authenticator.attemptAuthentication(event)) .memoize() .indefinitely() .flatMap(new Function<SecurityIdentity, Uni<? extends SecurityIdentity>>() { @Override public Uni<? extends SecurityIdentity> apply(SecurityIdentity securityIdentity) { if (securityIdentity == null) { return authenticator.getIdentityProviderManager() .authenticate(AnonymousAuthenticationRequest.INSTANCE); } return Uni.createFrom().item(securityIdentity); } }).onTermination().invoke(new Functions.TriConsumer<SecurityIdentity, Throwable, Boolean>() { @Override public void accept(SecurityIdentity identity, Throwable throwable, Boolean aBoolean) { if (identity != null) { if (identity != null) { event.setUser(new QuarkusHttpUser(identity)); } } else if (throwable != null) { BiConsumer<RoutingContext, Throwable> handler = event .get(QuarkusHttpUser.AUTH_FAILURE_HANDLER); if (handler != null) { handler.accept(event, throwable); } } } }).memoize().indefinitely(); event.put(QuarkusHttpUser.DEFERRED_IDENTITY_KEY, lazyUser); event.next(); } } }; }
event.put(QuarkusHttpUser.AUTH_FAILURE_HANDLER, new BiConsumer<RoutingContext, Throwable>() {
public Handler<RoutingContext> authenticationMechanismHandler(boolean proactiveAuthentication) { return new Handler<RoutingContext>() { volatile HttpAuthenticator authenticator; @Override public void handle(RoutingContext event) { if (authenticator == null) { authenticator = CDI.current().select(HttpAuthenticator.class).get(); } event.put(HttpAuthenticator.class.getName(), authenticator); event.put(QuarkusHttpUser.AUTH_FAILURE_HANDLER, new DefaultAuthFailureHandler()); if (proactiveAuthentication) { Uni<SecurityIdentity> potentialUser = authenticator.attemptAuthentication(event).memoize().indefinitely(); potentialUser .subscribe().withSubscriber(new UniSubscriber<SecurityIdentity>() { @Override public void onSubscribe(UniSubscription subscription) { } @Override public void onItem(SecurityIdentity identity) { if (event.response().ended()) { return; } if (identity == null) { Uni<SecurityIdentity> anon = authenticator.getIdentityProviderManager() .authenticate(AnonymousAuthenticationRequest.INSTANCE); anon.subscribe().withSubscriber(new UniSubscriber<SecurityIdentity>() { @Override public void onSubscribe(UniSubscription subscription) { } @Override public void onItem(SecurityIdentity item) { event.put(QuarkusHttpUser.DEFERRED_IDENTITY_KEY, anon); event.setUser(new QuarkusHttpUser(item)); event.next(); } @Override public void onFailure(Throwable failure) { BiConsumer<RoutingContext, Throwable> handler = event .get(QuarkusHttpUser.AUTH_FAILURE_HANDLER); if (handler != null) { handler.accept(event, failure); } } }); } else { event.setUser(new QuarkusHttpUser(identity)); event.put(QuarkusHttpUser.DEFERRED_IDENTITY_KEY, potentialUser); event.next(); } } @Override public void onFailure(Throwable failure) { BiConsumer<RoutingContext, Throwable> handler = event .get(QuarkusHttpUser.AUTH_FAILURE_HANDLER); if (handler != null) { handler.accept(event, failure); } } }); } else { Uni<SecurityIdentity> lazyUser = Uni .createFrom() .nullItem() .flatMap(n -> authenticator.attemptAuthentication(event)) .memoize() .indefinitely() .flatMap(new Function<SecurityIdentity, Uni<? extends SecurityIdentity>>() { @Override public Uni<? extends SecurityIdentity> apply(SecurityIdentity securityIdentity) { if (securityIdentity == null) { return authenticator.getIdentityProviderManager() .authenticate(AnonymousAuthenticationRequest.INSTANCE); } return Uni.createFrom().item(securityIdentity); } }).onTermination().invoke(new Functions.TriConsumer<SecurityIdentity, Throwable, Boolean>() { @Override public void accept(SecurityIdentity identity, Throwable throwable, Boolean aBoolean) { if (identity != null) { if (identity != null) { event.setUser(new QuarkusHttpUser(identity)); } } else if (throwable != null) { BiConsumer<RoutingContext, Throwable> handler = event .get(QuarkusHttpUser.AUTH_FAILURE_HANDLER); if (handler != null) { handler.accept(event, throwable); } } } }).memoize().indefinitely(); event.put(QuarkusHttpUser.DEFERRED_IDENTITY_KEY, lazyUser); event.next(); } } }; }
class HttpSecurityRecorder { private static final Logger log = Logger.getLogger(HttpSecurityRecorder.class); protected static final Consumer<Throwable> NOOP_CALLBACK = new Consumer<Throwable>() { @Override public void accept(Throwable throwable) { } }; final RuntimeValue<HttpConfiguration> httpConfiguration; final HttpBuildTimeConfig buildTimeConfig; static volatile String encryptionKey; public HttpSecurityRecorder(RuntimeValue<HttpConfiguration> httpConfiguration, HttpBuildTimeConfig buildTimeConfig) { this.httpConfiguration = httpConfiguration; this.buildTimeConfig = buildTimeConfig; } private Throwable extractRootCause(Throwable throwable) { while ((throwable instanceof CompletionException && throwable.getCause() != null) || (throwable instanceof CompositeException)) { if (throwable instanceof CompositeException) { throwable = ((CompositeException) throwable).getCauses().get(0); } else { throwable = throwable.getCause(); } } return throwable; } public Handler<RoutingContext> permissionCheckHandler() { return new Handler<RoutingContext>() { volatile HttpAuthorizer authorizer; @Override public void handle(RoutingContext event) { if (authorizer == null) { authorizer = CDI.current().select(HttpAuthorizer.class).get(); } authorizer.checkPermission(event); } }; } public BeanContainerListener initPermissions(HttpBuildTimeConfig permissions, Map<String, Supplier<HttpSecurityPolicy>> policies) { return new BeanContainerListener() { @Override public void created(BeanContainer container) { container.instance(PathMatchingHttpSecurityPolicy.class).init(permissions, policies); } }; } public Supplier<FormAuthenticationMechanism> setupFormAuth() { return new Supplier<FormAuthenticationMechanism>() { @Override public FormAuthenticationMechanism get() { String key; if (!httpConfiguration.getValue().encryptionKey.isPresent()) { if (encryptionKey != null) { key = encryptionKey; } else { byte[] data = new byte[32]; new SecureRandom().nextBytes(data); key = encryptionKey = Base64.getEncoder().encodeToString(data); log.warn("Encryption key was not specified for persistent FORM auth, using temporary key " + key); } } else { key = httpConfiguration.getValue().encryptionKey.get(); } FormAuthConfig form = buildTimeConfig.auth.form; PersistentLoginManager loginManager = new PersistentLoginManager(key, form.cookieName, form.timeout.toMillis(), form.newCookieInterval.toMillis(), form.httpOnlyCookie); String loginPage = form.loginPage.startsWith("/") ? form.loginPage : "/" + form.loginPage; String errorPage = form.errorPage.startsWith("/") ? form.errorPage : "/" + form.errorPage; String landingPage = form.landingPage.startsWith("/") ? form.landingPage : "/" + form.landingPage; String postLocation = form.postLocation.startsWith("/") ? form.postLocation : "/" + form.postLocation; String usernameParameter = form.usernameParameter; String passwordParameter = form.passwordParameter; String locationCookie = form.locationCookie; boolean redirectAfterLogin = form.redirectAfterLogin; return new FormAuthenticationMechanism(loginPage, postLocation, usernameParameter, passwordParameter, errorPage, landingPage, redirectAfterLogin, locationCookie, loginManager); } }; } public Supplier<?> setupBasicAuth(HttpBuildTimeConfig buildTimeConfig) { return new Supplier<BasicAuthenticationMechanism>() { @Override public BasicAuthenticationMechanism get() { return new BasicAuthenticationMechanism(buildTimeConfig.auth.realm.orElse(null), buildTimeConfig.auth.form.enabled); } }; } public Supplier<?> setupMtlsClientAuth() { return new Supplier<MtlsAuthenticationMechanism>() { @Override public MtlsAuthenticationMechanism get() { return new MtlsAuthenticationMechanism(); } }; } /** * This handler resolves the identity, and will be mapped to the post location. Otherwise, * for lazy auth the post will not be evaluated if there is no security rule for the post location. */ public Handler<RoutingContext> formAuthPostHandler() { return new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { Uni<SecurityIdentity> user = event.get(QuarkusHttpUser.DEFERRED_IDENTITY_KEY); user.subscribe().withSubscriber(new UniSubscriber<SecurityIdentity>() { @Override public void onSubscribe(UniSubscription uniSubscription) { } @Override public void onItem(SecurityIdentity securityIdentity) { event.next(); } @Override public void onFailure(Throwable throwable) { event.fail(throwable); } }); } }; } /** * Determines what should happen once the default auth failure handler finished. */ private static final class DefaultAuthFailureHandlerEndStrategyExec { /** * Signal {@link AuthenticationFailedException} has already been handled here. */ private static final String AUTH_FAILED_EX_HANDLED = QuarkusHttpUser.AUTH_FAILURE_HANDLER + ".handled.auth-failed-ex"; private final RoutingContext event; private DefaultAuthFailureHandlerEndStrategyExec(RoutingContext event) { this.event = event; } boolean iterateNext() { if (event.failed()) { event.next(); return true; } return false; } void proceed(AuthenticationFailedException authenticationFailedException) { if (!event.response().ended()) { switch (getEndStrategy()) { case END: event.end(); break; case NEXT_FAILURE_HANDLER: if (!iterateNext()) { event.put(AUTH_FAILED_EX_HANDLED, true); event.fail(authenticationFailedException); } break; } } } boolean preventRepeating() { if (event.response().ended()) { return true; } else if (getEndStrategy() == NEXT_FAILURE_HANDLER && event.failure() instanceof AuthenticationFailedException && parseBoolean(event.get(AUTH_FAILED_EX_HANDLED))) { event.next(); return true; } return false; } private DefaultAuthFailureHandlerEndStrategy getEndStrategy() { return event.get(DEFAULT_AUTH_FAILURE_HANDLER_END_STRATEGY, END); } } }
class HttpSecurityRecorder { private static final Logger log = Logger.getLogger(HttpSecurityRecorder.class); protected static final Consumer<Throwable> NOOP_CALLBACK = new Consumer<Throwable>() { @Override public void accept(Throwable throwable) { } }; final RuntimeValue<HttpConfiguration> httpConfiguration; final HttpBuildTimeConfig buildTimeConfig; static volatile String encryptionKey; public HttpSecurityRecorder(RuntimeValue<HttpConfiguration> httpConfiguration, HttpBuildTimeConfig buildTimeConfig) { this.httpConfiguration = httpConfiguration; this.buildTimeConfig = buildTimeConfig; } public Handler<RoutingContext> permissionCheckHandler() { return new Handler<RoutingContext>() { volatile HttpAuthorizer authorizer; @Override public void handle(RoutingContext event) { if (authorizer == null) { authorizer = CDI.current().select(HttpAuthorizer.class).get(); } authorizer.checkPermission(event); } }; } public BeanContainerListener initPermissions(HttpBuildTimeConfig permissions, Map<String, Supplier<HttpSecurityPolicy>> policies) { return new BeanContainerListener() { @Override public void created(BeanContainer container) { container.instance(PathMatchingHttpSecurityPolicy.class).init(permissions, policies); } }; } public Supplier<FormAuthenticationMechanism> setupFormAuth() { return new Supplier<FormAuthenticationMechanism>() { @Override public FormAuthenticationMechanism get() { String key; if (!httpConfiguration.getValue().encryptionKey.isPresent()) { if (encryptionKey != null) { key = encryptionKey; } else { byte[] data = new byte[32]; new SecureRandom().nextBytes(data); key = encryptionKey = Base64.getEncoder().encodeToString(data); log.warn("Encryption key was not specified for persistent FORM auth, using temporary key " + key); } } else { key = httpConfiguration.getValue().encryptionKey.get(); } FormAuthConfig form = buildTimeConfig.auth.form; PersistentLoginManager loginManager = new PersistentLoginManager(key, form.cookieName, form.timeout.toMillis(), form.newCookieInterval.toMillis(), form.httpOnlyCookie); String loginPage = form.loginPage.startsWith("/") ? form.loginPage : "/" + form.loginPage; String errorPage = form.errorPage.startsWith("/") ? form.errorPage : "/" + form.errorPage; String landingPage = form.landingPage.startsWith("/") ? form.landingPage : "/" + form.landingPage; String postLocation = form.postLocation.startsWith("/") ? form.postLocation : "/" + form.postLocation; String usernameParameter = form.usernameParameter; String passwordParameter = form.passwordParameter; String locationCookie = form.locationCookie; boolean redirectAfterLogin = form.redirectAfterLogin; return new FormAuthenticationMechanism(loginPage, postLocation, usernameParameter, passwordParameter, errorPage, landingPage, redirectAfterLogin, locationCookie, loginManager); } }; } public Supplier<?> setupBasicAuth(HttpBuildTimeConfig buildTimeConfig) { return new Supplier<BasicAuthenticationMechanism>() { @Override public BasicAuthenticationMechanism get() { return new BasicAuthenticationMechanism(buildTimeConfig.auth.realm.orElse(null), buildTimeConfig.auth.form.enabled); } }; } public Supplier<?> setupMtlsClientAuth() { return new Supplier<MtlsAuthenticationMechanism>() { @Override public MtlsAuthenticationMechanism get() { return new MtlsAuthenticationMechanism(); } }; } /** * This handler resolves the identity, and will be mapped to the post location. Otherwise, * for lazy auth the post will not be evaluated if there is no security rule for the post location. */ public Handler<RoutingContext> formAuthPostHandler() { return new Handler<RoutingContext>() { @Override public void handle(RoutingContext event) { Uni<SecurityIdentity> user = event.get(QuarkusHttpUser.DEFERRED_IDENTITY_KEY); user.subscribe().withSubscriber(new UniSubscriber<SecurityIdentity>() { @Override public void onSubscribe(UniSubscription uniSubscription) { } @Override public void onItem(SecurityIdentity securityIdentity) { event.next(); } @Override public void onFailure(Throwable throwable) { event.fail(throwable); } }); } }; } public static final class DefaultAuthFailureHandler implements BiConsumer<RoutingContext, Throwable> { private DefaultAuthFailureHandler() { } @Override public void accept(RoutingContext event, Throwable throwable) { throwable = extractRootCause(throwable); if (throwable instanceof AuthenticationFailedException) { getAuthenticator(event).sendChallenge(event).subscribe().with(new Consumer<Boolean>() { @Override public void accept(Boolean aBoolean) { if (!event.response().ended()) { event.response().end(); } } }, new Consumer<Throwable>() { @Override public void accept(Throwable throwable) { event.fail(throwable); } }); } else if (throwable instanceof AuthenticationCompletionException) { log.debug("Authentication has failed, returning HTTP status 401"); event.response().setStatusCode(401); event.response().end(); } else if (throwable instanceof AuthenticationRedirectException) { AuthenticationRedirectException redirectEx = (AuthenticationRedirectException) throwable; event.response().setStatusCode(redirectEx.getCode()); event.response().headers().set(HttpHeaders.LOCATION, redirectEx.getRedirectUri()); event.response().headers().set(HttpHeaders.CACHE_CONTROL, "no-store"); event.response().headers().set("Pragma", "no-cache"); event.response().end(); } else { event.fail(throwable); } } private static HttpAuthenticator getAuthenticator(RoutingContext event) { return event.get(HttpAuthenticator.class.getName()); } private static Throwable extractRootCause(Throwable throwable) { while ((throwable instanceof CompletionException && throwable.getCause() != null) || (throwable instanceof CompositeException)) { if (throwable instanceof CompositeException) { throwable = ((CompositeException) throwable).getCauses().get(0); } else { throwable = throwable.getCause(); } } return throwable; } } }
I will look into it, reported https://github.com/quarkusio/quarkus/issues/1419 I don't have the permission to assign it to myself though.
public void registerBaseMetrics(ShutdownContext shutdown) { MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.BASE); List<GarbageCollectorMXBean> gcs = ManagementFactory.getGarbageCollectorMXBeans(); List<String> names = new ArrayList<>(); for (GarbageCollectorMXBean gc : gcs) { Metadata meta = new Metadata("gc." + gc.getName() + ".count", MetricType.COUNTER); meta.setDisplayName("Garbage Collection Time"); meta.setUnit("none"); meta.setDescription( "Displays the total number of collections that have occurred. This attribute lists -1 if the collection count is undefined for this collector."); registry.register(meta, new LambdaCounter(() -> gc.getCollectionCount())); names.add(meta.getName()); meta = new Metadata("gc." + gc.getName() + ".time", MetricType.COUNTER); meta.setDisplayName("Garbage Collection Time"); meta.setUnit("milliseconds"); meta.setDescription( "machine implementation may use a high resolution timer to measure the elapsed time. This attribute may display the same value even if the collection count has been incremented if the collection elapsed time is very short."); registry.register(meta, new LambdaCounter(() -> gc.getCollectionTime())); names.add(meta.getName()); } shutdown.addShutdownTask(new Runnable() { @Override public void run() { for (String i : names) { registry.remove(i); } } }); }
for (String i : names) {
public void registerBaseMetrics(ShutdownContext shutdown) { MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.BASE); List<GarbageCollectorMXBean> gcs = ManagementFactory.getGarbageCollectorMXBeans(); List<String> names = new ArrayList<>(); for (GarbageCollectorMXBean gc : gcs) { Metadata meta = new Metadata("gc." + gc.getName() + ".count", MetricType.COUNTER); meta.setDisplayName("Garbage Collection Time"); meta.setUnit("none"); meta.setDescription( "Displays the total number of collections that have occurred. This attribute lists -1 if the collection count is undefined for this collector."); registry.register(meta, new LambdaCounter(() -> gc.getCollectionCount())); names.add(meta.getName()); meta = new Metadata("gc." + gc.getName() + ".time", MetricType.COUNTER); meta.setDisplayName("Garbage Collection Time"); meta.setUnit("milliseconds"); meta.setDescription( "machine implementation may use a high resolution timer to measure the elapsed time. This attribute may display the same value even if the collection count has been incremented if the collection elapsed time is very short."); registry.register(meta, new LambdaCounter(() -> gc.getCollectionTime())); names.add(meta.getName()); } shutdown.addShutdownTask(new Runnable() { @Override public void run() { for (String i : names) { registry.remove(i); } } }); }
class SmallRyeMetricsTemplate { private static final Logger log = Logger.getLogger("io.quarkus.metrics"); private static final String MEMORY_HEAP_USAGE = "memory.heap.usage"; private static final String MEMORY_NON_HEAP_USAGE = "memory.nonHeap.usage"; private static final String THREAD_COUNT = "thread.count"; public void registerVendorMetrics(ShutdownContext shutdown) { MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.VENDOR); MemoryMXBean mem = ManagementFactory.getMemoryMXBean(); Metadata meta = new Metadata(MEMORY_HEAP_USAGE, MetricType.GAUGE); meta.setUnit("bytes"); registry.register(meta, new LambdaGauge(() -> mem.getHeapMemoryUsage().getUsed())); meta = new Metadata(MEMORY_NON_HEAP_USAGE, MetricType.GAUGE); meta.setUnit("bytes"); registry.register(meta, new LambdaGauge(() -> mem.getNonHeapMemoryUsage().getUsed())); ThreadMXBean thread = ManagementFactory.getThreadMXBean(); meta = new Metadata(THREAD_COUNT, MetricType.COUNTER); registry.register(meta, new LambdaCounter(() -> (long) thread.getThreadCount())); shutdown.addShutdownTask(new Runnable() { @Override public void run() { registry.remove(MEMORY_HEAP_USAGE); registry.remove(MEMORY_NON_HEAP_USAGE); registry.remove(THREAD_COUNT); } }); /* * meta = new Metadata("thread.cpuTime", MetricType.COUNTER); * meta.setUnit("milliseconds"); * registry.register(meta, new LambdaCounter( ()->thread.getCurrentThreadCpuTime())); */ /* * List<MemoryPoolMXBean> mps = ManagementFactory.getMemoryPoolMXBeans(); * for (MemoryPoolMXBean mp : mps) { * Metadata meta = new Metadata("memoryPool." + mp.getName() + ".usage", MetricType.GAUGE); * meta.setDisplayName( "Current usage of the " + mp.getName() + " memory pool"); * meta.setUnit("bytes"); * meta.setDescription( "Current usage of the " + mp.getName() + " memory pool"); * registry.register( meta, new LambdaGauge( ()-> mp.getCollectionUsage().getUsed() )); * * meta = new Metadata("memoryPool." + mp.getName() + ".usage.max", MetricType.GAUGE); * meta.setDisplayName( "Peak usage of the " + mp.getName() + " memory pool"); * meta.setUnit("bytes"); * meta.setDescription( "Peak usage of the " + mp.getName() + " memory pool"); * registry.register( meta, new LambdaGauge( ()-> mp.getPeakUsage().getUsed())); * } */ } public void createRegistries(BeanContainer container) { log.info("Creating registries"); MetricRegistries.get(MetricRegistry.Type.APPLICATION); MetricRegistries.get(MetricRegistry.Type.BASE); MetricRegistries.get(MetricRegistry.Type.VENDOR); container.instance(MetricRegistries.class).getApplicationRegistry(); } }
class SmallRyeMetricsTemplate { private static final Logger log = Logger.getLogger("io.quarkus.metrics"); private static final String MEMORY_HEAP_USAGE = "memory.heap.usage"; private static final String MEMORY_NON_HEAP_USAGE = "memory.nonHeap.usage"; private static final String THREAD_COUNT = "thread.count"; public void registerVendorMetrics(ShutdownContext shutdown) { MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.VENDOR); MemoryMXBean mem = ManagementFactory.getMemoryMXBean(); Metadata meta = new Metadata(MEMORY_HEAP_USAGE, MetricType.GAUGE); meta.setUnit("bytes"); registry.register(meta, new LambdaGauge(() -> mem.getHeapMemoryUsage().getUsed())); meta = new Metadata(MEMORY_NON_HEAP_USAGE, MetricType.GAUGE); meta.setUnit("bytes"); registry.register(meta, new LambdaGauge(() -> mem.getNonHeapMemoryUsage().getUsed())); ThreadMXBean thread = ManagementFactory.getThreadMXBean(); meta = new Metadata(THREAD_COUNT, MetricType.COUNTER); registry.register(meta, new LambdaCounter(() -> (long) thread.getThreadCount())); shutdown.addShutdownTask(new Runnable() { @Override public void run() { registry.remove(MEMORY_HEAP_USAGE); registry.remove(MEMORY_NON_HEAP_USAGE); registry.remove(THREAD_COUNT); } }); /* * meta = new Metadata("thread.cpuTime", MetricType.COUNTER); * meta.setUnit("milliseconds"); * registry.register(meta, new LambdaCounter( ()->thread.getCurrentThreadCpuTime())); */ /* * List<MemoryPoolMXBean> mps = ManagementFactory.getMemoryPoolMXBeans(); * for (MemoryPoolMXBean mp : mps) { * Metadata meta = new Metadata("memoryPool." + mp.getName() + ".usage", MetricType.GAUGE); * meta.setDisplayName( "Current usage of the " + mp.getName() + " memory pool"); * meta.setUnit("bytes"); * meta.setDescription( "Current usage of the " + mp.getName() + " memory pool"); * registry.register( meta, new LambdaGauge( ()-> mp.getCollectionUsage().getUsed() )); * * meta = new Metadata("memoryPool." + mp.getName() + ".usage.max", MetricType.GAUGE); * meta.setDisplayName( "Peak usage of the " + mp.getName() + " memory pool"); * meta.setUnit("bytes"); * meta.setDescription( "Peak usage of the " + mp.getName() + " memory pool"); * registry.register( meta, new LambdaGauge( ()-> mp.getPeakUsage().getUsed())); * } */ } public void createRegistries(BeanContainer container) { log.info("Creating registries"); MetricRegistries.get(MetricRegistry.Type.APPLICATION); MetricRegistries.get(MetricRegistry.Type.BASE); MetricRegistries.get(MetricRegistry.Type.VENDOR); container.instance(MetricRegistries.class).getApplicationRegistry(); } }
Consider including the exception in the log entry (so that the stack trace is printed to the log).
private JsonResponse v1Response(URI requestUri) { try { return new JsonResponse(OK, v1Content(requestUri)); } catch (JSONException e) { log.warning("Bad JSON construction in " + V1_PATH + " response: " + e.getMessage()); return new ErrorResponse(INTERNAL_SERVER_ERROR, "An error occurred, please try path '" + VALUES_PATH + "'"); } }
log.warning("Bad JSON construction in " + V1_PATH + " response: " + e.getMessage());
private JsonResponse v1Response(URI requestUri) { try { return new JsonResponse(OK, v1Content(requestUri)); } catch (JSONException e) { log.warning("Bad JSON construction in " + V1_PATH + " response: " + e.getMessage()); return new ErrorResponse(INTERNAL_SERVER_ERROR, "An error occurred, please try path '" + VALUES_PATH + "'"); } }
class MetricsHandler extends ThreadedHttpRequestHandler { static final String V1_PATH = "/metrics/v1"; static final String VALUES_PATH = V1_PATH + "/values"; private final ValuesFetcher valuesFetcher; @Inject public MetricsHandler(Executor executor, MetricsManager metricsManager, VespaServices vespaServices, MetricsConsumers metricsConsumers) { super(executor); valuesFetcher = new ValuesFetcher(metricsManager, vespaServices, metricsConsumers); } @Override public HttpResponse handle(HttpRequest request) { if (request.getMethod() != GET) return new JsonResponse(METHOD_NOT_ALLOWED, "Only GET is supported"); Path path = new Path(request.getUri()); if (path.matches(V1_PATH)) return v1Response(request.getUri()); if (path.matches(VALUES_PATH)) return valuesResponse(request); return new ErrorResponse(NOT_FOUND, "No content at given path"); } private JsonResponse valuesResponse(HttpRequest request) { try { return new JsonResponse(OK, valuesFetcher.fetch(request.getProperty("consumer"))); } catch (JsonRenderingException e) { return new ErrorResponse(INTERNAL_SERVER_ERROR, e.getMessage()); } } private String v1Content(URI requestUri) throws JSONException { int port = requestUri.getPort(); String host = requestUri.getHost(); StringBuilder base = new StringBuilder("http: base.append(host); if (port >= 0) { base.append(":").append(port); } String uriBase = base.toString(); JSONArray linkList = new JSONArray(); for (String api : new String[] {VALUES_PATH}) { JSONObject resource = new JSONObject(); resource.put("url", uriBase + api); linkList.put(resource); } return new JSONObject().put("resources", linkList).toString(4); } }
class MetricsHandler extends ThreadedHttpRequestHandler { static final String V1_PATH = "/metrics/v1"; static final String VALUES_PATH = V1_PATH + "/values"; private final ValuesFetcher valuesFetcher; @Inject public MetricsHandler(Executor executor, MetricsManager metricsManager, VespaServices vespaServices, MetricsConsumers metricsConsumers) { super(executor); valuesFetcher = new ValuesFetcher(metricsManager, vespaServices, metricsConsumers); } @Override public HttpResponse handle(HttpRequest request) { if (request.getMethod() != GET) return new JsonResponse(METHOD_NOT_ALLOWED, "Only GET is supported"); Path path = new Path(request.getUri()); if (path.matches(V1_PATH)) return v1Response(request.getUri()); if (path.matches(VALUES_PATH)) return valuesResponse(request); return new ErrorResponse(NOT_FOUND, "No content at given path"); } private JsonResponse valuesResponse(HttpRequest request) { try { return new JsonResponse(OK, valuesFetcher.fetch(request.getProperty("consumer"))); } catch (JsonRenderingException e) { return new ErrorResponse(INTERNAL_SERVER_ERROR, e.getMessage()); } } private String v1Content(URI requestUri) throws JSONException { int port = requestUri.getPort(); String host = requestUri.getHost(); StringBuilder base = new StringBuilder("http: base.append(host); if (port >= 0) { base.append(":").append(port); } String uriBase = base.toString(); JSONArray linkList = new JSONArray(); for (String api : new String[] {VALUES_PATH}) { JSONObject resource = new JSONObject(); resource.put("url", uriBase + api); linkList.put(resource); } return new JSONObject().put("resources", linkList).toString(4); } }
It does not get invoked when the deprecated params doc is not available. So null check is removed
public void exitDeprecatedParametersDocumentation(BallerinaParser.DeprecatedParametersDocumentationContext ctx) { if (isInErrorState) { return; } String str = ctx.DeprecatedParametersDocumentation() != null ? ctx.DeprecatedParametersDocumentation().getText() : ""; this.pkgBuilder.endDeprecatedParametersDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), str); }
String str = ctx.DeprecatedParametersDocumentation() != null ?
public void exitDeprecatedParametersDocumentation(BallerinaParser.DeprecatedParametersDocumentationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endDeprecatedParametersDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx)); }
class BLangParserListener extends BallerinaParserBaseListener { private static final String KEYWORD_PUBLIC = "public"; private static final String KEYWORD_KEY = "key"; private BLangPackageBuilder pkgBuilder; private BDiagnosticSource diagnosticSrc; private BLangDiagnosticLogHelper dlog; private List<String> pkgNameComps; private String pkgVersion; private boolean isInErrorState = false; private Pattern pattern = Pattern.compile(Constants.UNICODE_REGEX); BLangParserListener(CompilerContext context, CompilationUnitNode compUnit, BDiagnosticSource diagnosticSource) { this.pkgBuilder = new BLangPackageBuilder(context, compUnit); this.diagnosticSrc = diagnosticSource; this.dlog = BLangDiagnosticLogHelper.getInstance(context); } @Override public void enterParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitParameter(BallerinaParser.ParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), false, ctx.annotationAttachment().size(), ctx.PUBLIC() != null); } /** * {@inheritDoc} */ @Override public void enterFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFormalParameterList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDefaultableParameter(BallerinaParser.DefaultableParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRestParameter(BallerinaParser.RestParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), ctx.annotationAttachment().size()); } @Override public void exitRestParameterTypeName(BallerinaParser.RestParameterTypeNameContext ctx) { if (isInErrorState) { return; } pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), null, null, 0); } /** * {@inheritDoc} */ @Override public void exitParameterTypeName(BallerinaParser.ParameterTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), null, null, false, 0); } @Override public void enterCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} */ @Override public void exitCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { this.pkgBuilder.endCompilationUnit(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPackageName(BallerinaParser.PackageNameContext ctx) { if (isInErrorState) { return; } this.pkgNameComps = new ArrayList<>(); ctx.Identifier().forEach(e -> pkgNameComps.add(e.getText())); this.pkgVersion = ctx.version() != null ? ctx.version().versionPattern().getText() : null; } /** * {@inheritDoc} */ @Override public void exitImportDeclaration(BallerinaParser.ImportDeclarationContext ctx) { if (isInErrorState) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; BallerinaParser.OrgNameContext orgNameContext = ctx.orgName(); if (orgNameContext == null) { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), null, this.pkgNameComps, this.pkgVersion, alias); } else { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), orgNameContext.getText(), this.pkgNameComps, this.pkgVersion, alias); } } /** * {@inheritDoc} */ @Override public void exitServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; final DiagnosticPos varPos = ctx.Identifier() != null ? getCurrentPos(ctx.Identifier()) : serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, false); } /** * {@inheritDoc} */ @Override public void enterServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startServiceDef(getCurrentPos(ctx)); this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder .addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, false, false, false, true); } /** * {@inheritDoc} */ @Override public void enterBlockFunctionBody(BallerinaParser.BlockFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlockFunctionBody(); } /** * {@inheritDoc} */ @Override public void exitBlockFunctionBody(BallerinaParser.BlockFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endBlockFunctionBody(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterExprFunctionBody(BallerinaParser.ExprFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprFunctionBody(); } /** * {@inheritDoc} */ @Override public void exitExprFunctionBody(BallerinaParser.ExprFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprFunctionBody(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterExternalFunctionBody(BallerinaParser.ExternalFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExternFunctionBody(); } /** * {@inheritDoc} */ @Override public void exitExternalFunctionBody(BallerinaParser.ExternalFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExternalFunctionBody(ctx.annotationAttachment().size(), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } int annotCount = ((BallerinaParser.CompilationUnitContext) ctx.parent.parent).annotationAttachment().size(); this.pkgBuilder.startFunctionDef(annotCount, false); } /** * {@inheritDoc} */ @Override public void exitFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } String funcName = ctx.anyIdentifierName().getText(); boolean publicFunc = ctx.PUBLIC() != null; boolean privateFunc = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean nativeFunc = ctx.functionDefinitionBody().externalFunctionBody() != null; this.pkgBuilder.endFunctionDefinition(getCurrentPos(ctx), getWS(ctx), funcName, getCurrentPos(ctx.anyIdentifierName()), publicFunc, remoteFunc, nativeFunc, privateFunc, false); } @Override public void enterExplicitAnonymousFunctionExpr(BallerinaParser.ExplicitAnonymousFunctionExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLambdaFunctionDef(diagnosticSrc.pkgID); } @Override public void exitExplicitAnonymousFunctionExpr(BallerinaParser.ExplicitAnonymousFunctionExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLambdaFunctionDef(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterInferAnonymousFunctionExpr(BallerinaParser.InferAnonymousFunctionExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitInferAnonymousFunctionExpression(BallerinaParser.InferAnonymousFunctionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addArrowFunctionDef(getCurrentPos(ctx), getWS(ctx), diagnosticSrc.pkgID); } @Override public void exitInferParamList(BallerinaParser.InferParamListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWSForInferParamList(getWS(ctx)); } @Override public void exitInferParam(BallerinaParser.InferParamContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addVarWithoutType(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), false, 0); } /** * {@inheritDoc} */ @Override public void exitFunctionSignature(BallerinaParser.FunctionSignatureContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFunctionSignature(getCurrentPos(ctx), getWS(ctx), ctx.formalParameterList() != null, ctx.returnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFiniteType(BallerinaParser.FiniteTypeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFiniteType(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTypeDefinition(BallerinaParser.TypeDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicObject = ctx.PUBLIC() != null; this.pkgBuilder.endTypeDefinition(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), publicObject); } /** * {@inheritDoc} */ @Override public void enterObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext) || (ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext && ctx.parent.parent.parent instanceof BallerinaParser.FiniteTypeContext && ctx.parent.parent.parent.getChildCount() > 1); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean isAbstract = ((ObjectTypeNameLabelContext) ctx.parent).ABSTRACT() != null; boolean isClient = ((ObjectTypeNameLabelContext) ctx.parent).CLIENT() != null; this.pkgBuilder.addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, isAbstract, isClient, false); } /** * {@inheritDoc} */ @Override public void exitObjectTypeNameLabel(BallerinaParser.ObjectTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addObjectTypeName(getWS(ctx)); } @Override public void exitTypeReference(BallerinaParser.TypeReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTypeReference(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitFieldDefinition(BallerinaParser.FieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.Identifier()); boolean exprAvailable = ctx.expression() != null; boolean isOptional = ctx.QUESTION_MARK() != null; boolean markdownExists = ctx.documentationString() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, identifierPos, exprAvailable, ctx.annotationAttachment().size(), false, isOptional, markdownExists); } /** * {@inheritDoc} */ @Override public void exitObjectFieldDefinition(BallerinaParser.ObjectFieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.Identifier()); boolean exprAvailable = ctx.expression() != null; int annotationCount = ctx.annotationAttachment().size(); boolean isPrivate = ctx.PRIVATE() != null; boolean isPublic = ctx.PUBLIC() != null; boolean markdownExists = ctx.documentationString() != null; this.pkgBuilder.addObjectFieldVariable(currentPos, ws, name, identifierPos, exprAvailable, annotationCount, isPrivate, isPublic, markdownExists); } /** * {@inheritDoc} */ @Override public void enterMethodDeclaration(BallerinaParser.MethodDeclarationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void enterMethodDefinition(BallerinaParser.MethodDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitMethodDefinition(BallerinaParser.MethodDefinitionContext ctx) { if (isInErrorState) { return; } String funcName = ctx.anyIdentifierName().getText(); DiagnosticPos funcNamePos = getCurrentPos(ctx.anyIdentifierName()); boolean publicFunc = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean resourceFunc = ctx.RESOURCE() != null; boolean markdownDocExists = ctx.documentationString() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), funcName, funcNamePos, publicFunc, isPrivate, remoteFunc, resourceFunc, false, markdownDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitMethodDeclaration(BallerinaParser.MethodDeclarationContext ctx) { if (isInErrorState) { return; } String funcName = ctx.anyIdentifierName().getText(); DiagnosticPos funcNamePos = getCurrentPos(ctx.anyIdentifierName()); boolean isPublic = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean resourceFunc = ctx.RESOURCE() != null; boolean markdownDocExists = ctx.documentationString() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), funcName, funcNamePos, isPublic, isPrivate, remoteFunc, resourceFunc, true, markdownDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicAnnotation = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); boolean isTypeAttached = ctx.typeName() != null; boolean isConst = ctx.CONST() != null; this.pkgBuilder.endAnnotationDef(getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), publicAnnotation, isTypeAttached, isConst); } /** * {@inheritDoc} */ @Override public void exitConstantDefinition(BallerinaParser.ConstantDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isTypeAvailable = ctx.typeName() != null; this.pkgBuilder.addConstant(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isPublic, isTypeAvailable); } @Override public void exitConstDivMulModExpression(BallerinaParser.ConstDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitConstAddSubExpression(BallerinaParser.ConstAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitConstGroupExpression(BallerinaParser.ConstGroupExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createGroupExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitGlobalVariableDefinition(BallerinaParser.GlobalVariableDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; boolean isListenerVar = ctx.LISTENER() != null; boolean isTypeNameProvided = ctx.typeName() != null; this.pkgBuilder.addGlobalVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isPublic, isFinal, isDeclaredWithVar, isExpressionAvailable, isListenerVar, isTypeNameProvided); } @Override public void exitAttachmentPoint(BallerinaParser.AttachmentPointContext ctx) { if (isInErrorState) { return; } AttachPoint attachPoint; if (ctx.dualAttachPoint() != null) { if (ctx.dualAttachPoint().SOURCE() != null) { attachPoint = AttachPoint.getAttachmentPoint(ctx.dualAttachPoint().dualAttachPointIdent().getText(), true); } else { attachPoint = AttachPoint.getAttachmentPoint(ctx.getText(), false); } } else { attachPoint = AttachPoint.getAttachmentPoint( ctx.sourceOnlyAttachPoint().sourceOnlyAttachPointIdent().getText(), true); } this.pkgBuilder.addAttachPoint(attachPoint, getWS(ctx)); } @Override public void enterWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWorker(diagnosticSrc.pkgID); } @Override public void exitWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } String workerName = null; DiagnosticPos workerNamePos = null; if (ctx.workerDefinition() != null) { workerName = escapeQuotedIdentifier(ctx.workerDefinition().Identifier().getText()); workerNamePos = getCurrentPos(ctx.workerDefinition().Identifier()); } boolean retParamsAvail = ctx.workerDefinition().returnParameter() != null; int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.addWorker( getCurrentPos(ctx), getWS(ctx), workerName, workerNamePos, retParamsAvail, numAnnotations); } /** * {@inheritDoc} */ @Override public void exitWorkerDefinition(BallerinaParser.WorkerDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachWorkerWS(getWS(ctx)); } @Override public void exitArrayTypeNameLabel(BallerinaParser.ArrayTypeNameLabelContext ctx) { if (isInErrorState) { return; } int index = 1; int dimensions = 0; List<Integer> sizes = new ArrayList<>(); List<ParseTree> children = ctx.children; while (index < children.size()) { if (children.get(index).getText().equals("[")) { if (children.get(index + 1).getText().equals("]")) { sizes.add(UNSEALED_ARRAY_INDICATOR); index += 2; } else if (children.get(index + 1).getText().equals(OPEN_SEALED_ARRAY)) { sizes.add(OPEN_SEALED_ARRAY_INDICATOR); index += 1; } else { sizes.add(Integer.parseInt(children.get(index + 1).getText())); index += 1; } dimensions++; } else { index++; } } Collections.reverse(sizes); this.pkgBuilder.addArrayType( getCurrentPos(ctx), getWS(ctx), dimensions, sizes.stream().mapToInt(val -> val).toArray()); } @Override public void exitUnionTypeNameLabel(BallerinaParser.UnionTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUnionType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleTypeNameLabel(BallerinaParser.TupleTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleType(getCurrentPos(ctx), getWS(ctx), ctx.tupleTypeDescriptor().typeName().size(), ctx.tupleTypeDescriptor().tupleRestDescriptor() != null); } @Override public void exitNullableTypeNameLabel(BallerinaParser.NullableTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsNullable(getWS(ctx)); } @Override public void exitGroupTypeNameLabel(BallerinaParser.GroupTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsGrouped(getWS(ctx)); } @Override public void enterInclusiveRecordTypeDescriptor(BallerinaParser.InclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitInclusiveRecordTypeDescriptor(BallerinaParser.InclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, false, false); } @Override public void enterExclusiveRecordTypeDescriptor(BallerinaParser.ExclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitExclusiveRecordTypeDescriptor(BallerinaParser.ExclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean hasRestField = ctx.recordRestFieldDefinition() != null; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, hasRestField, true); } @Override public void exitSimpleTypeName(BallerinaParser.SimpleTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.referenceTypeName() != null || ctx.valueTypeName() != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitUserDefineTypeName(BallerinaParser.UserDefineTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUserDefineType(getWS(ctx)); } @Override public void exitValueTypeName(BallerinaParser.ValueTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getText()); } @Override public void exitBuiltInReferenceTypeName(BallerinaParser.BuiltInReferenceTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.functionTypeName() != null) { return; } if (ctx.errorTypeName() != null) { return; } if (ctx.streamTypeName() != null) { return; } String typeName = ctx.getChild(0).getText(); DiagnosticPos pos = getCurrentPos(ctx); if (ctx.typeName() != null) { this.pkgBuilder.addConstraintTypeWithTypeName(pos, getWS(ctx), typeName); } else { this.pkgBuilder.addBuiltInReferenceType(pos, getWS(ctx), typeName); } } @Override public void exitStreamTypeName(BallerinaParser.StreamTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addStreamTypeWithTypeName(getCurrentPos(ctx), getWS(ctx), ctx.typeName(0) != null, ctx.typeName(1) != null); } @Override public void enterErrorTypeName(BallerinaParser.ErrorTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorType(); } @Override public void exitErrorTypeName(BallerinaParser.ErrorTypeNameContext ctx) { if (isInErrorState) { return; } boolean reasonTypeExists = !ctx.typeName().isEmpty(); boolean detailsTypeExists = ctx.typeName().size() > 1; boolean isAnonymous = !(ctx.parent.parent.parent.parent.parent.parent instanceof BallerinaParser.FiniteTypeContext) && reasonTypeExists; this.pkgBuilder.addErrorType(getCurrentPos(ctx), getWS(ctx), reasonTypeExists, detailsTypeExists, isAnonymous); } @Override public void exitFunctionTypeName(BallerinaParser.FunctionTypeNameContext ctx) { if (isInErrorState) { return; } boolean paramsAvail = false, retParamAvail = false, restParamAvail = false; if (ctx.parameterList() != null) { paramsAvail = ctx.parameterList().parameter().size() > 0; if (ctx.parameterList().restParameter() != null) { restParamAvail = true; } } else if (ctx.parameterTypeNameList() != null) { paramsAvail = ctx.parameterTypeNameList().parameterTypeName().size() > 0; if (ctx.parameterTypeNameList().restParameterTypeName() != null) { restParamAvail = true; } } if (ctx.returnParameter() != null) { retParamAvail = true; } this.pkgBuilder.addFunctionType(getCurrentPos(ctx), getWS(ctx), paramsAvail, restParamAvail, retParamAvail); } /** * {@inheritDoc} */ @Override public void enterAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationAttachment(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.setAnnotationAttachmentName(getWS(ctx), ctx.recordLiteral() != null, getCurrentPos(ctx), false); } /** * {@inheritDoc} */ @Override public void exitErrorRestBindingPattern(BallerinaParser.ErrorRestBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWSForErrorRestBinding(getWS(ctx)); } @Override public void exitErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.typeName() != null) { if (ctx.errorFieldBindingPatterns().errorRestBindingPattern() != null) { String restIdName = ctx.errorFieldBindingPatterns().errorRestBindingPattern().Identifier().getText(); DiagnosticPos restPos = getCurrentPos(ctx.errorFieldBindingPatterns().errorRestBindingPattern()); this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), restIdName, restPos); } else { this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), null, null); } return; } String reasonIdentifier = ctx.Identifier().getText(); DiagnosticPos currentPos = getCurrentPos(ctx); String restIdentifier = null; DiagnosticPos restParamPos = null; if (ctx.errorRestBindingPattern() != null) { restIdentifier = ctx.errorRestBindingPattern().Identifier().getText(); restParamPos = getCurrentPos(ctx.errorRestBindingPattern()); } this.pkgBuilder.addErrorVariable(currentPos, getWS(ctx), reasonIdentifier, restIdentifier, false, false, restParamPos); } @Override public void enterErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorBindingNode(); } @Override public void exitXmlElementAccessFilter(BallerinaParser.XmlElementAccessFilterContext ctx) { if (isInErrorState) { return; } List<TerminalNode> identifier = ctx.Identifier(); String ns = ""; String elementName = "*"; DiagnosticPos nsPos = null; DiagnosticPos elemNamePos = null; if (identifier.size() == 1) { if (ctx.MUL() == null) { TerminalNode nameNode = identifier.get(0); elementName = nameNode.getText(); elemNamePos = getCurrentPos(nameNode); } else { elemNamePos = getCurrentPos(ctx.MUL()); ns = ctx.Identifier(0).getText(); nsPos = getCurrentPos(ctx.Identifier(0)); } } else if (identifier.size() > 1) { TerminalNode nsNode = identifier.get(0); ns = nsNode.getText(); nsPos = getCurrentPos(nsNode); TerminalNode nameNode = identifier.get(1); elementName = nameNode.getText(); elemNamePos = getCurrentPos(nameNode); } else { elemNamePos = getCurrentPos(ctx.MUL()); } this.pkgBuilder.addXMLElementAccessFilter(getCurrentPos(ctx), getWS(ctx), ns, nsPos, elementName, elemNamePos); } @Override public void enterErrorMatchPattern(BallerinaParser.ErrorMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorBindingNode(); } @Override public void exitSimpleMatchPattern(BallerinaParser.SimpleMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endSimpleMatchPattern(getWS(ctx)); } @Override public void exitXmlElementFilter(BallerinaParser.XmlElementFilterContext ctx) { if (isInErrorState) { return; } int filterCount = ctx.xmlElementNames().xmlElementAccessFilter().size(); this.pkgBuilder.createXMLElementAccessNode(getCurrentPos(ctx), getWS(ctx), filterCount); } @Override public void exitXmlStepExpressionReference(BallerinaParser.XmlStepExpressionReferenceContext ctx) { if (isInErrorState) { return; } boolean isIndexed = ctx.xmlStepExpression().index() != null; BallerinaParser.XmlStepExpressionContext navAccess = ctx.xmlStepExpression(); BallerinaParser.XmlElementNamesContext filters = navAccess.xmlElementNames(); int filterCount = filters == null ? 0 : filters.xmlElementAccessFilter().size(); int starCount = navAccess.MUL().size(); this.pkgBuilder.createXMLNavigationAccessNode(getCurrentPos(ctx), getWS(ctx), filterCount, starCount, isIndexed); } @Override public void exitRestMatchPattern(BallerinaParser.RestMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWSForRestMatchPattern(getWS(ctx)); } @Override public void exitErrorArgListMatchPattern(BallerinaParser.ErrorArgListMatchPatternContext ctx) { if (isInErrorState) { return; } String restIdentifier = null; DiagnosticPos restParamPos = null; if (ctx.restMatchPattern() != null) { restIdentifier = ctx.restMatchPattern().Identifier().getText(); restParamPos = getCurrentPos(ctx.restMatchPattern()); } String reasonIdentifier = null; boolean reasonVar = false; boolean constReasonMatchPattern = false; if (ctx.simpleMatchPattern() != null) { reasonVar = ctx.simpleMatchPattern().VAR() != null; if (ctx.simpleMatchPattern().Identifier() != null) { reasonIdentifier = ctx.simpleMatchPattern().Identifier().getText(); } else { reasonIdentifier = ctx.simpleMatchPattern().QuotedStringLiteral().getText(); constReasonMatchPattern = true; } } this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), reasonIdentifier, restIdentifier, reasonVar, constReasonMatchPattern, restParamPos); } @Override public void exitErrorMatchPattern(BallerinaParser.ErrorMatchPatternContext ctx) { if (isInErrorState) { return; } boolean isIndirectErrorMatchPatern = ctx.typeName() != null; this.pkgBuilder.endErrorMatchPattern(getWS(ctx), isIndirectErrorMatchPatern); } @Override public void exitErrorDetailBindingPattern(BallerinaParser.ErrorDetailBindingPatternContext ctx) { if (isInErrorState) { return; } String bindingVarName = null; if (ctx.bindingPattern() != null && ctx.bindingPattern().Identifier() != null) { bindingVarName = ctx.bindingPattern().Identifier().getText(); } this.pkgBuilder.addErrorDetailBinding(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), bindingVarName); } @Override public void exitErrorRefBindingPattern(BallerinaParser.ErrorRefBindingPatternContext ctx) { if (isInErrorState) { return; } int numNamedArgs = ctx.errorNamedArgRefPattern().size(); boolean reasonRefAvailable = ctx.variableReference() != null; boolean restPatternAvailable = ctx.errorRefRestPattern() != null; boolean indirectErrorRefPattern = ctx.typeName() != null; this.pkgBuilder.addErrorVariableReference(getCurrentPos(ctx), getWS(ctx), numNamedArgs, reasonRefAvailable, restPatternAvailable, indirectErrorRefPattern); } @Override public void exitErrorNamedArgRefPattern(BallerinaParser.ErrorNamedArgRefPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } @Override public void exitListBindingPattern(BallerinaParser.ListBindingPatternContext ctx) { if (isInErrorState) { return; } boolean restBindingAvailable = ctx.restBindingPattern() != null; this.pkgBuilder.addTupleVariable(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().size(), restBindingAvailable); } @Override public void exitListRefBindingPattern(BallerinaParser.ListRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean restPatternAvailable = ctx.listRefRestPattern() != null; this.pkgBuilder.addTupleVariableReference(getCurrentPos(ctx), getWS(ctx), ctx.bindingRefPattern().size(), restPatternAvailable); } @Override public void enterRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableList(); } @Override public void exitRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } boolean hasRestBindingPattern = ctx.entryBindingPattern().restBindingPattern() != null; this.pkgBuilder.addRecordVariable(getCurrentPos(ctx), getWS(ctx), hasRestBindingPattern); } @Override public void enterRecordRefBindingPattern(BallerinaParser.RecordRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableReferenceList(); } @Override public void exitRecordRefBindingPattern(BallerinaParser.RecordRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean hasRestBindingPattern = ctx.entryRefBindingPattern().restRefBindingPattern() != null; this.pkgBuilder.addRecordVariableReference(getCurrentPos(ctx), getWS(ctx), hasRestBindingPattern); } @Override public void exitBindingPattern(BallerinaParser.BindingPatternContext ctx) { if (isInErrorState) { return; } if ((ctx.Identifier() != null) && ((ctx.parent instanceof BallerinaParser.ListBindingPatternContext) || (ctx.parent instanceof BallerinaParser.FieldBindingPatternContext) || (ctx.parent instanceof BallerinaParser.MatchPatternClauseContext))) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier())); } else if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternNameWhitespace(getWS(ctx)); } } @Override public void exitFieldBindingPattern(BallerinaParser.FieldBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), ctx.bindingPattern() != null); } @Override public void exitFieldRefBindingPattern(BallerinaParser.FieldRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldRefBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.bindingRefPattern() != null); } @Override public void exitRestBindingPattern(BallerinaParser.RestBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier())); } } @Override public void exitVariableDefinitionStatement(BallerinaParser.VariableDefinitionStatementContext ctx) { if (isInErrorState) { return; } boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; if (ctx.Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), getCurrentPos(ctx.bindingPattern().Identifier()), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().listBindingPattern() != null) { this.pkgBuilder.addTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } } @Override public void enterRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMapStructLiteral(); } @Override public void exitRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addMapStructLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordField(BallerinaParser.RecordFieldContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createBLangRecordVarRefNameField(pos, getWS(ctx)); this.pkgBuilder.addIdentifierRecordField(); } else if (ctx.ELLIPSIS() != null) { this.pkgBuilder.addSpreadOpRecordField(getWS(ctx)); } else { this.pkgBuilder.addKeyValueRecordField(getWS(ctx), ctx.recordKey().LEFT_BRACKET() != null); } } @Override public void exitRecordKey(BallerinaParser.RecordKeyContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(pos, getWS(ctx)); } else if (ctx.LEFT_BRACKET() != null) { this.pkgBuilder.addRecordKeyWS(getWS(ctx)); } } @Override public void enterTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTableLiteral(); } @Override public void exitTableColumnDefinition(BallerinaParser.TableColumnDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableColumnDefinition(getWS(ctx)); } @Override public void exitTableColumn(BallerinaParser.TableColumnContext ctx) { if (isInErrorState) { return; } String columnName; int childCount = ctx.getChildCount(); if (childCount == 2) { boolean keyColumn = KEYWORD_KEY.equals(ctx.getChild(0).getText()); if (keyColumn) { columnName = escapeQuotedIdentifier(ctx.getChild(1).getText()); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); this.pkgBuilder.markPrimaryKeyColumn(columnName); } else { DiagnosticPos pos = getCurrentPos(ctx); dlog.error(pos, DiagnosticCode.TABLE_KEY_EXPECTED); } } else { columnName = escapeQuotedIdentifier(ctx.getChild(0).getText()); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); } } @Override public void exitTableDataArray(BallerinaParser.TableDataArrayContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataArray(getWS(ctx)); } @Override public void exitTableDataList(BallerinaParser.TableDataListContext ctx) { if (isInErrorState) { return; } if (ctx.expressionList() != null) { this.pkgBuilder.endTableDataRow(getWS(ctx)); } } @Override public void exitTableData(BallerinaParser.TableDataContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataList(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTableLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitListConstructorExpr(BallerinaParser.ListConstructorExprContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.expressionList() != null; this.pkgBuilder.addListConstructorExpression(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void enterLetExpr(BallerinaParser.LetExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLetVarList(); } @Override public void exitLetExpr(BallerinaParser.LetExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLetExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitLetVarDecl(BallerinaParser.LetVarDeclContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; int annotationAttachmentsSize = ctx.annotationAttachment().size(); if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addSimpleLetVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), getCurrentPos(ctx.bindingPattern().Identifier()), isExpressionAvailable, isDeclaredWithVar, annotationAttachmentsSize); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addRecordVariableLetDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar, annotationAttachmentsSize); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addErrorVariableLetDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar, annotationAttachmentsSize); } else if (ctx.bindingPattern().structuredBindingPattern().listBindingPattern() != null) { this.pkgBuilder.addTupleVariableLetDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar, annotationAttachmentsSize); } } @Override public void exitTypeInitExpr(BallerinaParser.TypeInitExprContext ctx) { if (isInErrorState) { return; } String initName = ctx.NEW().getText(); boolean typeAvailable = ctx.userDefineTypeName() != null || ctx.streamTypeName() != null; boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.addTypeInitExpression(getCurrentPos(ctx), getWS(ctx), initName, typeAvailable, argsAvailable); } @Override public void exitServiceConstructorExpression(BallerinaParser.ServiceConstructorExpressionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = null; final DiagnosticPos varPos = serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, true, ctx.serviceConstructorExpr().annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitAssignmentStatement(BallerinaParser.AssignmentStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAssignmentStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitListDestructuringStatement(BallerinaParser.ListDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordDestructuringStatement(BallerinaParser.RecordDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRecordDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitErrorDestructuringStatement(BallerinaParser.ErrorDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addErrorDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitCompoundAssignmentStatement(BallerinaParser.CompoundAssignmentStatementContext ctx) { if (isInErrorState) { return; } String compoundOperatorText = ctx.compoundOperator().getText(); String operator = compoundOperatorText.substring(0, compoundOperatorText.length() - 1); this.pkgBuilder.addCompoundAssignmentStatement(getCurrentPos(ctx), getWS(ctx), operator); } /** * {@inheritDoc} */ @Override public void exitCompoundOperator(BallerinaParser.CompoundOperatorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addCompoundOperator(getWS(ctx)); } @Override public void enterVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } /** * {@inheritDoc} */ @Override public void enterIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endIfElseNode(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfClause(BallerinaParser.IfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createMatchNode(getCurrentPos(ctx)); } @Override public void exitMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.completeMatchNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMatchStmtPattern(); } @Override public void exitMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } if (ctx.bindingPattern() != null || ctx.errorMatchPattern() != null) { boolean isTypeGuardPresent = ctx.IF() != null; this.pkgBuilder.addMatchStmtStructuredBindingPattern(getCurrentPos(ctx), getWS(ctx), isTypeGuardPresent); return; } this.pkgBuilder.addMatchStmtStaticBindingPattern(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForeachStatement(); } @Override public void exitForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; if (ctx.bindingPattern().Identifier() != null) { String identifier = ctx.bindingPattern().Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.bindingPattern().Identifier()); this.pkgBuilder.addForeachStatementWithSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), identifier, identifierPos, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else { this.pkgBuilder.addForeachStatementWithTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } } @Override public void exitIntRangeExpression(BallerinaParser.IntRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIntRangeExpression(getCurrentPos(ctx), getWS(ctx), ctx.LEFT_PARENTHESIS() == null, ctx.RIGHT_PARENTHESIS() == null, ctx.expression(1) == null); } /** * {@inheritDoc} */ @Override public void enterWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWhileStmt(); } /** * {@inheritDoc} */ @Override public void exitWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWhileStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitContinueStatement(BallerinaParser.ContinueStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addContinueStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitBreakStatement(BallerinaParser.BreakStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addBreakStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForkJoinStmt(); } @Override public void exitForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addForkJoinStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTryCatchFinallyStmt(); } @Override public void exitTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryCatchFinallyStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterCatchClauses(BallerinaParser.CatchClausesContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryClause(getCurrentPos(ctx)); } @Override public void enterCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCatchClause(); } @Override public void exitCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } String paramName = ctx.Identifier().getText(); this.pkgBuilder.addCatchClause(getCurrentPos(ctx), getWS(ctx), paramName); } @Override public void enterFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startFinallyBlock(); } @Override public void exitFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFinallyBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitThrowStatement(BallerinaParser.ThrowStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addThrowStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitPanicStatement(BallerinaParser.PanicStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addPanicStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitReturnStatement(BallerinaParser.ReturnStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnStatement(this.getCurrentPos(ctx), getWS(ctx), ctx.expression() != null); } @Override public void exitWorkerReceiveExpression(BallerinaParser.WorkerReceiveExpressionContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerReceiveExpr(getCurrentPos(ctx), getWS(ctx), workerName, ctx.expression() != null); } @Override public void exitFlushWorker(BallerinaParser.FlushWorkerContext ctx) { if (isInErrorState) { return; } String workerName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addWorkerFlushExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWorkerSendAsyncStatement(BallerinaParser.WorkerSendAsyncStatementContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), workerName, ctx.expression().size() > 1); } @Override public void exitWorkerSendSyncExpression(BallerinaParser.WorkerSendSyncExpressionContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerSendSyncExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWaitExpression(BallerinaParser.WaitExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.waitForCollection() != null) { this.pkgBuilder.handleWaitForAll(getCurrentPos(ctx), getWS(ctx)); } else { this.pkgBuilder.handleWait(getCurrentPos(ctx), getWS(ctx)); } } @Override public void enterWaitForCollection(BallerinaParser.WaitForCollectionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWaitForAll(); } @Override public void exitWaitKeyValue(BallerinaParser.WaitKeyValueContext ctx) { if (isInErrorState) { return; } boolean containsExpr = ctx.expression() != null; this.pkgBuilder.addKeyValueToWaitForAll(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), containsExpr); } /** * {@inheritDoc} */ @Override public void exitXmlAttribVariableReference(BallerinaParser.XmlAttribVariableReferenceContext ctx) { if (isInErrorState) { return; } boolean isSingleAttrRef = ctx.xmlAttrib().expression() != null; this.pkgBuilder.createXmlAttributesRefExpr(getCurrentPos(ctx), getWS(ctx), isSingleAttrRef); } @Override public void exitSimpleVariableReference(BallerinaParser.SimpleVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitInvocation(BallerinaParser.InvocationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addInvocationWS(getWS(ctx)); } @Override public void exitStringFunctionInvocationReference(BallerinaParser.StringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitGroupStringFunctionInvocationReference(GroupStringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); InvocationContext invocation = ctx.invocation(); boolean argsAvailable = invocation.invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = invocation.anyIdentifierName(); String invocationText = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(invocation), getWS(invocation), invocationText, argsAvailable, getCurrentPos(identifierContext)); this.pkgBuilder.createGroupExpression(getCurrentPos(node), getWS(ctx)); } @Override public void exitFunctionInvocation(BallerinaParser.FunctionInvocationContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.createFunctionInvocation(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitFieldVariableReference(BallerinaParser.FieldVariableReferenceContext ctx) { if (isInErrorState) { return; } createFieldBasedAccessNode(ctx, ctx.field()); } @Override public void exitGroupFieldVariableReference(BallerinaParser.GroupFieldVariableReferenceContext ctx) { if (isInErrorState) { return; } FieldContext field = ctx.field(); VariableReferenceContext groupExpression = ctx.variableReference(); createFieldBasedAccessNode(field, field); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } private void createFieldBasedAccessNode(ParserRuleContext ctx, FieldContext field) { String fieldName; FieldKind fieldType; String nsName = null; DiagnosticPos nsPos = null; if (field.Identifier().isEmpty()) { fieldName = field.MUL().getText(); fieldType = FieldKind.ALL; } else if (field.Identifier().size() == 1) { fieldName = field.Identifier(0).getText(); fieldType = FieldKind.SINGLE; } else { nsName = field.Identifier(0).getText(); nsPos = getCurrentPos(field.Identifier(0)); fieldName = field.Identifier(1).getText(); fieldType = FieldKind.WITH_NS; } this.pkgBuilder.createFieldBasedAccessNode(getCurrentPos(ctx), getWS(ctx), fieldName, getCurrentPos(field), nsName, nsPos, fieldType, field.OPTIONAL_FIELD_ACCESS() != null); } @Override public void exitMapArrayVariableReference(BallerinaParser.MapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGroupMapArrayVariableReference(BallerinaParser.GroupMapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } IndexContext index = ctx.index(); VariableReferenceContext groupExpression = ctx.variableReference(); this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(index), getWS(index)); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } @Override public void exitReservedWord(BallerinaParser.ReservedWordContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startInvocationNode(getWS(ctx)); } @Override public void exitAnyIdentifierName(BallerinaParser.AnyIdentifierNameContext ctx) { if (isInErrorState) { return; } if (ctx.reservedWord() == null) { this.pkgBuilder.startInvocationNode(getWS(ctx)); } } @Override public void exitInvocationReference(BallerinaParser.InvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitGroupInvocationReference(BallerinaParser.GroupInvocationReferenceContext ctx) { if (isInErrorState) { return; } InvocationContext invocation = ctx.invocation(); VariableReferenceContext groupExpression = ctx.variableReference(); boolean argsAvailable = invocation.invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = invocation.anyIdentifierName(); String invocationText = identifierContext.getText(); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); this.pkgBuilder.createInvocationNode(getCurrentPos(invocation), getWS(invocation), invocationText, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitTypeDescExprInvocationReference(BallerinaParser.TypeDescExprInvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } /** * {@inheritDoc} */ @Override public void enterInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } /** * {@inheritDoc} */ @Override public void exitInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } public void enterExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void exitExpressionStmt(BallerinaParser.ExpressionStmtContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addExpressionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTransactionStmt(); } /** * {@inheritDoc} */ @Override public void exitTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.endTransactionStmt(pos, getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionClause(BallerinaParser.TransactionClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTransactionBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionPropertyInitStatementList( BallerinaParser.TransactionPropertyInitStatementListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTransactionPropertyInitStatementList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLockStmt(); } /** * {@inheritDoc} */ @Override public void exitLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLockStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOnretryBlock(); } /** * {@inheritDoc} */ @Override public void exitOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addOnretryBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCommittedBlock(); } /** * {@inheritDoc} */ @Override public void exitCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCommittedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAbortedBlock(); } /** * {@inheritDoc} */ @Override public void exitAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endAbortedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitAbortStatement(BallerinaParser.AbortStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAbortStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetryStatement(BallerinaParser.RetryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetriesStatement(BallerinaParser.RetriesStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryCountExpression(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { } @Override public void exitNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { if (isInErrorState) { return; } boolean isTopLevel = ctx.parent instanceof BallerinaParser.CompilationUnitContext; String namespaceUri = ctx.QuotedStringLiteral().getText(); DiagnosticPos pos = getCurrentPos(ctx); namespaceUri = namespaceUri.substring(1, namespaceUri.length() - 1); namespaceUri = StringEscapeUtils.unescapeJava(namespaceUri); String prefix = (ctx.Identifier() != null) ? ctx.Identifier().getText() : null; DiagnosticPos prefixPos = (ctx.Identifier() != null) ? getCurrentPos(ctx.Identifier()) : null; this.pkgBuilder.addXMLNSDeclaration(pos, getWS(ctx), namespaceUri, prefix, prefixPos, isTopLevel); } @Override public void exitBinaryDivMulModExpression(BallerinaParser.BinaryDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryOrExpression(BallerinaParser.BinaryOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryRefEqualExpression(BallerinaParser.BinaryRefEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryEqualExpression(BallerinaParser.BinaryEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchOrExpression(BallerinaParser.StaticMatchOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchIdentifierLiteral(BallerinaParser.StaticMatchIdentifierLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTypeDescExpr(BallerinaParser.TypeDescExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeAccessExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitActionInvocation(BallerinaParser.ActionInvocationContext ctx) { if (isInErrorState) { return; } int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.createActionInvocationNode(getCurrentPos(ctx), getWS(ctx), ctx.START() != null, numAnnotations); } @Override public void exitBinaryAndExpression(BallerinaParser.BinaryAndExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryAddSubExpression(BallerinaParser.BinaryAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseExpression(BallerinaParser.BitwiseExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseShiftExpression(BallerinaParser.BitwiseShiftExpressionContext ctx) { if (isInErrorState) { return; } StringBuilder operator = new StringBuilder(); for (int i = 1; i < ctx.getChildCount() - 1; i++) { operator.append(ctx.getChild(i).getText()); } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), operator.toString()); } /** * {@inheritDoc} */ @Override public void exitTypeConversionExpression(BallerinaParser.TypeConversionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeConversionExpr(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size(), ctx.typeName() != null); } @Override public void exitBinaryCompareExpression(BallerinaParser.BinaryCompareExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitIntegerRangeExpression(BallerinaParser.IntegerRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitUnaryExpression(BallerinaParser.UnaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createUnaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitTypeTestExpression(BallerinaParser.TypeTestExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeTestExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitAnnotAccessExpression(BallerinaParser.AnnotAccessExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createAnnotAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGroupExpression(BallerinaParser.GroupExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createGroupExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTernaryExpression(BallerinaParser.TernaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTernaryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckedExpression(BallerinaParser.CheckedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckPanickedExpression(BallerinaParser.CheckPanickedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckPanickedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitFromClause(BallerinaParser.FromClauseContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; if (ctx.bindingPattern().Identifier() != null) { String identifier = ctx.bindingPattern().Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.bindingPattern().Identifier()); this.pkgBuilder.createFromClauseWithSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), identifier, identifierPos, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.createFromClauseWithRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.createFromClauseWithErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else { this.pkgBuilder.createFromClauseWithTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } } public void enterLetClause(BallerinaParser.LetClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLetVarList(); } public void exitLetClause(BallerinaParser.LetClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLetClause(getCurrentPos(ctx)); } @Override public void exitWhereClause(BallerinaParser.WhereClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createWhereClause(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitSelectClause(BallerinaParser.SelectClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createSelectClause(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitDoClause(BallerinaParser.DoClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createDoClause(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitQueryExpr(BallerinaParser.QueryExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createQueryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterQueryAction(BallerinaParser.QueryActionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startDoActionBlock(); } @Override public void exitQueryAction(BallerinaParser.QueryActionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createQueryActionExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitNameReference(BallerinaParser.NameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier().size() == 2) { String pkgName = ctx.Identifier(0).getText(); String name = ctx.Identifier(1).getText(); DiagnosticPos pos = getCurrentPos(ctx); if (Names.IGNORE.value.equals(pkgName)) { dlog.error(pos, DiagnosticCode.INVALID_PACKAGE_NAME_QUALIFER, pkgName); } this.pkgBuilder.addNameReference(pos, getWS(ctx), pkgName, name); } else { String name = ctx.Identifier(0).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } @Override public void exitFunctionNameReference(BallerinaParser.FunctionNameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { String pkgName = ctx.Identifier().getText(); String name = ctx.anyIdentifierName().getText(); DiagnosticPos pos = getCurrentPos(ctx); if (Names.IGNORE.value.equals(pkgName)) { dlog.error(pos, DiagnosticCode.INVALID_PACKAGE_NAME_QUALIFER, pkgName); } this.pkgBuilder.addNameReference(pos, getWS(ctx), pkgName, name); } else { String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } /** * {@inheritDoc} */ @Override public void exitReturnParameter(BallerinaParser.ReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void enterParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } private String fillWithZeros(String str) { while (str.length() < 4) { str = "0".concat(str); } return str; } /** * {@inheritDoc} */ @Override public void exitSimpleLiteral(BallerinaParser.SimpleLiteralContext ctx) { if (isInErrorState) { return; } TerminalNode node; DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); Object value; BallerinaParser.IntegerLiteralContext integerLiteralContext = ctx.integerLiteral(); if (integerLiteralContext != null && (value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.INT, value, ctx.getText()); } else if (ctx.floatingPointLiteral() != null) { if ((node = ctx.floatingPointLiteral().DecimalFloatingPointNumber()) != null) { String nodeValue = getNodeValue(ctx, node); int literalTypeTag = NumericLiteralSupport.isDecimalDiscriminated(nodeValue) ? TypeTags.DECIMAL : TypeTags.FLOAT; this.pkgBuilder.addLiteralValue(pos, ws, literalTypeTag, nodeValue, node.getText()); } else if ((node = ctx.floatingPointLiteral().HexadecimalFloatingPointLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, getHexNodeValue(ctx, node), node.getText()); } } else if ((node = ctx.BooleanLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BOOLEAN, Boolean.parseBoolean(node.getText()), node.getText()); } else if ((node = ctx.QuotedStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length() - 1); String originalText = text; Matcher matcher = pattern.matcher(text); int position = 0; while (matcher.find(position)) { String hexStringVal = matcher.group(1); int hexDecimalVal = Integer.parseInt(hexStringVal, 16); if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE) || hexDecimalVal > Constants.MAX_UNICODE) { String hexStringWithBraces = matcher.group(0); int offset = originalText.indexOf(hexStringWithBraces) + 1; dlog.error(new DiagnosticPos(diagnosticSrc, pos.sLine, pos.eLine, pos.sCol + offset, pos.sCol + offset + hexStringWithBraces.length()), DiagnosticCode.INVALID_UNICODE, hexStringWithBraces); } text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal)); position = matcher.end() - 2; matcher = pattern.matcher(text); } text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } else if (ctx.NullLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "null"); } else if (ctx.nilLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "()"); } else if (ctx.blobLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BYTE_ARRAY, ctx.blobLiteral().getText()); } } /** * {@inheritDoc} */ @Override public void exitNamedArgs(BallerinaParser.NamedArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } /** * {@inheritDoc} */ @Override public void exitRestArgs(BallerinaParser.RestArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestArgument(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitXmlLiteral(BallerinaParser.XmlLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachXmlLiteralWS(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitComment(BallerinaParser.CommentContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLCommentTemplateText()); String endingString = getTemplateEndingStr(ctx.XMLCommentText()); this.pkgBuilder.createXMLCommentLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElement(BallerinaParser.ElementContext ctx) { if (isInErrorState) { return; } if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitStartTag(BallerinaParser.StartTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitCloseTag(BallerinaParser.CloseTagContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endXMLElement(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitEmptyTag(BallerinaParser.EmptyTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitProcIns(BallerinaParser.ProcInsContext ctx) { if (isInErrorState) { return; } String targetQName = ctx.XML_TAG_SPECIAL_OPEN().getText(); targetQName = targetQName.substring(2, targetQName.length() - 1); Stack<String> textFragments = getTemplateTextFragments(ctx.XMLPITemplateText()); String endingText = getTemplateEndingStr(ctx.XMLPIText()); endingText = endingText.substring(0, endingText.length() - 2); this.pkgBuilder.createXMLPILiteral(getCurrentPos(ctx), getWS(ctx), targetQName, textFragments, endingText); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitAttribute(BallerinaParser.AttributeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createXMLAttribute(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitText(BallerinaParser.TextContext ctx) { if (isInErrorState) { return; } Stack<String> textFragments = getTemplateTextFragments(ctx.XMLTemplateText()); String endingText = getTemplateEndingStr(ctx.XMLText()); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addXMLTextToElement(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } else { this.pkgBuilder.createXMLTextLiteral(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } } /** * {@inheritDoc} */ @Override public void exitXmlSingleQuotedString(BallerinaParser.XmlSingleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLSingleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLSingleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.SINGLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlDoubleQuotedString(BallerinaParser.XmlDoubleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLDoubleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLDoubleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.DOUBLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlQualifiedName(BallerinaParser.XmlQualifiedNameContext ctx) { if (isInErrorState) { return; } List<TerminalNode> qnames = ctx.XMLQName(); String prefix = null; String localname; if (qnames.size() > 1) { prefix = qnames.get(0).getText(); localname = qnames.get(1).getText(); } else { localname = qnames.get(0).getText(); } this.pkgBuilder.createXMLQName(getCurrentPos(ctx), getWS(ctx), localname, prefix); } /** * {@inheritDoc} */ @Override public void exitStringTemplateLiteral(BallerinaParser.StringTemplateLiteralContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments; String endingText = null; StringTemplateContentContext contentContext = ctx.stringTemplateContent(); if (contentContext != null) { stringFragments = getTemplateTextFragments(contentContext.StringTemplateExpressionStart()); endingText = getTemplateEndingStr(contentContext.StringTemplateText()); } else { stringFragments = new Stack<>(); } this.pkgBuilder.createStringTemplateLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingText); } /** * {@inheritDoc} */ @Override public void enterDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMarkdownDocumentationString(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkdownDocumentationString(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationLine(BallerinaParser.DocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkDownDocumentLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationContent(BallerinaParser.DocumentationContentContext ctx) { if (isInErrorState) { return; } String text = ctx.getText() != null ? ctx.getText() : ""; this.pkgBuilder.endMarkdownDocumentationText(getCurrentPos(ctx), getWS(ctx), text); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentationLine(BallerinaParser.ParameterDocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endParameterDocumentationLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentation(BallerinaParser.ParameterDocumentationContext ctx) { if (isInErrorState) { return; } String parameterName = ctx.docParameterName() != null ? ctx.docParameterName().getText() : ""; String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentation(getCurrentPos(ctx.docParameterName()), getWS(ctx), parameterName, description); } /** * {@inheritDoc} */ @Override public void exitParameterDescriptionLine(BallerinaParser.ParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDocumentation(BallerinaParser.ReturnParameterDocumentationContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDescriptionLine(BallerinaParser.ReturnParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentationDescription(getWS(ctx), description); } @Override public void exitDeprecatedAnnotationDocumentation(BallerinaParser.DeprecatedAnnotationDocumentationContext ctx) { if (isInErrorState) { return; } String str = ctx.DeprecatedDocumentation() != null ? ctx.DeprecatedDocumentation().getText() : ""; this.pkgBuilder.endDeprecationAnnotationDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), str); } @Override public void exitDeprecateAnnotationDescriptionLine(BallerinaParser.DeprecateAnnotationDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endDeprecateAnnotationDocumentationDescription(getWS(ctx), description); } @Override @Override public void exitTrapExpression(BallerinaParser.TrapExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTrapExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitVariableReferenceExpression(BallerinaParser.VariableReferenceExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.START() != null) { int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.markLastInvocationAsAsync(getCurrentPos(ctx), numAnnotations); } } @Override public void exitDocumentationReference(BallerinaParser.DocumentationReferenceContext ctx) { if (isInErrorState) { return; } BallerinaParser.ReferenceTypeContext referenceType = ctx.referenceType(); BallerinaParser.SingleBacktickedContentContext backtickedContent = ctx.singleBacktickedContent(); this.pkgBuilder.endDocumentationReference(getCurrentPos(ctx), referenceType.getText(), backtickedContent.getText()); } @Override public void exitSingleBacktickedBlock(BallerinaParser.SingleBacktickedBlockContext ctx) { if (isInErrorState) { return; } BallerinaParser.SingleBacktickedContentContext backtickedContent = ctx.singleBacktickedContent(); this.pkgBuilder.endSingleBacktickedBlock(getCurrentPos(ctx), backtickedContent.getText()); } /** * {@inheritDoc} */ @Override public void exitElvisExpression(BallerinaParser.ElvisExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createElvisExpr(getCurrentPos(ctx), getWS(ctx)); } private DiagnosticPos getCurrentPos(ParserRuleContext ctx) { int startLine = ctx.getStart().getLine(); int startCol = ctx.getStart().getCharPositionInLine() + 1; int endLine = -1; int endCol = -1; Token stop = ctx.getStop(); if (stop != null) { endLine = stop.getLine(); endCol = stop.getCharPositionInLine() + (stop.getStopIndex() - stop.getStartIndex() + 1) + 1; } return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } private DiagnosticPos getCurrentPos(TerminalNode node) { Token symbol = node.getSymbol(); int startLine = symbol.getLine(); int startCol = symbol.getCharPositionInLine() + 1; int endLine = startLine; int endCol = startCol + symbol.getText().length(); return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } protected Set<Whitespace> getWS(ParserRuleContext ctx) { return null; } private Stack<String> getTemplateTextFragments(List<TerminalNode> nodes) { Stack<String> templateStrFragments = new Stack<>(); nodes.forEach(node -> { if (node == null) { templateStrFragments.push(null); } else { String str = node.getText(); templateStrFragments.push(str.substring(0, str.length() - 2)); } }); return templateStrFragments; } private String getTemplateEndingStr(TerminalNode node) { return node == null ? null : node.getText(); } private String getTemplateEndingStr(List<TerminalNode> nodes) { StringJoiner joiner = new StringJoiner(""); nodes.forEach(node -> joiner.add(node.getText())); return joiner.toString(); } private String getNodeValue(ParserRuleContext ctx, TerminalNode node) { String op = ctx.getChild(0).getText(); String value = node.getText(); if (op != null && "-".equals(op)) { value = "-" + value; } return value; } private String getHexNodeValue(ParserRuleContext ctx, TerminalNode node) { String value = getNodeValue(ctx, node); if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private Object getIntegerLiteral(ParserRuleContext simpleLiteralContext, BallerinaParser.IntegerLiteralContext integerLiteralContext) { if (integerLiteralContext.DecimalIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.DecimalIntegerLiteral()); return parseLong(simpleLiteralContext, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL, DiagnosticCode.INTEGER_TOO_LARGE); } else if (integerLiteralContext.HexIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.HexIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 16, DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE); } return null; } private Object parseLong(ParserRuleContext context, String originalNodeValue, String processedNodeValue, int radix, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { DiagnosticPos pos = getCurrentPos(context); Set<Whitespace> ws = getWS(context); if (originalNodeValue.startsWith("-")) { dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } /** * Mark that this listener is in error state. */ public void setErrorState() { this.isInErrorState = true; } /** * Mark that this listener is not in an error state. */ public void unsetErrorState() { this.isInErrorState = false; } boolean isInErrorState() { return this.isInErrorState; } }
class BLangParserListener extends BallerinaParserBaseListener { private static final String KEYWORD_PUBLIC = "public"; private static final String KEYWORD_KEY = "key"; private BLangPackageBuilder pkgBuilder; private BDiagnosticSource diagnosticSrc; private BLangDiagnosticLogHelper dlog; private List<String> pkgNameComps; private String pkgVersion; private boolean isInErrorState = false; private Pattern pattern = Pattern.compile(Constants.UNICODE_REGEX); BLangParserListener(CompilerContext context, CompilationUnitNode compUnit, BDiagnosticSource diagnosticSource) { this.pkgBuilder = new BLangPackageBuilder(context, compUnit); this.diagnosticSrc = diagnosticSource; this.dlog = BLangDiagnosticLogHelper.getInstance(context); } @Override public void enterParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitParameter(BallerinaParser.ParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), false, ctx.annotationAttachment().size(), ctx.PUBLIC() != null); } /** * {@inheritDoc} */ @Override public void enterFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFormalParameterList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDefaultableParameter(BallerinaParser.DefaultableParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRestParameter(BallerinaParser.RestParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), ctx.annotationAttachment().size()); } @Override public void exitRestParameterTypeName(BallerinaParser.RestParameterTypeNameContext ctx) { if (isInErrorState) { return; } pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), null, null, 0); } /** * {@inheritDoc} */ @Override public void exitParameterTypeName(BallerinaParser.ParameterTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), null, null, false, 0); } @Override public void enterCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} */ @Override public void exitCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { this.pkgBuilder.endCompilationUnit(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPackageName(BallerinaParser.PackageNameContext ctx) { if (isInErrorState) { return; } this.pkgNameComps = new ArrayList<>(); ctx.Identifier().forEach(e -> pkgNameComps.add(e.getText())); this.pkgVersion = ctx.version() != null ? ctx.version().versionPattern().getText() : null; } /** * {@inheritDoc} */ @Override public void exitImportDeclaration(BallerinaParser.ImportDeclarationContext ctx) { if (isInErrorState) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; BallerinaParser.OrgNameContext orgNameContext = ctx.orgName(); if (orgNameContext == null) { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), null, this.pkgNameComps, this.pkgVersion, alias); } else { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), orgNameContext.getText(), this.pkgNameComps, this.pkgVersion, alias); } } /** * {@inheritDoc} */ @Override public void exitServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; final DiagnosticPos varPos = ctx.Identifier() != null ? getCurrentPos(ctx.Identifier()) : serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, false); } /** * {@inheritDoc} */ @Override public void enterServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startServiceDef(getCurrentPos(ctx)); this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder .addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, false, false, false, true); } /** * {@inheritDoc} */ @Override public void enterBlockFunctionBody(BallerinaParser.BlockFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlockFunctionBody(); } /** * {@inheritDoc} */ @Override public void exitBlockFunctionBody(BallerinaParser.BlockFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endBlockFunctionBody(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterExprFunctionBody(BallerinaParser.ExprFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprFunctionBody(); } /** * {@inheritDoc} */ @Override public void exitExprFunctionBody(BallerinaParser.ExprFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprFunctionBody(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterExternalFunctionBody(BallerinaParser.ExternalFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExternFunctionBody(); } /** * {@inheritDoc} */ @Override public void exitExternalFunctionBody(BallerinaParser.ExternalFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExternalFunctionBody(ctx.annotationAttachment().size(), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } int annotCount = ((BallerinaParser.CompilationUnitContext) ctx.parent.parent).annotationAttachment().size(); this.pkgBuilder.startFunctionDef(annotCount, false); } /** * {@inheritDoc} */ @Override public void exitFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } String funcName = ctx.anyIdentifierName().getText(); boolean publicFunc = ctx.PUBLIC() != null; boolean privateFunc = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean nativeFunc = ctx.functionDefinitionBody().externalFunctionBody() != null; this.pkgBuilder.endFunctionDefinition(getCurrentPos(ctx), getWS(ctx), funcName, getCurrentPos(ctx.anyIdentifierName()), publicFunc, remoteFunc, nativeFunc, privateFunc, false); } @Override public void enterExplicitAnonymousFunctionExpr(BallerinaParser.ExplicitAnonymousFunctionExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLambdaFunctionDef(diagnosticSrc.pkgID); } @Override public void exitExplicitAnonymousFunctionExpr(BallerinaParser.ExplicitAnonymousFunctionExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLambdaFunctionDef(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterInferAnonymousFunctionExpr(BallerinaParser.InferAnonymousFunctionExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitInferAnonymousFunctionExpression(BallerinaParser.InferAnonymousFunctionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addArrowFunctionDef(getCurrentPos(ctx), getWS(ctx), diagnosticSrc.pkgID); } @Override public void exitInferParamList(BallerinaParser.InferParamListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWSForInferParamList(getWS(ctx)); } @Override public void exitInferParam(BallerinaParser.InferParamContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addVarWithoutType(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), false, 0); } /** * {@inheritDoc} */ @Override public void exitFunctionSignature(BallerinaParser.FunctionSignatureContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFunctionSignature(getCurrentPos(ctx), getWS(ctx), ctx.formalParameterList() != null, ctx.returnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFiniteType(BallerinaParser.FiniteTypeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFiniteType(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTypeDefinition(BallerinaParser.TypeDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicObject = ctx.PUBLIC() != null; this.pkgBuilder.endTypeDefinition(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), publicObject); } /** * {@inheritDoc} */ @Override public void enterObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext) || (ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext && ctx.parent.parent.parent instanceof BallerinaParser.FiniteTypeContext && ctx.parent.parent.parent.getChildCount() > 1); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean isAbstract = ((ObjectTypeNameLabelContext) ctx.parent).ABSTRACT() != null; boolean isClient = ((ObjectTypeNameLabelContext) ctx.parent).CLIENT() != null; this.pkgBuilder.addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, isAbstract, isClient, false); } /** * {@inheritDoc} */ @Override public void exitObjectTypeNameLabel(BallerinaParser.ObjectTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addObjectTypeName(getWS(ctx)); } @Override public void exitTypeReference(BallerinaParser.TypeReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTypeReference(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitFieldDefinition(BallerinaParser.FieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.Identifier()); boolean exprAvailable = ctx.expression() != null; boolean isOptional = ctx.QUESTION_MARK() != null; boolean markdownExists = ctx.documentationString() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, identifierPos, exprAvailable, ctx.annotationAttachment().size(), false, isOptional, markdownExists); } /** * {@inheritDoc} */ @Override public void exitObjectFieldDefinition(BallerinaParser.ObjectFieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.Identifier()); boolean exprAvailable = ctx.expression() != null; int annotationCount = ctx.annotationAttachment().size(); boolean isPrivate = ctx.PRIVATE() != null; boolean isPublic = ctx.PUBLIC() != null; boolean markdownExists = ctx.documentationString() != null; this.pkgBuilder.addObjectFieldVariable(currentPos, ws, name, identifierPos, exprAvailable, annotationCount, isPrivate, isPublic, markdownExists); } /** * {@inheritDoc} */ @Override public void enterMethodDeclaration(BallerinaParser.MethodDeclarationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void enterMethodDefinition(BallerinaParser.MethodDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitMethodDefinition(BallerinaParser.MethodDefinitionContext ctx) { if (isInErrorState) { return; } String funcName = ctx.anyIdentifierName().getText(); DiagnosticPos funcNamePos = getCurrentPos(ctx.anyIdentifierName()); boolean publicFunc = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean resourceFunc = ctx.RESOURCE() != null; boolean markdownDocExists = ctx.documentationString() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), funcName, funcNamePos, publicFunc, isPrivate, remoteFunc, resourceFunc, false, markdownDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitMethodDeclaration(BallerinaParser.MethodDeclarationContext ctx) { if (isInErrorState) { return; } String funcName = ctx.anyIdentifierName().getText(); DiagnosticPos funcNamePos = getCurrentPos(ctx.anyIdentifierName()); boolean isPublic = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean resourceFunc = ctx.RESOURCE() != null; boolean markdownDocExists = ctx.documentationString() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), funcName, funcNamePos, isPublic, isPrivate, remoteFunc, resourceFunc, true, markdownDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicAnnotation = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); boolean isTypeAttached = ctx.typeName() != null; boolean isConst = ctx.CONST() != null; this.pkgBuilder.endAnnotationDef(getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), publicAnnotation, isTypeAttached, isConst); } /** * {@inheritDoc} */ @Override public void exitConstantDefinition(BallerinaParser.ConstantDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isTypeAvailable = ctx.typeName() != null; this.pkgBuilder.addConstant(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isPublic, isTypeAvailable); } @Override public void exitConstDivMulModExpression(BallerinaParser.ConstDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitConstAddSubExpression(BallerinaParser.ConstAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitConstGroupExpression(BallerinaParser.ConstGroupExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createGroupExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitGlobalVariableDefinition(BallerinaParser.GlobalVariableDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; boolean isListenerVar = ctx.LISTENER() != null; boolean isTypeNameProvided = ctx.typeName() != null; this.pkgBuilder.addGlobalVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isPublic, isFinal, isDeclaredWithVar, isExpressionAvailable, isListenerVar, isTypeNameProvided); } @Override public void exitAttachmentPoint(BallerinaParser.AttachmentPointContext ctx) { if (isInErrorState) { return; } AttachPoint attachPoint; if (ctx.dualAttachPoint() != null) { if (ctx.dualAttachPoint().SOURCE() != null) { attachPoint = AttachPoint.getAttachmentPoint(ctx.dualAttachPoint().dualAttachPointIdent().getText(), true); } else { attachPoint = AttachPoint.getAttachmentPoint(ctx.getText(), false); } } else { attachPoint = AttachPoint.getAttachmentPoint( ctx.sourceOnlyAttachPoint().sourceOnlyAttachPointIdent().getText(), true); } this.pkgBuilder.addAttachPoint(attachPoint, getWS(ctx)); } @Override public void enterWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWorker(diagnosticSrc.pkgID); } @Override public void exitWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } String workerName = null; DiagnosticPos workerNamePos = null; if (ctx.workerDefinition() != null) { workerName = escapeQuotedIdentifier(ctx.workerDefinition().Identifier().getText()); workerNamePos = getCurrentPos(ctx.workerDefinition().Identifier()); } boolean retParamsAvail = ctx.workerDefinition().returnParameter() != null; int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.addWorker( getCurrentPos(ctx), getWS(ctx), workerName, workerNamePos, retParamsAvail, numAnnotations); } /** * {@inheritDoc} */ @Override public void exitWorkerDefinition(BallerinaParser.WorkerDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachWorkerWS(getWS(ctx)); } @Override public void exitArrayTypeNameLabel(BallerinaParser.ArrayTypeNameLabelContext ctx) { if (isInErrorState) { return; } int index = 1; int dimensions = 0; List<Integer> sizes = new ArrayList<>(); List<ParseTree> children = ctx.children; while (index < children.size()) { if (children.get(index).getText().equals("[")) { if (children.get(index + 1).getText().equals("]")) { sizes.add(UNSEALED_ARRAY_INDICATOR); index += 2; } else if (children.get(index + 1).getText().equals(OPEN_SEALED_ARRAY)) { sizes.add(OPEN_SEALED_ARRAY_INDICATOR); index += 1; } else { sizes.add(Integer.parseInt(children.get(index + 1).getText())); index += 1; } dimensions++; } else { index++; } } Collections.reverse(sizes); this.pkgBuilder.addArrayType( getCurrentPos(ctx), getWS(ctx), dimensions, sizes.stream().mapToInt(val -> val).toArray()); } @Override public void exitUnionTypeNameLabel(BallerinaParser.UnionTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUnionType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleTypeNameLabel(BallerinaParser.TupleTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleType(getCurrentPos(ctx), getWS(ctx), ctx.tupleTypeDescriptor().typeName().size(), ctx.tupleTypeDescriptor().tupleRestDescriptor() != null); } @Override public void exitNullableTypeNameLabel(BallerinaParser.NullableTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsNullable(getWS(ctx)); } @Override public void exitGroupTypeNameLabel(BallerinaParser.GroupTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsGrouped(getWS(ctx)); } @Override public void enterInclusiveRecordTypeDescriptor(BallerinaParser.InclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitInclusiveRecordTypeDescriptor(BallerinaParser.InclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, false, false); } @Override public void enterExclusiveRecordTypeDescriptor(BallerinaParser.ExclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitExclusiveRecordTypeDescriptor(BallerinaParser.ExclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean hasRestField = ctx.recordRestFieldDefinition() != null; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, hasRestField, true); } @Override public void exitSimpleTypeName(BallerinaParser.SimpleTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.referenceTypeName() != null || ctx.valueTypeName() != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitUserDefineTypeName(BallerinaParser.UserDefineTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUserDefineType(getWS(ctx)); } @Override public void exitValueTypeName(BallerinaParser.ValueTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getText()); } @Override public void exitBuiltInReferenceTypeName(BallerinaParser.BuiltInReferenceTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.functionTypeName() != null) { return; } if (ctx.errorTypeName() != null) { return; } if (ctx.streamTypeName() != null) { return; } String typeName = ctx.getChild(0).getText(); DiagnosticPos pos = getCurrentPos(ctx); if (ctx.typeName() != null) { this.pkgBuilder.addConstraintTypeWithTypeName(pos, getWS(ctx), typeName); } else { this.pkgBuilder.addBuiltInReferenceType(pos, getWS(ctx), typeName); } } @Override public void exitStreamTypeName(BallerinaParser.StreamTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addStreamTypeWithTypeName(getCurrentPos(ctx), getWS(ctx), ctx.typeName(0) != null, ctx.typeName(1) != null); } @Override public void enterErrorTypeName(BallerinaParser.ErrorTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorType(); } @Override public void exitErrorTypeName(BallerinaParser.ErrorTypeNameContext ctx) { if (isInErrorState) { return; } boolean reasonTypeExists = !ctx.typeName().isEmpty(); boolean detailsTypeExists = ctx.typeName().size() > 1; boolean isAnonymous = !(ctx.parent.parent.parent.parent.parent.parent instanceof BallerinaParser.FiniteTypeContext) && reasonTypeExists; this.pkgBuilder.addErrorType(getCurrentPos(ctx), getWS(ctx), reasonTypeExists, detailsTypeExists, isAnonymous); } @Override public void exitFunctionTypeName(BallerinaParser.FunctionTypeNameContext ctx) { if (isInErrorState) { return; } boolean paramsAvail = false, retParamAvail = false, restParamAvail = false; if (ctx.parameterList() != null) { paramsAvail = ctx.parameterList().parameter().size() > 0; if (ctx.parameterList().restParameter() != null) { restParamAvail = true; } } else if (ctx.parameterTypeNameList() != null) { paramsAvail = ctx.parameterTypeNameList().parameterTypeName().size() > 0; if (ctx.parameterTypeNameList().restParameterTypeName() != null) { restParamAvail = true; } } if (ctx.returnParameter() != null) { retParamAvail = true; } this.pkgBuilder.addFunctionType(getCurrentPos(ctx), getWS(ctx), paramsAvail, restParamAvail, retParamAvail); } /** * {@inheritDoc} */ @Override public void enterAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationAttachment(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.setAnnotationAttachmentName(getWS(ctx), ctx.recordLiteral() != null, getCurrentPos(ctx), false); } /** * {@inheritDoc} */ @Override public void exitErrorRestBindingPattern(BallerinaParser.ErrorRestBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWSForErrorRestBinding(getWS(ctx)); } @Override public void exitErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.typeName() != null) { if (ctx.errorFieldBindingPatterns().errorRestBindingPattern() != null) { String restIdName = ctx.errorFieldBindingPatterns().errorRestBindingPattern().Identifier().getText(); DiagnosticPos restPos = getCurrentPos(ctx.errorFieldBindingPatterns().errorRestBindingPattern()); this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), restIdName, restPos); } else { this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), null, null); } return; } String reasonIdentifier = ctx.Identifier().getText(); DiagnosticPos currentPos = getCurrentPos(ctx); String restIdentifier = null; DiagnosticPos restParamPos = null; if (ctx.errorRestBindingPattern() != null) { restIdentifier = ctx.errorRestBindingPattern().Identifier().getText(); restParamPos = getCurrentPos(ctx.errorRestBindingPattern()); } this.pkgBuilder.addErrorVariable(currentPos, getWS(ctx), reasonIdentifier, restIdentifier, false, false, restParamPos); } @Override public void enterErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorBindingNode(); } @Override public void exitXmlElementAccessFilter(BallerinaParser.XmlElementAccessFilterContext ctx) { if (isInErrorState) { return; } List<TerminalNode> identifier = ctx.Identifier(); String ns = ""; String elementName = "*"; DiagnosticPos nsPos = null; DiagnosticPos elemNamePos = null; if (identifier.size() == 1) { if (ctx.MUL() == null) { TerminalNode nameNode = identifier.get(0); elementName = nameNode.getText(); elemNamePos = getCurrentPos(nameNode); } else { elemNamePos = getCurrentPos(ctx.MUL()); ns = ctx.Identifier(0).getText(); nsPos = getCurrentPos(ctx.Identifier(0)); } } else if (identifier.size() > 1) { TerminalNode nsNode = identifier.get(0); ns = nsNode.getText(); nsPos = getCurrentPos(nsNode); TerminalNode nameNode = identifier.get(1); elementName = nameNode.getText(); elemNamePos = getCurrentPos(nameNode); } else { elemNamePos = getCurrentPos(ctx.MUL()); } this.pkgBuilder.addXMLElementAccessFilter(getCurrentPos(ctx), getWS(ctx), ns, nsPos, elementName, elemNamePos); } @Override public void enterErrorMatchPattern(BallerinaParser.ErrorMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorBindingNode(); } @Override public void exitSimpleMatchPattern(BallerinaParser.SimpleMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endSimpleMatchPattern(getWS(ctx)); } @Override public void exitXmlElementFilter(BallerinaParser.XmlElementFilterContext ctx) { if (isInErrorState) { return; } int filterCount = ctx.xmlElementNames().xmlElementAccessFilter().size(); this.pkgBuilder.createXMLElementAccessNode(getCurrentPos(ctx), getWS(ctx), filterCount); } @Override public void exitXmlStepExpressionReference(BallerinaParser.XmlStepExpressionReferenceContext ctx) { if (isInErrorState) { return; } boolean isIndexed = ctx.xmlStepExpression().index() != null; BallerinaParser.XmlStepExpressionContext navAccess = ctx.xmlStepExpression(); BallerinaParser.XmlElementNamesContext filters = navAccess.xmlElementNames(); int filterCount = filters == null ? 0 : filters.xmlElementAccessFilter().size(); int starCount = navAccess.MUL().size(); this.pkgBuilder.createXMLNavigationAccessNode(getCurrentPos(ctx), getWS(ctx), filterCount, starCount, isIndexed); } @Override public void exitRestMatchPattern(BallerinaParser.RestMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWSForRestMatchPattern(getWS(ctx)); } @Override public void exitErrorArgListMatchPattern(BallerinaParser.ErrorArgListMatchPatternContext ctx) { if (isInErrorState) { return; } String restIdentifier = null; DiagnosticPos restParamPos = null; if (ctx.restMatchPattern() != null) { restIdentifier = ctx.restMatchPattern().Identifier().getText(); restParamPos = getCurrentPos(ctx.restMatchPattern()); } String reasonIdentifier = null; boolean reasonVar = false; boolean constReasonMatchPattern = false; if (ctx.simpleMatchPattern() != null) { reasonVar = ctx.simpleMatchPattern().VAR() != null; if (ctx.simpleMatchPattern().Identifier() != null) { reasonIdentifier = ctx.simpleMatchPattern().Identifier().getText(); } else { reasonIdentifier = ctx.simpleMatchPattern().QuotedStringLiteral().getText(); constReasonMatchPattern = true; } } this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), reasonIdentifier, restIdentifier, reasonVar, constReasonMatchPattern, restParamPos); } @Override public void exitErrorMatchPattern(BallerinaParser.ErrorMatchPatternContext ctx) { if (isInErrorState) { return; } boolean isIndirectErrorMatchPatern = ctx.typeName() != null; this.pkgBuilder.endErrorMatchPattern(getWS(ctx), isIndirectErrorMatchPatern); } @Override public void exitErrorDetailBindingPattern(BallerinaParser.ErrorDetailBindingPatternContext ctx) { if (isInErrorState) { return; } String bindingVarName = null; if (ctx.bindingPattern() != null && ctx.bindingPattern().Identifier() != null) { bindingVarName = ctx.bindingPattern().Identifier().getText(); } this.pkgBuilder.addErrorDetailBinding(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), bindingVarName); } @Override public void exitErrorRefBindingPattern(BallerinaParser.ErrorRefBindingPatternContext ctx) { if (isInErrorState) { return; } int numNamedArgs = ctx.errorNamedArgRefPattern().size(); boolean reasonRefAvailable = ctx.variableReference() != null; boolean restPatternAvailable = ctx.errorRefRestPattern() != null; boolean indirectErrorRefPattern = ctx.typeName() != null; this.pkgBuilder.addErrorVariableReference(getCurrentPos(ctx), getWS(ctx), numNamedArgs, reasonRefAvailable, restPatternAvailable, indirectErrorRefPattern); } @Override public void exitErrorNamedArgRefPattern(BallerinaParser.ErrorNamedArgRefPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } @Override public void exitListBindingPattern(BallerinaParser.ListBindingPatternContext ctx) { if (isInErrorState) { return; } boolean restBindingAvailable = ctx.restBindingPattern() != null; this.pkgBuilder.addTupleVariable(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().size(), restBindingAvailable); } @Override public void exitListRefBindingPattern(BallerinaParser.ListRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean restPatternAvailable = ctx.listRefRestPattern() != null; this.pkgBuilder.addTupleVariableReference(getCurrentPos(ctx), getWS(ctx), ctx.bindingRefPattern().size(), restPatternAvailable); } @Override public void enterRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableList(); } @Override public void exitRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } boolean hasRestBindingPattern = ctx.entryBindingPattern().restBindingPattern() != null; this.pkgBuilder.addRecordVariable(getCurrentPos(ctx), getWS(ctx), hasRestBindingPattern); } @Override public void enterRecordRefBindingPattern(BallerinaParser.RecordRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableReferenceList(); } @Override public void exitRecordRefBindingPattern(BallerinaParser.RecordRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean hasRestBindingPattern = ctx.entryRefBindingPattern().restRefBindingPattern() != null; this.pkgBuilder.addRecordVariableReference(getCurrentPos(ctx), getWS(ctx), hasRestBindingPattern); } @Override public void exitBindingPattern(BallerinaParser.BindingPatternContext ctx) { if (isInErrorState) { return; } if ((ctx.Identifier() != null) && ((ctx.parent instanceof BallerinaParser.ListBindingPatternContext) || (ctx.parent instanceof BallerinaParser.FieldBindingPatternContext) || (ctx.parent instanceof BallerinaParser.MatchPatternClauseContext))) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier())); } else if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternNameWhitespace(getWS(ctx)); } } @Override public void exitFieldBindingPattern(BallerinaParser.FieldBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), ctx.bindingPattern() != null); } @Override public void exitFieldRefBindingPattern(BallerinaParser.FieldRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldRefBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.bindingRefPattern() != null); } @Override public void exitRestBindingPattern(BallerinaParser.RestBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier())); } } @Override public void exitVariableDefinitionStatement(BallerinaParser.VariableDefinitionStatementContext ctx) { if (isInErrorState) { return; } boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; if (ctx.Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), getCurrentPos(ctx.bindingPattern().Identifier()), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().listBindingPattern() != null) { this.pkgBuilder.addTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } } @Override public void enterRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMapStructLiteral(); } @Override public void exitRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addMapStructLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordField(BallerinaParser.RecordFieldContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createBLangRecordVarRefNameField(pos, getWS(ctx)); this.pkgBuilder.addIdentifierRecordField(); } else if (ctx.ELLIPSIS() != null) { this.pkgBuilder.addSpreadOpRecordField(getWS(ctx)); } else { this.pkgBuilder.addKeyValueRecordField(getWS(ctx), ctx.recordKey().LEFT_BRACKET() != null); } } @Override public void exitRecordKey(BallerinaParser.RecordKeyContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(pos, getWS(ctx)); } else if (ctx.LEFT_BRACKET() != null) { this.pkgBuilder.addRecordKeyWS(getWS(ctx)); } } @Override public void enterTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTableLiteral(); } @Override public void exitTableColumnDefinition(BallerinaParser.TableColumnDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableColumnDefinition(getWS(ctx)); } @Override public void exitTableColumn(BallerinaParser.TableColumnContext ctx) { if (isInErrorState) { return; } String columnName; int childCount = ctx.getChildCount(); if (childCount == 2) { boolean keyColumn = KEYWORD_KEY.equals(ctx.getChild(0).getText()); if (keyColumn) { columnName = escapeQuotedIdentifier(ctx.getChild(1).getText()); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); this.pkgBuilder.markPrimaryKeyColumn(columnName); } else { DiagnosticPos pos = getCurrentPos(ctx); dlog.error(pos, DiagnosticCode.TABLE_KEY_EXPECTED); } } else { columnName = escapeQuotedIdentifier(ctx.getChild(0).getText()); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); } } @Override public void exitTableDataArray(BallerinaParser.TableDataArrayContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataArray(getWS(ctx)); } @Override public void exitTableDataList(BallerinaParser.TableDataListContext ctx) { if (isInErrorState) { return; } if (ctx.expressionList() != null) { this.pkgBuilder.endTableDataRow(getWS(ctx)); } } @Override public void exitTableData(BallerinaParser.TableDataContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataList(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTableLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitListConstructorExpr(BallerinaParser.ListConstructorExprContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.expressionList() != null; this.pkgBuilder.addListConstructorExpression(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void enterLetExpr(BallerinaParser.LetExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLetVarList(); } @Override public void exitLetExpr(BallerinaParser.LetExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLetExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitLetVarDecl(BallerinaParser.LetVarDeclContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; int annotationAttachmentsSize = ctx.annotationAttachment().size(); if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addSimpleLetVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), getCurrentPos(ctx.bindingPattern().Identifier()), isExpressionAvailable, isDeclaredWithVar, annotationAttachmentsSize); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addRecordVariableLetDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar, annotationAttachmentsSize); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addErrorVariableLetDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar, annotationAttachmentsSize); } else if (ctx.bindingPattern().structuredBindingPattern().listBindingPattern() != null) { this.pkgBuilder.addTupleVariableLetDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar, annotationAttachmentsSize); } } @Override public void exitTypeInitExpr(BallerinaParser.TypeInitExprContext ctx) { if (isInErrorState) { return; } String initName = ctx.NEW().getText(); boolean typeAvailable = ctx.userDefineTypeName() != null || ctx.streamTypeName() != null; boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.addTypeInitExpression(getCurrentPos(ctx), getWS(ctx), initName, typeAvailable, argsAvailable); } @Override public void exitServiceConstructorExpression(BallerinaParser.ServiceConstructorExpressionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = null; final DiagnosticPos varPos = serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, true, ctx.serviceConstructorExpr().annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitAssignmentStatement(BallerinaParser.AssignmentStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAssignmentStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitListDestructuringStatement(BallerinaParser.ListDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordDestructuringStatement(BallerinaParser.RecordDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRecordDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitErrorDestructuringStatement(BallerinaParser.ErrorDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addErrorDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitCompoundAssignmentStatement(BallerinaParser.CompoundAssignmentStatementContext ctx) { if (isInErrorState) { return; } String compoundOperatorText = ctx.compoundOperator().getText(); String operator = compoundOperatorText.substring(0, compoundOperatorText.length() - 1); this.pkgBuilder.addCompoundAssignmentStatement(getCurrentPos(ctx), getWS(ctx), operator); } /** * {@inheritDoc} */ @Override public void exitCompoundOperator(BallerinaParser.CompoundOperatorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addCompoundOperator(getWS(ctx)); } @Override public void enterVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } /** * {@inheritDoc} */ @Override public void enterIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endIfElseNode(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfClause(BallerinaParser.IfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createMatchNode(getCurrentPos(ctx)); } @Override public void exitMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.completeMatchNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMatchStmtPattern(); } @Override public void exitMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } if (ctx.bindingPattern() != null || ctx.errorMatchPattern() != null) { boolean isTypeGuardPresent = ctx.IF() != null; this.pkgBuilder.addMatchStmtStructuredBindingPattern(getCurrentPos(ctx), getWS(ctx), isTypeGuardPresent); return; } this.pkgBuilder.addMatchStmtStaticBindingPattern(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForeachStatement(); } @Override public void exitForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; if (ctx.bindingPattern().Identifier() != null) { String identifier = ctx.bindingPattern().Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.bindingPattern().Identifier()); this.pkgBuilder.addForeachStatementWithSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), identifier, identifierPos, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else { this.pkgBuilder.addForeachStatementWithTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } } @Override public void exitIntRangeExpression(BallerinaParser.IntRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIntRangeExpression(getCurrentPos(ctx), getWS(ctx), ctx.LEFT_PARENTHESIS() == null, ctx.RIGHT_PARENTHESIS() == null, ctx.expression(1) == null); } /** * {@inheritDoc} */ @Override public void enterWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWhileStmt(); } /** * {@inheritDoc} */ @Override public void exitWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWhileStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitContinueStatement(BallerinaParser.ContinueStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addContinueStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitBreakStatement(BallerinaParser.BreakStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addBreakStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForkJoinStmt(); } @Override public void exitForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addForkJoinStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTryCatchFinallyStmt(); } @Override public void exitTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryCatchFinallyStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterCatchClauses(BallerinaParser.CatchClausesContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryClause(getCurrentPos(ctx)); } @Override public void enterCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCatchClause(); } @Override public void exitCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } String paramName = ctx.Identifier().getText(); this.pkgBuilder.addCatchClause(getCurrentPos(ctx), getWS(ctx), paramName); } @Override public void enterFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startFinallyBlock(); } @Override public void exitFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFinallyBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitThrowStatement(BallerinaParser.ThrowStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addThrowStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitPanicStatement(BallerinaParser.PanicStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addPanicStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitReturnStatement(BallerinaParser.ReturnStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnStatement(this.getCurrentPos(ctx), getWS(ctx), ctx.expression() != null); } @Override public void exitWorkerReceiveExpression(BallerinaParser.WorkerReceiveExpressionContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerReceiveExpr(getCurrentPos(ctx), getWS(ctx), workerName, ctx.expression() != null); } @Override public void exitFlushWorker(BallerinaParser.FlushWorkerContext ctx) { if (isInErrorState) { return; } String workerName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addWorkerFlushExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWorkerSendAsyncStatement(BallerinaParser.WorkerSendAsyncStatementContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), workerName, ctx.expression().size() > 1); } @Override public void exitWorkerSendSyncExpression(BallerinaParser.WorkerSendSyncExpressionContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerSendSyncExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWaitExpression(BallerinaParser.WaitExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.waitForCollection() != null) { this.pkgBuilder.handleWaitForAll(getCurrentPos(ctx), getWS(ctx)); } else { this.pkgBuilder.handleWait(getCurrentPos(ctx), getWS(ctx)); } } @Override public void enterWaitForCollection(BallerinaParser.WaitForCollectionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWaitForAll(); } @Override public void exitWaitKeyValue(BallerinaParser.WaitKeyValueContext ctx) { if (isInErrorState) { return; } boolean containsExpr = ctx.expression() != null; this.pkgBuilder.addKeyValueToWaitForAll(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), containsExpr); } /** * {@inheritDoc} */ @Override public void exitXmlAttribVariableReference(BallerinaParser.XmlAttribVariableReferenceContext ctx) { if (isInErrorState) { return; } boolean isSingleAttrRef = ctx.xmlAttrib().expression() != null; this.pkgBuilder.createXmlAttributesRefExpr(getCurrentPos(ctx), getWS(ctx), isSingleAttrRef); } @Override public void exitSimpleVariableReference(BallerinaParser.SimpleVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitInvocation(BallerinaParser.InvocationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addInvocationWS(getWS(ctx)); } @Override public void exitStringFunctionInvocationReference(BallerinaParser.StringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitGroupStringFunctionInvocationReference(GroupStringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); InvocationContext invocation = ctx.invocation(); boolean argsAvailable = invocation.invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = invocation.anyIdentifierName(); String invocationText = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(invocation), getWS(invocation), invocationText, argsAvailable, getCurrentPos(identifierContext)); this.pkgBuilder.createGroupExpression(getCurrentPos(node), getWS(ctx)); } @Override public void exitFunctionInvocation(BallerinaParser.FunctionInvocationContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.createFunctionInvocation(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitFieldVariableReference(BallerinaParser.FieldVariableReferenceContext ctx) { if (isInErrorState) { return; } createFieldBasedAccessNode(ctx, ctx.field()); } @Override public void exitGroupFieldVariableReference(BallerinaParser.GroupFieldVariableReferenceContext ctx) { if (isInErrorState) { return; } FieldContext field = ctx.field(); VariableReferenceContext groupExpression = ctx.variableReference(); createFieldBasedAccessNode(field, field); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } private void createFieldBasedAccessNode(ParserRuleContext ctx, FieldContext field) { String fieldName; FieldKind fieldType; String nsName = null; DiagnosticPos nsPos = null; if (field.Identifier().isEmpty()) { fieldName = field.MUL().getText(); fieldType = FieldKind.ALL; } else if (field.Identifier().size() == 1) { fieldName = field.Identifier(0).getText(); fieldType = FieldKind.SINGLE; } else { nsName = field.Identifier(0).getText(); nsPos = getCurrentPos(field.Identifier(0)); fieldName = field.Identifier(1).getText(); fieldType = FieldKind.WITH_NS; } this.pkgBuilder.createFieldBasedAccessNode(getCurrentPos(ctx), getWS(ctx), fieldName, getCurrentPos(field), nsName, nsPos, fieldType, field.OPTIONAL_FIELD_ACCESS() != null); } @Override public void exitMapArrayVariableReference(BallerinaParser.MapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGroupMapArrayVariableReference(BallerinaParser.GroupMapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } IndexContext index = ctx.index(); VariableReferenceContext groupExpression = ctx.variableReference(); this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(index), getWS(index)); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } @Override public void exitReservedWord(BallerinaParser.ReservedWordContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startInvocationNode(getWS(ctx)); } @Override public void exitAnyIdentifierName(BallerinaParser.AnyIdentifierNameContext ctx) { if (isInErrorState) { return; } if (ctx.reservedWord() == null) { this.pkgBuilder.startInvocationNode(getWS(ctx)); } } @Override public void exitInvocationReference(BallerinaParser.InvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitGroupInvocationReference(BallerinaParser.GroupInvocationReferenceContext ctx) { if (isInErrorState) { return; } InvocationContext invocation = ctx.invocation(); VariableReferenceContext groupExpression = ctx.variableReference(); boolean argsAvailable = invocation.invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = invocation.anyIdentifierName(); String invocationText = identifierContext.getText(); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); this.pkgBuilder.createInvocationNode(getCurrentPos(invocation), getWS(invocation), invocationText, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitTypeDescExprInvocationReference(BallerinaParser.TypeDescExprInvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } /** * {@inheritDoc} */ @Override public void enterInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } /** * {@inheritDoc} */ @Override public void exitInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } public void enterExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void exitExpressionStmt(BallerinaParser.ExpressionStmtContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addExpressionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTransactionStmt(); } /** * {@inheritDoc} */ @Override public void exitTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.endTransactionStmt(pos, getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionClause(BallerinaParser.TransactionClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTransactionBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionPropertyInitStatementList( BallerinaParser.TransactionPropertyInitStatementListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTransactionPropertyInitStatementList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLockStmt(); } /** * {@inheritDoc} */ @Override public void exitLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLockStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOnretryBlock(); } /** * {@inheritDoc} */ @Override public void exitOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addOnretryBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCommittedBlock(); } /** * {@inheritDoc} */ @Override public void exitCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCommittedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAbortedBlock(); } /** * {@inheritDoc} */ @Override public void exitAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endAbortedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitAbortStatement(BallerinaParser.AbortStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAbortStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetryStatement(BallerinaParser.RetryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetriesStatement(BallerinaParser.RetriesStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryCountExpression(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { } @Override public void exitNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { if (isInErrorState) { return; } boolean isTopLevel = ctx.parent instanceof BallerinaParser.CompilationUnitContext; String namespaceUri = ctx.QuotedStringLiteral().getText(); DiagnosticPos pos = getCurrentPos(ctx); namespaceUri = namespaceUri.substring(1, namespaceUri.length() - 1); namespaceUri = StringEscapeUtils.unescapeJava(namespaceUri); String prefix = (ctx.Identifier() != null) ? ctx.Identifier().getText() : null; DiagnosticPos prefixPos = (ctx.Identifier() != null) ? getCurrentPos(ctx.Identifier()) : null; this.pkgBuilder.addXMLNSDeclaration(pos, getWS(ctx), namespaceUri, prefix, prefixPos, isTopLevel); } @Override public void exitBinaryDivMulModExpression(BallerinaParser.BinaryDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryOrExpression(BallerinaParser.BinaryOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryRefEqualExpression(BallerinaParser.BinaryRefEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryEqualExpression(BallerinaParser.BinaryEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchOrExpression(BallerinaParser.StaticMatchOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchIdentifierLiteral(BallerinaParser.StaticMatchIdentifierLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTypeDescExpr(BallerinaParser.TypeDescExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeAccessExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitActionInvocation(BallerinaParser.ActionInvocationContext ctx) { if (isInErrorState) { return; } int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.createActionInvocationNode(getCurrentPos(ctx), getWS(ctx), ctx.START() != null, numAnnotations); } @Override public void exitBinaryAndExpression(BallerinaParser.BinaryAndExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryAddSubExpression(BallerinaParser.BinaryAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseExpression(BallerinaParser.BitwiseExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseShiftExpression(BallerinaParser.BitwiseShiftExpressionContext ctx) { if (isInErrorState) { return; } StringBuilder operator = new StringBuilder(); for (int i = 1; i < ctx.getChildCount() - 1; i++) { operator.append(ctx.getChild(i).getText()); } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), operator.toString()); } /** * {@inheritDoc} */ @Override public void exitTypeConversionExpression(BallerinaParser.TypeConversionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeConversionExpr(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size(), ctx.typeName() != null); } @Override public void exitBinaryCompareExpression(BallerinaParser.BinaryCompareExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitIntegerRangeExpression(BallerinaParser.IntegerRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitUnaryExpression(BallerinaParser.UnaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createUnaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitTypeTestExpression(BallerinaParser.TypeTestExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeTestExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitAnnotAccessExpression(BallerinaParser.AnnotAccessExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createAnnotAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGroupExpression(BallerinaParser.GroupExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createGroupExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTernaryExpression(BallerinaParser.TernaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTernaryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckedExpression(BallerinaParser.CheckedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckPanickedExpression(BallerinaParser.CheckPanickedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckPanickedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterFromClause(BallerinaParser.FromClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startFromClause(); } @Override public void exitFromClause(BallerinaParser.FromClauseContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; if (ctx.bindingPattern().Identifier() != null) { String identifier = ctx.bindingPattern().Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.bindingPattern().Identifier()); this.pkgBuilder.createFromClauseWithSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), identifier, identifierPos, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.createFromClauseWithRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.createFromClauseWithErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else { this.pkgBuilder.createFromClauseWithTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } } public void enterLetClause(BallerinaParser.LetClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLetVarList(); } public void exitLetClause(BallerinaParser.LetClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLetClause(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitWhereClause(BallerinaParser.WhereClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createWhereClause(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitSelectClause(BallerinaParser.SelectClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createSelectClause(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitDoClause(BallerinaParser.DoClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createDoClause(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitQueryExpr(BallerinaParser.QueryExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createQueryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterQueryAction(BallerinaParser.QueryActionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startDoActionBlock(); } @Override public void exitQueryAction(BallerinaParser.QueryActionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createQueryActionExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitNameReference(BallerinaParser.NameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier().size() == 2) { String pkgName = ctx.Identifier(0).getText(); String name = ctx.Identifier(1).getText(); DiagnosticPos pos = getCurrentPos(ctx); if (Names.IGNORE.value.equals(pkgName)) { dlog.error(pos, DiagnosticCode.INVALID_PACKAGE_NAME_QUALIFER, pkgName); } this.pkgBuilder.addNameReference(pos, getWS(ctx), pkgName, name); } else { String name = ctx.Identifier(0).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } @Override public void exitFunctionNameReference(BallerinaParser.FunctionNameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { String pkgName = ctx.Identifier().getText(); String name = ctx.anyIdentifierName().getText(); DiagnosticPos pos = getCurrentPos(ctx); if (Names.IGNORE.value.equals(pkgName)) { dlog.error(pos, DiagnosticCode.INVALID_PACKAGE_NAME_QUALIFER, pkgName); } this.pkgBuilder.addNameReference(pos, getWS(ctx), pkgName, name); } else { String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } /** * {@inheritDoc} */ @Override public void exitReturnParameter(BallerinaParser.ReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void enterParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } private String fillWithZeros(String str) { while (str.length() < 4) { str = "0".concat(str); } return str; } /** * {@inheritDoc} */ @Override public void exitSimpleLiteral(BallerinaParser.SimpleLiteralContext ctx) { if (isInErrorState) { return; } TerminalNode node; DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); Object value; BallerinaParser.IntegerLiteralContext integerLiteralContext = ctx.integerLiteral(); if (integerLiteralContext != null && (value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.INT, value, ctx.getText()); } else if (ctx.floatingPointLiteral() != null) { if ((node = ctx.floatingPointLiteral().DecimalFloatingPointNumber()) != null) { String nodeValue = getNodeValue(ctx, node); int literalTypeTag = NumericLiteralSupport.isDecimalDiscriminated(nodeValue) ? TypeTags.DECIMAL : TypeTags.FLOAT; this.pkgBuilder.addLiteralValue(pos, ws, literalTypeTag, nodeValue, node.getText()); } else if ((node = ctx.floatingPointLiteral().HexadecimalFloatingPointLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, getHexNodeValue(ctx, node), node.getText()); } } else if ((node = ctx.BooleanLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BOOLEAN, Boolean.parseBoolean(node.getText()), node.getText()); } else if ((node = ctx.QuotedStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length() - 1); String originalText = text; Matcher matcher = pattern.matcher(text); int position = 0; while (matcher.find(position)) { String hexStringVal = matcher.group(1); int hexDecimalVal = Integer.parseInt(hexStringVal, 16); if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE) || hexDecimalVal > Constants.MAX_UNICODE) { String hexStringWithBraces = matcher.group(0); int offset = originalText.indexOf(hexStringWithBraces) + 1; dlog.error(new DiagnosticPos(diagnosticSrc, pos.sLine, pos.eLine, pos.sCol + offset, pos.sCol + offset + hexStringWithBraces.length()), DiagnosticCode.INVALID_UNICODE, hexStringWithBraces); } text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal)); position = matcher.end() - 2; matcher = pattern.matcher(text); } text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } else if (ctx.NullLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "null"); } else if (ctx.nilLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "()"); } else if (ctx.blobLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BYTE_ARRAY, ctx.blobLiteral().getText()); } } /** * {@inheritDoc} */ @Override public void exitNamedArgs(BallerinaParser.NamedArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } /** * {@inheritDoc} */ @Override public void exitRestArgs(BallerinaParser.RestArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestArgument(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitXmlLiteral(BallerinaParser.XmlLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachXmlLiteralWS(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitComment(BallerinaParser.CommentContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLCommentTemplateText()); String endingString = getTemplateEndingStr(ctx.XMLCommentText()); this.pkgBuilder.createXMLCommentLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElement(BallerinaParser.ElementContext ctx) { if (isInErrorState) { return; } if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitStartTag(BallerinaParser.StartTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitCloseTag(BallerinaParser.CloseTagContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endXMLElement(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitEmptyTag(BallerinaParser.EmptyTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitProcIns(BallerinaParser.ProcInsContext ctx) { if (isInErrorState) { return; } String targetQName = ctx.XML_TAG_SPECIAL_OPEN().getText(); targetQName = targetQName.substring(2, targetQName.length() - 1); Stack<String> textFragments = getTemplateTextFragments(ctx.XMLPITemplateText()); String endingText = getTemplateEndingStr(ctx.XMLPIText()); endingText = endingText.substring(0, endingText.length() - 2); this.pkgBuilder.createXMLPILiteral(getCurrentPos(ctx), getWS(ctx), targetQName, textFragments, endingText); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitAttribute(BallerinaParser.AttributeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createXMLAttribute(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitText(BallerinaParser.TextContext ctx) { if (isInErrorState) { return; } Stack<String> textFragments = getTemplateTextFragments(ctx.XMLTemplateText()); String endingText = getTemplateEndingStr(ctx.XMLText()); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addXMLTextToElement(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } else { this.pkgBuilder.createXMLTextLiteral(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } } /** * {@inheritDoc} */ @Override public void exitXmlSingleQuotedString(BallerinaParser.XmlSingleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLSingleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLSingleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.SINGLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlDoubleQuotedString(BallerinaParser.XmlDoubleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLDoubleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLDoubleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.DOUBLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlQualifiedName(BallerinaParser.XmlQualifiedNameContext ctx) { if (isInErrorState) { return; } List<TerminalNode> qnames = ctx.XMLQName(); String prefix = null; String localname; if (qnames.size() > 1) { prefix = qnames.get(0).getText(); localname = qnames.get(1).getText(); } else { localname = qnames.get(0).getText(); } this.pkgBuilder.createXMLQName(getCurrentPos(ctx), getWS(ctx), localname, prefix); } /** * {@inheritDoc} */ @Override public void exitStringTemplateLiteral(BallerinaParser.StringTemplateLiteralContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments; String endingText = null; StringTemplateContentContext contentContext = ctx.stringTemplateContent(); if (contentContext != null) { stringFragments = getTemplateTextFragments(contentContext.StringTemplateExpressionStart()); endingText = getTemplateEndingStr(contentContext.StringTemplateText()); } else { stringFragments = new Stack<>(); } this.pkgBuilder.createStringTemplateLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingText); } /** * {@inheritDoc} */ @Override public void enterDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMarkdownDocumentationString(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkdownDocumentationString(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationLine(BallerinaParser.DocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkDownDocumentLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationContent(BallerinaParser.DocumentationContentContext ctx) { if (isInErrorState) { return; } String text = ctx.getText() != null ? ctx.getText() : ""; this.pkgBuilder.endMarkdownDocumentationText(getCurrentPos(ctx), getWS(ctx), text); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentationLine(BallerinaParser.ParameterDocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endParameterDocumentationLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentation(BallerinaParser.ParameterDocumentationContext ctx) { if (isInErrorState) { return; } String parameterName = ctx.docParameterName() != null ? ctx.docParameterName().getText() : ""; String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentation(getCurrentPos(ctx.docParameterName()), getWS(ctx), parameterName, description); } /** * {@inheritDoc} */ @Override public void exitParameterDescriptionLine(BallerinaParser.ParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDocumentation(BallerinaParser.ReturnParameterDocumentationContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDescriptionLine(BallerinaParser.ReturnParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentationDescription(getWS(ctx), description); } @Override public void exitDeprecatedAnnotationDocumentation(BallerinaParser.DeprecatedAnnotationDocumentationContext ctx) { if (isInErrorState) { return; } String str = ctx.DeprecatedDocumentation() != null ? ctx.DeprecatedDocumentation().getText() : ""; this.pkgBuilder.endDeprecationAnnotationDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), str); } @Override public void exitDeprecateAnnotationDescriptionLine(BallerinaParser.DeprecateAnnotationDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endDeprecateAnnotationDocumentationDescription(getWS(ctx), description); } @Override @Override public void exitTrapExpression(BallerinaParser.TrapExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTrapExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitVariableReferenceExpression(BallerinaParser.VariableReferenceExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.START() != null) { int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.markLastInvocationAsAsync(getCurrentPos(ctx), numAnnotations); } } @Override public void exitDocumentationReference(BallerinaParser.DocumentationReferenceContext ctx) { if (isInErrorState) { return; } BallerinaParser.ReferenceTypeContext referenceType = ctx.referenceType(); BallerinaParser.SingleBacktickedContentContext backtickedContent = ctx.singleBacktickedContent(); this.pkgBuilder.endDocumentationReference(getCurrentPos(ctx), referenceType.getText(), backtickedContent.getText()); } @Override public void exitSingleBacktickedBlock(BallerinaParser.SingleBacktickedBlockContext ctx) { if (isInErrorState) { return; } BallerinaParser.SingleBacktickedContentContext backtickedContent = ctx.singleBacktickedContent(); this.pkgBuilder.endSingleBacktickedBlock(getCurrentPos(ctx), backtickedContent.getText()); } /** * {@inheritDoc} */ @Override public void exitElvisExpression(BallerinaParser.ElvisExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createElvisExpr(getCurrentPos(ctx), getWS(ctx)); } private DiagnosticPos getCurrentPos(ParserRuleContext ctx) { int startLine = ctx.getStart().getLine(); int startCol = ctx.getStart().getCharPositionInLine() + 1; int endLine = -1; int endCol = -1; Token stop = ctx.getStop(); if (stop != null) { endLine = stop.getLine(); endCol = stop.getCharPositionInLine() + (stop.getStopIndex() - stop.getStartIndex() + 1) + 1; } return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } private DiagnosticPos getCurrentPos(TerminalNode node) { Token symbol = node.getSymbol(); int startLine = symbol.getLine(); int startCol = symbol.getCharPositionInLine() + 1; int endLine = startLine; int endCol = startCol + symbol.getText().length(); return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } protected Set<Whitespace> getWS(ParserRuleContext ctx) { return null; } private Stack<String> getTemplateTextFragments(List<TerminalNode> nodes) { Stack<String> templateStrFragments = new Stack<>(); nodes.forEach(node -> { if (node == null) { templateStrFragments.push(null); } else { String str = node.getText(); templateStrFragments.push(str.substring(0, str.length() - 2)); } }); return templateStrFragments; } private String getTemplateEndingStr(TerminalNode node) { return node == null ? null : node.getText(); } private String getTemplateEndingStr(List<TerminalNode> nodes) { StringJoiner joiner = new StringJoiner(""); nodes.forEach(node -> joiner.add(node.getText())); return joiner.toString(); } private String getNodeValue(ParserRuleContext ctx, TerminalNode node) { String op = ctx.getChild(0).getText(); String value = node.getText(); if (op != null && "-".equals(op)) { value = "-" + value; } return value; } private String getHexNodeValue(ParserRuleContext ctx, TerminalNode node) { String value = getNodeValue(ctx, node); if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private Object getIntegerLiteral(ParserRuleContext simpleLiteralContext, BallerinaParser.IntegerLiteralContext integerLiteralContext) { if (integerLiteralContext.DecimalIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.DecimalIntegerLiteral()); return parseLong(simpleLiteralContext, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL, DiagnosticCode.INTEGER_TOO_LARGE); } else if (integerLiteralContext.HexIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.HexIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 16, DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE); } return null; } private Object parseLong(ParserRuleContext context, String originalNodeValue, String processedNodeValue, int radix, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { DiagnosticPos pos = getCurrentPos(context); Set<Whitespace> ws = getWS(context); if (originalNodeValue.startsWith("-")) { dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } /** * Mark that this listener is in error state. */ public void setErrorState() { this.isInErrorState = true; } /** * Mark that this listener is not in an error state. */ public void unsetErrorState() { this.isInErrorState = false; } boolean isInErrorState() { return this.isInErrorState; } }
``` "storage_medium" = "SSD" is not need ``` If the BE not specified the SSD, you will can not create table successfully.
void createTable() throws SQLException { runSql(db, "create table " + table + " ( pk bigint NOT NULL, v0 string not null) primary KEY (pk) DISTRIBUTED BY HASH(pk) BUCKETS " + numTablet + " PROPERTIES(\"replication_num\" = \"" + replicationNum + "\", \"storage_medium\" = \"SSD\");"); }
"\", \"storage_medium\" = \"SSD\");");
void createTable() throws SQLException { runSql(db, "create table " + table + " ( pk bigint NOT NULL, v0 string not null) primary KEY (pk) DISTRIBUTED BY HASH(pk) BUCKETS " + numTablet + " PROPERTIES(\"replication_num\" = \"" + replicationNum + "\", \"storage_medium\" = \"SSD\");"); }
class TableLoad { String db; String table; int id; int numTablet; int replicationNum; int loadIntervalMs; TableLoad(String db, int id, int numTablet, int replicationNum, int loadIntervalMs) { this.db = db; this.table = "table_" + id; this.id = id; this.numTablet = numTablet; this.replicationNum = replicationNum; this.loadIntervalMs = loadIntervalMs; } boolean hasTask(int tsMs) { return tsMs % loadIntervalMs == 0; } void loadOnce() throws SQLException { runSql(db, "insert into " + table + " values (1,\"1\"), (2,\"2\"), (3,\"3\");"); } }
class TableLoad { String db; String table; int id; int numTablet; int replicationNum; int loadIntervalMs; TableLoad(String db, int id, int numTablet, int replicationNum, int loadIntervalMs) { this.db = db; this.table = "table_" + id; this.id = id; this.numTablet = numTablet; this.replicationNum = replicationNum; this.loadIntervalMs = loadIntervalMs; } boolean hasTask(int tsMs) { return tsMs % loadIntervalMs == 0; } void loadOnce() throws SQLException { runSql(db, "insert into " + table + " values (1,\"1\"), (2,\"2\"), (3,\"3\");"); } }
`SYNCHRONIZED_PROCESSING_TIME`? Why is this the case? Specifically, why do we consider event time timers?
public void onTimers(Iterable<TimerData> timers) throws Exception { if (!timers.iterator().hasNext()) { return; } Map<BoundedWindow, EnrichedTimerData> enrichedTimers = new HashMap(); for (TimerData timer : timers) { checkArgument(timer.getNamespace() instanceof WindowNamespace, "Expected timer to be in WindowNamespace, but was in %s", timer.getNamespace()); @SuppressWarnings("unchecked") WindowNamespace<W> windowNamespace = (WindowNamespace<W>) timer.getNamespace(); W window = windowNamespace.getWindow(); WindowTracing.debug("{}: Received timer key:{}; window:{}; data:{} with " + "inputWatermark:{}; outputWatermark:{}", ReduceFnRunner.class.getSimpleName(), key, window, timer, timerInternals.currentInputWatermarkTime(), timerInternals.currentOutputWatermarkTime()); if (TimeDomain.PROCESSING_TIME == timer.getDomain() && windowIsExpired(window)) { continue; } if (enrichedTimers.containsKey(window)) { continue; } ReduceFn<K, InputT, OutputT, W>.Context directContext = contextFactory.base(window, StateStyle.DIRECT); ReduceFn<K, InputT, OutputT, W>.Context renamedContext = contextFactory.base(window, StateStyle.RENAMED); EnrichedTimerData enrichedTimer = new EnrichedTimerData(directContext, renamedContext); enrichedTimers.put(window, enrichedTimer); if (enrichedTimer.isGarbageCollection) { triggerRunner.prefetchIsClosed(directContext.state()); } else { triggerRunner.prefetchShouldFire(directContext.window(), directContext.state()); } } for (EnrichedTimerData timer : enrichedTimers.values()) { if (timer.windowIsActiveAndOpen()) { ReduceFn<K, InputT, OutputT, W>.Context directContext = timer.directContext; if (timer.isGarbageCollection) { prefetchOnTrigger(directContext, timer.renamedContext); } else if (triggerRunner.shouldFire( directContext.window(), directContext.timers(), directContext.state())) { prefetchEmit(directContext, timer.renamedContext); } } } for (EnrichedTimerData timer : enrichedTimers.values()) { ReduceFn<K, InputT, OutputT, W>.Context directContext = timer.directContext; ReduceFn<K, InputT, OutputT, W>.Context renamedContext = timer.renamedContext; if (timer.isGarbageCollection) { WindowTracing.debug( "{}: Cleaning up for key:{}; window:{} with inputWatermark:{}; outputWatermark:{}", ReduceFnRunner.class.getSimpleName(), key, directContext.window(), timerInternals.currentInputWatermarkTime(), timerInternals.currentOutputWatermarkTime()); boolean windowIsActiveAndOpen = timer.windowIsActiveAndOpen(); if (windowIsActiveAndOpen) { @Nullable Instant newHold = onTrigger( directContext, renamedContext, true /* isFinished */, timer.isEndOfWindow); checkState(newHold == null, "Hold placed at %s despite isFinished being true.", newHold); } clearAllState(directContext, renamedContext, windowIsActiveAndOpen); } else { WindowTracing.debug( "{}.onTimers: Triggering for key:{}; window:{} at {} with " + "inputWatermark:{}; outputWatermark:{}", key, directContext.window(), timerInternals.currentInputWatermarkTime(), timerInternals.currentOutputWatermarkTime()); if (timer.windowIsActiveAndOpen() && triggerRunner.shouldFire( directContext.window(), directContext.timers(), directContext.state())) { emit(directContext, renamedContext); } if (timer.isEndOfWindow) { checkState( windowingStrategy.getAllowedLateness().isLongerThan(Duration.ZERO), "Unexpected zero getAllowedLateness"); Instant cleanupTime = LateDataUtils.garbageCollectionTime(directContext.window(), windowingStrategy); WindowTracing.debug( "ReduceFnRunner.onTimer: Scheduling cleanup timer for key:{}; window:{} at {} with " + "inputWatermark:{}; outputWatermark:{}", key, directContext.window(), cleanupTime, timerInternals.currentInputWatermarkTime(), timerInternals.currentOutputWatermarkTime()); checkState(!cleanupTime.isAfter(BoundedWindow.TIMESTAMP_MAX_VALUE), "Cleanup time %s is beyond end-of-time", cleanupTime); directContext.timers().setTimer(cleanupTime, TimeDomain.EVENT_TIME); } } } }
if (TimeDomain.PROCESSING_TIME == timer.getDomain() && windowIsExpired(window)) {
public void onTimers(Iterable<TimerData> timers) throws Exception { if (!timers.iterator().hasNext()) { return; } Map<BoundedWindow, WindowActivation> windowActivations = new HashMap(); for (TimerData timer : timers) { checkArgument(timer.getNamespace() instanceof WindowNamespace, "Expected timer to be in WindowNamespace, but was in %s", timer.getNamespace()); @SuppressWarnings("unchecked") WindowNamespace<W> windowNamespace = (WindowNamespace<W>) timer.getNamespace(); W window = windowNamespace.getWindow(); WindowTracing.debug("{}: Received timer key:{}; window:{}; data:{} with " + "inputWatermark:{}; outputWatermark:{}", ReduceFnRunner.class.getSimpleName(), key, window, timer, timerInternals.currentInputWatermarkTime(), timerInternals.currentOutputWatermarkTime()); if (TimeDomain.EVENT_TIME != timer.getDomain() && windowIsExpired(window)) { continue; } if (windowActivations.containsKey(window)) { continue; } ReduceFn<K, InputT, OutputT, W>.Context directContext = contextFactory.base(window, StateStyle.DIRECT); ReduceFn<K, InputT, OutputT, W>.Context renamedContext = contextFactory.base(window, StateStyle.RENAMED); WindowActivation windowActivation = new WindowActivation(directContext, renamedContext); windowActivations.put(window, windowActivation); if (windowActivation.isGarbageCollection) { triggerRunner.prefetchIsClosed(directContext.state()); } else { triggerRunner.prefetchShouldFire(directContext.window(), directContext.state()); } } for (WindowActivation timer : windowActivations.values()) { if (timer.windowIsActiveAndOpen()) { ReduceFn<K, InputT, OutputT, W>.Context directContext = timer.directContext; if (timer.isGarbageCollection) { prefetchOnTrigger(directContext, timer.renamedContext); } else if (triggerRunner.shouldFire( directContext.window(), directContext.timers(), directContext.state())) { prefetchEmit(directContext, timer.renamedContext); } } } for (WindowActivation windowActivation : windowActivations.values()) { ReduceFn<K, InputT, OutputT, W>.Context directContext = windowActivation.directContext; ReduceFn<K, InputT, OutputT, W>.Context renamedContext = windowActivation.renamedContext; if (windowActivation.isGarbageCollection) { WindowTracing.debug( "{}: Cleaning up for key:{}; window:{} with inputWatermark:{}; outputWatermark:{}", ReduceFnRunner.class.getSimpleName(), key, directContext.window(), timerInternals.currentInputWatermarkTime(), timerInternals.currentOutputWatermarkTime()); boolean windowIsActiveAndOpen = windowActivation.windowIsActiveAndOpen(); if (windowIsActiveAndOpen) { @Nullable Instant newHold = onTrigger( directContext, renamedContext, true /* isFinished */, windowActivation.isEndOfWindow); checkState(newHold == null, "Hold placed at %s despite isFinished being true.", newHold); } clearAllState(directContext, renamedContext, windowIsActiveAndOpen); } else { WindowTracing.debug( "{}.onTimers: Triggering for key:{}; window:{} at {} with " + "inputWatermark:{}; outputWatermark:{}", key, directContext.window(), timerInternals.currentInputWatermarkTime(), timerInternals.currentOutputWatermarkTime()); if (windowActivation.windowIsActiveAndOpen() && triggerRunner.shouldFire( directContext.window(), directContext.timers(), directContext.state())) { emit(directContext, renamedContext); } if (windowActivation.isEndOfWindow) { checkState( windowingStrategy.getAllowedLateness().isLongerThan(Duration.ZERO), "Unexpected zero getAllowedLateness"); Instant cleanupTime = LateDataUtils.garbageCollectionTime(directContext.window(), windowingStrategy); WindowTracing.debug( "ReduceFnRunner.onTimer: Scheduling cleanup timer for key:{}; window:{} at {} with " + "inputWatermark:{}; outputWatermark:{}", key, directContext.window(), cleanupTime, timerInternals.currentInputWatermarkTime(), timerInternals.currentOutputWatermarkTime()); checkState(!cleanupTime.isAfter(BoundedWindow.TIMESTAMP_MAX_VALUE), "Cleanup time %s is beyond end-of-time", cleanupTime); directContext.timers().setTimer(cleanupTime, TimeDomain.EVENT_TIME); } } } }
class EnrichedTimerData { public final ReduceFn<K, InputT, OutputT, W>.Context directContext; public final ReduceFn<K, InputT, OutputT, W>.Context renamedContext; public final boolean isEndOfWindow; public final boolean isGarbageCollection; EnrichedTimerData( ReduceFn<K, InputT, OutputT, W>.Context directContext, ReduceFn<K, InputT, OutputT, W>.Context renamedContext) { this.directContext = directContext; this.renamedContext = renamedContext; W window = directContext.window(); boolean outputWatermarkBeforeEOW = timerInternals.currentOutputWatermarkTime() == null || !timerInternals.currentOutputWatermarkTime().isAfter(window.maxTimestamp()); this.isEndOfWindow = timerInternals.currentInputWatermarkTime().isAfter(window.maxTimestamp()) && outputWatermarkBeforeEOW; this.isGarbageCollection = timerInternals .currentInputWatermarkTime() .isAfter(LateDataUtils.garbageCollectionTime(window, windowingStrategy)); } public boolean windowIsActiveAndOpen() { return activeWindows.isActive(directContext.window()) && !triggerRunner.isClosed(directContext.state()); } }
class WindowActivation { public final ReduceFn<K, InputT, OutputT, W>.Context directContext; public final ReduceFn<K, InputT, OutputT, W>.Context renamedContext; public final boolean isEndOfWindow; public final boolean isGarbageCollection; WindowActivation( ReduceFn<K, InputT, OutputT, W>.Context directContext, ReduceFn<K, InputT, OutputT, W>.Context renamedContext) { this.directContext = directContext; this.renamedContext = renamedContext; W window = directContext.window(); boolean outputWatermarkBeforeEOW = timerInternals.currentOutputWatermarkTime() == null || !timerInternals.currentOutputWatermarkTime().isAfter(window.maxTimestamp()); this.isEndOfWindow = timerInternals.currentInputWatermarkTime().isAfter(window.maxTimestamp()) && outputWatermarkBeforeEOW; this.isGarbageCollection = timerInternals .currentInputWatermarkTime() .isAfter(LateDataUtils.garbageCollectionTime(window, windowingStrategy)); } public boolean windowIsActiveAndOpen() { return activeWindows.isActive(directContext.window()) && !triggerRunner.isClosed(directContext.state()); } }
It's just to remove the unecessary autoboxing, since you're comparing primitives here, but it's not that relevant anyway
void mapsWithBase() { assertEquals("localhost", mapsWithBase.server().get("host")); assertEquals(8080, Integer.valueOf(mapsWithBase.server().get("port"))); assertEquals("localhost", mapsWithBase.group().get("server").host()); assertEquals(8080, mapsWithBase.group().get("server").port()); }
assertEquals(8080, Integer.valueOf(mapsWithBase.server().get("port")));
void mapsWithBase() { assertEquals("localhost", mapsWithBase.server().get("host")); assertEquals(8080, Integer.valueOf(mapsWithBase.server().get("port"))); assertEquals("localhost", mapsWithBase.group().get("server").host()); assertEquals(8080, mapsWithBase.group().get("server").port()); }
class ConfigMappingTest { @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addAsResource(new StringAsset("config.my.prop=1234\n" + "group.host=localhost\n" + "group.port=8080\n" + "types.int=9\n" + "types.long=9999999999\n" + "types.float=99.9\n" + "types.double=99.99\n" + "types.char=c\n" + "types.boolean=true\n" + "types.value=1234\n" + "optionals.server.host=localhost\n" + "optionals.server.port=8080\n" + "optionals.optional=optional\n" + "optionals.optional.int=9\n" + "collections.strings=foo,bar\n" + "collections.ints=1,2,3\n" + "maps.server.host=localhost\n" + "maps.server.port=8080\n" + "maps.group.server.host=localhost\n" + "maps.group.server.port=8080\n" + "maps.base.server.host=localhost\n" + "maps.base.server.port=8080\n" + "maps.base.group.server.host=localhost\n" + "maps.base.group.server.port=8080\n" + "converters.foo=notbar\n" + "override.server.host=localhost\n" + "override.server.port=8080\n" + "cloud.server.host=cloud\n" + "cloud.server.port=9000\n" + "cloud.server.port=9000\n" + "hierarchy.foo=bar"), "application.properties")); @Inject Config config; @ConfigMapping(prefix = "config") public interface MyConfigMapping { @WithName("my.prop") String myProp(); } @Inject MyConfigMapping myConfigMapping; @Test void configMapping() { SmallRyeConfig smallRyeConfig = ((SmallRyeConfig) config); MyConfigMapping configMapping = smallRyeConfig.getConfigMapping(MyConfigMapping.class); assertNotNull(configMapping); assertEquals("1234", configMapping.myProp()); assertNotNull(myConfigMapping); assertEquals("1234", myConfigMapping.myProp()); } @ConfigMapping(prefix = "group") public interface GroupMapping { @WithParentName ServerHost host(); @WithParentName ServerPort port(); interface ServerHost { String host(); } interface ServerPort { int port(); } } @Inject GroupMapping groupMapping; @Test void groups() { assertNotNull(groupMapping); assertEquals("localhost", groupMapping.host().host()); assertEquals(8080, groupMapping.port().port()); } @ConfigMapping(prefix = "types") public interface SomeTypes { @WithName("int") int intPrimitive(); @WithName("int") Integer intWrapper(); @WithName("long") long longPrimitive(); @WithName("long") Long longWrapper(); @WithName("float") float floatPrimitive(); @WithName("float") Float floatWrapper(); @WithName("double") double doublePrimitive(); @WithName("double") Double doubleWrapper(); @WithName("char") char charPrimitive(); @WithName("char") Character charWrapper(); @WithName("boolean") boolean booleanPrimitive(); @WithName("boolean") Boolean booleanWrapper(); @WithName("value") ConfigValue configValue(); } @Inject SomeTypes types; @Test void types() { assertNotNull(types); assertEquals(9, types.intPrimitive()); assertEquals(9, types.intWrapper()); assertEquals(9999999999L, types.longPrimitive()); assertEquals(9999999999L, types.longWrapper()); assertEquals(99.9f, types.floatPrimitive()); assertEquals(99.9f, types.floatWrapper()); assertEquals(99.99, types.doublePrimitive()); assertEquals(99.99, types.doubleWrapper()); assertEquals('c', types.charPrimitive()); assertEquals('c', types.charWrapper()); assertTrue(types.booleanPrimitive()); assertTrue(types.booleanWrapper()); assertEquals("1234", types.configValue().getValue()); } @ConfigMapping(prefix = "optionals") public interface Optionals { Optional<Server> server(); Optional<String> optional(); @WithName("optional.int") OptionalInt optionalInt(); interface Server { String host(); int port(); } } @Inject Optionals optionals; @Test void optionals() { assertTrue(optionals.server().isPresent()); assertEquals("localhost", optionals.server().get().host()); assertEquals(8080, optionals.server().get().port()); assertTrue(optionals.optional().isPresent()); assertEquals("optional", optionals.optional().get()); assertTrue(optionals.optionalInt().isPresent()); assertEquals(9, optionals.optionalInt().getAsInt()); } @ConfigMapping(prefix = "collections") public interface Collections { @WithName("strings") List<String> listStrings(); @WithName("ints") List<Integer> listInts(); } @Inject Collections collections; @Test void collections() { assertEquals(Stream.of("foo", "bar").collect(toList()), collections.listStrings()); assertEquals(Stream.of(1, 2, 3).collect(toList()), collections.listInts()); } @ConfigMapping(prefix = "maps") public interface Maps { Map<String, String> server(); Map<String, Server> group(); interface Server { String host(); int port(); } } @Inject Maps maps; @Test void maps() { assertEquals("localhost", maps.server().get("host")); assertEquals(8080, Integer.valueOf(maps.server().get("port"))); assertEquals("localhost", maps.group().get("server").host()); assertEquals(8080, maps.group().get("server").port()); } public interface ServerBase { Map<String, String> server(); } @ConfigMapping(prefix = "maps.base") public interface MapsWithBase extends ServerBase { @Override Map<String, String> server(); Map<String, Server> group(); interface Server { String host(); int port(); } } @Inject MapsWithBase mapsWithBase; @Test @ConfigMapping(prefix = "defaults") public interface Defaults { @WithDefault("foo") String foo(); @WithDefault("bar") String bar(); } @Inject Defaults defaults; @Test void defaults() { assertEquals("foo", defaults.foo()); assertEquals("bar", defaults.bar()); assertEquals("foo", config.getValue("defaults.foo", String.class)); final List<String> propertyNames = stream(config.getPropertyNames().spliterator(), false).collect(toList()); assertFalse(propertyNames.contains("defaults.foo")); } @ConfigMapping(prefix = "converters") public interface Converters { @WithConverter(FooBarConverter.class) String foo(); class FooBarConverter implements Converter<String> { @Override public String convert(final String value) { return "bar"; } } } @Inject Converters converters; @Test void converters() { assertEquals("bar", converters.foo()); } public interface Base { String foo(); } @ConfigMapping(prefix = "hierarchy") public interface ExtendsBase extends Base { } @Inject Base base; @Inject ExtendsBase extendsBase; @Test void hierarchy() { assertSame(base, extendsBase); assertEquals("bar", extendsBase.foo()); } }
class ConfigMappingTest { @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addAsResource(new StringAsset("config.my.prop=1234\n" + "group.host=localhost\n" + "group.port=8080\n" + "types.int=9\n" + "types.long=9999999999\n" + "types.float=99.9\n" + "types.double=99.99\n" + "types.char=c\n" + "types.boolean=true\n" + "types.value=1234\n" + "optionals.server.host=localhost\n" + "optionals.server.port=8080\n" + "optionals.optional=optional\n" + "optionals.optional.int=9\n" + "collections.strings=foo,bar\n" + "collections.ints=1,2,3\n" + "maps.server.host=localhost\n" + "maps.server.port=8080\n" + "maps.group.server.host=localhost\n" + "maps.group.server.port=8080\n" + "maps.base.server.host=localhost\n" + "maps.base.server.port=8080\n" + "maps.base.group.server.host=localhost\n" + "maps.base.group.server.port=8080\n" + "converters.foo=notbar\n" + "override.server.host=localhost\n" + "override.server.port=8080\n" + "cloud.server.host=cloud\n" + "cloud.server.port=9000\n" + "cloud.server.port=9000\n" + "hierarchy.foo=bar"), "application.properties")); @Inject Config config; @ConfigMapping(prefix = "config") public interface MyConfigMapping { @WithName("my.prop") String myProp(); } @Inject MyConfigMapping myConfigMapping; @Test void configMapping() { SmallRyeConfig smallRyeConfig = ((SmallRyeConfig) config); MyConfigMapping configMapping = smallRyeConfig.getConfigMapping(MyConfigMapping.class); assertNotNull(configMapping); assertEquals("1234", configMapping.myProp()); assertNotNull(myConfigMapping); assertEquals("1234", myConfigMapping.myProp()); } @ConfigMapping(prefix = "group") public interface GroupMapping { @WithParentName ServerHost host(); @WithParentName ServerPort port(); interface ServerHost { String host(); } interface ServerPort { int port(); } } @Inject GroupMapping groupMapping; @Test void groups() { assertNotNull(groupMapping); assertEquals("localhost", groupMapping.host().host()); assertEquals(8080, groupMapping.port().port()); } @ConfigMapping(prefix = "types") public interface SomeTypes { @WithName("int") int intPrimitive(); @WithName("int") Integer intWrapper(); @WithName("long") long longPrimitive(); @WithName("long") Long longWrapper(); @WithName("float") float floatPrimitive(); @WithName("float") Float floatWrapper(); @WithName("double") double doublePrimitive(); @WithName("double") Double doubleWrapper(); @WithName("char") char charPrimitive(); @WithName("char") Character charWrapper(); @WithName("boolean") boolean booleanPrimitive(); @WithName("boolean") Boolean booleanWrapper(); @WithName("value") ConfigValue configValue(); } @Inject SomeTypes types; @Test void types() { assertNotNull(types); assertEquals(9, types.intPrimitive()); assertEquals(9, types.intWrapper()); assertEquals(9999999999L, types.longPrimitive()); assertEquals(9999999999L, types.longWrapper()); assertEquals(99.9f, types.floatPrimitive()); assertEquals(99.9f, types.floatWrapper()); assertEquals(99.99, types.doublePrimitive()); assertEquals(99.99, types.doubleWrapper()); assertEquals('c', types.charPrimitive()); assertEquals('c', types.charWrapper()); assertTrue(types.booleanPrimitive()); assertTrue(types.booleanWrapper()); assertEquals("1234", types.configValue().getValue()); } @ConfigMapping(prefix = "optionals") public interface Optionals { Optional<Server> server(); Optional<String> optional(); @WithName("optional.int") OptionalInt optionalInt(); interface Server { String host(); int port(); } } @Inject Optionals optionals; @Test void optionals() { assertTrue(optionals.server().isPresent()); assertEquals("localhost", optionals.server().get().host()); assertEquals(8080, optionals.server().get().port()); assertTrue(optionals.optional().isPresent()); assertEquals("optional", optionals.optional().get()); assertTrue(optionals.optionalInt().isPresent()); assertEquals(9, optionals.optionalInt().getAsInt()); } @ConfigMapping(prefix = "collections") public interface Collections { @WithName("strings") List<String> listStrings(); @WithName("ints") List<Integer> listInts(); } @Inject Collections collections; @Test void collections() { assertEquals(Stream.of("foo", "bar").collect(toList()), collections.listStrings()); assertEquals(Stream.of(1, 2, 3).collect(toList()), collections.listInts()); } @ConfigMapping(prefix = "maps") public interface Maps { Map<String, String> server(); Map<String, Server> group(); interface Server { String host(); int port(); } } @Inject Maps maps; @Test void maps() { assertEquals("localhost", maps.server().get("host")); assertEquals(8080, Integer.valueOf(maps.server().get("port"))); assertEquals("localhost", maps.group().get("server").host()); assertEquals(8080, maps.group().get("server").port()); } public interface ServerBase { Map<String, String> server(); } @ConfigMapping(prefix = "maps.base") public interface MapsWithBase extends ServerBase { @Override Map<String, String> server(); Map<String, Server> group(); interface Server { String host(); int port(); } } @Inject MapsWithBase mapsWithBase; @Test @ConfigMapping(prefix = "defaults") public interface Defaults { @WithDefault("foo") String foo(); @WithDefault("bar") String bar(); } @Inject Defaults defaults; @Test void defaults() { assertEquals("foo", defaults.foo()); assertEquals("bar", defaults.bar()); assertEquals("foo", config.getValue("defaults.foo", String.class)); final List<String> propertyNames = stream(config.getPropertyNames().spliterator(), false).collect(toList()); assertFalse(propertyNames.contains("defaults.foo")); } @ConfigMapping(prefix = "converters") public interface Converters { @WithConverter(FooBarConverter.class) String foo(); class FooBarConverter implements Converter<String> { @Override public String convert(final String value) { return "bar"; } } } @Inject Converters converters; @Test void converters() { assertEquals("bar", converters.foo()); } public interface Base { String foo(); } @ConfigMapping(prefix = "hierarchy") public interface ExtendsBase extends Base { } @Inject Base base; @Inject ExtendsBase extendsBase; @Test void hierarchy() { assertSame(base, extendsBase); assertEquals("bar", extendsBase.foo()); } }
"availability" in resolver means that tracked remote repository (by repoID) is available in the context artifact is asked for. So "remote" may come in as "remote repo ID is available in current context asking for artifact" :)
public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropBlank() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", " "); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); }
assertNotNull(resolveOrgAcmeFooJar001(mvn));
public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropBlank() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", " "); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); }
class ChainedLocalRepositoryManagerTest extends BootstrapMavenContextTestBase { private static final String M2_LOCAL_1; private static final String M2_LOCAL_2; private static final String M2_FROM_REMOTE; static { final String projectLocation; try { projectLocation = getProjectLocation("workspace-with-local-repo-tail").toString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } M2_LOCAL_1 = Paths.get(projectLocation, ".m2-local-1", "repository").toAbsolutePath().toString(); M2_LOCAL_2 = Paths.get(projectLocation, ".m2-local-2", "repository").toAbsolutePath().toString(); M2_FROM_REMOTE = Paths.get(projectLocation, ".m2-from-remote", "repository").toAbsolutePath().toString(); } @Test public void testNoTail() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButEmptyString() throws Exception { setSystemProp("maven.repo.local.tail", ""); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButBlank() throws Exception { setSystemProp("maven.repo.local.tail", " "); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButNonExistent() throws Exception { setSystemProp("maven.repo.local.tail", "/tmp/this-dir-does-not-exist"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailViaSystemProp() throws Exception { setSystemProp("maven.repo.local.tail", M2_LOCAL_1); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailResolutionOrder() throws Exception { final BootstrapMavenContext mvnLocal1first = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1, M2_LOCAL_2 })); final BootstrapMavenContext mvnLocal2first = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_2, M2_LOCAL_1 })); assertEquals(resolveOrgAcmeFooJar001(mvnLocal1first).getFile().getAbsolutePath(), Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); assertEquals(resolveOrgAcmeFooJar001(mvnLocal2first).getFile().getAbsolutePath(), Paths.get(M2_LOCAL_2, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); } @Test public void testValidTailMultiplicity() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1, M2_LOCAL_2 })); final Artifact foo = resolveOrgAcmeFooJar001(mvn); assertNotNull(foo); assertEquals(foo.getFile().getAbsolutePath(), Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); final Artifact bar = resolveOrgAcmeBarJar002(mvn); assertNotNull(bar); assertEquals(bar.getFile().getAbsolutePath(), Paths.get(M2_LOCAL_2, "org", "acme", "bar", "0.0.2", "bar-0.0.2.jar").toAbsolutePath().toString()); } @Test public void testValidTailLocalCheckingForAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(false) .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteCheckingForAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(false) .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteCheckingForAvailabilityViaSystemProp() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", "false"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropEmpty() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", ""); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test @Test public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropTruthy() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", "fals"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailLocalIgnoringAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(true) .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteIgnoringAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(true) .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } private Artifact resolveOrgAcmeFooJar001(BootstrapMavenContext ctx) throws BootstrapMavenException { final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx); return resolver.resolve(new DefaultArtifact("org.acme", "foo", "", "jar", "0.0.1")).getArtifact(); } private Artifact resolveOrgAcmeBarJar002(BootstrapMavenContext ctx) throws BootstrapMavenException { final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx); return resolver.resolve(new DefaultArtifact("org.acme", "bar", "", "jar", "0.0.2")).getArtifact(); } }
class ChainedLocalRepositoryManagerTest extends BootstrapMavenContextTestBase { private static final String M2_LOCAL_1; private static final String M2_LOCAL_2; private static final String M2_FROM_REMOTE; static { final String projectLocation; try { projectLocation = getProjectLocation("workspace-with-local-repo-tail").toString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } M2_LOCAL_1 = Paths.get(projectLocation, ".m2-local-1", "repository").toAbsolutePath().toString(); M2_LOCAL_2 = Paths.get(projectLocation, ".m2-local-2", "repository").toAbsolutePath().toString(); M2_FROM_REMOTE = Paths.get(projectLocation, ".m2-from-remote", "repository").toAbsolutePath().toString(); } @Test public void testNoTail() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButEmptyString() throws Exception { setSystemProp("maven.repo.local.tail", ""); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButBlank() throws Exception { setSystemProp("maven.repo.local.tail", " "); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testTailConfiguredButNonExistent() throws Exception { setSystemProp("maven.repo.local.tail", "/tmp/this-dir-does-not-exist"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailViaSystemProp() throws Exception { setSystemProp("maven.repo.local.tail", M2_LOCAL_1); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail"); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailResolutionOrder() throws Exception { final BootstrapMavenContext mvnLocal1first = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1, M2_LOCAL_2 })); final BootstrapMavenContext mvnLocal2first = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_2, M2_LOCAL_1 })); assertEquals(resolveOrgAcmeFooJar001(mvnLocal1first).getFile().getAbsolutePath(), Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); assertEquals(resolveOrgAcmeFooJar001(mvnLocal2first).getFile().getAbsolutePath(), Paths.get(M2_LOCAL_2, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); } @Test public void testValidTailMultiplicity() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_LOCAL_1, M2_LOCAL_2 })); final Artifact foo = resolveOrgAcmeFooJar001(mvn); assertNotNull(foo); assertEquals(foo.getFile().getAbsolutePath(), Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString()); final Artifact bar = resolveOrgAcmeBarJar002(mvn); assertNotNull(bar); assertEquals(bar.getFile().getAbsolutePath(), Paths.get(M2_LOCAL_2, "org", "acme", "bar", "0.0.2", "bar-0.0.2.jar").toAbsolutePath().toString()); } @Test public void testValidTailLocalCheckingForAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(false) .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteCheckingForAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(false) .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteCheckingForAvailabilityViaSystemProp() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", "false"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropEmpty() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", ""); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test @Test public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropTruthy() throws Exception { setSystemProp("maven.repo.local.tail.ignoreAvailability", "fals"); final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailLocalIgnoringAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(true) .setLocalRepositoryTail(new String[] { M2_LOCAL_1 })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } @Test public void testValidTailFromRemoteIgnoringAvailabilityViaConfig() throws Exception { final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail", BootstrapMavenContext.config() .setLocalRepositoryTailIgnoreAvailability(true) .setLocalRepositoryTail(new String[] { M2_FROM_REMOTE })); assertNotNull(resolveOrgAcmeFooJar001(mvn)); } private Artifact resolveOrgAcmeFooJar001(BootstrapMavenContext ctx) throws BootstrapMavenException { final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx); return resolver.resolve(new DefaultArtifact("org.acme", "foo", "", "jar", "0.0.1")).getArtifact(); } private Artifact resolveOrgAcmeBarJar002(BootstrapMavenContext ctx) throws BootstrapMavenException { final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx); return resolver.resolve(new DefaultArtifact("org.acme", "bar", "", "jar", "0.0.2")).getArtifact(); } }
I was thinking of just having the loop ``` for (T newValue : newValues) { valueCoder.encode(newValue, out); if (out.size() > BAG_APPEND_BATCHING_LIMIT) { [send out.toByteStringAndReset()] } } ... ``` rather than introducing the (I think correct, but complex to reason about) `consumePrefixToByteString`. True, we will fail on a element of size 100-x MB after writing x MB, for x < 10, but the question is whether that corner case is worth the additional complexity.
public void asyncClose() throws Exception { checkState( !isClosed, "Bag user state is no longer usable because it is closed for %s", request.getStateKey()); isClosed = true; if (!isCleared && newValues.isEmpty()) { return; } if (isCleared) { beamFnStateClient.handle( request.toBuilder().setClear(StateClearRequest.getDefaultInstance())); } if (!newValues.isEmpty()) { ByteStringOutputStream out = new ByteStringOutputStream(); for (T newValue : newValues) { int previousSize = out.size(); valueCoder.encode(newValue, out); if (out.size() > BAG_APPEND_BATCHING_LIMIT && previousSize > 0) { beamFnStateClient.handle( request .toBuilder() .setAppend( StateAppendRequest.newBuilder() .setData(out.consumePrefixToByteString(previousSize)))); } if (out.size() > BAG_APPEND_BATCHING_LIMIT) { beamFnStateClient.handle( request .toBuilder() .setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset()))); } } if (out.size() > 0) { beamFnStateClient.handle( request .toBuilder() .setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset()))); } } if (isCleared) { oldValues.clearAndAppend(newValues); } else { oldValues.append(newValues); } }
if (out.size() > BAG_APPEND_BATCHING_LIMIT) {
public void asyncClose() throws Exception { checkState( !isClosed, "Bag user state is no longer usable because it is closed for %s", request.getStateKey()); isClosed = true; if (!isCleared && newValues.isEmpty()) { return; } if (isCleared) { beamFnStateClient.handle( request.toBuilder().setClear(StateClearRequest.getDefaultInstance())); } if (!newValues.isEmpty()) { ByteStringOutputStream out = new ByteStringOutputStream(); for (T newValue : newValues) { int previousSize = out.size(); valueCoder.encode(newValue, out); if (out.size() > BAG_APPEND_BATCHING_LIMIT && previousSize > 0) { beamFnStateClient.handle( request .toBuilder() .setAppend( StateAppendRequest.newBuilder() .setData(out.consumePrefixToByteString(previousSize)))); } if (out.size() > BAG_APPEND_BATCHING_LIMIT) { beamFnStateClient.handle( request .toBuilder() .setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset()))); } } if (out.size() > 0) { beamFnStateClient.handle( request .toBuilder() .setAppend(StateAppendRequest.newBuilder().setData(out.toByteStringAndReset()))); } } if (isCleared) { oldValues.clearAndAppend(newValues); } else { oldValues.append(newValues); } }
class BagUserState<T> { private final Cache<?, ?> cache; private final BeamFnStateClient beamFnStateClient; private final StateRequest request; private final Coder<T> valueCoder; private final CachingStateIterable<T> oldValues; private List<T> newValues; private boolean isCleared; private boolean isClosed; static final int BAG_APPEND_BATCHING_LIMIT = 10 * 1024 * 1024; /** The cache must be namespaced for this state object accordingly. */ public BagUserState( Cache<?, ?> cache, BeamFnStateClient beamFnStateClient, String instructionId, StateKey stateKey, Coder<T> valueCoder) { checkArgument( stateKey.hasBagUserState(), "Expected BagUserState StateKey but received %s.", stateKey); this.cache = cache; this.beamFnStateClient = beamFnStateClient; this.valueCoder = valueCoder; this.request = StateRequest.newBuilder().setInstructionId(instructionId).setStateKey(stateKey).build(); this.oldValues = StateFetchingIterators.readAllAndDecodeStartingFrom( this.cache, beamFnStateClient, request, valueCoder); this.newValues = new ArrayList<>(); } public PrefetchableIterable<T> get() { checkState( !isClosed, "Bag user state is no longer usable because it is closed for %s", request.getStateKey()); if (isCleared) { return PrefetchableIterables.limit(Collections.unmodifiableList(newValues), newValues.size()); } else if (newValues.isEmpty()) { return oldValues; } return PrefetchableIterables.concat( oldValues, Iterables.limit(Collections.unmodifiableList(newValues), newValues.size())); } public void append(T t) { checkState( !isClosed, "Bag user state is no longer usable because it is closed for %s", request.getStateKey()); newValues.add(t); } public void clear() { checkState( !isClosed, "Bag user state is no longer usable because it is closed for %s", request.getStateKey()); isCleared = true; newValues = new ArrayList<>(); } @SuppressWarnings("FutureReturnValueIgnored") }
class BagUserState<T> { private final Cache<?, ?> cache; private final BeamFnStateClient beamFnStateClient; private final StateRequest request; private final Coder<T> valueCoder; private final CachingStateIterable<T> oldValues; private List<T> newValues; private boolean isCleared; private boolean isClosed; static final int BAG_APPEND_BATCHING_LIMIT = 10 * 1024 * 1024; /** The cache must be namespaced for this state object accordingly. */ public BagUserState( Cache<?, ?> cache, BeamFnStateClient beamFnStateClient, String instructionId, StateKey stateKey, Coder<T> valueCoder) { checkArgument( stateKey.hasBagUserState(), "Expected BagUserState StateKey but received %s.", stateKey); this.cache = cache; this.beamFnStateClient = beamFnStateClient; this.valueCoder = valueCoder; this.request = StateRequest.newBuilder().setInstructionId(instructionId).setStateKey(stateKey).build(); this.oldValues = StateFetchingIterators.readAllAndDecodeStartingFrom( this.cache, beamFnStateClient, request, valueCoder); this.newValues = new ArrayList<>(); } public PrefetchableIterable<T> get() { checkState( !isClosed, "Bag user state is no longer usable because it is closed for %s", request.getStateKey()); if (isCleared) { return PrefetchableIterables.limit(Collections.unmodifiableList(newValues), newValues.size()); } else if (newValues.isEmpty()) { return oldValues; } return PrefetchableIterables.concat( oldValues, Iterables.limit(Collections.unmodifiableList(newValues), newValues.size())); } public void append(T t) { checkState( !isClosed, "Bag user state is no longer usable because it is closed for %s", request.getStateKey()); newValues.add(t); } public void clear() { checkState( !isClosed, "Bag user state is no longer usable because it is closed for %s", request.getStateKey()); isCleared = true; newValues = new ArrayList<>(); } @SuppressWarnings("FutureReturnValueIgnored") }
Do we have any numbers that indicate disabling `TCP_NODELAY` is worth it? We unconditionally enabled nodelay for everything to get rid of spurious latency spikes incurred by the in-kernel deferred packet sending.
public Connection(TransportThread parent, Supervisor owner, Spec spec, Object context, boolean tcpNoDelay) { super(context); this.parent = parent; this.owner = owner; this.spec = spec; this.tcpNoDelay = tcpNoDelay; server = false; owner.sessionInit(this); }
this.tcpNoDelay = tcpNoDelay;
public Connection(TransportThread parent, Supervisor owner, Spec spec, Object context, boolean tcpNoDelay) { super(context); this.parent = parent; this.owner = owner; this.spec = spec; this.tcpNoDelay = tcpNoDelay; server = false; owner.sessionInit(this); }
class Connection extends Target { private static final Logger log = Logger.getLogger(Connection.class.getName()); private static final int READ_SIZE = 32768; private static final int READ_REDO = 10; private static final int WRITE_SIZE = 32768; private static final int WRITE_REDO = 10; private static final int INITIAL = 0; private static final int CONNECTING = 1; private static final int CONNECTED = 2; private static final int CLOSED = 3; private int state = INITIAL; private final Queue queue = new Queue(); private final Queue myQueue = new Queue(); private final Buffer input = new Buffer(READ_SIZE * 2); private final Buffer output = new Buffer(WRITE_SIZE * 2); private int maxInputSize = 64*1024; private int maxOutputSize = 64*1024; private final boolean tcpNoDelay; private final Map<Integer, ReplyHandler> replyMap = new HashMap<>(); private final Map<TargetWatcher, TargetWatcher> watchers = new IdentityHashMap<>(); private int activeReqs = 0; private int writeWork = 0; private boolean pendingHandshakeWork = false; private final TransportThread parent; private final Supervisor owner; private final Spec spec; private CryptoSocket socket; private int readSize = READ_SIZE; private final boolean server; private final AtomicLong requestId = new AtomicLong(0); private SelectionKey selectionKey; private Exception lostReason = null; private void setState(int state) { if (state <= this.state) { log.log(Level.WARNING, "Bogus state transition: " + this.state + "->" + state); return; } boolean live = (state == CONNECTED); boolean down = (state == CLOSED); boolean fini; boolean pendingWrite; synchronized (this) { this.state = state; fini = down && (activeReqs == 0); pendingWrite = (writeWork > 0); } if (live) { enableRead(); if (pendingWrite) { enableWrite(); } else { disableWrite(); } owner.sessionLive(this); } if (down) { for (ReplyHandler rh : replyMap.values()) { rh.handleConnectionDown(); } for (TargetWatcher watcher : watchers.values()) { watcher.notifyTargetInvalid(this); } owner.sessionDown(this); } if (fini) { owner.sessionFini(this); } } public Connection(TransportThread parent, Supervisor owner, SocketChannel channel, boolean tcpNoDelay) { this.parent = parent; this.owner = owner; this.socket = parent.transport().createServerCryptoSocket(channel); this.spec = null; this.tcpNoDelay = tcpNoDelay; server = true; owner.sessionInit(this); } public void setMaxInputSize(int bytes) { maxInputSize = bytes; } public void setMaxOutputSize(int bytes) { maxOutputSize = bytes; } public TransportThread transportThread() { return parent; } public int allocateKey() { long v = requestId.getAndIncrement(); v = v*2 + (server ? 1 : 0); int i = (int)(v & 0x7fffffff); return i; } public synchronized boolean cancelReply(ReplyHandler handler) { if (state == CLOSED) { return false; } ReplyHandler stored = replyMap.remove(handler.key()); if (stored != handler) { if (stored != null) { replyMap.put(handler.key(), stored); } return false; } return true; } public boolean postPacket(Packet packet, ReplyHandler handler) { boolean accepted = false; boolean enableWrite = false; synchronized (this) { if (state <= CONNECTED) { enableWrite = (writeWork == 0 && state == CONNECTED); queue.enqueue(packet); writeWork++; accepted = true; if (handler != null) { replyMap.put(handler.key(), handler); } } } if (enableWrite) { parent.enableWrite(this); } return accepted; } public boolean postPacket(Packet packet) { return postPacket(packet, null); } public Connection connect() { if (spec == null || spec.malformed()) { setLostReason(new IllegalArgumentException("jrt: malformed or missing spec")); return this; } try { socket = parent.transport().createClientCryptoSocket(SocketChannel.open(spec.resolveAddress()), spec); } catch (Exception e) { setLostReason(e); } return this; } public boolean init(Selector selector) { if (!hasSocket()) { return false; } try { socket.channel().configureBlocking(false); socket.channel().socket().setTcpNoDelay(tcpNoDelay); selectionKey = socket.channel().register(selector, SelectionKey.OP_READ | SelectionKey.OP_WRITE, this); } catch (Exception e) { log.log(Level.WARNING, "Error initializing connection", e); setLostReason(e); return false; } setState(CONNECTING); return true; } public void enableRead() { selectionKey.interestOps(selectionKey.interestOps() | SelectionKey.OP_READ); } public void disableRead() { selectionKey.interestOps(selectionKey.interestOps() & ~SelectionKey.OP_READ); } public void enableWrite() { selectionKey.interestOps(selectionKey.interestOps() | SelectionKey.OP_WRITE); } public void disableWrite() { selectionKey.interestOps(selectionKey.interestOps() & ~SelectionKey.OP_WRITE); } private void handshake() throws IOException { if (pendingHandshakeWork) { return; } switch (socket.handshake()) { case DONE: if (socket.getMinimumReadBufferSize() > readSize) { readSize = socket.getMinimumReadBufferSize(); } setState(CONNECTED); while (socket.drain(input.getChannelWritable(readSize)) > 0) { handlePackets(); } break; case NEED_READ: enableRead(); disableWrite(); break; case NEED_WRITE: disableRead(); enableWrite(); break; case NEED_WORK: disableRead(); disableWrite(); pendingHandshakeWork = true; parent.transport().doHandshakeWork(this); break; } } public void doHandshakeWork() { socket.doHandshakeWork(); } public void handleHandshakeWorkDone() throws IOException { if (!pendingHandshakeWork) { throw new IllegalStateException("jrt: got unwanted handshake work done event"); } pendingHandshakeWork = false; if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got handshake work done event in incompatible state: " + state); } } private void handlePackets() throws IOException { ByteBuffer rb = input.getReadable(); while (true) { PacketInfo info = PacketInfo.getPacketInfo(rb); if (info == null || info.packetLength() > rb.remaining()) { break; } owner.readPacket(info); Packet packet; try { packet = info.decodePacket(rb); } catch (RuntimeException e) { log.log(Level.WARNING, "got garbage; closing connection: " + toString()); throw new IOException("jrt: decode error", e); } ReplyHandler handler; synchronized (this) { handler = replyMap.remove(packet.requestId()); } if (handler != null) { handler.handleReply(packet); } else { owner.handlePacket(this, packet); } } } private void read() throws IOException { boolean doneRead = false; for (int i = 0; !doneRead && i < READ_REDO; i++) { ByteBuffer wb = input.getChannelWritable(readSize); if (socket.read(wb) == -1) { throw new IOException("jrt: Connection closed by peer"); } doneRead = (wb.remaining() > 0); handlePackets(); } while (socket.drain(input.getChannelWritable(readSize)) > 0) { handlePackets(); } if (maxInputSize > 0) { input.shrink(maxInputSize); } } public void handleReadEvent() throws IOException { if (state == CONNECTED) { read(); } else if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got read event in incompatible state: " + state); } } private void write() throws IOException { synchronized (this) { queue.flush(myQueue); } for (int i = 0; i < WRITE_REDO; i++) { while (output.bytes() < WRITE_SIZE) { Packet packet = (Packet) myQueue.dequeue(); if (packet == null) { break; } PacketInfo info = packet.getPacketInfo(); ByteBuffer wb = output.getWritable(info.packetLength()); owner.writePacket(info); info.encodePacket(packet, wb); } ByteBuffer rb = output.getChannelReadable(); if (rb.remaining() == 0) { break; } socket.write(rb); if (rb.remaining() > 0) { break; } } int myWriteWork = 0; if (output.bytes() > 0) { myWriteWork++; } if (socket.flush() == CryptoSocket.FlushResult.NEED_WRITE) { myWriteWork++; } boolean disableWrite; synchronized (this) { writeWork = queue.size() + myQueue.size() + myWriteWork; disableWrite = (writeWork == 0); } if (disableWrite) { disableWrite(); } if (maxOutputSize > 0) { output.shrink(maxOutputSize); } } public void handleWriteEvent() throws IOException { if (state == CONNECTED) { write(); } else if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got write event in incompatible state: " + state); } } public void fini() { setState(CLOSED); if (selectionKey != null) { selectionKey.cancel(); } } public boolean isClosed() { return (state == CLOSED); } public boolean hasSocket() { return ((socket != null) && (socket.channel() != null)); } public void closeSocket() { if (hasSocket()) { try { socket.channel().socket().close(); } catch (Exception e) { log.log(Level.WARNING, "Error closing connection", e); } } } public void setLostReason(Exception e) { if (lostReason == null) { lostReason = e; } } public TieBreaker startRequest() { synchronized (this) { activeReqs++; } return new TieBreaker(); } public boolean completeRequest(TieBreaker done) { boolean signalFini = false; synchronized (this) { if (!done.first()) { return false; } if (--activeReqs == 0 && state == CLOSED) { signalFini = true; } } if (signalFini) { owner.sessionFini(this); } return true; } public boolean isValid() { return (state != CLOSED); } public Exception getConnectionLostReason() { return lostReason; } @Override public Optional<SecurityContext> getSecurityContext() { return Optional.ofNullable(socket) .flatMap(CryptoSocket::getSecurityContext); } public boolean isClient() { return !server; } public boolean isServer() { return server; } public void invokeSync(Request req, double timeout) { SingleRequestWaiter waiter = new SingleRequestWaiter(); invokeAsync(req, timeout, waiter); waiter.waitDone(); } public void invokeAsync(Request req, double timeout, RequestWaiter waiter) { if (timeout < 0.0) { timeout = 0.0; } new InvocationClient(this, req, timeout, waiter).invoke(); } public boolean invokeVoid(Request req) { return postPacket(new RequestPacket(Packet.FLAG_NOREPLY, allocateKey(), req.methodName(), req.parameters())); } public synchronized boolean addWatcher(TargetWatcher watcher) { if (state == CLOSED) { return false; } watchers.put(watcher, watcher); return true; } public synchronized boolean removeWatcher(TargetWatcher watcher) { if (state == CLOSED) { return false; } watchers.remove(watcher); return true; } public void close() { parent.closeConnection(this); } public String toString() { if (hasSocket()) { return "Connection { " + socket.channel().socket() + " }"; } return "Connection { no socket, spec " + spec + " }"; } }
class Connection extends Target { private static final Logger log = Logger.getLogger(Connection.class.getName()); private static final int READ_SIZE = 32768; private static final int READ_REDO = 10; private static final int WRITE_SIZE = 32768; private static final int WRITE_REDO = 10; private static final int INITIAL = 0; private static final int CONNECTING = 1; private static final int CONNECTED = 2; private static final int CLOSED = 3; private int state = INITIAL; private final Queue queue = new Queue(); private final Queue myQueue = new Queue(); private final Buffer input = new Buffer(READ_SIZE * 2); private final Buffer output = new Buffer(WRITE_SIZE * 2); private int maxInputSize = 64*1024; private int maxOutputSize = 64*1024; private final boolean tcpNoDelay; private final Map<Integer, ReplyHandler> replyMap = new HashMap<>(); private final Map<TargetWatcher, TargetWatcher> watchers = new IdentityHashMap<>(); private int activeReqs = 0; private int writeWork = 0; private boolean pendingHandshakeWork = false; private final TransportThread parent; private final Supervisor owner; private final Spec spec; private CryptoSocket socket; private int readSize = READ_SIZE; private final boolean server; private final AtomicLong requestId = new AtomicLong(0); private SelectionKey selectionKey; private Exception lostReason = null; private void setState(int state) { if (state <= this.state) { log.log(Level.WARNING, "Bogus state transition: " + this.state + "->" + state); return; } boolean live = (state == CONNECTED); boolean down = (state == CLOSED); boolean fini; boolean pendingWrite; synchronized (this) { this.state = state; fini = down && (activeReqs == 0); pendingWrite = (writeWork > 0); } if (live) { enableRead(); if (pendingWrite) { enableWrite(); } else { disableWrite(); } owner.sessionLive(this); } if (down) { for (ReplyHandler rh : replyMap.values()) { rh.handleConnectionDown(); } for (TargetWatcher watcher : watchers.values()) { watcher.notifyTargetInvalid(this); } owner.sessionDown(this); } if (fini) { owner.sessionFini(this); } } public Connection(TransportThread parent, Supervisor owner, SocketChannel channel, boolean tcpNoDelay) { this.parent = parent; this.owner = owner; this.socket = parent.transport().createServerCryptoSocket(channel); this.spec = null; this.tcpNoDelay = tcpNoDelay; server = true; owner.sessionInit(this); } public void setMaxInputSize(int bytes) { maxInputSize = bytes; } public void setMaxOutputSize(int bytes) { maxOutputSize = bytes; } public TransportThread transportThread() { return parent; } public int allocateKey() { long v = requestId.getAndIncrement(); v = v*2 + (server ? 1 : 0); int i = (int)(v & 0x7fffffff); return i; } public synchronized boolean cancelReply(ReplyHandler handler) { if (state == CLOSED) { return false; } ReplyHandler stored = replyMap.remove(handler.key()); if (stored != handler) { if (stored != null) { replyMap.put(handler.key(), stored); } return false; } return true; } public boolean postPacket(Packet packet, ReplyHandler handler) { boolean accepted = false; boolean enableWrite = false; synchronized (this) { if (state <= CONNECTED) { enableWrite = (writeWork == 0 && state == CONNECTED); queue.enqueue(packet); writeWork++; accepted = true; if (handler != null) { replyMap.put(handler.key(), handler); } } } if (enableWrite) { parent.enableWrite(this); } return accepted; } public boolean postPacket(Packet packet) { return postPacket(packet, null); } public Connection connect() { if (spec == null || spec.malformed()) { setLostReason(new IllegalArgumentException("jrt: malformed or missing spec")); return this; } try { socket = parent.transport().createClientCryptoSocket(SocketChannel.open(spec.resolveAddress()), spec); } catch (Exception e) { setLostReason(e); } return this; } public boolean init(Selector selector) { if (!hasSocket()) { return false; } try { socket.channel().configureBlocking(false); socket.channel().socket().setTcpNoDelay(tcpNoDelay); selectionKey = socket.channel().register(selector, SelectionKey.OP_READ | SelectionKey.OP_WRITE, this); } catch (Exception e) { log.log(Level.WARNING, "Error initializing connection", e); setLostReason(e); return false; } setState(CONNECTING); return true; } public void enableRead() { selectionKey.interestOps(selectionKey.interestOps() | SelectionKey.OP_READ); } public void disableRead() { selectionKey.interestOps(selectionKey.interestOps() & ~SelectionKey.OP_READ); } public void enableWrite() { selectionKey.interestOps(selectionKey.interestOps() | SelectionKey.OP_WRITE); } public void disableWrite() { selectionKey.interestOps(selectionKey.interestOps() & ~SelectionKey.OP_WRITE); } private void handshake() throws IOException { if (pendingHandshakeWork) { return; } switch (socket.handshake()) { case DONE: if (socket.getMinimumReadBufferSize() > readSize) { readSize = socket.getMinimumReadBufferSize(); } setState(CONNECTED); while (socket.drain(input.getChannelWritable(readSize)) > 0) { handlePackets(); } break; case NEED_READ: enableRead(); disableWrite(); break; case NEED_WRITE: disableRead(); enableWrite(); break; case NEED_WORK: disableRead(); disableWrite(); pendingHandshakeWork = true; parent.transport().doHandshakeWork(this); break; } } public void doHandshakeWork() { socket.doHandshakeWork(); } public void handleHandshakeWorkDone() throws IOException { if (!pendingHandshakeWork) { throw new IllegalStateException("jrt: got unwanted handshake work done event"); } pendingHandshakeWork = false; if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got handshake work done event in incompatible state: " + state); } } private void handlePackets() throws IOException { ByteBuffer rb = input.getReadable(); while (true) { PacketInfo info = PacketInfo.getPacketInfo(rb); if (info == null || info.packetLength() > rb.remaining()) { break; } owner.readPacket(info); Packet packet; try { packet = info.decodePacket(rb); } catch (RuntimeException e) { log.log(Level.WARNING, "got garbage; closing connection: " + toString()); throw new IOException("jrt: decode error", e); } ReplyHandler handler; synchronized (this) { handler = replyMap.remove(packet.requestId()); } if (handler != null) { handler.handleReply(packet); } else { owner.handlePacket(this, packet); } } } private void read() throws IOException { boolean doneRead = false; for (int i = 0; !doneRead && i < READ_REDO; i++) { ByteBuffer wb = input.getChannelWritable(readSize); if (socket.read(wb) == -1) { throw new IOException("jrt: Connection closed by peer"); } doneRead = (wb.remaining() > 0); handlePackets(); } while (socket.drain(input.getChannelWritable(readSize)) > 0) { handlePackets(); } if (maxInputSize > 0) { input.shrink(maxInputSize); } } public void handleReadEvent() throws IOException { if (state == CONNECTED) { read(); } else if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got read event in incompatible state: " + state); } } private void write() throws IOException { synchronized (this) { queue.flush(myQueue); } for (int i = 0; i < WRITE_REDO; i++) { while (output.bytes() < WRITE_SIZE) { Packet packet = (Packet) myQueue.dequeue(); if (packet == null) { break; } PacketInfo info = packet.getPacketInfo(); ByteBuffer wb = output.getWritable(info.packetLength()); owner.writePacket(info); info.encodePacket(packet, wb); } ByteBuffer rb = output.getChannelReadable(); if (rb.remaining() == 0) { break; } socket.write(rb); if (rb.remaining() > 0) { break; } } int myWriteWork = 0; if (output.bytes() > 0) { myWriteWork++; } if (socket.flush() == CryptoSocket.FlushResult.NEED_WRITE) { myWriteWork++; } boolean disableWrite; synchronized (this) { writeWork = queue.size() + myQueue.size() + myWriteWork; disableWrite = (writeWork == 0); } if (disableWrite) { disableWrite(); } if (maxOutputSize > 0) { output.shrink(maxOutputSize); } } public void handleWriteEvent() throws IOException { if (state == CONNECTED) { write(); } else if (state == CONNECTING) { handshake(); } else { throw new IOException("jrt: got write event in incompatible state: " + state); } } public void fini() { setState(CLOSED); if (selectionKey != null) { selectionKey.cancel(); } } public boolean isClosed() { return (state == CLOSED); } public boolean hasSocket() { return ((socket != null) && (socket.channel() != null)); } public void closeSocket() { if (hasSocket()) { try { socket.channel().socket().close(); } catch (Exception e) { log.log(Level.WARNING, "Error closing connection", e); } } } public void setLostReason(Exception e) { if (lostReason == null) { lostReason = e; } } public TieBreaker startRequest() { synchronized (this) { activeReqs++; } return new TieBreaker(); } public boolean completeRequest(TieBreaker done) { boolean signalFini = false; synchronized (this) { if (!done.first()) { return false; } if (--activeReqs == 0 && state == CLOSED) { signalFini = true; } } if (signalFini) { owner.sessionFini(this); } return true; } public boolean isValid() { return (state != CLOSED); } public Exception getConnectionLostReason() { return lostReason; } @Override public Optional<SecurityContext> getSecurityContext() { return Optional.ofNullable(socket) .flatMap(CryptoSocket::getSecurityContext); } public boolean isClient() { return !server; } public boolean isServer() { return server; } public void invokeSync(Request req, double timeout) { SingleRequestWaiter waiter = new SingleRequestWaiter(); invokeAsync(req, timeout, waiter); waiter.waitDone(); } public void invokeAsync(Request req, double timeout, RequestWaiter waiter) { if (timeout < 0.0) { timeout = 0.0; } new InvocationClient(this, req, timeout, waiter).invoke(); } public boolean invokeVoid(Request req) { return postPacket(new RequestPacket(Packet.FLAG_NOREPLY, allocateKey(), req.methodName(), req.parameters())); } public synchronized boolean addWatcher(TargetWatcher watcher) { if (state == CLOSED) { return false; } watchers.put(watcher, watcher); return true; } public synchronized boolean removeWatcher(TargetWatcher watcher) { if (state == CLOSED) { return false; } watchers.remove(watcher); return true; } public void close() { parent.closeConnection(this); } public String toString() { if (hasSocket()) { return "Connection { " + socket.channel().socket() + " }"; } return "Connection { no socket, spec " + spec + " }"; } }
If it is something non-trivial and hard to make a call about, I would propose to skip this refactoring for now.
public void startCluster() throws ClusterEntrypointException { LOG.info("Starting {}.", getClass().getSimpleName()); try { PluginManager pluginManager = PluginUtils.createPluginManagerFromRootFolder(configuration); configureFileSystems(configuration, pluginManager); SecurityContext securityContext = installSecurityContext(configuration); securityContext.runSecured((Callable<Void>) () -> { runCluster(configuration, pluginManager); return null; }); } catch (Throwable t) { final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class); try { shutDownAsync( ApplicationStatus.FAILED, ExceptionUtils.stringifyException(strippedThrowable), false).get(INITIALIZATION_SHUTDOWN_TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { strippedThrowable.addSuppressed(e); } throw new ClusterEntrypointException( String.format("Failed to initialize the cluster entrypoint %s.", getClass().getSimpleName()), strippedThrowable); } }
public void startCluster() throws ClusterEntrypointException { LOG.info("Starting {}.", getClass().getSimpleName()); try { PluginManager pluginManager = PluginUtils.createPluginManagerFromRootFolder(configuration); configureFileSystems(configuration, pluginManager); SecurityContext securityContext = installSecurityContext(configuration); securityContext.runSecured((Callable<Void>) () -> { runCluster(configuration, pluginManager); return null; }); } catch (Throwable t) { final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class); try { shutDownAsync( ApplicationStatus.FAILED, ExceptionUtils.stringifyException(strippedThrowable), false).get(INITIALIZATION_SHUTDOWN_TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { strippedThrowable.addSuppressed(e); } throw new ClusterEntrypointException( String.format("Failed to initialize the cluster entrypoint %s.", getClass().getSimpleName()), strippedThrowable); } }
class ClusterEntrypoint implements AutoCloseableAsync, FatalErrorHandler { public static final ConfigOption<String> EXECUTION_MODE = ConfigOptions .key("internal.cluster.execution-mode") .defaultValue(ExecutionMode.NORMAL.toString()); protected static final Logger LOG = LoggerFactory.getLogger(ClusterEntrypoint.class); protected static final int STARTUP_FAILURE_RETURN_CODE = 1; protected static final int RUNTIME_FAILURE_RETURN_CODE = 2; private static final Time INITIALIZATION_SHUTDOWN_TIMEOUT = Time.seconds(30L); /** The lock to guard startup / shutdown / manipulation methods. */ private final Object lock = new Object(); private final Configuration configuration; private final CompletableFuture<ApplicationStatus> terminationFuture; private final AtomicBoolean isShutDown = new AtomicBoolean(false); @GuardedBy("lock") private DispatcherResourceManagerComponent clusterComponent; @GuardedBy("lock") private MetricRegistryImpl metricRegistry; @GuardedBy("lock") private ProcessMetricGroup processMetricGroup; @GuardedBy("lock") private HighAvailabilityServices haServices; @GuardedBy("lock") private BlobServer blobServer; @GuardedBy("lock") private HeartbeatServices heartbeatServices; @GuardedBy("lock") private RpcService commonRpcService; @GuardedBy("lock") private ExecutorService ioExecutor; private ArchivedExecutionGraphStore archivedExecutionGraphStore; private final Thread shutDownHook; protected ClusterEntrypoint(Configuration configuration) { this.configuration = generateClusterConfiguration(configuration); this.terminationFuture = new CompletableFuture<>(); shutDownHook = ShutdownHookUtil.addShutdownHook(this::cleanupDirectories, getClass().getSimpleName(), LOG); } public CompletableFuture<ApplicationStatus> getTerminationFuture() { return terminationFuture; } private void configureFileSystems(Configuration configuration, PluginManager pluginManager) { LOG.info("Install default filesystem."); FileSystem.initialize(configuration, pluginManager); } private SecurityContext installSecurityContext(Configuration configuration) throws Exception { LOG.info("Install security context."); SecurityUtils.install(new SecurityConfiguration(configuration)); return SecurityUtils.getInstalledContext(); } private void runCluster(Configuration configuration, PluginManager pluginManager) throws Exception { synchronized (lock) { initializeServices(configuration, pluginManager); configuration.setString(JobManagerOptions.ADDRESS, commonRpcService.getAddress()); configuration.setInteger(JobManagerOptions.PORT, commonRpcService.getPort()); final DispatcherResourceManagerComponentFactory dispatcherResourceManagerComponentFactory = createDispatcherResourceManagerComponentFactory(configuration); clusterComponent = dispatcherResourceManagerComponentFactory.create( configuration, ioExecutor, commonRpcService, haServices, blobServer, heartbeatServices, metricRegistry, archivedExecutionGraphStore, new RpcMetricQueryServiceRetriever(metricRegistry.getMetricQueryServiceRpcService()), this); clusterComponent.getShutDownFuture().whenComplete( (ApplicationStatus applicationStatus, Throwable throwable) -> { if (throwable != null) { shutDownAsync( ApplicationStatus.UNKNOWN, ExceptionUtils.stringifyException(throwable), false); } else { shutDownAsync( applicationStatus, null, true); } }); } } protected void initializeServices(Configuration configuration, PluginManager pluginManager) throws Exception { LOG.info("Initializing cluster services."); synchronized (lock) { final String bindAddress = configuration.getString(JobManagerOptions.ADDRESS); final String portRange = getRPCPortRange(configuration); commonRpcService = createRpcService(configuration, bindAddress, portRange); configuration.setString(JobManagerOptions.ADDRESS, commonRpcService.getAddress()); configuration.setInteger(JobManagerOptions.PORT, commonRpcService.getPort()); ioExecutor = Executors.newFixedThreadPool( Hardware.getNumberCPUCores(), new ExecutorThreadFactory("cluster-io")); haServices = createHaServices(configuration, ioExecutor); blobServer = new BlobServer(configuration, haServices.createBlobStore()); blobServer.start(); heartbeatServices = createHeartbeatServices(configuration); metricRegistry = createMetricRegistry(configuration, pluginManager); final RpcService metricQueryServiceRpcService = MetricUtils.startMetricsRpcService(configuration, bindAddress); metricRegistry.startQueryService(metricQueryServiceRpcService, null); final String hostname = RpcUtils.getHostname(commonRpcService); processMetricGroup = MetricUtils.instantiateProcessMetricGroup( metricRegistry, hostname, ConfigurationUtils.getSystemResourceMetricsProbingInterval(configuration)); archivedExecutionGraphStore = createSerializableExecutionGraphStore(configuration, commonRpcService.getScheduledExecutor()); } } @Nonnull private RpcService createRpcService(Configuration configuration, String bindAddress, String portRange) throws Exception { return AkkaRpcServiceUtils.createRpcService(bindAddress, portRange, configuration); } /** * Returns the port range for the common {@link RpcService}. * * @param configuration to extract the port range from * @return Port range for the common {@link RpcService} */ protected String getRPCPortRange(Configuration configuration) { if (ZooKeeperUtils.isZooKeeperRecoveryMode(configuration)) { return configuration.getString(HighAvailabilityOptions.HA_JOB_MANAGER_PORT_RANGE); } else { return String.valueOf(configuration.getInteger(JobManagerOptions.PORT)); } } protected HighAvailabilityServices createHaServices( Configuration configuration, Executor executor) throws Exception { return HighAvailabilityServicesUtils.createHighAvailabilityServices( configuration, executor, HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION); } protected HeartbeatServices createHeartbeatServices(Configuration configuration) { return HeartbeatServices.fromConfiguration(configuration); } protected MetricRegistryImpl createMetricRegistry(Configuration configuration, PluginManager pluginManager) { return new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(configuration), ReporterSetup.fromConfiguration(configuration, pluginManager)); } @Override public CompletableFuture<Void> closeAsync() { return shutDownAsync( ApplicationStatus.UNKNOWN, "Cluster entrypoint has been closed externally.", true).thenAccept(ignored -> {}); } protected CompletableFuture<Void> stopClusterServices(boolean cleanupHaData) { final long shutdownTimeout = configuration.getLong(ClusterOptions.CLUSTER_SERVICES_SHUTDOWN_TIMEOUT); synchronized (lock) { Throwable exception = null; final Collection<CompletableFuture<Void>> terminationFutures = new ArrayList<>(3); if (blobServer != null) { try { blobServer.close(); } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } } if (haServices != null) { try { if (cleanupHaData) { haServices.closeAndCleanupAllData(); } else { haServices.close(); } } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } } if (archivedExecutionGraphStore != null) { try { archivedExecutionGraphStore.close(); } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } } if (processMetricGroup != null) { processMetricGroup.close(); } if (metricRegistry != null) { terminationFutures.add(metricRegistry.shutdown()); } if (ioExecutor != null) { terminationFutures.add(ExecutorUtils.nonBlockingShutdown(shutdownTimeout, TimeUnit.MILLISECONDS, ioExecutor)); } if (commonRpcService != null) { terminationFutures.add(commonRpcService.stopService()); } if (exception != null) { terminationFutures.add(FutureUtils.completedExceptionally(exception)); } return FutureUtils.completeAll(terminationFutures); } } @Override public void onFatalError(Throwable exception) { LOG.error("Fatal error occurred in the cluster entrypoint.", exception); System.exit(RUNTIME_FAILURE_RETURN_CODE); } private Configuration generateClusterConfiguration(Configuration configuration) { final Configuration resultConfiguration = new Configuration(Preconditions.checkNotNull(configuration)); final String webTmpDir = configuration.getString(WebOptions.TMP_DIR); final File uniqueWebTmpDir = new File(webTmpDir, "flink-web-" + UUID.randomUUID()); resultConfiguration.setString(WebOptions.TMP_DIR, uniqueWebTmpDir.getAbsolutePath()); return resultConfiguration; } private CompletableFuture<ApplicationStatus> shutDownAsync( ApplicationStatus applicationStatus, @Nullable String diagnostics, boolean cleanupHaData) { if (isShutDown.compareAndSet(false, true)) { LOG.info("Shutting {} down with application status {}. Diagnostics {}.", getClass().getSimpleName(), applicationStatus, diagnostics); final CompletableFuture<Void> shutDownApplicationFuture = closeClusterComponent(applicationStatus, diagnostics); final CompletableFuture<Void> serviceShutdownFuture = FutureUtils.composeAfterwards( shutDownApplicationFuture, () -> stopClusterServices(cleanupHaData)); final CompletableFuture<Void> cleanupDirectoriesFuture = FutureUtils.runAfterwards( serviceShutdownFuture, this::cleanupDirectories); cleanupDirectoriesFuture.whenComplete( (Void ignored2, Throwable serviceThrowable) -> { if (serviceThrowable != null) { terminationFuture.completeExceptionally(serviceThrowable); } else { terminationFuture.complete(applicationStatus); } }); } return terminationFuture; } /** * Deregister the Flink application from the resource management system by signalling * the {@link ResourceManager}. * * @param applicationStatus to terminate the application with * @param diagnostics additional information about the shut down, can be {@code null} * @return Future which is completed once the shut down */ private CompletableFuture<Void> closeClusterComponent(ApplicationStatus applicationStatus, @Nullable String diagnostics) { synchronized (lock) { if (clusterComponent != null) { return clusterComponent.deregisterApplicationAndClose(applicationStatus, diagnostics); } else { return CompletableFuture.completedFuture(null); } } } /** * Clean up of temporary directories created by the {@link ClusterEntrypoint}. * * @throws IOException if the temporary directories could not be cleaned up */ private void cleanupDirectories() throws IOException { ShutdownHookUtil.removeShutdownHook(shutDownHook, getClass().getSimpleName(), LOG); final String webTmpDir = configuration.getString(WebOptions.TMP_DIR); FileUtils.deleteDirectory(new File(webTmpDir)); } protected abstract DispatcherResourceManagerComponentFactory createDispatcherResourceManagerComponentFactory(Configuration configuration) throws IOException; protected abstract ArchivedExecutionGraphStore createSerializableExecutionGraphStore( Configuration configuration, ScheduledExecutor scheduledExecutor) throws IOException; protected static EntrypointClusterConfiguration parseArguments(String[] args) throws FlinkParseException { final CommandLineParser<EntrypointClusterConfiguration> clusterConfigurationParser = new CommandLineParser<>(new EntrypointClusterConfigurationParserFactory()); return clusterConfigurationParser.parse(args); } protected static Configuration loadConfiguration(EntrypointClusterConfiguration entrypointClusterConfiguration) { final Configuration dynamicProperties = ConfigurationUtils.createConfiguration(entrypointClusterConfiguration.getDynamicProperties()); final Configuration configuration = GlobalConfiguration.loadConfiguration(entrypointClusterConfiguration.getConfigDir(), dynamicProperties); final int restPort = entrypointClusterConfiguration.getRestPort(); if (restPort >= 0) { configuration.setInteger(RestOptions.PORT, restPort); } final String hostname = entrypointClusterConfiguration.getHostname(); if (hostname != null) { configuration.setString(JobManagerOptions.ADDRESS, hostname); } return configuration; } public static void runClusterEntrypoint(ClusterEntrypoint clusterEntrypoint) { final String clusterEntrypointName = clusterEntrypoint.getClass().getSimpleName(); try { clusterEntrypoint.startCluster(); } catch (ClusterEntrypointException e) { LOG.error(String.format("Could not start cluster entrypoint %s.", clusterEntrypointName), e); System.exit(STARTUP_FAILURE_RETURN_CODE); } clusterEntrypoint.getTerminationFuture().whenComplete((applicationStatus, throwable) -> { final int returnCode; if (throwable != null) { returnCode = RUNTIME_FAILURE_RETURN_CODE; } else { returnCode = applicationStatus.processExitCode(); } LOG.info("Terminating cluster entrypoint process {} with exit code {}.", clusterEntrypointName, returnCode, throwable); System.exit(returnCode); }); } /** * Execution mode of the {@link MiniDispatcher}. */ public enum ExecutionMode { /** * Waits until the job result has been served. */ NORMAL, /** * Directly stops after the job has finished. */ DETACHED } }
class ClusterEntrypoint implements AutoCloseableAsync, FatalErrorHandler { public static final ConfigOption<String> EXECUTION_MODE = ConfigOptions .key("internal.cluster.execution-mode") .defaultValue(ExecutionMode.NORMAL.toString()); protected static final Logger LOG = LoggerFactory.getLogger(ClusterEntrypoint.class); protected static final int STARTUP_FAILURE_RETURN_CODE = 1; protected static final int RUNTIME_FAILURE_RETURN_CODE = 2; private static final Time INITIALIZATION_SHUTDOWN_TIMEOUT = Time.seconds(30L); /** The lock to guard startup / shutdown / manipulation methods. */ private final Object lock = new Object(); private final Configuration configuration; private final CompletableFuture<ApplicationStatus> terminationFuture; private final AtomicBoolean isShutDown = new AtomicBoolean(false); @GuardedBy("lock") private DispatcherResourceManagerComponent clusterComponent; @GuardedBy("lock") private MetricRegistryImpl metricRegistry; @GuardedBy("lock") private ProcessMetricGroup processMetricGroup; @GuardedBy("lock") private HighAvailabilityServices haServices; @GuardedBy("lock") private BlobServer blobServer; @GuardedBy("lock") private HeartbeatServices heartbeatServices; @GuardedBy("lock") private RpcService commonRpcService; @GuardedBy("lock") private ExecutorService ioExecutor; private ArchivedExecutionGraphStore archivedExecutionGraphStore; private final Thread shutDownHook; protected ClusterEntrypoint(Configuration configuration) { this.configuration = generateClusterConfiguration(configuration); this.terminationFuture = new CompletableFuture<>(); shutDownHook = ShutdownHookUtil.addShutdownHook(this::cleanupDirectories, getClass().getSimpleName(), LOG); } public CompletableFuture<ApplicationStatus> getTerminationFuture() { return terminationFuture; } private void configureFileSystems(Configuration configuration, PluginManager pluginManager) { LOG.info("Install default filesystem."); FileSystem.initialize(configuration, pluginManager); } private SecurityContext installSecurityContext(Configuration configuration) throws Exception { LOG.info("Install security context."); SecurityUtils.install(new SecurityConfiguration(configuration)); return SecurityUtils.getInstalledContext(); } private void runCluster(Configuration configuration, PluginManager pluginManager) throws Exception { synchronized (lock) { initializeServices(configuration, pluginManager); configuration.setString(JobManagerOptions.ADDRESS, commonRpcService.getAddress()); configuration.setInteger(JobManagerOptions.PORT, commonRpcService.getPort()); final DispatcherResourceManagerComponentFactory dispatcherResourceManagerComponentFactory = createDispatcherResourceManagerComponentFactory(configuration); clusterComponent = dispatcherResourceManagerComponentFactory.create( configuration, ioExecutor, commonRpcService, haServices, blobServer, heartbeatServices, metricRegistry, archivedExecutionGraphStore, new RpcMetricQueryServiceRetriever(metricRegistry.getMetricQueryServiceRpcService()), this); clusterComponent.getShutDownFuture().whenComplete( (ApplicationStatus applicationStatus, Throwable throwable) -> { if (throwable != null) { shutDownAsync( ApplicationStatus.UNKNOWN, ExceptionUtils.stringifyException(throwable), false); } else { shutDownAsync( applicationStatus, null, true); } }); } } protected void initializeServices(Configuration configuration, PluginManager pluginManager) throws Exception { LOG.info("Initializing cluster services."); synchronized (lock) { commonRpcService = AkkaRpcServiceUtils.createRemoteRpcService( configuration, configuration.getString(JobManagerOptions.ADDRESS), getRPCPortRange(configuration), configuration.getString(JobManagerOptions.BIND_HOST), configuration.getOptional(JobManagerOptions.RPC_BIND_PORT)); configuration.setString(JobManagerOptions.ADDRESS, commonRpcService.getAddress()); configuration.setInteger(JobManagerOptions.PORT, commonRpcService.getPort()); ioExecutor = Executors.newFixedThreadPool( Hardware.getNumberCPUCores(), new ExecutorThreadFactory("cluster-io")); haServices = createHaServices(configuration, ioExecutor); blobServer = new BlobServer(configuration, haServices.createBlobStore()); blobServer.start(); heartbeatServices = createHeartbeatServices(configuration); metricRegistry = createMetricRegistry(configuration, pluginManager); final RpcService metricQueryServiceRpcService = MetricUtils.startMetricsRpcService(configuration, commonRpcService.getAddress()); metricRegistry.startQueryService(metricQueryServiceRpcService, null); final String hostname = RpcUtils.getHostname(commonRpcService); processMetricGroup = MetricUtils.instantiateProcessMetricGroup( metricRegistry, hostname, ConfigurationUtils.getSystemResourceMetricsProbingInterval(configuration)); archivedExecutionGraphStore = createSerializableExecutionGraphStore(configuration, commonRpcService.getScheduledExecutor()); } } /** * Returns the port range for the common {@link RpcService}. * * @param configuration to extract the port range from * @return Port range for the common {@link RpcService} */ protected String getRPCPortRange(Configuration configuration) { if (ZooKeeperUtils.isZooKeeperRecoveryMode(configuration)) { return configuration.getString(HighAvailabilityOptions.HA_JOB_MANAGER_PORT_RANGE); } else { return String.valueOf(configuration.getInteger(JobManagerOptions.PORT)); } } protected HighAvailabilityServices createHaServices( Configuration configuration, Executor executor) throws Exception { return HighAvailabilityServicesUtils.createHighAvailabilityServices( configuration, executor, HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION); } protected HeartbeatServices createHeartbeatServices(Configuration configuration) { return HeartbeatServices.fromConfiguration(configuration); } protected MetricRegistryImpl createMetricRegistry(Configuration configuration, PluginManager pluginManager) { return new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(configuration), ReporterSetup.fromConfiguration(configuration, pluginManager)); } @Override public CompletableFuture<Void> closeAsync() { return shutDownAsync( ApplicationStatus.UNKNOWN, "Cluster entrypoint has been closed externally.", true).thenAccept(ignored -> {}); } protected CompletableFuture<Void> stopClusterServices(boolean cleanupHaData) { final long shutdownTimeout = configuration.getLong(ClusterOptions.CLUSTER_SERVICES_SHUTDOWN_TIMEOUT); synchronized (lock) { Throwable exception = null; final Collection<CompletableFuture<Void>> terminationFutures = new ArrayList<>(3); if (blobServer != null) { try { blobServer.close(); } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } } if (haServices != null) { try { if (cleanupHaData) { haServices.closeAndCleanupAllData(); } else { haServices.close(); } } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } } if (archivedExecutionGraphStore != null) { try { archivedExecutionGraphStore.close(); } catch (Throwable t) { exception = ExceptionUtils.firstOrSuppressed(t, exception); } } if (processMetricGroup != null) { processMetricGroup.close(); } if (metricRegistry != null) { terminationFutures.add(metricRegistry.shutdown()); } if (ioExecutor != null) { terminationFutures.add(ExecutorUtils.nonBlockingShutdown(shutdownTimeout, TimeUnit.MILLISECONDS, ioExecutor)); } if (commonRpcService != null) { terminationFutures.add(commonRpcService.stopService()); } if (exception != null) { terminationFutures.add(FutureUtils.completedExceptionally(exception)); } return FutureUtils.completeAll(terminationFutures); } } @Override public void onFatalError(Throwable exception) { LOG.error("Fatal error occurred in the cluster entrypoint.", exception); System.exit(RUNTIME_FAILURE_RETURN_CODE); } private Configuration generateClusterConfiguration(Configuration configuration) { final Configuration resultConfiguration = new Configuration(Preconditions.checkNotNull(configuration)); final String webTmpDir = configuration.getString(WebOptions.TMP_DIR); final File uniqueWebTmpDir = new File(webTmpDir, "flink-web-" + UUID.randomUUID()); resultConfiguration.setString(WebOptions.TMP_DIR, uniqueWebTmpDir.getAbsolutePath()); return resultConfiguration; } private CompletableFuture<ApplicationStatus> shutDownAsync( ApplicationStatus applicationStatus, @Nullable String diagnostics, boolean cleanupHaData) { if (isShutDown.compareAndSet(false, true)) { LOG.info("Shutting {} down with application status {}. Diagnostics {}.", getClass().getSimpleName(), applicationStatus, diagnostics); final CompletableFuture<Void> shutDownApplicationFuture = closeClusterComponent(applicationStatus, diagnostics); final CompletableFuture<Void> serviceShutdownFuture = FutureUtils.composeAfterwards( shutDownApplicationFuture, () -> stopClusterServices(cleanupHaData)); final CompletableFuture<Void> cleanupDirectoriesFuture = FutureUtils.runAfterwards( serviceShutdownFuture, this::cleanupDirectories); cleanupDirectoriesFuture.whenComplete( (Void ignored2, Throwable serviceThrowable) -> { if (serviceThrowable != null) { terminationFuture.completeExceptionally(serviceThrowable); } else { terminationFuture.complete(applicationStatus); } }); } return terminationFuture; } /** * Deregister the Flink application from the resource management system by signalling * the {@link ResourceManager}. * * @param applicationStatus to terminate the application with * @param diagnostics additional information about the shut down, can be {@code null} * @return Future which is completed once the shut down */ private CompletableFuture<Void> closeClusterComponent(ApplicationStatus applicationStatus, @Nullable String diagnostics) { synchronized (lock) { if (clusterComponent != null) { return clusterComponent.deregisterApplicationAndClose(applicationStatus, diagnostics); } else { return CompletableFuture.completedFuture(null); } } } /** * Clean up of temporary directories created by the {@link ClusterEntrypoint}. * * @throws IOException if the temporary directories could not be cleaned up */ private void cleanupDirectories() throws IOException { ShutdownHookUtil.removeShutdownHook(shutDownHook, getClass().getSimpleName(), LOG); final String webTmpDir = configuration.getString(WebOptions.TMP_DIR); FileUtils.deleteDirectory(new File(webTmpDir)); } protected abstract DispatcherResourceManagerComponentFactory createDispatcherResourceManagerComponentFactory(Configuration configuration) throws IOException; protected abstract ArchivedExecutionGraphStore createSerializableExecutionGraphStore( Configuration configuration, ScheduledExecutor scheduledExecutor) throws IOException; protected static EntrypointClusterConfiguration parseArguments(String[] args) throws FlinkParseException { final CommandLineParser<EntrypointClusterConfiguration> clusterConfigurationParser = new CommandLineParser<>(new EntrypointClusterConfigurationParserFactory()); return clusterConfigurationParser.parse(args); } protected static Configuration loadConfiguration(EntrypointClusterConfiguration entrypointClusterConfiguration) { final Configuration dynamicProperties = ConfigurationUtils.createConfiguration(entrypointClusterConfiguration.getDynamicProperties()); final Configuration configuration = GlobalConfiguration.loadConfiguration(entrypointClusterConfiguration.getConfigDir(), dynamicProperties); final int restPort = entrypointClusterConfiguration.getRestPort(); if (restPort >= 0) { configuration.setInteger(RestOptions.PORT, restPort); } final String hostname = entrypointClusterConfiguration.getHostname(); if (hostname != null) { configuration.setString(JobManagerOptions.ADDRESS, hostname); } return configuration; } public static void runClusterEntrypoint(ClusterEntrypoint clusterEntrypoint) { final String clusterEntrypointName = clusterEntrypoint.getClass().getSimpleName(); try { clusterEntrypoint.startCluster(); } catch (ClusterEntrypointException e) { LOG.error(String.format("Could not start cluster entrypoint %s.", clusterEntrypointName), e); System.exit(STARTUP_FAILURE_RETURN_CODE); } clusterEntrypoint.getTerminationFuture().whenComplete((applicationStatus, throwable) -> { final int returnCode; if (throwable != null) { returnCode = RUNTIME_FAILURE_RETURN_CODE; } else { returnCode = applicationStatus.processExitCode(); } LOG.info("Terminating cluster entrypoint process {} with exit code {}.", clusterEntrypointName, returnCode, throwable); System.exit(returnCode); }); } /** * Execution mode of the {@link MiniDispatcher}. */ public enum ExecutionMode { /** * Waits until the job result has been served. */ NORMAL, /** * Directly stops after the job has finished. */ DETACHED } }
Sure, I added a link to the Cloud docs about specifying pipeline options. This generalizes across different languages and is a good how-to guide.
public void logHotKeyDetection(String userStepName, Duration hotKeyAge) { if (isThrottled()) { return; } LOG.warn( "A hot key was detected in step '{}' with age of '{}'. This is " + "a symptom of key distribution being skewed. To fix, please inspect your data and " + "pipeline to ensure that elements are evenly distributed across your key space. If " + "you want to log the plain-text key to Cloud Logging please re-run with the " + "`hotKeyLoggingEnabled` pipeline option.", userStepName, TimeUtil.toCloudDuration(hotKeyAge)); }
+ "`hotKeyLoggingEnabled` pipeline option.",
public void logHotKeyDetection(String userStepName, Duration hotKeyAge) { if (isThrottled()) { return; } LOG.warn( "A hot key was detected in step '{}' with age of '{}'. This is " + "a symptom of key distribution being skewed. To fix, please inspect your data and " + "pipeline to ensure that elements are evenly distributed across your key space. If " + "you want to log the plain-text key to Cloud Logging please re-run with the " + "`hotKeyLoggingEnabled` pipeline option. See " + "https: + "information.", userStepName, TimeUtil.toCloudDuration(hotKeyAge)); }
class HotKeyLogger { private final Logger LOG = LoggerFactory.getLogger(HotKeyLogger.class); /** Clock used to either provide real system time or mocked to virtualize time for testing. */ private Clock clock = Clock.SYSTEM; /** * The previous time the HotKeyDetection was logged. This is used to throttle logging to every 5 * minutes. */ private long prevHotKeyDetectionLogMs = 0; /** Throttles logging the detection to every loggingPeriod */ private final Duration loggingPeriod = Duration.standardMinutes(5); HotKeyLogger() {} HotKeyLogger(Clock clock) { this.clock = clock; } /** Logs a detection of the hot key every 5 minutes. */ /** Logs a detection of the hot key every 5 minutes with the given key. */ public void logHotKeyDetection(String userStepName, Duration hotKeyAge, Object hotkey) { if (isThrottled()) { return; } LOG.warn( "A hot key '{}' was detected in step '{}' with age of '{}'. This is " + "a symptom of key distribution being skewed. To fix, please inspect your data and " + "pipeline to ensure that elements are evenly distributed across your key space.", hotkey, userStepName, TimeUtil.toCloudDuration(hotKeyAge)); } /** * Returns true if the class should log the HotKeyMessage. This method throttles logging to every * 5 minutes. */ protected boolean isThrottled() { long nowMs = clock.currentTimeMillis(); if (nowMs - prevHotKeyDetectionLogMs < loggingPeriod.getMillis()) { return true; } prevHotKeyDetectionLogMs = nowMs; return false; } }
class HotKeyLogger { private final Logger LOG = LoggerFactory.getLogger(HotKeyLogger.class); /** Clock used to either provide real system time or mocked to virtualize time for testing. */ private Clock clock = Clock.SYSTEM; /** * The previous time the HotKeyDetection was logged. This is used to throttle logging to every 5 * minutes. */ private long prevHotKeyDetectionLogMs = 0; /** Throttles logging the detection to every loggingPeriod */ private final Duration loggingPeriod = Duration.standardMinutes(5); HotKeyLogger() {} HotKeyLogger(Clock clock) { this.clock = clock; } /** Logs a detection of the hot key every 5 minutes. */ /** Logs a detection of the hot key every 5 minutes with the given key. */ public void logHotKeyDetection(String userStepName, Duration hotKeyAge, Object hotkey) { if (isThrottled()) { return; } LOG.warn( "A hot key '{}' was detected in step '{}' with age of '{}'. This is " + "a symptom of key distribution being skewed. To fix, please inspect your data and " + "pipeline to ensure that elements are evenly distributed across your key space.", hotkey, userStepName, TimeUtil.toCloudDuration(hotKeyAge)); } /** * Returns true if the class should log the HotKeyMessage. This method throttles logging to every * 5 minutes. */ protected boolean isThrottled() { long nowMs = clock.currentTimeMillis(); if (nowMs - prevHotKeyDetectionLogMs < loggingPeriod.getMillis()) { return true; } prevHotKeyDetectionLogMs = nowMs; return false; } }
if you have just `resteasy` or `spring` then there is no problem :)
public QuarkusCommandOutcome execute() throws QuarkusCommandException { Matcher matcher = JAVA_VERSION_PATTERN .matcher(this.javaTarget != null ? this.javaTarget : System.getProperty("java.version", "")); if (matcher.matches() && Integer.parseInt(matcher.group(1)) < 11) { setValue(JAVA_TARGET, "8"); } else { setValue(JAVA_TARGET, "11"); } if (containsSpringWeb(extensions)) { setValue(IS_SPRING, true); if (containsRESTEasy(extensions)) { values.remove(CLASS_NAME); values.remove(PACKAGE_NAME); values.remove(RESOURCE_PATH); } } setValue(EXTENSIONS, extensions); final QuarkusProject quarkusProject = QuarkusProject.of(projectDirPath, platformDescr, buildTool); final QuarkusCommandInvocation invocation = new QuarkusCommandInvocation(quarkusProject, values); if (legacyCodegen) { return new LegacyCreateProjectCommandHandler().execute(invocation); } return new CreateProjectCommandHandler().execute(invocation); }
values.remove(RESOURCE_PATH);
public QuarkusCommandOutcome execute() throws QuarkusCommandException { Matcher matcher = JAVA_VERSION_PATTERN .matcher(this.javaTarget != null ? this.javaTarget : System.getProperty("java.version", "")); if (matcher.matches() && Integer.parseInt(matcher.group(1)) < 11) { setValue(JAVA_TARGET, "8"); } else { setValue(JAVA_TARGET, "11"); } if (containsSpringWeb(extensions)) { setValue(IS_SPRING, true); if (containsRESTEasy(extensions)) { values.remove(CLASS_NAME); values.remove(PACKAGE_NAME); values.remove(RESOURCE_PATH); } } setValue(EXTENSIONS, extensions); final QuarkusProject quarkusProject = QuarkusProject.of(projectDirPath, platformDescr, buildTool); final QuarkusCommandInvocation invocation = new QuarkusCommandInvocation(quarkusProject, values); if (legacyCodegen) { return new LegacyCreateProjectCommandHandler().execute(invocation); } return new CreateProjectCommandHandler().execute(invocation); }
class name"); } setValue(CLASS_NAME, className); return this; } public CreateProject extensions(Set<String> extensions) { if (extensions == null) { return this; } this.extensions.addAll(extensions); return this; }
class name"); } setValue(CLASS_NAME, className); return this; } public CreateProject extensions(Set<String> extensions) { if (extensions == null) { return this; } this.extensions.addAll(extensions); return this; }
From a code review perspective, here are some suggestions for improvement: 1. Naming: - Consider providing more descriptive names for variables, methods, and classes to improve code readability and maintainability. 2. Code Style: - Follow a consistent code style throughout the codebase. For example, use consistent indentation (spaces vs. tabs), line length, and bracket placement. 3. Best Practices: - Provide clear comments explaining the rationale or intention behind certain code blocks or TODOs. - Consider using dependency injection instead of directly creating objects within the method. 4. Bug Risks: - Review error handling and exception handling mechanisms to ensure they are robust and handle edge cases properly. 5. Compatibility: - Ensure compatibility with different versions or variants of the libraries or frameworks being used. - Verify that the code is compatible with the target runtime environment (e.g., Java version, specific dependencies). 6. Simplification: - Simplify code logic where possible to improve clarity and reduce complexity. 7. Optimization: - Evaluate the performance implications of the code and identify potential optimization points. This could include reducing unnecessary object creation or improving algorithm efficiency. Note: Without the actual code implementation and a deeper understanding of the context, it is challenging to provide precise improvement suggestions. A comprehensive code review would require examining the entire codebase, not just this patch.
public IHiveMetastore createHiveMetastore() { HiveMetaClient metaClient = HiveMetaClient.createHiveMetaClient(hdfsEnvironment, properties); IHiveMetastore hiveMetastore = new HiveMetastore(metaClient, catalogName, metastoreType); IHiveMetastore baseHiveMetastore; if (!enableMetastoreCache) { baseHiveMetastore = hiveMetastore; } else { refreshHiveMetastoreExecutor = Executors.newCachedThreadPool( new ThreadFactoryBuilder().setNameFormat("hive-metastore-refresh-%d").build()); baseHiveMetastore = CachingHiveMetastore.createCatalogLevelInstance( hiveMetastore, new ReentrantExecutor(refreshHiveMetastoreExecutor, hmsConf.getCacheRefreshThreadMaxNum()), hmsConf.getCacheTtlSec(), enableHmsEventsIncrementalSync ? NEVER_REFRESH : hmsConf.getCacheRefreshIntervalSec(), hmsConf.getCacheMaxNum(), hmsConf.enableListNamesCache()); } return baseHiveMetastore; }
baseHiveMetastore = hiveMetastore;
public IHiveMetastore createHiveMetastore() { HiveMetaClient metaClient = HiveMetaClient.createHiveMetaClient(hdfsEnvironment, properties); IHiveMetastore hiveMetastore = new HiveMetastore(metaClient, catalogName, metastoreType); IHiveMetastore baseHiveMetastore; if (!enableMetastoreCache) { baseHiveMetastore = hiveMetastore; } else { refreshHiveMetastoreExecutor = Executors.newCachedThreadPool( new ThreadFactoryBuilder().setNameFormat("hive-metastore-refresh-%d").build()); baseHiveMetastore = CachingHiveMetastore.createCatalogLevelInstance( hiveMetastore, new ReentrantExecutor(refreshHiveMetastoreExecutor, hmsConf.getCacheRefreshThreadMaxNum()), hmsConf.getCacheTtlSec(), enableHmsEventsIncrementalSync ? NEVER_REFRESH : hmsConf.getCacheRefreshIntervalSec(), hmsConf.getCacheMaxNum(), hmsConf.enableListNamesCache()); } return baseHiveMetastore; }
class HiveConnectorInternalMgr { public static final List<String> SUPPORTED_METASTORE_TYPE = Lists.newArrayList("hive", "glue", "dlf"); private final String catalogName; private final HdfsEnvironment hdfsEnvironment; private final Map<String, String> properties; private final boolean enableMetastoreCache; private final CachingHiveMetastoreConf hmsConf; private final boolean enableRemoteFileCache; private final CachingRemoteFileConf remoteFileConf; private ExecutorService refreshHiveMetastoreExecutor; private ExecutorService refreshRemoteFileExecutor; private ExecutorService pullRemoteFileExecutor; private ExecutorService updateRemoteFilesExecutor; private ExecutorService updateStatisticsExecutor; private final boolean isRecursive; private final int loadRemoteFileMetadataThreadNum; private final int updateRemoteFileMetadataThreadNum; private final boolean enableHmsEventsIncrementalSync; private final boolean enableBackgroundRefreshHiveMetadata; private final MetastoreType metastoreType; public HiveConnectorInternalMgr(String catalogName, Map<String, String> properties, HdfsEnvironment hdfsEnvironment) { this.catalogName = catalogName; this.properties = properties; this.hdfsEnvironment = hdfsEnvironment; this.enableMetastoreCache = Boolean.parseBoolean(properties.getOrDefault("enable_metastore_cache", "true")); this.hmsConf = new CachingHiveMetastoreConf(properties, "hive"); this.enableRemoteFileCache = Boolean.parseBoolean(properties.getOrDefault("enable_remote_file_cache", "true")); this.remoteFileConf = new CachingRemoteFileConf(properties); this.isRecursive = Boolean.parseBoolean(properties.getOrDefault("enable_recursive_listing", "true")); this.loadRemoteFileMetadataThreadNum = Integer.parseInt(properties.getOrDefault("remote_file_load_thread_num", String.valueOf(Config.remote_file_metadata_load_concurrency))); this.updateRemoteFileMetadataThreadNum = Integer.parseInt(properties.getOrDefault("remote_file_update_thread_num", String.valueOf(Config.remote_file_metadata_load_concurrency / 4))); this.enableHmsEventsIncrementalSync = Boolean.parseBoolean(properties.getOrDefault("enable_hms_events_incremental_sync", String.valueOf(Config.enable_hms_events_incremental_sync))); this.enableBackgroundRefreshHiveMetadata = Boolean.parseBoolean(properties.getOrDefault( "enable_background_refresh_connector_metadata", "true")); String hiveMetastoreType = properties.getOrDefault(HIVE_METASTORE_TYPE, "hive").toLowerCase(); if (!SUPPORTED_METASTORE_TYPE.contains(hiveMetastoreType)) { throw new SemanticException("hive metastore type [%s] is not supported", hiveMetastoreType); } if (hiveMetastoreType.equals("hive")) { String hiveMetastoreUris = Preconditions.checkNotNull(properties.get(HIVE_METASTORE_URIS), "%s must be set in properties when creating hive catalog", HIVE_METASTORE_URIS); Util.validateMetastoreUris(hiveMetastoreUris); } this.metastoreType = MetastoreType.get(hiveMetastoreType); } public void shutdown() { if (enableMetastoreCache && refreshHiveMetastoreExecutor != null) { refreshHiveMetastoreExecutor.shutdown(); } if (enableRemoteFileCache && refreshRemoteFileExecutor != null) { refreshRemoteFileExecutor.shutdown(); } if (pullRemoteFileExecutor != null) { pullRemoteFileExecutor.shutdown(); } } public RemoteFileIO createRemoteFileIO() { RemoteFileIO remoteFileIO = new HiveRemoteFileIO(hdfsEnvironment.getConfiguration()); RemoteFileIO baseRemoteFileIO; if (!enableRemoteFileCache) { baseRemoteFileIO = remoteFileIO; } else { refreshRemoteFileExecutor = Executors.newCachedThreadPool( new ThreadFactoryBuilder().setNameFormat("hive-remote-files-refresh-%d").build()); baseRemoteFileIO = CachingRemoteFileIO.createCatalogLevelInstance( remoteFileIO, new ReentrantExecutor(refreshRemoteFileExecutor, remoteFileConf.getRefreshMaxThreadNum()), remoteFileConf.getCacheTtlSec(), enableHmsEventsIncrementalSync ? NEVER_REFRESH : remoteFileConf.getCacheRefreshIntervalSec(), remoteFileConf.getCacheMaxSize()); } return baseRemoteFileIO; } public ExecutorService getPullRemoteFileExecutor() { if (pullRemoteFileExecutor == null) { pullRemoteFileExecutor = Executors.newFixedThreadPool(loadRemoteFileMetadataThreadNum, new ThreadFactoryBuilder().setNameFormat("pull-hive-remote-files-%d").build()); } return pullRemoteFileExecutor; } public ExecutorService getupdateRemoteFilesExecutor() { if (updateRemoteFilesExecutor == null) { updateRemoteFilesExecutor = Executors.newFixedThreadPool(updateRemoteFileMetadataThreadNum, new ThreadFactoryBuilder().setNameFormat("update-hive-remote-files-%d").build()); } return updateRemoteFilesExecutor; } public Executor getUpdateStatisticsExecutor() { Executor baseExecutor = Executors.newCachedThreadPool( new ThreadFactoryBuilder().setNameFormat("hive-metastore-update-%d").build()); return new ReentrantExecutor(baseExecutor, remoteFileConf.getRefreshMaxThreadNum()); } public Executor getRefreshOthersFeExecutor() { Executor baseExecutor = Executors.newCachedThreadPool( new ThreadFactoryBuilder().setNameFormat("refresh-others-fe-hive-metadata-cache-%d").build()); return new ReentrantExecutor(baseExecutor, remoteFileConf.getRefreshMaxThreadNum()); } public boolean isSearchRecursive() { return isRecursive; } public CachingHiveMetastoreConf getHiveMetastoreConf() { return hmsConf; } public CachingRemoteFileConf getRemoteFileConf() { return remoteFileConf; } public boolean enableHmsEventsIncrementalSync() { return enableHmsEventsIncrementalSync; } public HdfsEnvironment getHdfsEnvironment() { return hdfsEnvironment; } public boolean isEnableBackgroundRefreshHiveMetadata() { return enableBackgroundRefreshHiveMetadata; } public MetastoreType getMetastoreType() { return metastoreType; } }
class HiveConnectorInternalMgr { public static final List<String> SUPPORTED_METASTORE_TYPE = Lists.newArrayList("hive", "glue", "dlf"); private final String catalogName; private final HdfsEnvironment hdfsEnvironment; private final Map<String, String> properties; private final boolean enableMetastoreCache; private final CachingHiveMetastoreConf hmsConf; private final boolean enableRemoteFileCache; private final CachingRemoteFileConf remoteFileConf; private ExecutorService refreshHiveMetastoreExecutor; private ExecutorService refreshRemoteFileExecutor; private ExecutorService pullRemoteFileExecutor; private ExecutorService updateRemoteFilesExecutor; private ExecutorService updateStatisticsExecutor; private final boolean isRecursive; private final int loadRemoteFileMetadataThreadNum; private final int updateRemoteFileMetadataThreadNum; private final boolean enableHmsEventsIncrementalSync; private final boolean enableBackgroundRefreshHiveMetadata; private final MetastoreType metastoreType; public HiveConnectorInternalMgr(String catalogName, Map<String, String> properties, HdfsEnvironment hdfsEnvironment) { this.catalogName = catalogName; this.properties = properties; this.hdfsEnvironment = hdfsEnvironment; this.enableMetastoreCache = Boolean.parseBoolean(properties.getOrDefault("enable_metastore_cache", "true")); this.hmsConf = new CachingHiveMetastoreConf(properties, "hive"); this.enableRemoteFileCache = Boolean.parseBoolean(properties.getOrDefault("enable_remote_file_cache", "true")); this.remoteFileConf = new CachingRemoteFileConf(properties); this.isRecursive = Boolean.parseBoolean(properties.getOrDefault("enable_recursive_listing", "true")); this.loadRemoteFileMetadataThreadNum = Integer.parseInt(properties.getOrDefault("remote_file_load_thread_num", String.valueOf(Config.remote_file_metadata_load_concurrency))); this.updateRemoteFileMetadataThreadNum = Integer.parseInt(properties.getOrDefault("remote_file_update_thread_num", String.valueOf(Config.remote_file_metadata_load_concurrency / 4))); this.enableHmsEventsIncrementalSync = Boolean.parseBoolean(properties.getOrDefault("enable_hms_events_incremental_sync", String.valueOf(Config.enable_hms_events_incremental_sync))); this.enableBackgroundRefreshHiveMetadata = Boolean.parseBoolean(properties.getOrDefault( "enable_background_refresh_connector_metadata", "true")); String hiveMetastoreType = properties.getOrDefault(HIVE_METASTORE_TYPE, "hive").toLowerCase(); if (!SUPPORTED_METASTORE_TYPE.contains(hiveMetastoreType)) { throw new SemanticException("hive metastore type [%s] is not supported", hiveMetastoreType); } if (hiveMetastoreType.equals("hive")) { String hiveMetastoreUris = Preconditions.checkNotNull(properties.get(HIVE_METASTORE_URIS), "%s must be set in properties when creating hive catalog", HIVE_METASTORE_URIS); Util.validateMetastoreUris(hiveMetastoreUris); } this.metastoreType = MetastoreType.get(hiveMetastoreType); } public void shutdown() { if (enableMetastoreCache && refreshHiveMetastoreExecutor != null) { refreshHiveMetastoreExecutor.shutdown(); } if (enableRemoteFileCache && refreshRemoteFileExecutor != null) { refreshRemoteFileExecutor.shutdown(); } if (pullRemoteFileExecutor != null) { pullRemoteFileExecutor.shutdown(); } } public RemoteFileIO createRemoteFileIO() { RemoteFileIO remoteFileIO = new HiveRemoteFileIO(hdfsEnvironment.getConfiguration()); RemoteFileIO baseRemoteFileIO; if (!enableRemoteFileCache) { baseRemoteFileIO = remoteFileIO; } else { refreshRemoteFileExecutor = Executors.newCachedThreadPool( new ThreadFactoryBuilder().setNameFormat("hive-remote-files-refresh-%d").build()); baseRemoteFileIO = CachingRemoteFileIO.createCatalogLevelInstance( remoteFileIO, new ReentrantExecutor(refreshRemoteFileExecutor, remoteFileConf.getRefreshMaxThreadNum()), remoteFileConf.getCacheTtlSec(), enableHmsEventsIncrementalSync ? NEVER_REFRESH : remoteFileConf.getCacheRefreshIntervalSec(), remoteFileConf.getCacheMaxSize()); } return baseRemoteFileIO; } public ExecutorService getPullRemoteFileExecutor() { if (pullRemoteFileExecutor == null) { pullRemoteFileExecutor = Executors.newFixedThreadPool(loadRemoteFileMetadataThreadNum, new ThreadFactoryBuilder().setNameFormat("pull-hive-remote-files-%d").build()); } return pullRemoteFileExecutor; } public ExecutorService getupdateRemoteFilesExecutor() { if (updateRemoteFilesExecutor == null) { updateRemoteFilesExecutor = Executors.newFixedThreadPool(updateRemoteFileMetadataThreadNum, new ThreadFactoryBuilder().setNameFormat("update-hive-remote-files-%d").build()); } return updateRemoteFilesExecutor; } public Executor getUpdateStatisticsExecutor() { Executor baseExecutor = Executors.newCachedThreadPool( new ThreadFactoryBuilder().setNameFormat("hive-metastore-update-%d").build()); return new ReentrantExecutor(baseExecutor, remoteFileConf.getRefreshMaxThreadNum()); } public Executor getRefreshOthersFeExecutor() { Executor baseExecutor = Executors.newCachedThreadPool( new ThreadFactoryBuilder().setNameFormat("refresh-others-fe-hive-metadata-cache-%d").build()); return new ReentrantExecutor(baseExecutor, remoteFileConf.getRefreshMaxThreadNum()); } public boolean isSearchRecursive() { return isRecursive; } public CachingHiveMetastoreConf getHiveMetastoreConf() { return hmsConf; } public CachingRemoteFileConf getRemoteFileConf() { return remoteFileConf; } public boolean enableHmsEventsIncrementalSync() { return enableHmsEventsIncrementalSync; } public HdfsEnvironment getHdfsEnvironment() { return hdfsEnvironment; } public boolean isEnableBackgroundRefreshHiveMetadata() { return enableBackgroundRefreshHiveMetadata; } public MetastoreType getMetastoreType() { return metastoreType; } }
done (I can refactor tests to share pairs of inputs later, but anyhow I added the reverse tests)
public static Row recordToRow(Schema schema, Record record) { Row.Builder rowBuilder = Row.withSchema(schema); for (Schema.Field field : schema.getFields()) { switch (field.getType().getTypeName()) { case BYTE: byte byteValue = (byte) record.getField(field.getName()); rowBuilder.addValue(byteValue); break; case INT16: short shortValue = (short) record.getField(field.getName()); rowBuilder.addValue(shortValue); break; case INT32: int intValue = (int) record.getField(field.getName()); rowBuilder.addValue(intValue); break; case INT64: long longValue = (long) record.getField(field.getName()); rowBuilder.addValue(longValue); break; case DECIMAL: rowBuilder.addValue(record.getField(field.getName())); break; case FLOAT: rowBuilder.addValue(record.getField(field.getName())); break; case DOUBLE: rowBuilder.addValue(record.getField(field.getName())); break; case STRING: rowBuilder.addValue(record.getField(field.getName())); break; case DATETIME: long millis = (long) record.getField(field.getName()); rowBuilder.addValue(new DateTime(millis, DateTimeZone.UTC)); break; case BOOLEAN: rowBuilder.addValue(record.getField(field.getName())); break; case BYTES: rowBuilder.addValue(((ByteBuffer) record.getField(field.getName())).array()); break; case ARRAY: throw new UnsupportedOperationException("Array fields are not yet supported."); case ITERABLE: throw new UnsupportedOperationException("Iterable fields are not yet supported."); case MAP: throw new UnsupportedOperationException("Map fields are not yet supported."); case ROW: Record nestedRecord = (Record) record.getField(field.getName()); Schema nestedSchema = checkArgumentNotNull( field.getType().getRowSchema(), "Corrupted schema: Row type did not have associated nested schema."); Row nestedRow = recordToRow(nestedSchema, nestedRecord); rowBuilder.addValue(nestedRow); break; case LOGICAL_TYPE: throw new UnsupportedOperationException( "Cannot convert iceberg field to Beam logical type"); } } return rowBuilder.build(); }
public static Row recordToRow(Schema schema, Record record) { Row.Builder rowBuilder = Row.withSchema(schema); for (Schema.Field field : schema.getFields()) { switch (field.getType().getTypeName()) { case BYTE: byte byteValue = (byte) record.getField(field.getName()); rowBuilder.addValue(byteValue); break; case INT16: short shortValue = (short) record.getField(field.getName()); rowBuilder.addValue(shortValue); break; case INT32: int intValue = (int) record.getField(field.getName()); rowBuilder.addValue(intValue); break; case INT64: long longValue = (long) record.getField(field.getName()); rowBuilder.addValue(longValue); break; case DECIMAL: rowBuilder.addValue(record.getField(field.getName())); break; case FLOAT: rowBuilder.addValue(record.getField(field.getName())); break; case DOUBLE: rowBuilder.addValue(record.getField(field.getName())); break; case STRING: rowBuilder.addValue(record.getField(field.getName())); break; case DATETIME: long millis = (long) record.getField(field.getName()); rowBuilder.addValue(new DateTime(millis, DateTimeZone.UTC)); break; case BOOLEAN: rowBuilder.addValue(record.getField(field.getName())); break; case BYTES: rowBuilder.addValue(((ByteBuffer) record.getField(field.getName())).array()); break; case ARRAY: throw new UnsupportedOperationException("Array fields are not yet supported."); case ITERABLE: throw new UnsupportedOperationException("Iterable fields are not yet supported."); case MAP: throw new UnsupportedOperationException("Map fields are not yet supported."); case ROW: Record nestedRecord = (Record) record.getField(field.getName()); Schema nestedSchema = checkArgumentNotNull( field.getType().getRowSchema(), "Corrupted schema: Row type did not have associated nested schema."); Row nestedRow = recordToRow(nestedSchema, nestedRecord); rowBuilder.addValue(nestedRow); break; case LOGICAL_TYPE: throw new UnsupportedOperationException( "Cannot convert iceberg field to Beam logical type"); } } return rowBuilder.build(); }
class SchemaAndRowConversions { private SchemaAndRowConversions() {} public static final String ICEBERG_TYPE_OPTION_NAME = "icebergTypeID"; public static Schema.FieldType icebergTypeToBeamFieldType(final Type type) { switch (type.typeId()) { case BOOLEAN: return Schema.FieldType.BOOLEAN; case INTEGER: return Schema.FieldType.INT32; case LONG: return Schema.FieldType.INT64; case FLOAT: return Schema.FieldType.FLOAT; case DOUBLE: return Schema.FieldType.DOUBLE; case DATE: case TIME: case TIMESTAMP: return Schema.FieldType.DATETIME; case STRING: return Schema.FieldType.STRING; case UUID: case BINARY: return Schema.FieldType.BYTES; case FIXED: case DECIMAL: return Schema.FieldType.DECIMAL; case STRUCT: return Schema.FieldType.row(icebergStructTypeToBeamSchema(type.asStructType())); case LIST: return Schema.FieldType.iterable( icebergTypeToBeamFieldType(type.asListType().elementType())); case MAP: return Schema.FieldType.map( icebergTypeToBeamFieldType(type.asMapType().keyType()), icebergTypeToBeamFieldType(type.asMapType().valueType())); } throw new RuntimeException("Unrecognized IcebergIO Type"); } public static Schema.Field icebergFieldToBeamField(final Types.NestedField field) { return Schema.Field.of(field.name(), icebergTypeToBeamFieldType(field.type())) .withOptions( Schema.Options.builder() .setOption( ICEBERG_TYPE_OPTION_NAME, Schema.FieldType.STRING, field.type().typeId().name()) .build()) .withNullable(field.isOptional()); } public static Schema icebergSchemaToBeamSchema(final org.apache.iceberg.Schema schema) { Schema.Builder builder = Schema.builder(); for (Types.NestedField f : schema.columns()) { builder.addField(icebergFieldToBeamField(f)); } return builder.build(); } public static Schema icebergStructTypeToBeamSchema(final Types.StructType struct) { Schema.Builder builder = Schema.builder(); for (Types.NestedField f : struct.fields()) { builder.addField(icebergFieldToBeamField(f)); } return builder.build(); } public static Types.NestedField beamFieldToIcebergField(int fieldId, final Schema.Field field) { String typeId = field.getOptions().getValue(ICEBERG_TYPE_OPTION_NAME, String.class); if (typeId != null) { return Types.NestedField.of( fieldId, field.getType().getNullable(), field.getName(), Types.fromPrimitiveString(typeId)); } else { return Types.NestedField.of( fieldId, field.getType().getNullable(), field.getName(), Types.StringType.get()); } } public static org.apache.iceberg.Schema beamSchemaToIcebergSchema(final Schema schema) { Types.NestedField[] fields = new Types.NestedField[schema.getFieldCount()]; int fieldId = 0; for (Schema.Field f : schema.getFields()) { fields[fieldId++] = beamFieldToIcebergField(fieldId, f); } return new org.apache.iceberg.Schema(fields); } public static Record rowToRecord(org.apache.iceberg.Schema schema, Row row) { return copyRowIntoRecord(GenericRecord.create(schema), row); } private static Record copyRowIntoRecord(Record baseRecord, Row value) { Record rec = baseRecord.copy(); for (Types.NestedField f : rec.struct().fields()) { copyFieldIntoRecord(rec, f, value); } return rec; } private static void copyFieldIntoRecord(Record rec, Types.NestedField field, Row value) { String name = field.name(); switch (field.type().typeId()) { case BOOLEAN: Optional.ofNullable(value.getBoolean(name)).ifPresent(v -> rec.setField(name, v)); break; case INTEGER: Optional.ofNullable(value.getInt32(name)).ifPresent(v -> rec.setField(name, v)); break; case LONG: Optional.ofNullable(value.getInt64(name)).ifPresent(v -> rec.setField(name, v)); break; case FLOAT: Optional.ofNullable(value.getFloat(name)).ifPresent(v -> rec.setField(name, v)); break; case DOUBLE: Optional.ofNullable(value.getDouble(name)).ifPresent(v -> rec.setField(name, v)); break; case DATE: throw new UnsupportedOperationException("Date fields not yet supported"); case TIME: throw new UnsupportedOperationException("Time fields not yet supported"); case TIMESTAMP: Optional.ofNullable(value.getDateTime(name)) .ifPresent(v -> rec.setField(name, v.getMillis())); break; case STRING: Optional.ofNullable(value.getString(name)).ifPresent(v -> rec.setField(name, v)); break; case UUID: Optional.ofNullable(value.getBytes(name)) .ifPresent(v -> rec.setField(name, UUID.nameUUIDFromBytes(v))); break; case FIXED: throw new UnsupportedOperationException("Fixed-precision fields are not yet supported."); case BINARY: Optional.ofNullable(value.getBytes(name)) .ifPresent(v -> rec.setField(name, ByteBuffer.wrap(v))); break; case DECIMAL: Optional.ofNullable(value.getDecimal(name)).ifPresent(v -> rec.setField(name, v)); break; case STRUCT: Optional.ofNullable(value.getRow(name)) .ifPresent( row -> rec.setField( name, copyRowIntoRecord(GenericRecord.create(field.type().asStructType()), row))); break; case LIST: throw new UnsupportedOperationException("List fields are not yet supported."); case MAP: throw new UnsupportedOperationException("Map fields are not yet supported."); } } }
class SchemaAndRowConversions { private SchemaAndRowConversions() {} public static final String ICEBERG_TYPE_OPTION_NAME = "icebergTypeID"; public static Schema.FieldType icebergTypeToBeamFieldType(final Type type) { switch (type.typeId()) { case BOOLEAN: return Schema.FieldType.BOOLEAN; case INTEGER: return Schema.FieldType.INT32; case LONG: return Schema.FieldType.INT64; case FLOAT: return Schema.FieldType.FLOAT; case DOUBLE: return Schema.FieldType.DOUBLE; case DATE: case TIME: case TIMESTAMP: return Schema.FieldType.DATETIME; case STRING: return Schema.FieldType.STRING; case UUID: case BINARY: return Schema.FieldType.BYTES; case FIXED: case DECIMAL: return Schema.FieldType.DECIMAL; case STRUCT: return Schema.FieldType.row(icebergStructTypeToBeamSchema(type.asStructType())); case LIST: return Schema.FieldType.iterable( icebergTypeToBeamFieldType(type.asListType().elementType())); case MAP: return Schema.FieldType.map( icebergTypeToBeamFieldType(type.asMapType().keyType()), icebergTypeToBeamFieldType(type.asMapType().valueType())); } throw new RuntimeException("Unrecognized IcebergIO Type"); } public static Schema.Field icebergFieldToBeamField(final Types.NestedField field) { return Schema.Field.of(field.name(), icebergTypeToBeamFieldType(field.type())) .withOptions( Schema.Options.builder() .setOption( ICEBERG_TYPE_OPTION_NAME, Schema.FieldType.STRING, field.type().typeId().name()) .build()) .withNullable(field.isOptional()); } public static Schema icebergSchemaToBeamSchema(final org.apache.iceberg.Schema schema) { Schema.Builder builder = Schema.builder(); for (Types.NestedField f : schema.columns()) { builder.addField(icebergFieldToBeamField(f)); } return builder.build(); } public static Schema icebergStructTypeToBeamSchema(final Types.StructType struct) { Schema.Builder builder = Schema.builder(); for (Types.NestedField f : struct.fields()) { builder.addField(icebergFieldToBeamField(f)); } return builder.build(); } public static Types.NestedField beamFieldToIcebergField(int fieldId, final Schema.Field field) { String typeId = field.getOptions().getValue(ICEBERG_TYPE_OPTION_NAME, String.class); if (typeId != null) { return Types.NestedField.of( fieldId, field.getType().getNullable(), field.getName(), Types.fromPrimitiveString(typeId)); } else { return Types.NestedField.of( fieldId, field.getType().getNullable(), field.getName(), Types.StringType.get()); } } public static org.apache.iceberg.Schema beamSchemaToIcebergSchema(final Schema schema) { Types.NestedField[] fields = new Types.NestedField[schema.getFieldCount()]; int fieldId = 0; for (Schema.Field f : schema.getFields()) { fields[fieldId++] = beamFieldToIcebergField(fieldId, f); } return new org.apache.iceberg.Schema(fields); } public static Record rowToRecord(org.apache.iceberg.Schema schema, Row row) { return copyRowIntoRecord(GenericRecord.create(schema), row); } private static Record copyRowIntoRecord(Record baseRecord, Row value) { Record rec = baseRecord.copy(); for (Types.NestedField f : rec.struct().fields()) { copyFieldIntoRecord(rec, f, value); } return rec; } private static void copyFieldIntoRecord(Record rec, Types.NestedField field, Row value) { String name = field.name(); switch (field.type().typeId()) { case BOOLEAN: Optional.ofNullable(value.getBoolean(name)).ifPresent(v -> rec.setField(name, v)); break; case INTEGER: Optional.ofNullable(value.getInt32(name)).ifPresent(v -> rec.setField(name, v)); break; case LONG: Optional.ofNullable(value.getInt64(name)).ifPresent(v -> rec.setField(name, v)); break; case FLOAT: Optional.ofNullable(value.getFloat(name)).ifPresent(v -> rec.setField(name, v)); break; case DOUBLE: Optional.ofNullable(value.getDouble(name)).ifPresent(v -> rec.setField(name, v)); break; case DATE: throw new UnsupportedOperationException("Date fields not yet supported"); case TIME: throw new UnsupportedOperationException("Time fields not yet supported"); case TIMESTAMP: Optional.ofNullable(value.getDateTime(name)) .ifPresent(v -> rec.setField(name, v.getMillis())); break; case STRING: Optional.ofNullable(value.getString(name)).ifPresent(v -> rec.setField(name, v)); break; case UUID: Optional.ofNullable(value.getBytes(name)) .ifPresent(v -> rec.setField(name, UUID.nameUUIDFromBytes(v))); break; case FIXED: throw new UnsupportedOperationException("Fixed-precision fields are not yet supported."); case BINARY: Optional.ofNullable(value.getBytes(name)) .ifPresent(v -> rec.setField(name, ByteBuffer.wrap(v))); break; case DECIMAL: Optional.ofNullable(value.getDecimal(name)).ifPresent(v -> rec.setField(name, v)); break; case STRUCT: Optional.ofNullable(value.getRow(name)) .ifPresent( row -> rec.setField( name, copyRowIntoRecord(GenericRecord.create(field.type().asStructType()), row))); break; case LIST: throw new UnsupportedOperationException("List fields are not yet supported."); case MAP: throw new UnsupportedOperationException("Map fields are not yet supported."); } } }
please add a unit test for the change. See `PartitionKeyInternalTest` for existing tests.
public void serialize(PartitionKeyInternal partitionKey, JsonGenerator writer, SerializerProvider serializerProvider) { try { if (partitionKey.equals(PartitionKeyInternal.getExclusiveMaximum())) { writer.writeString(INFINITY); return; } if (partitionKey.getComponents() != null) { writer.writeStartArray(); for (IPartitionKeyComponent componentValue : partitionKey.getComponents()) { componentValue.jsonEncode(writer); } writer.writeEndArray(); } } catch (IOException e) { throw new IllegalStateException(e); } }
writer.writeEndArray();
public void serialize(PartitionKeyInternal partitionKey, JsonGenerator writer, SerializerProvider serializerProvider) { try { if (partitionKey.equals(PartitionKeyInternal.getExclusiveMaximum())) { writer.writeString(INFINITY); return; } if (partitionKey.getComponents() != null) { writer.writeStartArray(); for (IPartitionKeyComponent componentValue : partitionKey.getComponents()) { componentValue.jsonEncode(writer); } writer.writeEndArray(); } } catch (IOException e) { throw new IllegalStateException(e); } }
class PartitionKeyInternalJsonSerializer extends StdSerializer<PartitionKeyInternal> { private static final long serialVersionUID = 2258093043805843865L; protected PartitionKeyInternalJsonSerializer() { this(null); } protected PartitionKeyInternalJsonSerializer(Class<PartitionKeyInternal> t) { super(t); } @Override static void jsonEncode(MinNumberPartitionKeyComponent component, JsonGenerator writer) { jsonEncodeLimit(writer, MIN_NUMBER); } static void jsonEncode(MaxNumberPartitionKeyComponent component, JsonGenerator writer) { jsonEncodeLimit(writer, MAX_NUMBER); } static void jsonEncode(MinStringPartitionKeyComponent component, JsonGenerator writer) { jsonEncodeLimit(writer, MIN_STRING); } static void jsonEncode(MaxStringPartitionKeyComponent component, JsonGenerator writer) { jsonEncodeLimit(writer, MAX_STRING); } private static void jsonEncodeLimit(JsonGenerator writer, String value) { try { writer.writeStartObject(); writer.writeFieldName(TYPE); writer.writeString(value); writer.writeEndObject(); } catch (IOException e) { throw new IllegalStateException(e); } } }
class PartitionKeyInternalJsonSerializer extends StdSerializer<PartitionKeyInternal> { private static final long serialVersionUID = 2258093043805843865L; protected PartitionKeyInternalJsonSerializer() { this(null); } protected PartitionKeyInternalJsonSerializer(Class<PartitionKeyInternal> t) { super(t); } @Override static void jsonEncode(MinNumberPartitionKeyComponent component, JsonGenerator writer) { jsonEncodeLimit(writer, MIN_NUMBER); } static void jsonEncode(MaxNumberPartitionKeyComponent component, JsonGenerator writer) { jsonEncodeLimit(writer, MAX_NUMBER); } static void jsonEncode(MinStringPartitionKeyComponent component, JsonGenerator writer) { jsonEncodeLimit(writer, MIN_STRING); } static void jsonEncode(MaxStringPartitionKeyComponent component, JsonGenerator writer) { jsonEncodeLimit(writer, MAX_STRING); } private static void jsonEncodeLimit(JsonGenerator writer, String value) { try { writer.writeStartObject(); writer.writeFieldName(TYPE); writer.writeString(value); writer.writeEndObject(); } catch (IOException e) { throw new IllegalStateException(e); } } }
```suggestion // if they are found before 'By' ``` ?
public Result parse(MethodInfo methodInfo) { String methodName = methodInfo.name(); ClassInfo repositoryClassInfo = methodInfo.declaringClass(); String repositoryMethodDescription = "'" + methodName + "' of repository '" + repositoryClassInfo + "'"; QueryType queryType = getType(methodName); if (queryType == null) { throw new UnableToParseMethodException("Method " + repositoryMethodDescription + " cannot be parsed. Did you forget to annotate the method with '@Query'?"); } int byIndex = methodName.indexOf("By"); if ((byIndex == -1) || (byIndex + 2 >= methodName.length())) { throw new UnableToParseMethodException("Method " + repositoryMethodDescription + " cannot be parsed as there is no proper 'By' clause in the name."); } Integer topCount = null; int firstIndex = methodName.indexOf("First"); int topIndex = methodName.indexOf("Top"); if ((firstIndex != -1) || (topIndex != -1)) { int maxFirstAndTopIndex = Math.max(firstIndex, topIndex); if (maxFirstAndTopIndex < byIndex) { try { String topCountStr = methodName.substring(maxFirstAndTopIndex, byIndex) .replace("Top", "").replace("First", ""); if (topCountStr.isEmpty()) { topCount = 1; } else { topCount = Integer.valueOf(topCountStr); } } catch (Exception e) { throw new UnableToParseMethodException( "Unable to parse query with limiting results clause. Offending method is " + repositoryMethodDescription + "."); } } } if ((topCount != null) && (queryType != QueryType.SELECT)) { throw new UnableToParseMethodException( "When 'Top' or 'First' is specified, the query must be a find query. Offending method is " + repositoryMethodDescription + "."); } if (methodName.substring(0, byIndex).contains("Distinct")) { throw new UnableToParseMethodException( "Distinct is not yet supported. Offending method is " + repositoryMethodDescription + "."); } String afterByPart = methodName.substring(byIndex + 2); boolean allIgnoreCase = false; if (afterByPart.contains(ALL_IGNORE_CASE)) { allIgnoreCase = true; afterByPart = afterByPart.replace(ALL_IGNORE_CASE, ""); } Sort sort = null; if (containsLogicOperator(afterByPart, ORDER_BY)) { int orderByIndex = afterByPart.indexOf(ORDER_BY); if (orderByIndex + ORDER_BY.length() == afterByPart.length()) { throw new UnableToParseMethodException( "A field must by supplied after 'OrderBy' . Offending method is " + repositoryMethodDescription + "."); } String afterOrderByPart = afterByPart.substring(orderByIndex + ORDER_BY.length()); afterByPart = afterByPart.substring(0, orderByIndex); boolean ascending = true; if (afterOrderByPart.endsWith("Asc")) { ascending = true; afterOrderByPart = afterOrderByPart.replace("Asc", ""); } else if (afterOrderByPart.endsWith("Desc")) { ascending = false; afterOrderByPart = afterOrderByPart.replace("Desc", ""); } String orderField = lowerFirstLetter(afterOrderByPart); if (!entityContainsField(orderField)) { throw new UnableToParseMethodException( "Field " + orderField + " which was configured as the order field does not exist in the entity. Offending method is " + repositoryMethodDescription + "."); } if (ascending) { sort = Sort.ascending(orderField); } else { sort = Sort.descending(orderField); } } List<String> parts = Collections.singletonList(afterByPart); boolean containsAnd = containsLogicOperator(afterByPart, "And"); boolean containsOr = containsLogicOperator(afterByPart, "Or"); if (containsAnd && containsOr) { throw new UnableToParseMethodException( "'And' and 'Or' clauses cannot be mixed in a method name - Try specifying the Query with the @Query annotation. Offending method is " + repositoryMethodDescription + "."); } if (containsAnd) { parts = Arrays.asList(afterByPart.split("And")); } else if (containsOr) { parts = Arrays.asList(afterByPart.split("Or")); } MutableReference<List<ClassInfo>> mappedSuperClassInfoRef = MutableReference.of(mappedSuperClassInfos); StringBuilder where = new StringBuilder(); int paramsCount = 0; for (String part : parts) { if (part.isEmpty()) { continue; } String fieldName; boolean ignoreCase = false; if (part.endsWith(IGNORE_CASE)) { ignoreCase = true; part = part.replace(IGNORE_CASE, ""); } String operation = getFieldOperation(part); if (operation == null) { fieldName = lowerFirstLetter(part); } else { fieldName = lowerFirstLetter(part.replaceAll(operation, "")); } FieldInfo fieldInfo = getFieldInfo(fieldName, entityClass, mappedSuperClassInfoRef); if (fieldInfo == null) { StringBuilder fieldPathBuilder = new StringBuilder(fieldName.length() + 5); fieldInfo = resolveNestedField(repositoryMethodDescription, fieldName, fieldPathBuilder); fieldName = fieldPathBuilder.toString(); } validateFieldWithOperation(operation, fieldInfo, fieldName, repositoryMethodDescription); if ((ignoreCase || allIgnoreCase) && !DotNames.STRING.equals(fieldInfo.type().name())) { throw new UnableToParseMethodException( "IgnoreCase cannot be specified for field" + fieldInfo.name() + " because it is not a String type. " + "Offending method is " + repositoryMethodDescription + "."); } if (where.length() > 0) { where.append(containsAnd ? " AND " : " OR "); } String upperPrefix = (ignoreCase || allIgnoreCase) ? "UPPER(" : ""; String upperSuffix = (ignoreCase || allIgnoreCase) ? ")" : ""; where.append(upperPrefix).append(fieldName).append(upperSuffix); if ((operation == null) || "Equals".equals(operation) || "Is".equals(operation)) { paramsCount++; where.append(" = ").append(upperPrefix).append("?").append(paramsCount).append(upperSuffix); } else { switch (operation) { case "IsNot": case "Not": paramsCount++; where.append(" <> ?").append(paramsCount); break; case "IsNull": case "Null": where.append(" IS null "); break; case "IsNotNull": case "NotNull": where.append(" IS NOT null "); break; case "Between": case "IsBetween": where.append(" BETWEEN "); paramsCount++; where.append("?").append(paramsCount).append(" AND "); paramsCount++; where.append("?").append(paramsCount); break; case "LessThan": case "IsLessThan": case "Before": case "IsBefore": paramsCount++; where.append(" < ?").append(paramsCount); break; case "LessThanEqual": case "IsLessThanEqual": paramsCount++; where.append(" <= ?").append(paramsCount); break; case "GreaterThan": case "IsGreaterThan": case "After": case "IsAfter": paramsCount++; where.append(" > ?").append(paramsCount); break; case "GreaterThanEqual": case "IsGreaterThanEqual": paramsCount++; where.append(" >= ?").append(paramsCount); break; case "Like": case "IsLike": paramsCount++; where.append(" LIKE ?").append(paramsCount); break; case "NotLike": case "IsNotLike": paramsCount++; where.append(" NOT LIKE ?").append(paramsCount); break; case "IsStartingWith": case "StartingWith": case "StartsWith": paramsCount++; where.append(" LIKE CONCAT(").append(upperPrefix).append("?").append(paramsCount).append(upperSuffix) .append(", '%')"); break; case "IsEndingWith": case "EndingWith": case "EndsWith": paramsCount++; where.append(" LIKE CONCAT('%', ").append(upperPrefix).append("?").append(paramsCount) .append(upperSuffix).append(")"); break; case "IsContaining": case "Containing": case "Contains": paramsCount++; where.append(" LIKE CONCAT('%', ").append(upperPrefix).append("?").append(paramsCount) .append(upperSuffix).append(", '%')"); break; case "True": case "False": where.append(" = ").append(operation.toLowerCase()); break; case "IsIn": case "In": paramsCount++; where.append(" IN ?").append(paramsCount); break; case "IsNotIn": case "NotIn": paramsCount++; where.append(" NOT IN ?").append(paramsCount); break; case "IsEmpty": case "Empty": where.append(" IS EMPTY"); break; case "IsNotEmpty": case "NotEmpty": where.append(" IS NOT EMPTY"); break; } } } String whereQuery = where.toString().isEmpty() ? "" : " WHERE " + where.toString(); return new Result(entityClass, "FROM " + getEntityName() + whereQuery, queryType, paramsCount, sort, topCount); }
public Result parse(MethodInfo methodInfo) { String methodName = methodInfo.name(); ClassInfo repositoryClassInfo = methodInfo.declaringClass(); String repositoryMethodDescription = "'" + methodName + "' of repository '" + repositoryClassInfo + "'"; QueryType queryType = getType(methodName); if (queryType == null) { throw new UnableToParseMethodException("Method " + repositoryMethodDescription + " cannot be parsed. Did you forget to annotate the method with '@Query'?"); } int byIndex = methodName.indexOf("By"); if ((byIndex == -1) || (byIndex + 2 >= methodName.length())) { throw new UnableToParseMethodException("Method " + repositoryMethodDescription + " cannot be parsed as there is no proper 'By' clause in the name."); } Integer topCount = null; int minFirstOrTopIndex = Math.min(indexOfOrMaxValue(methodName, "First"), indexOfOrMaxValue(methodName, "Top")); if (minFirstOrTopIndex < byIndex) { if (queryType != QueryType.SELECT) { throw new UnableToParseMethodException( "When 'Top' or 'First' is specified, the query must be a find query. Offending method is " + repositoryMethodDescription + "."); } try { String topCountStr = methodName.substring(minFirstOrTopIndex, byIndex) .replace("Top", "").replace("First", ""); if (topCountStr.isEmpty()) { topCount = 1; } else { topCount = Integer.valueOf(topCountStr); } } catch (Exception e) { throw new UnableToParseMethodException( "Unable to parse query with limiting results clause. Offending method is " + repositoryMethodDescription + "."); } } if (methodName.substring(0, byIndex).contains("Distinct")) { throw new UnableToParseMethodException( "Distinct is not yet supported. Offending method is " + repositoryMethodDescription + "."); } String afterByPart = methodName.substring(byIndex + 2); boolean allIgnoreCase = false; if (afterByPart.contains(ALL_IGNORE_CASE)) { allIgnoreCase = true; afterByPart = afterByPart.replace(ALL_IGNORE_CASE, ""); } Sort sort = null; if (containsLogicOperator(afterByPart, ORDER_BY)) { int orderByIndex = afterByPart.indexOf(ORDER_BY); if (orderByIndex + ORDER_BY.length() == afterByPart.length()) { throw new UnableToParseMethodException( "A field must by supplied after 'OrderBy' . Offending method is " + repositoryMethodDescription + "."); } String afterOrderByPart = afterByPart.substring(orderByIndex + ORDER_BY.length()); afterByPart = afterByPart.substring(0, orderByIndex); boolean ascending = true; if (afterOrderByPart.endsWith("Asc")) { ascending = true; afterOrderByPart = afterOrderByPart.replace("Asc", ""); } else if (afterOrderByPart.endsWith("Desc")) { ascending = false; afterOrderByPart = afterOrderByPart.replace("Desc", ""); } String orderField = lowerFirstLetter(afterOrderByPart); if (!entityContainsField(orderField)) { throw new UnableToParseMethodException( "Field " + orderField + " which was configured as the order field does not exist in the entity. Offending method is " + repositoryMethodDescription + "."); } if (ascending) { sort = Sort.ascending(orderField); } else { sort = Sort.descending(orderField); } } List<String> parts = Collections.singletonList(afterByPart); boolean containsAnd = containsLogicOperator(afterByPart, "And"); boolean containsOr = containsLogicOperator(afterByPart, "Or"); if (containsAnd && containsOr) { throw new UnableToParseMethodException( "'And' and 'Or' clauses cannot be mixed in a method name - Try specifying the Query with the @Query annotation. Offending method is " + repositoryMethodDescription + "."); } if (containsAnd) { parts = Arrays.asList(afterByPart.split("And")); } else if (containsOr) { parts = Arrays.asList(afterByPart.split("Or")); } MutableReference<List<ClassInfo>> mappedSuperClassInfoRef = MutableReference.of(mappedSuperClassInfos); StringBuilder where = new StringBuilder(); int paramsCount = 0; for (String part : parts) { if (part.isEmpty()) { continue; } String fieldName; boolean ignoreCase = false; if (part.endsWith(IGNORE_CASE)) { ignoreCase = true; part = part.replace(IGNORE_CASE, ""); } String operation = getFieldOperation(part); if (operation == null) { fieldName = lowerFirstLetter(part); } else { fieldName = lowerFirstLetter(part.replaceAll(operation, "")); } FieldInfo fieldInfo = getFieldInfo(fieldName, entityClass, mappedSuperClassInfoRef); if (fieldInfo == null) { StringBuilder fieldPathBuilder = new StringBuilder(fieldName.length() + 5); fieldInfo = resolveNestedField(repositoryMethodDescription, fieldName, fieldPathBuilder); fieldName = fieldPathBuilder.toString(); } validateFieldWithOperation(operation, fieldInfo, fieldName, repositoryMethodDescription); if ((ignoreCase || allIgnoreCase) && !DotNames.STRING.equals(fieldInfo.type().name())) { throw new UnableToParseMethodException( "IgnoreCase cannot be specified for field" + fieldInfo.name() + " because it is not a String type. " + "Offending method is " + repositoryMethodDescription + "."); } if (where.length() > 0) { where.append(containsAnd ? " AND " : " OR "); } String upperPrefix = (ignoreCase || allIgnoreCase) ? "UPPER(" : ""; String upperSuffix = (ignoreCase || allIgnoreCase) ? ")" : ""; where.append(upperPrefix).append(fieldName).append(upperSuffix); if ((operation == null) || "Equals".equals(operation) || "Is".equals(operation)) { paramsCount++; where.append(" = ").append(upperPrefix).append("?").append(paramsCount).append(upperSuffix); } else { switch (operation) { case "IsNot": case "Not": paramsCount++; where.append(" <> ?").append(paramsCount); break; case "IsNull": case "Null": where.append(" IS null "); break; case "IsNotNull": case "NotNull": where.append(" IS NOT null "); break; case "Between": case "IsBetween": where.append(" BETWEEN "); paramsCount++; where.append("?").append(paramsCount).append(" AND "); paramsCount++; where.append("?").append(paramsCount); break; case "LessThan": case "IsLessThan": case "Before": case "IsBefore": paramsCount++; where.append(" < ?").append(paramsCount); break; case "LessThanEqual": case "IsLessThanEqual": paramsCount++; where.append(" <= ?").append(paramsCount); break; case "GreaterThan": case "IsGreaterThan": case "After": case "IsAfter": paramsCount++; where.append(" > ?").append(paramsCount); break; case "GreaterThanEqual": case "IsGreaterThanEqual": paramsCount++; where.append(" >= ?").append(paramsCount); break; case "Like": case "IsLike": paramsCount++; where.append(" LIKE ?").append(paramsCount); break; case "NotLike": case "IsNotLike": paramsCount++; where.append(" NOT LIKE ?").append(paramsCount); break; case "IsStartingWith": case "StartingWith": case "StartsWith": paramsCount++; where.append(" LIKE CONCAT(").append(upperPrefix).append("?").append(paramsCount).append(upperSuffix) .append(", '%')"); break; case "IsEndingWith": case "EndingWith": case "EndsWith": paramsCount++; where.append(" LIKE CONCAT('%', ").append(upperPrefix).append("?").append(paramsCount) .append(upperSuffix).append(")"); break; case "IsContaining": case "Containing": case "Contains": paramsCount++; where.append(" LIKE CONCAT('%', ").append(upperPrefix).append("?").append(paramsCount) .append(upperSuffix).append(", '%')"); break; case "True": case "False": where.append(" = ").append(operation.toLowerCase()); break; case "IsIn": case "In": paramsCount++; where.append(" IN ?").append(paramsCount); break; case "IsNotIn": case "NotIn": paramsCount++; where.append(" NOT IN ?").append(paramsCount); break; case "IsEmpty": case "Empty": where.append(" IS EMPTY"); break; case "IsNotEmpty": case "NotEmpty": where.append(" IS NOT EMPTY"); break; } } } String whereQuery = where.toString().isEmpty() ? "" : " WHERE " + where.toString(); return new Result(entityClass, "FROM " + getEntityName() + whereQuery, queryType, paramsCount, sort, topCount); }
class MethodNameParser { private static final String ALL_IGNORE_CASE = "AllIgnoreCase"; private static final String IGNORE_CASE = "IgnoreCase"; private static final String ORDER_BY = "OrderBy"; private static final List<String> HANDLED_PROPERTY_OPERATIONS = Arrays.asList( "Is", "Equals", "IsNot", "Not", "IsNull", "Null", "IsNotNull", "NotNull", "IsBetween", "Between", "IsLessThan", "LessThan", "IsLessThanEqual", "LessThanEqual", "IsGreaterThan", "GreaterThan", "IsGreaterThanEqual", "GreaterThanEqual", "IsLike", "Like", "IsNotLike", "NotLike", "IsStartingWith", "StartingWith", "StartsWith", "IsEndingWith", "EndingWith", "EndsWith", "IsContaining", "Containing", "Contains", "Before", "IsBefore", "After", "IsAfter", "True", "False", "IsIn", "In", "IsNotIn", "NotIn", "IsEmpty", "Empty", "IsNotEmpty", "NotEmpty"); private static final Set<String> STRING_LIKE_OPERATIONS = new HashSet<>(Arrays.asList( "IsLike", "Like", "IsNotLike", "NotLike", "IsStartingWith", "StartingWith", "StartsWith", "IsEndingWith", "EndingWith", "EndsWith", "IsContaining", "Containing", "Contains")); private static final Set<String> BOOLEAN_OPERATIONS = new HashSet<>(Arrays.asList("True", "False")); private final ClassInfo entityClass; private final IndexView indexView; private final List<ClassInfo> mappedSuperClassInfos; public MethodNameParser(ClassInfo entityClass, IndexView indexView) { this.entityClass = entityClass; this.indexView = indexView; this.mappedSuperClassInfos = getSuperClassInfos(indexView, entityClass); } public enum QueryType { SELECT, COUNT, EXISTS, DELETE; } /** * See: * https: */ private FieldInfo resolveNestedField(String repositoryMethodDescription, String fieldPathExpression, StringBuilder fieldPathBuilder) { String fieldNotResolvableMessage = "Entity " + this.entityClass + " does not contain a field named: " + fieldPathExpression + ". "; String offendingMethodMessage = "Offending method is " + repositoryMethodDescription + "."; ClassInfo parentClassInfo = this.entityClass; FieldInfo fieldInfo = null; int fieldStartIndex = 0; while (fieldStartIndex < fieldPathExpression.length()) { if (fieldPathExpression.charAt(fieldStartIndex) == '_') { fieldStartIndex++; if (fieldStartIndex >= fieldPathExpression.length()) { throw new UnableToParseMethodException(fieldNotResolvableMessage + offendingMethodMessage); } } MutableReference<List<ClassInfo>> parentSuperClassInfos = new MutableReference<>(); int firstSeparator = fieldPathExpression.indexOf('_', fieldStartIndex); int fieldEndIndex = firstSeparator == -1 ? fieldPathExpression.length() : firstSeparator; while (fieldEndIndex >= fieldStartIndex) { String simpleFieldName = lowerFirstLetter(fieldPathExpression.substring(fieldStartIndex, fieldEndIndex)); fieldInfo = getFieldInfo(simpleFieldName, parentClassInfo, parentSuperClassInfos); if (fieldInfo != null) { break; } fieldEndIndex = previousPotentialFieldEnd(fieldPathExpression, fieldStartIndex, fieldEndIndex); } if (fieldInfo == null) { String detail = ""; if (fieldStartIndex > 0) { String notMatched = lowerFirstLetter(fieldPathExpression.substring(fieldStartIndex)); detail = "Can not resolve " + parentClassInfo + "." + notMatched + ". "; } throw new UnableToParseMethodException( fieldNotResolvableMessage + detail + offendingMethodMessage); } if (fieldPathBuilder.length() > 0) { fieldPathBuilder.append('.'); } fieldPathBuilder.append(fieldInfo.name()); if (!isHibernateProvidedBasicType(fieldInfo.type().name())) { parentClassInfo = indexView.getClassByName(fieldInfo.type().name()); if (parentClassInfo == null) { throw new IllegalStateException( "Entity class " + fieldInfo.type().name() + " referenced by " + this.entityClass + "." + fieldPathBuilder + " was not part of the Quarkus index. " + offendingMethodMessage); } } fieldStartIndex = fieldEndIndex; } return fieldInfo; } private int previousPotentialFieldEnd(String fieldName, int fieldStartIndex, int fieldEndIndexExclusive) { for (int i = fieldEndIndexExclusive - 1; i > fieldStartIndex; i--) { char c = fieldName.charAt(i); if (c >= 'A' && c <= 'Z') { return i; } } return -1; } /** * Meant to be called with {@param operator} being {@code "And"} or {@code "Or"} * and returns {@code true} if the string contains the logical operator * and the next character is an uppercase character. * The reasoning is that if the next character is not uppercase, * then the operator string is just part of a word */ private boolean containsLogicOperator(String str, String operatorStr) { int index = str.indexOf(operatorStr); if (index == -1) { return false; } if (str.length() < index + operatorStr.length() + 1) { return false; } return Character.isUpperCase(str.charAt(index + operatorStr.length())); } private void validateFieldWithOperation(String operation, FieldInfo fieldInfo, String fieldPath, String repositoryMethodDescription) { DotName fieldTypeDotName = fieldInfo.type().name(); if (STRING_LIKE_OPERATIONS.contains(operation) && !DotNames.STRING.equals(fieldTypeDotName)) { throw new UnableToParseMethodException( operation + " cannot be specified for field" + fieldPath + " because it is not a String type. " + "Offending method is " + repositoryMethodDescription + "."); } if (BOOLEAN_OPERATIONS.contains(operation) && !DotNames.BOOLEAN.equals(fieldTypeDotName) && !DotNames.PRIMITIVE_BOOLEAN.equals(fieldTypeDotName)) { throw new UnableToParseMethodException( operation + " cannot be specified for field" + fieldPath + " because it is not a boolean type. " + "Offending method is " + repositoryMethodDescription + "."); } } private QueryType getType(String methodName) { if (methodName.startsWith("find") || methodName.startsWith("query") || methodName.startsWith("read") || methodName.startsWith("get")) { return QueryType.SELECT; } if (methodName.startsWith("count")) { return QueryType.COUNT; } if (methodName.startsWith("delete") || methodName.startsWith("remove")) { return QueryType.DELETE; } if (methodName.startsWith("exists")) { return QueryType.EXISTS; } return null; } private String getFieldOperation(String part) { List<String> matches = new ArrayList<>(); for (String handledPropertyOperation : HANDLED_PROPERTY_OPERATIONS) { if (part.endsWith(handledPropertyOperation)) { matches.add(handledPropertyOperation); } } if (matches.isEmpty()) { return null; } if (matches.size() == 1) { return matches.get(0); } matches.sort(Comparator.comparing(String::length).reversed()); return matches.get(0); } private String lowerFirstLetter(String input) { if ((input == null) || input.isEmpty()) { return input; } if (input.length() == 1) { return input.toLowerCase(); } return Character.toLowerCase(input.charAt(0)) + input.substring(1); } private String getEntityName() { AnnotationInstance annotationInstance = entityClass.classAnnotation(DotNames.JPA_ENTITY); if (annotationInstance != null && annotationInstance.value("name") != null) { AnnotationValue annotationValue = annotationInstance.value("name"); return annotationValue.asString().length() > 0 ? annotationValue.asString() : entityClass.simpleName(); } return entityClass.simpleName(); } private boolean entityContainsField(String fieldName) { if (entityClass.field(fieldName) != null) { return true; } for (ClassInfo superClass : mappedSuperClassInfos) { FieldInfo fieldInfo = superClass.field(fieldName); if (fieldInfo != null) { return true; } } return false; } private FieldInfo getFieldInfo(String fieldName, ClassInfo entityClass, MutableReference<List<ClassInfo>> superClassInfos) { FieldInfo fieldInfo = entityClass.field(fieldName); if (fieldInfo == null) { if (superClassInfos.isEmpty()) { superClassInfos.set(getSuperClassInfos(indexView, entityClass)); } for (ClassInfo superClass : superClassInfos.get()) { fieldInfo = superClass.field(fieldName); if (fieldInfo != null) { break; } } } return fieldInfo; } private List<ClassInfo> getSuperClassInfos(IndexView indexView, ClassInfo entityClass) { List<ClassInfo> mappedSuperClassInfoElements = new ArrayList<>(3); Type superClassType = entityClass.superClassType(); while (superClassType != null && !superClassType.name().equals(DotNames.OBJECT)) { ClassInfo superClass = indexView.getClassByName(superClassType.name()); if (superClass.classAnnotation(DotNames.JPA_MAPPED_SUPERCLASS) != null) { mappedSuperClassInfoElements.add(superClass); } else if (superClass.classAnnotation(DotNames.JPA_INHERITANCE) != null) { mappedSuperClassInfoElements.add(superClass); } if (superClassType.kind() == Kind.CLASS) { superClassType = superClass.superClassType(); } else if (superClassType.kind() == Kind.PARAMETERIZED_TYPE) { ParameterizedType parameterizedType = superClassType.asParameterizedType(); superClassType = parameterizedType.owner(); } } return mappedSuperClassInfoElements; } private boolean isHibernateProvidedBasicType(DotName dotName) { return DotNames.HIBERNATE_PROVIDED_BASIC_TYPES.contains(dotName); } private static class MutableReference<T> { private T reference; public static <T> MutableReference<T> of(T reference) { return new MutableReference<>(reference); } public MutableReference() { } private MutableReference(T reference) { this.reference = reference; } public T get() { return reference; } public void set(T value) { this.reference = value; } public boolean isEmpty() { return reference == null; } } public static class Result { private final ClassInfo entityClass; private final String query; private final QueryType queryType; private final int paramCount; private final Sort sort; private final Integer topCount; public Result(ClassInfo entityClass, String query, QueryType queryType, int paramCount, Sort sort, Integer topCount) { this.entityClass = entityClass; this.query = query; this.queryType = queryType; this.paramCount = paramCount; this.sort = sort; this.topCount = topCount; } public ClassInfo getEntityClass() { return entityClass; } public String getQuery() { return query; } public QueryType getQueryType() { return queryType; } public int getParamCount() { return paramCount; } public Sort getSort() { return sort; } public Integer getTopCount() { return topCount; } } }
class MethodNameParser { private static final String ALL_IGNORE_CASE = "AllIgnoreCase"; private static final String IGNORE_CASE = "IgnoreCase"; private static final String ORDER_BY = "OrderBy"; private static final List<String> HANDLED_PROPERTY_OPERATIONS = Arrays.asList( "Is", "Equals", "IsNot", "Not", "IsNull", "Null", "IsNotNull", "NotNull", "IsBetween", "Between", "IsLessThan", "LessThan", "IsLessThanEqual", "LessThanEqual", "IsGreaterThan", "GreaterThan", "IsGreaterThanEqual", "GreaterThanEqual", "IsLike", "Like", "IsNotLike", "NotLike", "IsStartingWith", "StartingWith", "StartsWith", "IsEndingWith", "EndingWith", "EndsWith", "IsContaining", "Containing", "Contains", "Before", "IsBefore", "After", "IsAfter", "True", "False", "IsIn", "In", "IsNotIn", "NotIn", "IsEmpty", "Empty", "IsNotEmpty", "NotEmpty"); private static final Set<String> STRING_LIKE_OPERATIONS = new HashSet<>(Arrays.asList( "IsLike", "Like", "IsNotLike", "NotLike", "IsStartingWith", "StartingWith", "StartsWith", "IsEndingWith", "EndingWith", "EndsWith", "IsContaining", "Containing", "Contains")); private static final Set<String> BOOLEAN_OPERATIONS = new HashSet<>(Arrays.asList("True", "False")); private final ClassInfo entityClass; private final IndexView indexView; private final List<ClassInfo> mappedSuperClassInfos; public MethodNameParser(ClassInfo entityClass, IndexView indexView) { this.entityClass = entityClass; this.indexView = indexView; this.mappedSuperClassInfos = getSuperClassInfos(indexView, entityClass); } public enum QueryType { SELECT, COUNT, EXISTS, DELETE; } private int indexOfOrMaxValue(String methodName, String term) { int index = methodName.indexOf(term); return index != -1 ? index : Integer.MAX_VALUE; } /** * See: * https: */ private FieldInfo resolveNestedField(String repositoryMethodDescription, String fieldPathExpression, StringBuilder fieldPathBuilder) { String fieldNotResolvableMessage = "Entity " + this.entityClass + " does not contain a field named: " + fieldPathExpression + ". "; String offendingMethodMessage = "Offending method is " + repositoryMethodDescription + "."; ClassInfo parentClassInfo = this.entityClass; FieldInfo fieldInfo = null; int fieldStartIndex = 0; while (fieldStartIndex < fieldPathExpression.length()) { if (fieldPathExpression.charAt(fieldStartIndex) == '_') { fieldStartIndex++; if (fieldStartIndex >= fieldPathExpression.length()) { throw new UnableToParseMethodException(fieldNotResolvableMessage + offendingMethodMessage); } } MutableReference<List<ClassInfo>> parentSuperClassInfos = new MutableReference<>(); int firstSeparator = fieldPathExpression.indexOf('_', fieldStartIndex); int fieldEndIndex = firstSeparator == -1 ? fieldPathExpression.length() : firstSeparator; while (fieldEndIndex >= fieldStartIndex) { String simpleFieldName = lowerFirstLetter(fieldPathExpression.substring(fieldStartIndex, fieldEndIndex)); fieldInfo = getFieldInfo(simpleFieldName, parentClassInfo, parentSuperClassInfos); if (fieldInfo != null) { break; } fieldEndIndex = previousPotentialFieldEnd(fieldPathExpression, fieldStartIndex, fieldEndIndex); } if (fieldInfo == null) { String detail = ""; if (fieldStartIndex > 0) { String notMatched = lowerFirstLetter(fieldPathExpression.substring(fieldStartIndex)); detail = "Can not resolve " + parentClassInfo + "." + notMatched + ". "; } throw new UnableToParseMethodException( fieldNotResolvableMessage + detail + offendingMethodMessage); } if (fieldPathBuilder.length() > 0) { fieldPathBuilder.append('.'); } fieldPathBuilder.append(fieldInfo.name()); if (!isHibernateProvidedBasicType(fieldInfo.type().name())) { parentClassInfo = indexView.getClassByName(fieldInfo.type().name()); if (parentClassInfo == null) { throw new IllegalStateException( "Entity class " + fieldInfo.type().name() + " referenced by " + this.entityClass + "." + fieldPathBuilder + " was not part of the Quarkus index. " + offendingMethodMessage); } } fieldStartIndex = fieldEndIndex; } return fieldInfo; } private int previousPotentialFieldEnd(String fieldName, int fieldStartIndex, int fieldEndIndexExclusive) { for (int i = fieldEndIndexExclusive - 1; i > fieldStartIndex; i--) { char c = fieldName.charAt(i); if (c >= 'A' && c <= 'Z') { return i; } } return -1; } /** * Meant to be called with {@param operator} being {@code "And"} or {@code "Or"} * and returns {@code true} if the string contains the logical operator * and the next character is an uppercase character. * The reasoning is that if the next character is not uppercase, * then the operator string is just part of a word */ private boolean containsLogicOperator(String str, String operatorStr) { int index = str.indexOf(operatorStr); if (index == -1) { return false; } if (str.length() < index + operatorStr.length() + 1) { return false; } return Character.isUpperCase(str.charAt(index + operatorStr.length())); } private void validateFieldWithOperation(String operation, FieldInfo fieldInfo, String fieldPath, String repositoryMethodDescription) { DotName fieldTypeDotName = fieldInfo.type().name(); if (STRING_LIKE_OPERATIONS.contains(operation) && !DotNames.STRING.equals(fieldTypeDotName)) { throw new UnableToParseMethodException( operation + " cannot be specified for field" + fieldPath + " because it is not a String type. " + "Offending method is " + repositoryMethodDescription + "."); } if (BOOLEAN_OPERATIONS.contains(operation) && !DotNames.BOOLEAN.equals(fieldTypeDotName) && !DotNames.PRIMITIVE_BOOLEAN.equals(fieldTypeDotName)) { throw new UnableToParseMethodException( operation + " cannot be specified for field" + fieldPath + " because it is not a boolean type. " + "Offending method is " + repositoryMethodDescription + "."); } } private QueryType getType(String methodName) { if (methodName.startsWith("find") || methodName.startsWith("query") || methodName.startsWith("read") || methodName.startsWith("get")) { return QueryType.SELECT; } if (methodName.startsWith("count")) { return QueryType.COUNT; } if (methodName.startsWith("delete") || methodName.startsWith("remove")) { return QueryType.DELETE; } if (methodName.startsWith("exists")) { return QueryType.EXISTS; } return null; } private String getFieldOperation(String part) { List<String> matches = new ArrayList<>(); for (String handledPropertyOperation : HANDLED_PROPERTY_OPERATIONS) { if (part.endsWith(handledPropertyOperation)) { matches.add(handledPropertyOperation); } } if (matches.isEmpty()) { return null; } if (matches.size() == 1) { return matches.get(0); } matches.sort(Comparator.comparing(String::length).reversed()); return matches.get(0); } private String lowerFirstLetter(String input) { if ((input == null) || input.isEmpty()) { return input; } if (input.length() == 1) { return input.toLowerCase(); } return Character.toLowerCase(input.charAt(0)) + input.substring(1); } private String getEntityName() { AnnotationInstance annotationInstance = entityClass.classAnnotation(DotNames.JPA_ENTITY); if (annotationInstance != null && annotationInstance.value("name") != null) { AnnotationValue annotationValue = annotationInstance.value("name"); return annotationValue.asString().length() > 0 ? annotationValue.asString() : entityClass.simpleName(); } return entityClass.simpleName(); } private boolean entityContainsField(String fieldName) { if (entityClass.field(fieldName) != null) { return true; } for (ClassInfo superClass : mappedSuperClassInfos) { FieldInfo fieldInfo = superClass.field(fieldName); if (fieldInfo != null) { return true; } } return false; } private FieldInfo getFieldInfo(String fieldName, ClassInfo entityClass, MutableReference<List<ClassInfo>> superClassInfos) { FieldInfo fieldInfo = entityClass.field(fieldName); if (fieldInfo == null) { if (superClassInfos.isEmpty()) { superClassInfos.set(getSuperClassInfos(indexView, entityClass)); } for (ClassInfo superClass : superClassInfos.get()) { fieldInfo = superClass.field(fieldName); if (fieldInfo != null) { break; } } } return fieldInfo; } private List<ClassInfo> getSuperClassInfos(IndexView indexView, ClassInfo entityClass) { List<ClassInfo> mappedSuperClassInfoElements = new ArrayList<>(3); Type superClassType = entityClass.superClassType(); while (superClassType != null && !superClassType.name().equals(DotNames.OBJECT)) { ClassInfo superClass = indexView.getClassByName(superClassType.name()); if (superClass.classAnnotation(DotNames.JPA_MAPPED_SUPERCLASS) != null) { mappedSuperClassInfoElements.add(superClass); } else if (superClass.classAnnotation(DotNames.JPA_INHERITANCE) != null) { mappedSuperClassInfoElements.add(superClass); } if (superClassType.kind() == Kind.CLASS) { superClassType = superClass.superClassType(); } else if (superClassType.kind() == Kind.PARAMETERIZED_TYPE) { ParameterizedType parameterizedType = superClassType.asParameterizedType(); superClassType = parameterizedType.owner(); } } return mappedSuperClassInfoElements; } private boolean isHibernateProvidedBasicType(DotName dotName) { return DotNames.HIBERNATE_PROVIDED_BASIC_TYPES.contains(dotName); } private static class MutableReference<T> { private T reference; public static <T> MutableReference<T> of(T reference) { return new MutableReference<>(reference); } public MutableReference() { } private MutableReference(T reference) { this.reference = reference; } public T get() { return reference; } public void set(T value) { this.reference = value; } public boolean isEmpty() { return reference == null; } } public static class Result { private final ClassInfo entityClass; private final String query; private final QueryType queryType; private final int paramCount; private final Sort sort; private final Integer topCount; public Result(ClassInfo entityClass, String query, QueryType queryType, int paramCount, Sort sort, Integer topCount) { this.entityClass = entityClass; this.query = query; this.queryType = queryType; this.paramCount = paramCount; this.sort = sort; this.topCount = topCount; } public ClassInfo getEntityClass() { return entityClass; } public String getQuery() { return query; } public QueryType getQueryType() { return queryType; } public int getParamCount() { return paramCount; } public Sort getSort() { return sort; } public Integer getTopCount() { return topCount; } } }
Also an exclusive curator framework per JobMaster will definitely harm the performance and add additional pressure to ZooKeeper.
public void close() throws Exception { if (!running) { return; } running = false; LOG.info("Closing {}.", this); client.getConnectionStateListenable().removeListener(connectionStateListener); cache.close(); try { if (client.getZookeeperClient().isConnected() && !connectionInformationPath.contains(RESOURCE_MANAGER_NODE)) { client.watchers() .removeAll() .ofType(Watcher.WatcherType.Any) .forPath(connectionInformationPath); } } catch (KeeperException.NoWatcherException e) { } }
&& !connectionInformationPath.contains(RESOURCE_MANAGER_NODE)) {
public void close() throws Exception { if (!running) { return; } running = false; LOG.info("Closing {}.", this); client.getConnectionStateListenable().removeListener(connectionStateListener); cache.close(); try { if (client.getZookeeperClient().isConnected() && !connectionInformationPath.contains(RESOURCE_MANAGER_NODE)) { client.watchers() .removeAll() .ofType(Watcher.WatcherType.Any) .forPath(connectionInformationPath); } } catch (KeeperException.NoWatcherException e) { } }
class ZooKeeperLeaderRetrievalDriver implements LeaderRetrievalDriver { private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperLeaderRetrievalDriver.class); /** Connection to the used ZooKeeper quorum. */ private final CuratorFramework client; /** Curator recipe to watch changes of a specific ZooKeeper node. */ private final TreeCache cache; private final String connectionInformationPath; private final ConnectionStateListener connectionStateListener = (client, newState) -> handleStateChange(newState); private final LeaderRetrievalEventHandler leaderRetrievalEventHandler; private final LeaderInformationClearancePolicy leaderInformationClearancePolicy; private final FatalErrorHandler fatalErrorHandler; private volatile boolean running; /** * Creates a leader retrieval service which uses ZooKeeper to retrieve the leader information. * * @param client Client which constitutes the connection to the ZooKeeper quorum * @param path Path of the ZooKeeper node which contains the leader information * @param leaderRetrievalEventHandler Handler to notify the leader changes. * @param leaderInformationClearancePolicy leaderInformationClearancePolicy controls when the * leader information is being cleared * @param fatalErrorHandler Fatal error handler */ public ZooKeeperLeaderRetrievalDriver( CuratorFramework client, String path, LeaderRetrievalEventHandler leaderRetrievalEventHandler, LeaderInformationClearancePolicy leaderInformationClearancePolicy, FatalErrorHandler fatalErrorHandler) throws Exception { this.client = checkNotNull(client, "CuratorFramework client"); this.connectionInformationPath = ZooKeeperUtils.generateConnectionInformationPath(path); this.cache = ZooKeeperUtils.createTreeCache( client, connectionInformationPath, this::retrieveLeaderInformationFromZooKeeper); this.leaderRetrievalEventHandler = checkNotNull(leaderRetrievalEventHandler); this.leaderInformationClearancePolicy = leaderInformationClearancePolicy; this.fatalErrorHandler = checkNotNull(fatalErrorHandler); cache.start(); client.getConnectionStateListenable().addListener(connectionStateListener); LOG.debug( "Monitoring data change in {}", ZooKeeperUtils.generateZookeeperPath( client.getNamespace(), connectionInformationPath)); running = true; } @Override private void retrieveLeaderInformationFromZooKeeper() { try { LOG.debug("Leader node has changed."); final ChildData childData = cache.getCurrentData(connectionInformationPath); if (childData != null) { final byte[] data = childData.getData(); if (data != null && data.length > 0) { ByteArrayInputStream bais = new ByteArrayInputStream(data); ObjectInputStream ois = new ObjectInputStream(bais); final String leaderAddress = ois.readUTF(); final UUID leaderSessionID = (UUID) ois.readObject(); leaderRetrievalEventHandler.notifyLeaderAddress( LeaderInformation.known(leaderSessionID, leaderAddress)); return; } } notifyNoLeader(); } catch (Exception e) { fatalErrorHandler.onFatalError( new LeaderRetrievalException("Could not handle node changed event.", e)); ExceptionUtils.checkInterrupted(e); } } private void handleStateChange(ConnectionState newState) { switch (newState) { case CONNECTED: LOG.debug("Connected to ZooKeeper quorum. Leader retrieval can start."); break; case SUSPENDED: LOG.warn("Connection to ZooKeeper suspended, waiting for reconnection."); if (leaderInformationClearancePolicy == LeaderInformationClearancePolicy.ON_SUSPENDED_CONNECTION) { notifyNoLeader(); } break; case RECONNECTED: LOG.info( "Connection to ZooKeeper was reconnected. Leader retrieval can be restarted."); onReconnectedConnectionState(); break; case LOST: LOG.warn( "Connection to ZooKeeper lost. Can no longer retrieve the leader from " + "ZooKeeper."); notifyNoLeader(); break; } } private void notifyNoLeader() { leaderRetrievalEventHandler.notifyLeaderAddress(LeaderInformation.empty()); } private void onReconnectedConnectionState() { retrieveLeaderInformationFromZooKeeper(); } @Override public String toString() { return "ZookeeperLeaderRetrievalDriver{" + "connectionInformationPath='" + connectionInformationPath + '\'' + '}'; } @VisibleForTesting public String getConnectionInformationPath() { return connectionInformationPath; } /** Policy when to clear the leader information and to notify the listener about it. */ public enum LeaderInformationClearancePolicy { ON_SUSPENDED_CONNECTION, ON_LOST_CONNECTION } }
class ZooKeeperLeaderRetrievalDriver implements LeaderRetrievalDriver { private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperLeaderRetrievalDriver.class); /** Connection to the used ZooKeeper quorum. */ private final CuratorFramework client; /** Curator recipe to watch changes of a specific ZooKeeper node. */ private final TreeCache cache; private final String connectionInformationPath; private final ConnectionStateListener connectionStateListener = (client, newState) -> handleStateChange(newState); private final LeaderRetrievalEventHandler leaderRetrievalEventHandler; private final LeaderInformationClearancePolicy leaderInformationClearancePolicy; private final FatalErrorHandler fatalErrorHandler; private volatile boolean running; /** * Creates a leader retrieval service which uses ZooKeeper to retrieve the leader information. * * @param client Client which constitutes the connection to the ZooKeeper quorum * @param path Path of the ZooKeeper node which contains the leader information * @param leaderRetrievalEventHandler Handler to notify the leader changes. * @param leaderInformationClearancePolicy leaderInformationClearancePolicy controls when the * leader information is being cleared * @param fatalErrorHandler Fatal error handler */ public ZooKeeperLeaderRetrievalDriver( CuratorFramework client, String path, LeaderRetrievalEventHandler leaderRetrievalEventHandler, LeaderInformationClearancePolicy leaderInformationClearancePolicy, FatalErrorHandler fatalErrorHandler) throws Exception { this.client = checkNotNull(client, "CuratorFramework client"); this.connectionInformationPath = ZooKeeperUtils.generateConnectionInformationPath(path); this.cache = ZooKeeperUtils.createTreeCache( client, connectionInformationPath, this::retrieveLeaderInformationFromZooKeeper); this.leaderRetrievalEventHandler = checkNotNull(leaderRetrievalEventHandler); this.leaderInformationClearancePolicy = leaderInformationClearancePolicy; this.fatalErrorHandler = checkNotNull(fatalErrorHandler); cache.start(); client.getConnectionStateListenable().addListener(connectionStateListener); LOG.debug( "Monitoring data change in {}", ZooKeeperUtils.generateZookeeperPath( client.getNamespace(), connectionInformationPath)); running = true; } @Override private void retrieveLeaderInformationFromZooKeeper() { try { LOG.debug("Leader node has changed."); final ChildData childData = cache.getCurrentData(connectionInformationPath); if (childData != null) { final byte[] data = childData.getData(); if (data != null && data.length > 0) { ByteArrayInputStream bais = new ByteArrayInputStream(data); ObjectInputStream ois = new ObjectInputStream(bais); final String leaderAddress = ois.readUTF(); final UUID leaderSessionID = (UUID) ois.readObject(); leaderRetrievalEventHandler.notifyLeaderAddress( LeaderInformation.known(leaderSessionID, leaderAddress)); return; } } notifyNoLeader(); } catch (Exception e) { fatalErrorHandler.onFatalError( new LeaderRetrievalException("Could not handle node changed event.", e)); ExceptionUtils.checkInterrupted(e); } } private void handleStateChange(ConnectionState newState) { switch (newState) { case CONNECTED: LOG.debug("Connected to ZooKeeper quorum. Leader retrieval can start."); break; case SUSPENDED: LOG.warn("Connection to ZooKeeper suspended, waiting for reconnection."); if (leaderInformationClearancePolicy == LeaderInformationClearancePolicy.ON_SUSPENDED_CONNECTION) { notifyNoLeader(); } break; case RECONNECTED: LOG.info( "Connection to ZooKeeper was reconnected. Leader retrieval can be restarted."); onReconnectedConnectionState(); break; case LOST: LOG.warn( "Connection to ZooKeeper lost. Can no longer retrieve the leader from " + "ZooKeeper."); notifyNoLeader(); break; } } private void notifyNoLeader() { leaderRetrievalEventHandler.notifyLeaderAddress(LeaderInformation.empty()); } private void onReconnectedConnectionState() { retrieveLeaderInformationFromZooKeeper(); } @Override public String toString() { return "ZookeeperLeaderRetrievalDriver{" + "connectionInformationPath='" + connectionInformationPath + '\'' + '}'; } @VisibleForTesting public String getConnectionInformationPath() { return connectionInformationPath; } /** Policy when to clear the leader information and to notify the listener about it. */ public enum LeaderInformationClearancePolicy { ON_SUSPENDED_CONNECTION, ON_LOST_CONNECTION } }
I think this looks like a bug, why do you increment the counter here?
public boolean triggerCheckpoint(long timestamp, boolean isPeriodic) { try { triggerCheckpoint(timestamp, checkpointProperties, null, isPeriodic, false); return true; } catch (CheckpointException e) { try { long latestGeneratedCheckpointId = getCheckpointIdCounter().getAndIncrement(); failureManager.handleCheckpointException(e, -1 * latestGeneratedCheckpointId); } catch (Exception e1) { LOG.warn("Get latest generated checkpoint id error : ", e1); } return false; } }
long latestGeneratedCheckpointId = getCheckpointIdCounter().getAndIncrement();
public boolean triggerCheckpoint(long timestamp, boolean isPeriodic) { try { triggerCheckpoint(timestamp, checkpointProperties, null, isPeriodic, false); return true; } catch (CheckpointException e) { long latestGeneratedCheckpointId = getCheckpointIdCounter().get(); failureManager.handleCheckpointException(e, -1 * latestGeneratedCheckpointId); return false; } }
class CheckpointCoordinator { private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class); /** The number of recent checkpoints whose IDs are remembered. */ private static final int NUM_GHOST_CHECKPOINT_IDS = 16; /** Coordinator-wide lock to safeguard the checkpoint updates. */ private final Object lock = new Object(); /** Lock specially to make sure that trigger requests do not overtake each other. * This is not done with the coordinator-wide lock, because as part of triggering, * blocking operations may happen (distributed atomic counters). * Using a dedicated lock, we avoid blocking the processing of 'acknowledge/decline' * messages during that phase. */ private final Object triggerLock = new Object(); /** The job whose checkpoint this coordinator coordinates. */ private final JobID job; /** Default checkpoint properties. **/ private final CheckpointProperties checkpointProperties; /** The executor used for asynchronous calls, like potentially blocking I/O. */ private final Executor executor; /** Tasks who need to be sent a message when a checkpoint is started. */ private final ExecutionVertex[] tasksToTrigger; /** Tasks who need to acknowledge a checkpoint before it succeeds. */ private final ExecutionVertex[] tasksToWaitFor; /** Tasks who need to be sent a message when a checkpoint is confirmed. */ private final ExecutionVertex[] tasksToCommitTo; /** Map from checkpoint ID to the pending checkpoint. */ private final Map<Long, PendingCheckpoint> pendingCheckpoints; /** Completed checkpoints. Implementations can be blocking. Make sure calls to methods * accessing this don't block the job manager actor and run asynchronously. */ private final CompletedCheckpointStore completedCheckpointStore; /** The root checkpoint state backend, which is responsible for initializing the * checkpoint, storing the metadata, and cleaning up the checkpoint. */ private final CheckpointStorageCoordinatorView checkpointStorage; /** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */ private final ArrayDeque<Long> recentPendingCheckpoints; /** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these * need to be ascending across job managers. */ private final CheckpointIDCounter checkpointIdCounter; /** The base checkpoint interval. Actual trigger time may be affected by the * max concurrent checkpoints and minimum-pause values */ private final long baseInterval; /** The max time (in ms) that a checkpoint may take. */ private final long checkpointTimeout; /** The min time(in ns) to delay after a checkpoint could be triggered. Allows to * enforce minimum processing time between checkpoint attempts */ private final long minPauseBetweenCheckpointsNanos; /** The maximum number of checkpoints that may be in progress at the same time. */ private final int maxConcurrentCheckpointAttempts; /** The timer that handles the checkpoint timeouts and triggers periodic checkpoints. */ private final ScheduledThreadPoolExecutor timer; /** The master checkpoint hooks executed by this checkpoint coordinator. */ private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks; /** Actor that receives status updates from the execution graph this coordinator works for. */ private JobStatusListener jobStatusListener; /** The number of consecutive failed trigger attempts. */ private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0); /** A handle to the current periodic trigger, to cancel it when necessary. */ private ScheduledFuture<?> currentPeriodicTrigger; /** The timestamp (via {@link System private long lastCheckpointCompletionNanos; /** Flag whether a triggered checkpoint should immediately schedule the next checkpoint. * Non-volatile, because only accessed in synchronized scope */ private boolean periodicScheduling; /** Flag whether a trigger request could not be handled immediately. Non-volatile, because only * accessed in synchronized scope */ private boolean triggerRequestQueued; /** Flag marking the coordinator as shut down (not accepting any messages any more). */ private volatile boolean shutdown; /** Optional tracker for checkpoint statistics. */ @Nullable private CheckpointStatsTracker statsTracker; /** A factory for SharedStateRegistry objects. */ private final SharedStateRegistryFactory sharedStateRegistryFactory; /** Registry that tracks state which is shared across (incremental) checkpoints. */ private SharedStateRegistry sharedStateRegistry; private final CheckpointFailureManager failureManager; public CheckpointCoordinator( JobID job, long baseInterval, long checkpointTimeout, long minPauseBetweenCheckpoints, int maxConcurrentCheckpointAttempts, CheckpointRetentionPolicy retentionPolicy, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager) { checkNotNull(checkpointStateBackend); checkArgument(baseInterval > 0, "Checkpoint base interval must be larger than zero"); checkArgument(checkpointTimeout >= 1, "Checkpoint timeout must be larger than zero"); checkArgument(minPauseBetweenCheckpoints >= 0, "minPauseBetweenCheckpoints must be >= 0"); checkArgument(maxConcurrentCheckpointAttempts >= 1, "maxConcurrentCheckpointAttempts must be >= 1"); if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) { minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000; } if (baseInterval < minPauseBetweenCheckpoints) { baseInterval = minPauseBetweenCheckpoints; } this.job = checkNotNull(job); this.baseInterval = baseInterval; this.checkpointTimeout = checkpointTimeout; this.minPauseBetweenCheckpointsNanos = minPauseBetweenCheckpoints * 1_000_000; this.maxConcurrentCheckpointAttempts = maxConcurrentCheckpointAttempts; this.tasksToTrigger = checkNotNull(tasksToTrigger); this.tasksToWaitFor = checkNotNull(tasksToWaitFor); this.tasksToCommitTo = checkNotNull(tasksToCommitTo); this.pendingCheckpoints = new LinkedHashMap<>(); this.checkpointIdCounter = checkNotNull(checkpointIDCounter); this.completedCheckpointStore = checkNotNull(completedCheckpointStore); this.executor = checkNotNull(executor); this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory); this.sharedStateRegistry = sharedStateRegistryFactory.create(executor); this.failureManager = checkNotNull(failureManager); this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS); this.masterHooks = new HashMap<>(); this.timer = new ScheduledThreadPoolExecutor(1, new DispatcherThreadFactory(Thread.currentThread().getThreadGroup(), "Checkpoint Timer")); this.timer.setRemoveOnCancelPolicy(true); this.timer.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); this.timer.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); this.checkpointProperties = CheckpointProperties.forCheckpoint(retentionPolicy); try { this.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job); } catch (IOException e) { throw new FlinkRuntimeException("Failed to create checkpoint storage at checkpoint coordinator side.", e); } try { checkpointIDCounter.start(); } catch (Throwable t) { throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t); } } /** * Adds the given master hook to the checkpoint coordinator. This method does nothing, if * the checkpoint coordinator already contained a hook with the same ID (as defined via * {@link MasterTriggerRestoreHook * * @param hook The hook to add. * @return True, if the hook was added, false if the checkpoint coordinator already * contained a hook with the same ID. */ public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) { checkNotNull(hook); final String id = hook.getIdentifier(); checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id"); synchronized (lock) { if (!masterHooks.containsKey(id)) { masterHooks.put(id, hook); return true; } else { return false; } } } /** * Gets the number of currently register master hooks. */ public int getNumberOfRegisteredMasterHooks() { synchronized (lock) { return masterHooks.size(); } } /** * Sets the checkpoint stats tracker. * * @param statsTracker The checkpoint stats tracker. */ public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) { this.statsTracker = statsTracker; } /** * Shuts down the checkpoint coordinator. * * <p>After this method has been called, the coordinator does not accept * and further messages and cannot trigger any further checkpoints. */ public void shutdown(JobStatus jobStatus) throws Exception { synchronized (lock) { if (!shutdown) { shutdown = true; LOG.info("Stopping checkpoint coordinator for job {}.", job); periodicScheduling = false; triggerRequestQueued = false; MasterHooks.close(masterHooks.values(), LOG); masterHooks.clear(); timer.shutdownNow(); for (PendingCheckpoint pending : pendingCheckpoints.values()) { failPendingCheckpoint(pending, CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } pendingCheckpoints.clear(); completedCheckpointStore.shutdown(jobStatus); checkpointIdCounter.shutdown(jobStatus); } } } public boolean isShutdown() { return shutdown; } /** * Triggers a savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSavepoint( final long timestamp, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSavepoint(); return triggerSavepointInternal(timestamp, properties, false, targetLocation); } /** * Triggers a synchronous savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline * to fire any registered event-time timers. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSynchronousSavepoint( final long timestamp, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSyncSavepoint(); return triggerSavepointInternal(timestamp, properties, advanceToEndOfEventTime, targetLocation); } private CompletableFuture<CompletedCheckpoint> triggerSavepointInternal( final long timestamp, final CheckpointProperties checkpointProperties, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { checkNotNull(checkpointProperties); try { PendingCheckpoint pendingCheckpoint = triggerCheckpoint( timestamp, checkpointProperties, targetLocation, false, advanceToEndOfEventTime); return pendingCheckpoint.getCompletionFuture(); } catch (CheckpointException e) { Throwable cause = new CheckpointException("Failed to trigger savepoint.", e.getCheckpointFailureReason()); return FutureUtils.completedExceptionally(cause); } } /** * Triggers a new standard checkpoint and uses the given timestamp as the checkpoint * timestamp. * * @param timestamp The timestamp for the checkpoint. * @param isPeriodic Flag indicating whether this triggered checkpoint is * periodic. If this flag is true, but the periodic scheduler is disabled, * the checkpoint will be declined. * @return <code>true</code> if triggering the checkpoint succeeded. */ @VisibleForTesting public PendingCheckpoint triggerCheckpoint( long timestamp, CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime) throws CheckpointException { if (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) { throw new IllegalArgumentException("Only synchronous savepoints are allowed to advance the watermark to MAX."); } synchronized (lock) { if (shutdown) { throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } if (isPeriodic && !periodicScheduling) { throw new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN); } if (!props.forceCheckpoint()) { if (triggerRequestQueued) { LOG.warn("Trying to trigger another checkpoint for job {} while one was queued already.", job); throw new CheckpointException(CheckpointFailureReason.ALREADY_QUEUED); } if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { triggerRequestQueued = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos; final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000; if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS); throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } } Execution[] executions = new Execution[tasksToTrigger.length]; for (int i = 0; i < tasksToTrigger.length; i++) { Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt(); if (ee == null) { LOG.info("Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job); throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } else if (ee.getState() == ExecutionState.RUNNING) { executions[i] = ee; } else { LOG.info("Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job, ExecutionState.RUNNING, ee.getState()); throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length); for (ExecutionVertex ev : tasksToWaitFor) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ackTasks.put(ee.getAttemptId(), ev); } else { LOG.info("Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.", ev.getTaskNameWithSubtaskIndex(), job); throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } synchronized (triggerLock) { final CheckpointStorageLocation checkpointStorageLocation; final long checkpointID; try { checkpointID = checkpointIdCounter.getAndIncrement(); checkpointStorageLocation = props.isSavepoint() ? checkpointStorage.initializeLocationForSavepoint(checkpointID, externalSavepointLocation) : checkpointStorage.initializeLocationForCheckpoint(checkpointID); } catch (Throwable t) { int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint for job {} ({} consecutive failed attempts so far).", job, numUnsuccessful, t); throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t); } final PendingCheckpoint checkpoint = new PendingCheckpoint( job, checkpointID, timestamp, ackTasks, props, checkpointStorageLocation, executor); if (statsTracker != null) { PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint( checkpointID, timestamp, props); checkpoint.setStatsCallback(callback); } final Runnable canceller = () -> { synchronized (lock) { if (!checkpoint.isDiscarded()) { LOG.info("Checkpoint {} of job {} expired before completing.", checkpointID, job); failPendingCheckpoint(checkpoint, CheckpointFailureReason.CHECKPOINT_EXPIRED); pendingCheckpoints.remove(checkpointID); rememberRecentCheckpointId(checkpointID); triggerQueuedRequests(); } } }; try { synchronized (lock) { if (shutdown) { throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } else if (!props.forceCheckpoint()) { if (triggerRequestQueued) { LOG.warn("Trying to trigger another checkpoint for job {} while one was queued already.", job); throw new CheckpointException(CheckpointFailureReason.ALREADY_QUEUED); } if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { triggerRequestQueued = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos; final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000; if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS); throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } LOG.info("Triggering checkpoint {} @ {} for job {}.", checkpointID, timestamp, job); pendingCheckpoints.put(checkpointID, checkpoint); ScheduledFuture<?> cancellerHandle = timer.schedule( canceller, checkpointTimeout, TimeUnit.MILLISECONDS); if (!checkpoint.setCancellerHandle(cancellerHandle)) { cancellerHandle.cancel(false); } final List<MasterState> masterStates = MasterHooks.triggerMasterHooks(masterHooks.values(), checkpointID, timestamp, executor, Time.milliseconds(checkpointTimeout)); for (MasterState s : masterStates) { checkpoint.addMasterState(s); } } final CheckpointOptions checkpointOptions = new CheckpointOptions( props.getCheckpointType(), checkpointStorageLocation.getLocationReference()); for (Execution execution: executions) { if (props.isSynchronous()) { execution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime); } else { execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions); } } numUnsuccessfulCheckpointsTriggers.set(0); return checkpoint; } catch (Throwable t) { synchronized (lock) { pendingCheckpoints.remove(checkpointID); } int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)", checkpointID, job, numUnsuccessful, t); if (!checkpoint.isDiscarded()) { failPendingCheckpoint(checkpoint, CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, t); } try { checkpointStorageLocation.disposeOnFailure(); } catch (Throwable t2) { LOG.warn("Cannot dispose failed checkpoint storage location {}", checkpointStorageLocation, t2); } throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t); } } } /** * Receives a {@link DeclineCheckpoint} message for a pending checkpoint. * * @param message Checkpoint decline from the task manager */ public void receiveDeclineMessage(DeclineCheckpoint message) { if (shutdown || message == null) { return; } if (!job.equals(message.getJob())) { throw new IllegalArgumentException("Received DeclineCheckpoint message for job " + message.getJob() + " while this coordinator handles job " + job); } final long checkpointId = message.getCheckpointId(); final String reason = (message.getReason() != null ? message.getReason().getMessage() : ""); PendingCheckpoint checkpoint; synchronized (lock) { if (shutdown) { return; } checkpoint = pendingCheckpoints.remove(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { LOG.info("Decline checkpoint {} by task {} of job {}.", checkpointId, message.getTaskExecutionId(), job); discardCheckpoint(checkpoint, message.getReason()); } else if (checkpoint != null) { throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else if (LOG.isDebugEnabled()) { if (recentPendingCheckpoints.contains(checkpointId)) { LOG.debug("Received another decline message for now expired checkpoint attempt {} of job {} : {}", checkpointId, job, reason); } else { LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} of job {} : {}", checkpointId, job, reason); } } } } /** * Receives an AcknowledgeCheckpoint message and returns whether the * message was associated with a pending checkpoint. * * @param message Checkpoint ack from the task manager * * @return Flag indicating whether the ack'd checkpoint was associated * with a pending checkpoint. * * @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store. */ public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message) throws CheckpointException { if (shutdown || message == null) { return false; } if (!job.equals(message.getJob())) { LOG.error("Received wrong AcknowledgeCheckpoint message for job {}: {}", job, message); return false; } final long checkpointId = message.getCheckpointId(); synchronized (lock) { if (shutdown) { return false; } final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) { case SUCCESS: LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob()); if (checkpoint.isFullyAcknowledged()) { completePendingCheckpoint(checkpoint); } break; case DUPLICATE: LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob()); break; case UNKNOWN: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {}, " + "because the task's execution attempt id was unknown. Discarding " + "the state handle to avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob()); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); break; case DISCARDED: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {}, " + "because the pending checkpoint had been discarded. Discarding the " + "state handle tp avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob()); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); } return true; } else if (checkpoint != null) { throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else { boolean wasPendingCheckpoint; if (recentPendingCheckpoints.contains(checkpointId)) { wasPendingCheckpoint = true; LOG.warn("Received late message for now expired checkpoint attempt {} from " + "{} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob()); } else { LOG.debug("Received message for an unknown checkpoint {} from {} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob()); wasPendingCheckpoint = false; } discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); return wasPendingCheckpoint; } } } /** * Try to complete the given pending checkpoint. * * <p>Important: This method should only be called in the checkpoint lock scope. * * @param pendingCheckpoint to complete * @throws CheckpointException if the completion failed */ private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException { final long checkpointId = pendingCheckpoint.getCheckpointId(); final CompletedCheckpoint completedCheckpoint; Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates(); sharedStateRegistry.registerAll(operatorStates.values()); try { try { completedCheckpoint = pendingCheckpoint.finalizeCheckpoint(); failureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId()); } catch (Exception e1) { if (!pendingCheckpoint.isDiscarded()) { failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1); } throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1); } Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null); try { completedCheckpointStore.addCheckpoint(completedCheckpoint); } catch (Exception exception) { executor.execute(new Runnable() { @Override public void run() { try { completedCheckpoint.discardOnFailedStoring(); } catch (Throwable t) { LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t); } } }); throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception); } } finally { pendingCheckpoints.remove(checkpointId); triggerQueuedRequests(); } rememberRecentCheckpointId(checkpointId); dropSubsumedCheckpoints(checkpointId); lastCheckpointCompletionNanos = System.nanoTime(); LOG.info("Completed checkpoint {} for job {} ({} bytes in {} ms).", checkpointId, job, completedCheckpoint.getStateSize(), completedCheckpoint.getDuration()); if (LOG.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("Checkpoint state: "); for (OperatorState state : completedCheckpoint.getOperatorStates().values()) { builder.append(state); builder.append(", "); } builder.setLength(builder.length() - 2); LOG.debug(builder.toString()); } final long timestamp = completedCheckpoint.getTimestamp(); for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointComplete(checkpointId, timestamp); } } } /** * Fails all pending checkpoints which have not been acknowledged by the given execution * attempt id. * * @param executionAttemptId for which to discard unacknowledged pending checkpoints * @param cause of the failure */ public void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) { synchronized (lock) { Iterator<PendingCheckpoint> pendingCheckpointIterator = pendingCheckpoints.values().iterator(); while (pendingCheckpointIterator.hasNext()) { final PendingCheckpoint pendingCheckpoint = pendingCheckpointIterator.next(); if (!pendingCheckpoint.isAcknowledgedBy(executionAttemptId)) { pendingCheckpointIterator.remove(); discardCheckpoint(pendingCheckpoint, cause); } } } } private void rememberRecentCheckpointId(long id) { if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) { recentPendingCheckpoints.removeFirst(); } recentPendingCheckpoints.addLast(id); } private void dropSubsumedCheckpoints(long checkpointId) { Iterator<Map.Entry<Long, PendingCheckpoint>> entries = pendingCheckpoints.entrySet().iterator(); while (entries.hasNext()) { PendingCheckpoint p = entries.next().getValue(); if (p.getCheckpointId() < checkpointId && p.canBeSubsumed()) { rememberRecentCheckpointId(p.getCheckpointId()); failPendingCheckpoint(p, CheckpointFailureReason.CHECKPOINT_SUBSUMED); entries.remove(); } } } /** * Triggers the queued request, if there is one. * * <p>NOTE: The caller of this method must hold the lock when invoking the method! */ private void triggerQueuedRequests() { if (triggerRequestQueued) { triggerRequestQueued = false; if (periodicScheduling) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); } currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), 0L, baseInterval, TimeUnit.MILLISECONDS); } else { timer.execute(new ScheduledTrigger()); } } } @VisibleForTesting int getNumScheduledTasks() { return timer.getQueue().size(); } /** * Restores the latest checkpointed state. * * @param tasks Map of job vertices to restore. State for these vertices is * restored via {@link Execution * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedState( Map<JobVertexID, ExecutionJobVertex> tasks, boolean errorIfNoCheckpoint, boolean allowNonRestoredState) throws Exception { synchronized (lock) { if (shutdown) { throw new IllegalStateException("CheckpointCoordinator is shut down"); } sharedStateRegistry.close(); sharedStateRegistry = sharedStateRegistryFactory.create(executor); completedCheckpointStore.recover(); for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) { completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry); } LOG.debug("Status of the shared state registry of job {} after restore: {}.", job, sharedStateRegistry); CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(); if (latest == null) { if (errorIfNoCheckpoint) { throw new IllegalStateException("No completed checkpoint available"); } else { LOG.debug("Resetting the master hooks."); MasterHooks.reset(masterHooks.values(), LOG); return false; } } LOG.info("Restoring job {} from latest valid checkpoint: {}.", job, latest); final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates(); StateAssignmentOperation stateAssignmentOperation = new StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState); stateAssignmentOperation.assignStates(); MasterHooks.restoreMasterHooks( masterHooks, latest.getMasterHookStates(), latest.getCheckpointID(), allowNonRestoredState, LOG); if (statsTracker != null) { long restoreTimestamp = System.currentTimeMillis(); RestoredCheckpointStats restored = new RestoredCheckpointStats( latest.getCheckpointID(), latest.getProperties(), restoreTimestamp, latest.getExternalPointer()); statsTracker.reportRestoredCheckpoint(restored); } return true; } } /** * Restore the state with given savepoint. * * @param savepointPointer The pointer to the savepoint. * @param allowNonRestored True if allowing checkpoint state that cannot be * mapped to any job vertex in tasks. * @param tasks Map of job vertices to restore. State for these * vertices is restored via * {@link Execution * @param userClassLoader The class loader to resolve serialized classes in * legacy savepoint versions. */ public boolean restoreSavepoint( String savepointPointer, boolean allowNonRestored, Map<JobVertexID, ExecutionJobVertex> tasks, ClassLoader userClassLoader) throws Exception { Preconditions.checkNotNull(savepointPointer, "The savepoint path cannot be null."); LOG.info("Starting job {} from savepoint {} ({})", job, savepointPointer, (allowNonRestored ? "allowing non restored state" : "")); final CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer); CompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint( job, tasks, checkpointLocation, userClassLoader, allowNonRestored); completedCheckpointStore.addCheckpoint(savepoint); long nextCheckpointId = savepoint.getCheckpointID() + 1; checkpointIdCounter.setCount(nextCheckpointId); LOG.info("Reset the checkpoint ID of job {} to {}.", job, nextCheckpointId); return restoreLatestCheckpointedState(tasks, true, allowNonRestored); } public int getNumberOfPendingCheckpoints() { return this.pendingCheckpoints.size(); } public int getNumberOfRetainedSuccessfulCheckpoints() { synchronized (lock) { return completedCheckpointStore.getNumberOfRetainedCheckpoints(); } } public Map<Long, PendingCheckpoint> getPendingCheckpoints() { synchronized (lock) { return new HashMap<>(this.pendingCheckpoints); } } public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception { synchronized (lock) { return completedCheckpointStore.getAllCheckpoints(); } } public CheckpointStorageCoordinatorView getCheckpointStorage() { return checkpointStorage; } public CompletedCheckpointStore getCheckpointStore() { return completedCheckpointStore; } public CheckpointIDCounter getCheckpointIdCounter() { return checkpointIdCounter; } public long getCheckpointTimeout() { return checkpointTimeout; } /** * Returns whether periodic checkpointing has been configured. * * @return <code>true</code> if periodic checkpoints have been configured. */ public boolean isPeriodicCheckpointingConfigured() { return baseInterval != Long.MAX_VALUE; } public void startCheckpointScheduler() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } stopCheckpointScheduler(); periodicScheduling = true; long initialDelay = ThreadLocalRandom.current().nextLong( minPauseBetweenCheckpointsNanos / 1_000_000L, baseInterval + 1L); currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), initialDelay, baseInterval, TimeUnit.MILLISECONDS); } } public void stopCheckpointScheduler() { synchronized (lock) { triggerRequestQueued = false; periodicScheduling = false; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } abortPendingCheckpoints(new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND)); numUnsuccessfulCheckpointsTriggers.set(0); } } /** * Aborts all the pending checkpoints due to en exception. * @param exception The exception. */ public void abortPendingCheckpoints(CheckpointException exception) { synchronized (lock) { for (PendingCheckpoint p : pendingCheckpoints.values()) { failPendingCheckpoint(p, exception.getCheckpointFailureReason()); } pendingCheckpoints.clear(); } } public JobStatusListener createActivatorDeactivator() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } if (jobStatusListener == null) { jobStatusListener = new CheckpointCoordinatorDeActivator(this); } return jobStatusListener; } } private final class ScheduledTrigger implements Runnable { @Override public void run() { try { triggerCheckpoint(System.currentTimeMillis(), true); } catch (Exception e) { LOG.error("Exception while triggering checkpoint for job {}.", job, e); } } } /** * Discards the given pending checkpoint because of the given cause. * * @param pendingCheckpoint to discard * @param cause for discarding the checkpoint */ private void discardCheckpoint(PendingCheckpoint pendingCheckpoint, @Nullable Throwable cause) { assert(Thread.holdsLock(lock)); Preconditions.checkNotNull(pendingCheckpoint); final long checkpointId = pendingCheckpoint.getCheckpointId(); LOG.info("Discarding checkpoint {} of job {}.", checkpointId, job, cause); if (cause == null || cause instanceof CheckpointDeclineException) { failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.CHECKPOINT_DECLINED, cause); } else { failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.JOB_FAILURE, cause); } rememberRecentCheckpointId(checkpointId); boolean haveMoreRecentPending = false; for (PendingCheckpoint p : pendingCheckpoints.values()) { if (!p.isDiscarded() && p.getCheckpointId() >= pendingCheckpoint.getCheckpointId()) { haveMoreRecentPending = true; break; } } if (!haveMoreRecentPending) { triggerQueuedRequests(); } } /** * Discards the given state object asynchronously belonging to the given job, execution attempt * id and checkpoint id. * * @param jobId identifying the job to which the state object belongs * @param executionAttemptID identifying the task to which the state object belongs * @param checkpointId of the state object * @param subtaskState to discard asynchronously */ private void discardSubtaskState( final JobID jobId, final ExecutionAttemptID executionAttemptID, final long checkpointId, final TaskStateSnapshot subtaskState) { if (subtaskState != null) { executor.execute(new Runnable() { @Override public void run() { try { subtaskState.discardState(); } catch (Throwable t2) { LOG.warn("Could not properly discard state object of checkpoint {} " + "belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2); } } }); } } private void failPendingCheckpoint( final PendingCheckpoint pendingCheckpoint, final CheckpointFailureReason reason, final Throwable cause) { CheckpointException exception = new CheckpointException(reason, cause); if (cause != null) { pendingCheckpoint.abort(reason, cause); } else { pendingCheckpoint.abort(reason); } failureManager.handleCheckpointException(exception, pendingCheckpoint.getCheckpointId()); } private void failPendingCheckpoint( final PendingCheckpoint pendingCheckpoint, final CheckpointFailureReason reason) { failPendingCheckpoint(pendingCheckpoint, reason, null); } }
class CheckpointCoordinator { private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class); /** The number of recent checkpoints whose IDs are remembered. */ private static final int NUM_GHOST_CHECKPOINT_IDS = 16; /** Coordinator-wide lock to safeguard the checkpoint updates. */ private final Object lock = new Object(); /** Lock specially to make sure that trigger requests do not overtake each other. * This is not done with the coordinator-wide lock, because as part of triggering, * blocking operations may happen (distributed atomic counters). * Using a dedicated lock, we avoid blocking the processing of 'acknowledge/decline' * messages during that phase. */ private final Object triggerLock = new Object(); /** The job whose checkpoint this coordinator coordinates. */ private final JobID job; /** Default checkpoint properties. **/ private final CheckpointProperties checkpointProperties; /** The executor used for asynchronous calls, like potentially blocking I/O. */ private final Executor executor; /** Tasks who need to be sent a message when a checkpoint is started. */ private final ExecutionVertex[] tasksToTrigger; /** Tasks who need to acknowledge a checkpoint before it succeeds. */ private final ExecutionVertex[] tasksToWaitFor; /** Tasks who need to be sent a message when a checkpoint is confirmed. */ private final ExecutionVertex[] tasksToCommitTo; /** Map from checkpoint ID to the pending checkpoint. */ private final Map<Long, PendingCheckpoint> pendingCheckpoints; /** Completed checkpoints. Implementations can be blocking. Make sure calls to methods * accessing this don't block the job manager actor and run asynchronously. */ private final CompletedCheckpointStore completedCheckpointStore; /** The root checkpoint state backend, which is responsible for initializing the * checkpoint, storing the metadata, and cleaning up the checkpoint. */ private final CheckpointStorageCoordinatorView checkpointStorage; /** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */ private final ArrayDeque<Long> recentPendingCheckpoints; /** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these * need to be ascending across job managers. */ private final CheckpointIDCounter checkpointIdCounter; /** The base checkpoint interval. Actual trigger time may be affected by the * max concurrent checkpoints and minimum-pause values */ private final long baseInterval; /** The max time (in ms) that a checkpoint may take. */ private final long checkpointTimeout; /** The min time(in ns) to delay after a checkpoint could be triggered. Allows to * enforce minimum processing time between checkpoint attempts */ private final long minPauseBetweenCheckpointsNanos; /** The maximum number of checkpoints that may be in progress at the same time. */ private final int maxConcurrentCheckpointAttempts; /** The timer that handles the checkpoint timeouts and triggers periodic checkpoints. */ private final ScheduledThreadPoolExecutor timer; /** The master checkpoint hooks executed by this checkpoint coordinator. */ private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks; /** Actor that receives status updates from the execution graph this coordinator works for. */ private JobStatusListener jobStatusListener; /** The number of consecutive failed trigger attempts. */ private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0); /** A handle to the current periodic trigger, to cancel it when necessary. */ private ScheduledFuture<?> currentPeriodicTrigger; /** The timestamp (via {@link System private long lastCheckpointCompletionNanos; /** Flag whether a triggered checkpoint should immediately schedule the next checkpoint. * Non-volatile, because only accessed in synchronized scope */ private boolean periodicScheduling; /** Flag whether a trigger request could not be handled immediately. Non-volatile, because only * accessed in synchronized scope */ private boolean triggerRequestQueued; /** Flag marking the coordinator as shut down (not accepting any messages any more). */ private volatile boolean shutdown; /** Optional tracker for checkpoint statistics. */ @Nullable private CheckpointStatsTracker statsTracker; /** A factory for SharedStateRegistry objects. */ private final SharedStateRegistryFactory sharedStateRegistryFactory; /** Registry that tracks state which is shared across (incremental) checkpoints. */ private SharedStateRegistry sharedStateRegistry; private boolean isPreferCheckpointForRecovery; private final CheckpointFailureManager failureManager; public CheckpointCoordinator( JobID job, CheckpointCoordinatorConfiguration chkConfig, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager) { checkNotNull(checkpointStateBackend); long minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints(); if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) { minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000; } long baseInterval = chkConfig.getCheckpointInterval(); if (baseInterval < minPauseBetweenCheckpoints) { baseInterval = minPauseBetweenCheckpoints; } this.job = checkNotNull(job); this.baseInterval = baseInterval; this.checkpointTimeout = chkConfig.getCheckpointTimeout(); this.minPauseBetweenCheckpointsNanos = minPauseBetweenCheckpoints * 1_000_000; this.maxConcurrentCheckpointAttempts = chkConfig.getMaxConcurrentCheckpoints(); this.tasksToTrigger = checkNotNull(tasksToTrigger); this.tasksToWaitFor = checkNotNull(tasksToWaitFor); this.tasksToCommitTo = checkNotNull(tasksToCommitTo); this.pendingCheckpoints = new LinkedHashMap<>(); this.checkpointIdCounter = checkNotNull(checkpointIDCounter); this.completedCheckpointStore = checkNotNull(completedCheckpointStore); this.executor = checkNotNull(executor); this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory); this.sharedStateRegistry = sharedStateRegistryFactory.create(executor); this.isPreferCheckpointForRecovery = chkConfig.isPreferCheckpointForRecovery(); this.failureManager = checkNotNull(failureManager); this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS); this.masterHooks = new HashMap<>(); this.timer = new ScheduledThreadPoolExecutor(1, new DispatcherThreadFactory(Thread.currentThread().getThreadGroup(), "Checkpoint Timer")); this.timer.setRemoveOnCancelPolicy(true); this.timer.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); this.timer.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); this.checkpointProperties = CheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy()); try { this.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job); } catch (IOException e) { throw new FlinkRuntimeException("Failed to create checkpoint storage at checkpoint coordinator side.", e); } try { checkpointIDCounter.start(); } catch (Throwable t) { throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t); } } /** * Adds the given master hook to the checkpoint coordinator. This method does nothing, if * the checkpoint coordinator already contained a hook with the same ID (as defined via * {@link MasterTriggerRestoreHook * * @param hook The hook to add. * @return True, if the hook was added, false if the checkpoint coordinator already * contained a hook with the same ID. */ public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) { checkNotNull(hook); final String id = hook.getIdentifier(); checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id"); synchronized (lock) { if (!masterHooks.containsKey(id)) { masterHooks.put(id, hook); return true; } else { return false; } } } /** * Gets the number of currently register master hooks. */ public int getNumberOfRegisteredMasterHooks() { synchronized (lock) { return masterHooks.size(); } } /** * Sets the checkpoint stats tracker. * * @param statsTracker The checkpoint stats tracker. */ public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) { this.statsTracker = statsTracker; } /** * Shuts down the checkpoint coordinator. * * <p>After this method has been called, the coordinator does not accept * and further messages and cannot trigger any further checkpoints. */ public void shutdown(JobStatus jobStatus) throws Exception { synchronized (lock) { if (!shutdown) { shutdown = true; LOG.info("Stopping checkpoint coordinator for job {}.", job); periodicScheduling = false; triggerRequestQueued = false; MasterHooks.close(masterHooks.values(), LOG); masterHooks.clear(); timer.shutdownNow(); for (PendingCheckpoint pending : pendingCheckpoints.values()) { failPendingCheckpoint(pending, CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } pendingCheckpoints.clear(); completedCheckpointStore.shutdown(jobStatus); checkpointIdCounter.shutdown(jobStatus); } } } public boolean isShutdown() { return shutdown; } /** * Triggers a savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSavepoint( final long timestamp, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSavepoint(); return triggerSavepointInternal(timestamp, properties, false, targetLocation); } /** * Triggers a synchronous savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline * to fire any registered event-time timers. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSynchronousSavepoint( final long timestamp, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSyncSavepoint(); return triggerSavepointInternal(timestamp, properties, advanceToEndOfEventTime, targetLocation); } private CompletableFuture<CompletedCheckpoint> triggerSavepointInternal( final long timestamp, final CheckpointProperties checkpointProperties, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { checkNotNull(checkpointProperties); try { PendingCheckpoint pendingCheckpoint = triggerCheckpoint( timestamp, checkpointProperties, targetLocation, false, advanceToEndOfEventTime); return pendingCheckpoint.getCompletionFuture(); } catch (CheckpointException e) { Throwable cause = new CheckpointException("Failed to trigger savepoint.", e.getCheckpointFailureReason()); return FutureUtils.completedExceptionally(cause); } } /** * Triggers a new standard checkpoint and uses the given timestamp as the checkpoint * timestamp. * * @param timestamp The timestamp for the checkpoint. * @param isPeriodic Flag indicating whether this triggered checkpoint is * periodic. If this flag is true, but the periodic scheduler is disabled, * the checkpoint will be declined. * @return <code>true</code> if triggering the checkpoint succeeded. */ @VisibleForTesting public PendingCheckpoint triggerCheckpoint( long timestamp, CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime) throws CheckpointException { if (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) { throw new IllegalArgumentException("Only synchronous savepoints are allowed to advance the watermark to MAX."); } synchronized (lock) { if (shutdown) { throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } if (isPeriodic && !periodicScheduling) { throw new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN); } if (!props.forceCheckpoint()) { if (triggerRequestQueued) { LOG.warn("Trying to trigger another checkpoint for job {} while one was queued already.", job); throw new CheckpointException(CheckpointFailureReason.ALREADY_QUEUED); } if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { triggerRequestQueued = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos; final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000; if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS); throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } } Execution[] executions = new Execution[tasksToTrigger.length]; for (int i = 0; i < tasksToTrigger.length; i++) { Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt(); if (ee == null) { LOG.info("Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job); throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } else if (ee.getState() == ExecutionState.RUNNING) { executions[i] = ee; } else { LOG.info("Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job, ExecutionState.RUNNING, ee.getState()); throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length); for (ExecutionVertex ev : tasksToWaitFor) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ackTasks.put(ee.getAttemptId(), ev); } else { LOG.info("Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.", ev.getTaskNameWithSubtaskIndex(), job); throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } synchronized (triggerLock) { final CheckpointStorageLocation checkpointStorageLocation; final long checkpointID; try { checkpointID = checkpointIdCounter.getAndIncrement(); checkpointStorageLocation = props.isSavepoint() ? checkpointStorage.initializeLocationForSavepoint(checkpointID, externalSavepointLocation) : checkpointStorage.initializeLocationForCheckpoint(checkpointID); } catch (Throwable t) { int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint for job {} ({} consecutive failed attempts so far).", job, numUnsuccessful, t); throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t); } final PendingCheckpoint checkpoint = new PendingCheckpoint( job, checkpointID, timestamp, ackTasks, props, checkpointStorageLocation, executor); if (statsTracker != null) { PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint( checkpointID, timestamp, props); checkpoint.setStatsCallback(callback); } final Runnable canceller = () -> { synchronized (lock) { if (!checkpoint.isDiscarded()) { LOG.info("Checkpoint {} of job {} expired before completing.", checkpointID, job); failPendingCheckpoint(checkpoint, CheckpointFailureReason.CHECKPOINT_EXPIRED); pendingCheckpoints.remove(checkpointID); rememberRecentCheckpointId(checkpointID); triggerQueuedRequests(); } } }; try { synchronized (lock) { if (shutdown) { throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } else if (!props.forceCheckpoint()) { if (triggerRequestQueued) { LOG.warn("Trying to trigger another checkpoint for job {} while one was queued already.", job); throw new CheckpointException(CheckpointFailureReason.ALREADY_QUEUED); } if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { triggerRequestQueued = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos; final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000; if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS); throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } LOG.info("Triggering checkpoint {} @ {} for job {}.", checkpointID, timestamp, job); pendingCheckpoints.put(checkpointID, checkpoint); ScheduledFuture<?> cancellerHandle = timer.schedule( canceller, checkpointTimeout, TimeUnit.MILLISECONDS); if (!checkpoint.setCancellerHandle(cancellerHandle)) { cancellerHandle.cancel(false); } final List<MasterState> masterStates = MasterHooks.triggerMasterHooks(masterHooks.values(), checkpointID, timestamp, executor, Time.milliseconds(checkpointTimeout)); for (MasterState s : masterStates) { checkpoint.addMasterState(s); } } final CheckpointOptions checkpointOptions = new CheckpointOptions( props.getCheckpointType(), checkpointStorageLocation.getLocationReference()); for (Execution execution: executions) { if (props.isSynchronous()) { execution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime); } else { execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions); } } numUnsuccessfulCheckpointsTriggers.set(0); return checkpoint; } catch (Throwable t) { synchronized (lock) { pendingCheckpoints.remove(checkpointID); } int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)", checkpointID, job, numUnsuccessful, t); if (!checkpoint.isDiscarded()) { failPendingCheckpoint(checkpoint, CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, t); } try { checkpointStorageLocation.disposeOnFailure(); } catch (Throwable t2) { LOG.warn("Cannot dispose failed checkpoint storage location {}", checkpointStorageLocation, t2); } throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t); } } } /** * Receives a {@link DeclineCheckpoint} message for a pending checkpoint. * * @param message Checkpoint decline from the task manager * @param taskManagerLocationInfo The location info of the decline checkpoint message's sender */ public void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) { if (shutdown || message == null) { return; } if (!job.equals(message.getJob())) { throw new IllegalArgumentException("Received DeclineCheckpoint message for job " + message.getJob() + " from " + taskManagerLocationInfo + " while this coordinator handles job " + job); } final long checkpointId = message.getCheckpointId(); final String reason = (message.getReason() != null ? message.getReason().getMessage() : ""); PendingCheckpoint checkpoint; synchronized (lock) { if (shutdown) { return; } checkpoint = pendingCheckpoints.remove(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { LOG.info("Decline checkpoint {} by task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo); discardCheckpoint(checkpoint, message.getReason()); } else if (checkpoint != null) { throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else if (LOG.isDebugEnabled()) { if (recentPendingCheckpoints.contains(checkpointId)) { LOG.debug("Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } else { LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } } } } /** * Receives an AcknowledgeCheckpoint message and returns whether the * message was associated with a pending checkpoint. * * @param message Checkpoint ack from the task manager * * @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender * @return Flag indicating whether the ack'd checkpoint was associated * with a pending checkpoint. * * @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store. */ public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message, String taskManagerLocationInfo) throws CheckpointException { if (shutdown || message == null) { return false; } if (!job.equals(message.getJob())) { LOG.error("Received wrong AcknowledgeCheckpoint message for job {} from {} : {}", job, taskManagerLocationInfo, message); return false; } final long checkpointId = message.getCheckpointId(); synchronized (lock) { if (shutdown) { return false; } final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) { case SUCCESS: LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); if (checkpoint.isFullyAcknowledged()) { completePendingCheckpoint(checkpoint); } break; case DUPLICATE: LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); break; case UNKNOWN: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the task's execution attempt id was unknown. Discarding " + "the state handle to avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); break; case DISCARDED: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the pending checkpoint had been discarded. Discarding the " + "state handle tp avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); } return true; } else if (checkpoint != null) { throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else { boolean wasPendingCheckpoint; if (recentPendingCheckpoints.contains(checkpointId)) { wasPendingCheckpoint = true; LOG.warn("Received late message for now expired checkpoint attempt {} from task " + "{} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); } else { LOG.debug("Received message for an unknown checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); wasPendingCheckpoint = false; } discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); return wasPendingCheckpoint; } } } /** * Try to complete the given pending checkpoint. * * <p>Important: This method should only be called in the checkpoint lock scope. * * @param pendingCheckpoint to complete * @throws CheckpointException if the completion failed */ private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException { final long checkpointId = pendingCheckpoint.getCheckpointId(); final CompletedCheckpoint completedCheckpoint; Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates(); sharedStateRegistry.registerAll(operatorStates.values()); try { try { completedCheckpoint = pendingCheckpoint.finalizeCheckpoint(); failureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId()); } catch (Exception e1) { if (!pendingCheckpoint.isDiscarded()) { failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1); } throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1); } Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null); try { completedCheckpointStore.addCheckpoint(completedCheckpoint); } catch (Exception exception) { executor.execute(new Runnable() { @Override public void run() { try { completedCheckpoint.discardOnFailedStoring(); } catch (Throwable t) { LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t); } } }); throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception); } } finally { pendingCheckpoints.remove(checkpointId); triggerQueuedRequests(); } rememberRecentCheckpointId(checkpointId); dropSubsumedCheckpoints(checkpointId); lastCheckpointCompletionNanos = System.nanoTime(); LOG.info("Completed checkpoint {} for job {} ({} bytes in {} ms).", checkpointId, job, completedCheckpoint.getStateSize(), completedCheckpoint.getDuration()); if (LOG.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("Checkpoint state: "); for (OperatorState state : completedCheckpoint.getOperatorStates().values()) { builder.append(state); builder.append(", "); } builder.setLength(builder.length() - 2); LOG.debug(builder.toString()); } final long timestamp = completedCheckpoint.getTimestamp(); for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointComplete(checkpointId, timestamp); } } } /** * Fails all pending checkpoints which have not been acknowledged by the given execution * attempt id. * * @param executionAttemptId for which to discard unacknowledged pending checkpoints * @param cause of the failure */ public void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) { synchronized (lock) { Iterator<PendingCheckpoint> pendingCheckpointIterator = pendingCheckpoints.values().iterator(); while (pendingCheckpointIterator.hasNext()) { final PendingCheckpoint pendingCheckpoint = pendingCheckpointIterator.next(); if (!pendingCheckpoint.isAcknowledgedBy(executionAttemptId)) { pendingCheckpointIterator.remove(); discardCheckpoint(pendingCheckpoint, cause); } } } } private void rememberRecentCheckpointId(long id) { if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) { recentPendingCheckpoints.removeFirst(); } recentPendingCheckpoints.addLast(id); } private void dropSubsumedCheckpoints(long checkpointId) { Iterator<Map.Entry<Long, PendingCheckpoint>> entries = pendingCheckpoints.entrySet().iterator(); while (entries.hasNext()) { PendingCheckpoint p = entries.next().getValue(); if (p.getCheckpointId() < checkpointId && p.canBeSubsumed()) { rememberRecentCheckpointId(p.getCheckpointId()); failPendingCheckpoint(p, CheckpointFailureReason.CHECKPOINT_SUBSUMED); entries.remove(); } } } /** * Triggers the queued request, if there is one. * * <p>NOTE: The caller of this method must hold the lock when invoking the method! */ private void triggerQueuedRequests() { if (triggerRequestQueued) { triggerRequestQueued = false; if (periodicScheduling) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); } currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), 0L, baseInterval, TimeUnit.MILLISECONDS); } else { timer.execute(new ScheduledTrigger()); } } } @VisibleForTesting int getNumScheduledTasks() { return timer.getQueue().size(); } /** * Restores the latest checkpointed state. * * @param tasks Map of job vertices to restore. State for these vertices is * restored via {@link Execution * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedState( Map<JobVertexID, ExecutionJobVertex> tasks, boolean errorIfNoCheckpoint, boolean allowNonRestoredState) throws Exception { synchronized (lock) { if (shutdown) { throw new IllegalStateException("CheckpointCoordinator is shut down"); } sharedStateRegistry.close(); sharedStateRegistry = sharedStateRegistryFactory.create(executor); completedCheckpointStore.recover(); for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) { completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry); } LOG.debug("Status of the shared state registry of job {} after restore: {}.", job, sharedStateRegistry); CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(isPreferCheckpointForRecovery); if (latest == null) { if (errorIfNoCheckpoint) { throw new IllegalStateException("No completed checkpoint available"); } else { LOG.debug("Resetting the master hooks."); MasterHooks.reset(masterHooks.values(), LOG); return false; } } LOG.info("Restoring job {} from latest valid checkpoint: {}.", job, latest); final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates(); StateAssignmentOperation stateAssignmentOperation = new StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState); stateAssignmentOperation.assignStates(); MasterHooks.restoreMasterHooks( masterHooks, latest.getMasterHookStates(), latest.getCheckpointID(), allowNonRestoredState, LOG); if (statsTracker != null) { long restoreTimestamp = System.currentTimeMillis(); RestoredCheckpointStats restored = new RestoredCheckpointStats( latest.getCheckpointID(), latest.getProperties(), restoreTimestamp, latest.getExternalPointer()); statsTracker.reportRestoredCheckpoint(restored); } return true; } } /** * Restore the state with given savepoint. * * @param savepointPointer The pointer to the savepoint. * @param allowNonRestored True if allowing checkpoint state that cannot be * mapped to any job vertex in tasks. * @param tasks Map of job vertices to restore. State for these * vertices is restored via * {@link Execution * @param userClassLoader The class loader to resolve serialized classes in * legacy savepoint versions. */ public boolean restoreSavepoint( String savepointPointer, boolean allowNonRestored, Map<JobVertexID, ExecutionJobVertex> tasks, ClassLoader userClassLoader) throws Exception { Preconditions.checkNotNull(savepointPointer, "The savepoint path cannot be null."); LOG.info("Starting job {} from savepoint {} ({})", job, savepointPointer, (allowNonRestored ? "allowing non restored state" : "")); final CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer); CompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint( job, tasks, checkpointLocation, userClassLoader, allowNonRestored); completedCheckpointStore.addCheckpoint(savepoint); long nextCheckpointId = savepoint.getCheckpointID() + 1; checkpointIdCounter.setCount(nextCheckpointId); LOG.info("Reset the checkpoint ID of job {} to {}.", job, nextCheckpointId); return restoreLatestCheckpointedState(tasks, true, allowNonRestored); } public int getNumberOfPendingCheckpoints() { return this.pendingCheckpoints.size(); } public int getNumberOfRetainedSuccessfulCheckpoints() { synchronized (lock) { return completedCheckpointStore.getNumberOfRetainedCheckpoints(); } } public Map<Long, PendingCheckpoint> getPendingCheckpoints() { synchronized (lock) { return new HashMap<>(this.pendingCheckpoints); } } public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception { synchronized (lock) { return completedCheckpointStore.getAllCheckpoints(); } } public CheckpointStorageCoordinatorView getCheckpointStorage() { return checkpointStorage; } public CompletedCheckpointStore getCheckpointStore() { return completedCheckpointStore; } public CheckpointIDCounter getCheckpointIdCounter() { return checkpointIdCounter; } public long getCheckpointTimeout() { return checkpointTimeout; } /** * Returns whether periodic checkpointing has been configured. * * @return <code>true</code> if periodic checkpoints have been configured. */ public boolean isPeriodicCheckpointingConfigured() { return baseInterval != Long.MAX_VALUE; } public void startCheckpointScheduler() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } stopCheckpointScheduler(); periodicScheduling = true; long initialDelay = ThreadLocalRandom.current().nextLong( minPauseBetweenCheckpointsNanos / 1_000_000L, baseInterval + 1L); currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), initialDelay, baseInterval, TimeUnit.MILLISECONDS); } } public void stopCheckpointScheduler() { synchronized (lock) { triggerRequestQueued = false; periodicScheduling = false; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } abortPendingCheckpoints(new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND)); numUnsuccessfulCheckpointsTriggers.set(0); } } /** * Aborts all the pending checkpoints due to en exception. * @param exception The exception. */ public void abortPendingCheckpoints(CheckpointException exception) { synchronized (lock) { for (PendingCheckpoint p : pendingCheckpoints.values()) { failPendingCheckpoint(p, exception.getCheckpointFailureReason()); } pendingCheckpoints.clear(); } } public JobStatusListener createActivatorDeactivator() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } if (jobStatusListener == null) { jobStatusListener = new CheckpointCoordinatorDeActivator(this); } return jobStatusListener; } } private final class ScheduledTrigger implements Runnable { @Override public void run() { try { triggerCheckpoint(System.currentTimeMillis(), true); } catch (Exception e) { LOG.error("Exception while triggering checkpoint for job {}.", job, e); } } } /** * Discards the given pending checkpoint because of the given cause. * * @param pendingCheckpoint to discard * @param cause for discarding the checkpoint */ private void discardCheckpoint(PendingCheckpoint pendingCheckpoint, @Nullable Throwable cause) { assert(Thread.holdsLock(lock)); Preconditions.checkNotNull(pendingCheckpoint); final long checkpointId = pendingCheckpoint.getCheckpointId(); LOG.info("Discarding checkpoint {} of job {}.", checkpointId, job, cause); if (cause == null) { failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.CHECKPOINT_DECLINED); } else if (cause instanceof CheckpointException) { CheckpointException exception = (CheckpointException) cause; failPendingCheckpoint(pendingCheckpoint, exception.getCheckpointFailureReason(), cause); } else { failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.JOB_FAILURE, cause); } rememberRecentCheckpointId(checkpointId); boolean haveMoreRecentPending = false; for (PendingCheckpoint p : pendingCheckpoints.values()) { if (!p.isDiscarded() && p.getCheckpointId() >= pendingCheckpoint.getCheckpointId()) { haveMoreRecentPending = true; break; } } if (!haveMoreRecentPending) { triggerQueuedRequests(); } } /** * Discards the given state object asynchronously belonging to the given job, execution attempt * id and checkpoint id. * * @param jobId identifying the job to which the state object belongs * @param executionAttemptID identifying the task to which the state object belongs * @param checkpointId of the state object * @param subtaskState to discard asynchronously */ private void discardSubtaskState( final JobID jobId, final ExecutionAttemptID executionAttemptID, final long checkpointId, final TaskStateSnapshot subtaskState) { if (subtaskState != null) { executor.execute(new Runnable() { @Override public void run() { try { subtaskState.discardState(); } catch (Throwable t2) { LOG.warn("Could not properly discard state object of checkpoint {} " + "belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2); } } }); } } private void failPendingCheckpoint( final PendingCheckpoint pendingCheckpoint, final CheckpointFailureReason reason, final Throwable cause) { CheckpointException exception = new CheckpointException(reason, cause); pendingCheckpoint.abort(reason, cause); failureManager.handleCheckpointException(exception, pendingCheckpoint.getCheckpointId()); } private void failPendingCheckpoint( final PendingCheckpoint pendingCheckpoint, final CheckpointFailureReason reason) { failPendingCheckpoint(pendingCheckpoint, reason, null); } }
But once the `CheckpointCoordinator` is shut down, these exceptions should no longer matter, at least not in terms of correctness of the job. I don't really understand why this was not a problem before and now we want to fine grained filter out exceptions, this does not seem like a straight forward solution to me.
private void startTriggeringCheckpoint(CheckpointTriggerRequest request) { try { synchronized (lock) { preCheckGlobalState(request.isPeriodic); } final Execution[] executions = getTriggerExecutions(); final Map<ExecutionAttemptID, ExecutionVertex> ackTasks = getAckTasks(); Preconditions.checkState(!isTriggering); isTriggering = true; final long timestamp = System.currentTimeMillis(); final CompletableFuture<PendingCheckpoint> pendingCheckpointCompletableFuture = initializeCheckpoint(request.props, request.externalSavepointLocation) .thenApplyAsync( (checkpointIdAndStorageLocation) -> createPendingCheckpoint( timestamp, request.props, ackTasks, request.isPeriodic, checkpointIdAndStorageLocation.checkpointId, checkpointIdAndStorageLocation.checkpointStorageLocation, request.getOnCompletionFuture()), timer); final CompletableFuture<?> masterStatesComplete = pendingCheckpointCompletableFuture .thenCompose(this::snapshotMasterState); final CompletableFuture<?> coordinatorCheckpointsComplete = pendingCheckpointCompletableFuture .thenComposeAsync((pendingCheckpoint) -> OperatorCoordinatorCheckpoints.triggerAndAcknowledgeAllCoordinatorCheckpointsWithCompletion( coordinatorsToCheckpoint, pendingCheckpoint, timer), timer); FutureUtils.waitForAll(asList(masterStatesComplete, coordinatorCheckpointsComplete)) .handleAsync( (ignored, throwable) -> { final PendingCheckpoint checkpoint = FutureUtils.getWithoutException(pendingCheckpointCompletableFuture); Preconditions.checkState( checkpoint != null || throwable != null, "Either the pending checkpoint needs to be created or an error must have been occurred."); if (throwable != null) { if (checkpoint == null) { onTriggerFailure(request, throwable); } else { onTriggerFailure(checkpoint, throwable); } } else { if (checkpoint.isDiscarded()) { onTriggerFailure( checkpoint, new CheckpointException( CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, checkpoint.getFailureCause())); } else { final long checkpointId = checkpoint.getCheckpointId(); snapshotTaskState( timestamp, checkpointId, checkpoint.getCheckpointStorageLocation(), request.props, executions, request.advanceToEndOfTime); coordinatorsToCheckpoint.forEach((ctx) -> ctx.afterSourceBarrierInjection(checkpointId)); onTriggerSuccess(); } } return null; }, timer) .whenComplete((unused, error) -> { if (error != null) { if (!isShutdown()) { failureManager.handleJobLevelCheckpointException(new CheckpointException(EXCEPTION, error), Optional.empty()); } else if (error instanceof RejectedExecutionException) { LOG.debug("Execution rejected during shutdown"); } else { LOG.warn("Error encountered during shutdown", error); } } }); } catch (Throwable throwable) { onTriggerFailure(request, throwable); } }
if (error != null) {
private void startTriggeringCheckpoint(CheckpointTriggerRequest request) { try { synchronized (lock) { preCheckGlobalState(request.isPeriodic); } final Execution[] executions = getTriggerExecutions(); final Map<ExecutionAttemptID, ExecutionVertex> ackTasks = getAckTasks(); Preconditions.checkState(!isTriggering); isTriggering = true; final long timestamp = System.currentTimeMillis(); final CompletableFuture<PendingCheckpoint> pendingCheckpointCompletableFuture = initializeCheckpoint(request.props, request.externalSavepointLocation) .thenApplyAsync( (checkpointIdAndStorageLocation) -> createPendingCheckpoint( timestamp, request.props, ackTasks, request.isPeriodic, checkpointIdAndStorageLocation.checkpointId, checkpointIdAndStorageLocation.checkpointStorageLocation, request.getOnCompletionFuture()), timer); final CompletableFuture<?> masterStatesComplete = pendingCheckpointCompletableFuture .thenCompose(this::snapshotMasterState); final CompletableFuture<?> coordinatorCheckpointsComplete = pendingCheckpointCompletableFuture .thenComposeAsync((pendingCheckpoint) -> OperatorCoordinatorCheckpoints.triggerAndAcknowledgeAllCoordinatorCheckpointsWithCompletion( coordinatorsToCheckpoint, pendingCheckpoint, timer), timer); FutureUtils.assertNoException( CompletableFuture.allOf(masterStatesComplete, coordinatorCheckpointsComplete) .handleAsync( (ignored, throwable) -> { final PendingCheckpoint checkpoint = FutureUtils.getWithoutException(pendingCheckpointCompletableFuture); Preconditions.checkState( checkpoint != null || throwable != null, "Either the pending checkpoint needs to be created or an error must have been occurred."); if (throwable != null) { if (checkpoint == null) { onTriggerFailure(request, throwable); } else { onTriggerFailure(checkpoint, throwable); } } else { if (checkpoint.isDiscarded()) { onTriggerFailure( checkpoint, new CheckpointException( CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, checkpoint.getFailureCause())); } else { final long checkpointId = checkpoint.getCheckpointId(); snapshotTaskState( timestamp, checkpointId, checkpoint.getCheckpointStorageLocation(), request.props, executions, request.advanceToEndOfTime); coordinatorsToCheckpoint.forEach((ctx) -> ctx.afterSourceBarrierInjection(checkpointId)); onTriggerSuccess(); } } return null; }, timer) .exceptionally(error -> { if (!isShutdown()) { throw new CompletionException(error); } else if (error instanceof RejectedExecutionException) { LOG.debug("Execution rejected during shutdown"); } else { LOG.warn("Error encountered during shutdown", error); } return null; })); } catch (Throwable throwable) { onTriggerFailure(request, throwable); } }
class CheckpointCoordinator { private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class); /** The number of recent checkpoints whose IDs are remembered. */ private static final int NUM_GHOST_CHECKPOINT_IDS = 16; /** Coordinator-wide lock to safeguard the checkpoint updates. */ private final Object lock = new Object(); /** The job whose checkpoint this coordinator coordinates. */ private final JobID job; /** Default checkpoint properties. **/ private final CheckpointProperties checkpointProperties; /** The executor used for asynchronous calls, like potentially blocking I/O. */ private final Executor executor; /** Tasks who need to be sent a message when a checkpoint is started. */ private final ExecutionVertex[] tasksToTrigger; /** Tasks who need to acknowledge a checkpoint before it succeeds. */ private final ExecutionVertex[] tasksToWaitFor; /** Tasks who need to be sent a message when a checkpoint is confirmed. */ private final ExecutionVertex[] tasksToCommitTo; /** The operator coordinators that need to be checkpointed. */ private final Collection<OperatorCoordinatorCheckpointContext> coordinatorsToCheckpoint; /** Map from checkpoint ID to the pending checkpoint. */ @GuardedBy("lock") private final Map<Long, PendingCheckpoint> pendingCheckpoints; /** Completed checkpoints. Implementations can be blocking. Make sure calls to methods * accessing this don't block the job manager actor and run asynchronously. */ private final CompletedCheckpointStore completedCheckpointStore; /** The root checkpoint state backend, which is responsible for initializing the * checkpoint, storing the metadata, and cleaning up the checkpoint. */ private final CheckpointStorageCoordinatorView checkpointStorage; /** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */ private final ArrayDeque<Long> recentPendingCheckpoints; /** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these * need to be ascending across job managers. */ private final CheckpointIDCounter checkpointIdCounter; /** The base checkpoint interval. Actual trigger time may be affected by the * max concurrent checkpoints and minimum-pause values */ private final long baseInterval; /** The max time (in ms) that a checkpoint may take. */ private final long checkpointTimeout; /** The min time(in ms) to delay after a checkpoint could be triggered. Allows to * enforce minimum processing time between checkpoint attempts */ private final long minPauseBetweenCheckpoints; /** The timer that handles the checkpoint timeouts and triggers periodic checkpoints. * It must be single-threaded. Eventually it will be replaced by main thread executor. */ private final ScheduledExecutor timer; /** The master checkpoint hooks executed by this checkpoint coordinator. */ private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks; private final boolean unalignedCheckpointsEnabled; /** Actor that receives status updates from the execution graph this coordinator works for. */ private JobStatusListener jobStatusListener; /** The number of consecutive failed trigger attempts. */ private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0); /** A handle to the current periodic trigger, to cancel it when necessary. */ private ScheduledFuture<?> currentPeriodicTrigger; /** The timestamp (via {@link Clock * completed. */ private long lastCheckpointCompletionRelativeTime; /** Flag whether a triggered checkpoint should immediately schedule the next checkpoint. * Non-volatile, because only accessed in synchronized scope */ private boolean periodicScheduling; /** Flag marking the coordinator as shut down (not accepting any messages any more). */ private volatile boolean shutdown; /** Optional tracker for checkpoint statistics. */ @Nullable private CheckpointStatsTracker statsTracker; /** A factory for SharedStateRegistry objects. */ private final SharedStateRegistryFactory sharedStateRegistryFactory; /** Registry that tracks state which is shared across (incremental) checkpoints. */ private SharedStateRegistry sharedStateRegistry; private boolean isPreferCheckpointForRecovery; private final CheckpointFailureManager failureManager; private final Clock clock; private final boolean isExactlyOnceMode; /** Flag represents there is an in-flight trigger request. */ private boolean isTriggering = false; private final CheckpointRequestDecider requestDecider; public CheckpointCoordinator( JobID job, CheckpointCoordinatorConfiguration chkConfig, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, Collection<OperatorCoordinatorCheckpointContext> coordinatorsToCheckpoint, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, ScheduledExecutor timer, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager) { this( job, chkConfig, tasksToTrigger, tasksToWaitFor, tasksToCommitTo, coordinatorsToCheckpoint, checkpointIDCounter, completedCheckpointStore, checkpointStateBackend, executor, timer, sharedStateRegistryFactory, failureManager, SystemClock.getInstance()); } @VisibleForTesting public CheckpointCoordinator( JobID job, CheckpointCoordinatorConfiguration chkConfig, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, Collection<OperatorCoordinatorCheckpointContext> coordinatorsToCheckpoint, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, ScheduledExecutor timer, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager, Clock clock) { checkNotNull(checkpointStateBackend); long minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints(); if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) { minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000; } long baseInterval = chkConfig.getCheckpointInterval(); if (baseInterval < minPauseBetweenCheckpoints) { baseInterval = minPauseBetweenCheckpoints; } this.job = checkNotNull(job); this.baseInterval = baseInterval; this.checkpointTimeout = chkConfig.getCheckpointTimeout(); this.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints; this.tasksToTrigger = checkNotNull(tasksToTrigger); this.tasksToWaitFor = checkNotNull(tasksToWaitFor); this.tasksToCommitTo = checkNotNull(tasksToCommitTo); this.coordinatorsToCheckpoint = Collections.unmodifiableCollection(coordinatorsToCheckpoint); this.pendingCheckpoints = new LinkedHashMap<>(); this.checkpointIdCounter = checkNotNull(checkpointIDCounter); this.completedCheckpointStore = checkNotNull(completedCheckpointStore); this.executor = checkNotNull(executor); this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory); this.sharedStateRegistry = sharedStateRegistryFactory.create(executor); this.isPreferCheckpointForRecovery = chkConfig.isPreferCheckpointForRecovery(); this.failureManager = checkNotNull(failureManager); this.clock = checkNotNull(clock); this.isExactlyOnceMode = chkConfig.isExactlyOnce(); this.unalignedCheckpointsEnabled = chkConfig.isUnalignedCheckpointsEnabled(); this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS); this.masterHooks = new HashMap<>(); this.timer = timer; this.checkpointProperties = CheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy()); try { this.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job); checkpointStorage.initializeBaseLocations(); } catch (IOException e) { throw new FlinkRuntimeException("Failed to create checkpoint storage at checkpoint coordinator side.", e); } try { checkpointIDCounter.start(); } catch (Throwable t) { throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t); } this.requestDecider = new CheckpointRequestDecider( chkConfig.getMaxConcurrentCheckpoints(), this::rescheduleTrigger, this.clock, this.minPauseBetweenCheckpoints, this.pendingCheckpoints::size, this.lock); } /** * Adds the given master hook to the checkpoint coordinator. This method does nothing, if * the checkpoint coordinator already contained a hook with the same ID (as defined via * {@link MasterTriggerRestoreHook * * @param hook The hook to add. * @return True, if the hook was added, false if the checkpoint coordinator already * contained a hook with the same ID. */ public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) { checkNotNull(hook); final String id = hook.getIdentifier(); checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id"); synchronized (lock) { if (!masterHooks.containsKey(id)) { masterHooks.put(id, hook); return true; } else { return false; } } } /** * Gets the number of currently register master hooks. */ public int getNumberOfRegisteredMasterHooks() { synchronized (lock) { return masterHooks.size(); } } /** * Sets the checkpoint stats tracker. * * @param statsTracker The checkpoint stats tracker. */ public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) { this.statsTracker = statsTracker; } /** * Shuts down the checkpoint coordinator. * * <p>After this method has been called, the coordinator does not accept * and further messages and cannot trigger any further checkpoints. */ public void shutdown(JobStatus jobStatus) throws Exception { synchronized (lock) { if (!shutdown) { shutdown = true; LOG.info("Stopping checkpoint coordinator for job {}.", job); periodicScheduling = false; MasterHooks.close(masterHooks.values(), LOG); masterHooks.clear(); final CheckpointException reason = new CheckpointException( CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); abortPendingAndQueuedCheckpoints(reason); completedCheckpointStore.shutdown(jobStatus); checkpointIdCounter.shutdown(jobStatus); } } } public boolean isShutdown() { return shutdown; } /** * Triggers a savepoint with the given savepoint directory as a target. * * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSavepoint(@Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSavepoint(!unalignedCheckpointsEnabled); return triggerSavepointInternal(properties, false, targetLocation); } /** * Triggers a synchronous savepoint with the given savepoint directory as a target. * * @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline * to fire any registered event-time timers. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSynchronousSavepoint( final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSyncSavepoint(!unalignedCheckpointsEnabled); return triggerSavepointInternal(properties, advanceToEndOfEventTime, targetLocation); } private CompletableFuture<CompletedCheckpoint> triggerSavepointInternal( final CheckpointProperties checkpointProperties, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { checkNotNull(checkpointProperties); final CompletableFuture<CompletedCheckpoint> resultFuture = new CompletableFuture<>(); timer.execute(() -> triggerCheckpoint( checkpointProperties, targetLocation, false, advanceToEndOfEventTime) .whenComplete((completedCheckpoint, throwable) -> { if (throwable == null) { resultFuture.complete(completedCheckpoint); } else { resultFuture.completeExceptionally(throwable); } })); return resultFuture; } /** * Triggers a new standard checkpoint and uses the given timestamp as the checkpoint * timestamp. The return value is a future. It completes when the checkpoint triggered finishes * or an error occurred. * * @param isPeriodic Flag indicating whether this triggered checkpoint is * periodic. If this flag is true, but the periodic scheduler is disabled, * the checkpoint will be declined. * @return a future to the completed checkpoint. */ public CompletableFuture<CompletedCheckpoint> triggerCheckpoint(boolean isPeriodic) { return triggerCheckpoint(checkpointProperties, null, isPeriodic, false); } @VisibleForTesting public CompletableFuture<CompletedCheckpoint> triggerCheckpoint( CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime) { if (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) { return FutureUtils.completedExceptionally(new IllegalArgumentException( "Only synchronous savepoints are allowed to advance the watermark to MAX.")); } CheckpointTriggerRequest request = new CheckpointTriggerRequest(props, externalSavepointLocation, isPeriodic, advanceToEndOfTime); requestDecider .chooseRequestToExecute(request, isTriggering, lastCheckpointCompletionRelativeTime) .ifPresent(this::startTriggeringCheckpoint); return request.onCompletionPromise; } /** * Initialize the checkpoint trigger asynchronously. It will be executed in io thread due to * it might be time-consuming. * * @param props checkpoint properties * @param externalSavepointLocation the external savepoint location, it might be null * @return the future of initialized result, checkpoint id and checkpoint location */ private CompletableFuture<CheckpointIdAndStorageLocation> initializeCheckpoint( CheckpointProperties props, @Nullable String externalSavepointLocation) { return CompletableFuture.supplyAsync(() -> { try { long checkpointID = checkpointIdCounter.getAndIncrement(); CheckpointStorageLocation checkpointStorageLocation = props.isSavepoint() ? checkpointStorage .initializeLocationForSavepoint(checkpointID, externalSavepointLocation) : checkpointStorage.initializeLocationForCheckpoint(checkpointID); return new CheckpointIdAndStorageLocation(checkpointID, checkpointStorageLocation); } catch (Throwable throwable) { throw new CompletionException(throwable); } }, executor); } private PendingCheckpoint createPendingCheckpoint( long timestamp, CheckpointProperties props, Map<ExecutionAttemptID, ExecutionVertex> ackTasks, boolean isPeriodic, long checkpointID, CheckpointStorageLocation checkpointStorageLocation, CompletableFuture<CompletedCheckpoint> onCompletionPromise) { synchronized (lock) { try { preCheckGlobalState(isPeriodic); } catch (Throwable t) { throw new CompletionException(t); } } final PendingCheckpoint checkpoint = new PendingCheckpoint( job, checkpointID, timestamp, ackTasks, OperatorInfo.getIds(coordinatorsToCheckpoint), masterHooks.keySet(), props, checkpointStorageLocation, executor, onCompletionPromise); if (statsTracker != null) { PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint( checkpointID, timestamp, props); checkpoint.setStatsCallback(callback); } synchronized (lock) { pendingCheckpoints.put(checkpointID, checkpoint); ScheduledFuture<?> cancellerHandle = timer.schedule( new CheckpointCanceller(checkpoint), checkpointTimeout, TimeUnit.MILLISECONDS); if (!checkpoint.setCancellerHandle(cancellerHandle)) { cancellerHandle.cancel(false); } } LOG.info("Triggering checkpoint {} (type={}) @ {} for job {}.", checkpointID, checkpoint.getProps().getCheckpointType(), timestamp, job); return checkpoint; } /** * Snapshot master hook states asynchronously. * * @param checkpoint the pending checkpoint * @return the future represents master hook states are finished or not */ private CompletableFuture<Void> snapshotMasterState(PendingCheckpoint checkpoint) { if (masterHooks.isEmpty()) { return CompletableFuture.completedFuture(null); } final long checkpointID = checkpoint.getCheckpointId(); final long timestamp = checkpoint.getCheckpointTimestamp(); final CompletableFuture<Void> masterStateCompletableFuture = new CompletableFuture<>(); for (MasterTriggerRestoreHook<?> masterHook : masterHooks.values()) { MasterHooks .triggerHook(masterHook, checkpointID, timestamp, executor) .whenCompleteAsync( (masterState, throwable) -> { try { synchronized (lock) { if (masterStateCompletableFuture.isDone()) { return; } if (checkpoint.isDiscarded()) { throw new IllegalStateException( "Checkpoint " + checkpointID + " has been discarded"); } if (throwable == null) { checkpoint.acknowledgeMasterState( masterHook.getIdentifier(), masterState); if (checkpoint.areMasterStatesFullyAcknowledged()) { masterStateCompletableFuture.complete(null); } } else { masterStateCompletableFuture.completeExceptionally(throwable); } } } catch (Throwable t) { masterStateCompletableFuture.completeExceptionally(t); } }, timer); } return masterStateCompletableFuture; } /** * Snapshot task state. * * @param timestamp the timestamp of this checkpoint reques * @param checkpointID the checkpoint id * @param checkpointStorageLocation the checkpoint location * @param props the checkpoint properties * @param executions the executions which should be triggered * @param advanceToEndOfTime Flag indicating if the source should inject a {@code MAX_WATERMARK} * in the pipeline to fire any registered event-time timers. */ private void snapshotTaskState( long timestamp, long checkpointID, CheckpointStorageLocation checkpointStorageLocation, CheckpointProperties props, Execution[] executions, boolean advanceToEndOfTime) { final CheckpointOptions checkpointOptions = new CheckpointOptions( props.getCheckpointType(), checkpointStorageLocation.getLocationReference(), isExactlyOnceMode, props.getCheckpointType() == CheckpointType.CHECKPOINT && unalignedCheckpointsEnabled); for (Execution execution: executions) { if (props.isSynchronous()) { execution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime); } else { execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions); } } } /** * Trigger request is successful. * NOTE, it must be invoked if trigger request is successful. */ private void onTriggerSuccess() { isTriggering = false; numUnsuccessfulCheckpointsTriggers.set(0); executeQueuedRequest(); } /** * The trigger request is failed prematurely without a proper initialization. * There is no resource to release, but the completion promise needs to fail manually here. * * @param onCompletionPromise the completion promise of the checkpoint/savepoint * @param throwable the reason of trigger failure */ private void onTriggerFailure( CheckpointTriggerRequest onCompletionPromise, Throwable throwable) { final CheckpointException checkpointException = getCheckpointException(CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable); onCompletionPromise.completeExceptionally(checkpointException); onTriggerFailure((PendingCheckpoint) null, checkpointException); } /** * The trigger request is failed. * NOTE, it must be invoked if trigger request is failed. * * @param checkpoint the pending checkpoint which is failed. It could be null if it's failed * prematurely without a proper initialization. * @param throwable the reason of trigger failure */ private void onTriggerFailure(@Nullable PendingCheckpoint checkpoint, Throwable throwable) { throwable = ExceptionUtils.stripCompletionException(throwable); try { coordinatorsToCheckpoint.forEach(OperatorCoordinatorCheckpointContext::abortCurrentTriggering); if (checkpoint != null && !checkpoint.isDiscarded()) { int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn( "Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)", checkpoint.getCheckpointId(), job, numUnsuccessful, throwable); final CheckpointException cause = getCheckpointException( CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable); synchronized (lock) { abortPendingCheckpoint(checkpoint, cause); } } } finally { isTriggering = false; executeQueuedRequest(); } } private void executeQueuedRequest() { requestDecider.chooseQueuedRequestToExecute(isTriggering, lastCheckpointCompletionRelativeTime).ifPresent(this::startTriggeringCheckpoint); } /** * Receives a {@link DeclineCheckpoint} message for a pending checkpoint. * * @param message Checkpoint decline from the task manager * @param taskManagerLocationInfo The location info of the decline checkpoint message's sender */ public void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) { if (shutdown || message == null) { return; } if (!job.equals(message.getJob())) { throw new IllegalArgumentException("Received DeclineCheckpoint message for job " + message.getJob() + " from " + taskManagerLocationInfo + " while this coordinator handles job " + job); } final long checkpointId = message.getCheckpointId(); final String reason = (message.getReason() != null ? message.getReason().getMessage() : ""); PendingCheckpoint checkpoint; synchronized (lock) { if (shutdown) { return; } checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null) { Preconditions.checkState( !checkpoint.isDiscarded(), "Received message for discarded but non-removed checkpoint " + checkpointId); LOG.info("Decline checkpoint {} by task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo); final CheckpointException checkpointException; if (message.getReason() == null) { checkpointException = new CheckpointException(CheckpointFailureReason.CHECKPOINT_DECLINED); } else { checkpointException = getCheckpointException( CheckpointFailureReason.JOB_FAILURE, message.getReason()); } abortPendingCheckpoint( checkpoint, checkpointException, message.getTaskExecutionId()); } else if (LOG.isDebugEnabled()) { if (recentPendingCheckpoints.contains(checkpointId)) { LOG.debug("Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } else { LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } } } } /** * Receives an AcknowledgeCheckpoint message and returns whether the * message was associated with a pending checkpoint. * * @param message Checkpoint ack from the task manager * * @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender * @return Flag indicating whether the ack'd checkpoint was associated * with a pending checkpoint. * * @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store. */ public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message, String taskManagerLocationInfo) throws CheckpointException { if (shutdown || message == null) { return false; } if (!job.equals(message.getJob())) { LOG.error("Received wrong AcknowledgeCheckpoint message for job {} from {} : {}", job, taskManagerLocationInfo, message); return false; } final long checkpointId = message.getCheckpointId(); synchronized (lock) { if (shutdown) { return false; } final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) { case SUCCESS: LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); if (checkpoint.areTasksFullyAcknowledged()) { completePendingCheckpoint(checkpoint); } break; case DUPLICATE: LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); break; case UNKNOWN: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the task's execution attempt id was unknown. Discarding " + "the state handle to avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); break; case DISCARDED: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the pending checkpoint had been discarded. Discarding the " + "state handle tp avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); } return true; } else if (checkpoint != null) { throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else { boolean wasPendingCheckpoint; if (recentPendingCheckpoints.contains(checkpointId)) { wasPendingCheckpoint = true; LOG.warn("Received late message for now expired checkpoint attempt {} from task " + "{} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); } else { LOG.debug("Received message for an unknown checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); wasPendingCheckpoint = false; } discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); return wasPendingCheckpoint; } } } /** * Try to complete the given pending checkpoint. * * <p>Important: This method should only be called in the checkpoint lock scope. * * @param pendingCheckpoint to complete * @throws CheckpointException if the completion failed */ private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException { final long checkpointId = pendingCheckpoint.getCheckpointId(); final CompletedCheckpoint completedCheckpoint; Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates(); sharedStateRegistry.registerAll(operatorStates.values()); try { try { completedCheckpoint = pendingCheckpoint.finalizeCheckpoint(); failureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId()); } catch (Exception e1) { if (!pendingCheckpoint.isDiscarded()) { abortPendingCheckpoint( pendingCheckpoint, new CheckpointException( CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1)); } throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1); } Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null); try { completedCheckpointStore.addCheckpoint(completedCheckpoint); } catch (Exception exception) { executor.execute(new Runnable() { @Override public void run() { try { completedCheckpoint.discardOnFailedStoring(); } catch (Throwable t) { LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t); } } }); sendAbortedMessages(checkpointId, pendingCheckpoint.getCheckpointTimestamp()); throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception); } } finally { pendingCheckpoints.remove(checkpointId); timer.execute(this::executeQueuedRequest); } rememberRecentCheckpointId(checkpointId); dropSubsumedCheckpoints(checkpointId); lastCheckpointCompletionRelativeTime = clock.relativeTimeMillis(); LOG.info("Completed checkpoint {} for job {} ({} bytes in {} ms).", checkpointId, job, completedCheckpoint.getStateSize(), completedCheckpoint.getDuration()); if (LOG.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("Checkpoint state: "); for (OperatorState state : completedCheckpoint.getOperatorStates().values()) { builder.append(state); builder.append(", "); } builder.setLength(builder.length() - 2); LOG.debug(builder.toString()); } sendAcknowledgeMessages(checkpointId, completedCheckpoint.getTimestamp()); } private void sendAcknowledgeMessages(long checkpointId, long timestamp) { for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointComplete(checkpointId, timestamp); } } for (OperatorCoordinatorCheckpointContext coordinatorContext : coordinatorsToCheckpoint) { coordinatorContext.checkpointComplete(checkpointId); } } private void sendAbortedMessages(long checkpointId, long timeStamp) { executor.execute(() -> { for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointAborted(checkpointId, timeStamp); } } }); } /** * Fails all pending checkpoints which have not been acknowledged by the given execution * attempt id. * * @param executionAttemptId for which to discard unacknowledged pending checkpoints * @param cause of the failure */ public void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) { synchronized (lock) { abortPendingCheckpoints( checkpoint -> !checkpoint.isAcknowledgedBy(executionAttemptId), new CheckpointException(CheckpointFailureReason.TASK_FAILURE, cause)); } } private void rememberRecentCheckpointId(long id) { if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) { recentPendingCheckpoints.removeFirst(); } recentPendingCheckpoints.addLast(id); } private void dropSubsumedCheckpoints(long checkpointId) { abortPendingCheckpoints( checkpoint -> checkpoint.getCheckpointId() < checkpointId && checkpoint.canBeSubsumed(), new CheckpointException(CheckpointFailureReason.CHECKPOINT_SUBSUMED)); } /** * Restores the latest checkpointed state. * * @param tasks Map of job vertices to restore. State for these vertices is * restored via {@link Execution * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ @Deprecated public boolean restoreLatestCheckpointedState( Map<JobVertexID, ExecutionJobVertex> tasks, boolean errorIfNoCheckpoint, boolean allowNonRestoredState) throws Exception { return restoreLatestCheckpointedStateInternal(new HashSet<>(tasks.values()), true, errorIfNoCheckpoint, allowNonRestoredState); } /** * Restores the latest checkpointed state to a set of subtasks. This method represents a "local" * or "regional" failover and does restore states to coordinators. Note that a regional failover * might still include all tasks. * * @param tasks Set of job vertices to restore. State for these vertices is * restored via {@link Execution * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedStateToSubtasks(final Set<ExecutionJobVertex> tasks) throws Exception { return restoreLatestCheckpointedStateInternal(tasks, false, false, true); } /** * Restores the latest checkpointed state to all tasks and all coordinators. * This method represents a "global restore"-style operation where all stateful tasks * and coordinators from the given set of Job Vertices are restored. * are restored to their latest checkpointed state. * * @param tasks Set of job vertices to restore. State for these vertices is * restored via {@link Execution * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedStateToAll( final Set<ExecutionJobVertex> tasks, final boolean allowNonRestoredState) throws Exception { return restoreLatestCheckpointedStateInternal(tasks, true, false, allowNonRestoredState); } private boolean restoreLatestCheckpointedStateInternal( final Set<ExecutionJobVertex> tasks, final boolean restoreCoordinators, final boolean errorIfNoCheckpoint, final boolean allowNonRestoredState) throws Exception { synchronized (lock) { if (shutdown) { throw new IllegalStateException("CheckpointCoordinator is shut down"); } sharedStateRegistry.close(); sharedStateRegistry = sharedStateRegistryFactory.create(executor); completedCheckpointStore.recover(); for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) { completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry); } LOG.debug("Status of the shared state registry of job {} after restore: {}.", job, sharedStateRegistry); CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(isPreferCheckpointForRecovery); if (latest == null) { if (errorIfNoCheckpoint) { throw new IllegalStateException("No completed checkpoint available"); } else { LOG.debug("Resetting the master hooks."); MasterHooks.reset(masterHooks.values(), LOG); return false; } } LOG.info("Restoring job {} from latest valid checkpoint: {}.", job, latest); final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates(); StateAssignmentOperation stateAssignmentOperation = new StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState); stateAssignmentOperation.assignStates(); MasterHooks.restoreMasterHooks( masterHooks, latest.getMasterHookStates(), latest.getCheckpointID(), allowNonRestoredState, LOG); if (restoreCoordinators) { restoreStateToCoordinators(operatorStates); } if (statsTracker != null) { long restoreTimestamp = System.currentTimeMillis(); RestoredCheckpointStats restored = new RestoredCheckpointStats( latest.getCheckpointID(), latest.getProperties(), restoreTimestamp, latest.getExternalPointer()); statsTracker.reportRestoredCheckpoint(restored); } return true; } } /** * Restore the state with given savepoint. * * @param savepointPointer The pointer to the savepoint. * @param allowNonRestored True if allowing checkpoint state that cannot be * mapped to any job vertex in tasks. * @param tasks Map of job vertices to restore. State for these * vertices is restored via * {@link Execution * @param userClassLoader The class loader to resolve serialized classes in * legacy savepoint versions. */ public boolean restoreSavepoint( String savepointPointer, boolean allowNonRestored, Map<JobVertexID, ExecutionJobVertex> tasks, ClassLoader userClassLoader) throws Exception { Preconditions.checkNotNull(savepointPointer, "The savepoint path cannot be null."); LOG.info("Starting job {} from savepoint {} ({})", job, savepointPointer, (allowNonRestored ? "allowing non restored state" : "")); final CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer); CompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint( job, tasks, checkpointLocation, userClassLoader, allowNonRestored); completedCheckpointStore.addCheckpoint(savepoint); long nextCheckpointId = savepoint.getCheckpointID() + 1; checkpointIdCounter.setCount(nextCheckpointId); LOG.info("Reset the checkpoint ID of job {} to {}.", job, nextCheckpointId); return restoreLatestCheckpointedStateInternal(new HashSet<>(tasks.values()), true, true, allowNonRestored); } public int getNumberOfPendingCheckpoints() { synchronized (lock) { return this.pendingCheckpoints.size(); } } public int getNumberOfRetainedSuccessfulCheckpoints() { synchronized (lock) { return completedCheckpointStore.getNumberOfRetainedCheckpoints(); } } public Map<Long, PendingCheckpoint> getPendingCheckpoints() { synchronized (lock) { return new HashMap<>(this.pendingCheckpoints); } } public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception { synchronized (lock) { return completedCheckpointStore.getAllCheckpoints(); } } public CheckpointStorageCoordinatorView getCheckpointStorage() { return checkpointStorage; } public CompletedCheckpointStore getCheckpointStore() { return completedCheckpointStore; } public long getCheckpointTimeout() { return checkpointTimeout; } /** * @deprecated use {@link */ @Deprecated @VisibleForTesting PriorityQueue<CheckpointTriggerRequest> getTriggerRequestQueue() { return requestDecider.getTriggerRequestQueue(); } public boolean isTriggering() { return isTriggering; } @VisibleForTesting boolean isCurrentPeriodicTriggerAvailable() { return currentPeriodicTrigger != null; } /** * Returns whether periodic checkpointing has been configured. * * @return <code>true</code> if periodic checkpoints have been configured. */ public boolean isPeriodicCheckpointingConfigured() { return baseInterval != Long.MAX_VALUE; } public void startCheckpointScheduler() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } stopCheckpointScheduler(); periodicScheduling = true; currentPeriodicTrigger = scheduleTriggerWithDelay(getRandomInitDelay()); } } public void stopCheckpointScheduler() { synchronized (lock) { periodicScheduling = false; cancelPeriodicTrigger(); final CheckpointException reason = new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND); abortPendingAndQueuedCheckpoints(reason); numUnsuccessfulCheckpointsTriggers.set(0); } } /** * Aborts all the pending checkpoints due to en exception. * @param exception The exception. */ public void abortPendingCheckpoints(CheckpointException exception) { synchronized (lock) { abortPendingCheckpoints(ignored -> true, exception); } } private void abortPendingCheckpoints( Predicate<PendingCheckpoint> checkpointToFailPredicate, CheckpointException exception) { assert Thread.holdsLock(lock); final PendingCheckpoint[] pendingCheckpointsToFail = pendingCheckpoints .values() .stream() .filter(checkpointToFailPredicate) .toArray(PendingCheckpoint[]::new); for (PendingCheckpoint pendingCheckpoint : pendingCheckpointsToFail) { abortPendingCheckpoint(pendingCheckpoint, exception); } } private void rescheduleTrigger(long tillNextMillis) { cancelPeriodicTrigger(); currentPeriodicTrigger = scheduleTriggerWithDelay(tillNextMillis); } private void cancelPeriodicTrigger() { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } } private long getRandomInitDelay() { return ThreadLocalRandom.current().nextLong(minPauseBetweenCheckpoints, baseInterval + 1L); } private ScheduledFuture<?> scheduleTriggerWithDelay(long initDelay) { return timer.scheduleAtFixedRate( new ScheduledTrigger(), initDelay, baseInterval, TimeUnit.MILLISECONDS); } private void restoreStateToCoordinators(final Map<OperatorID, OperatorState> operatorStates) throws Exception { for (OperatorCoordinatorCheckpointContext coordContext : coordinatorsToCheckpoint) { final OperatorState state = operatorStates.get(coordContext.operatorId()); if (state == null) { continue; } final ByteStreamStateHandle coordinatorState = state.getCoordinatorState(); if (coordinatorState != null) { coordContext.resetToCheckpoint(coordinatorState.getData()); } } } public JobStatusListener createActivatorDeactivator() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } if (jobStatusListener == null) { jobStatusListener = new CheckpointCoordinatorDeActivator(this); } return jobStatusListener; } } int getNumQueuedRequests() { return requestDecider.getNumQueuedRequests(); } private final class ScheduledTrigger implements Runnable { @Override public void run() { try { triggerCheckpoint(true); } catch (Exception e) { LOG.error("Exception while triggering checkpoint for job {}.", job, e); } } } /** * Discards the given state object asynchronously belonging to the given job, execution attempt * id and checkpoint id. * * @param jobId identifying the job to which the state object belongs * @param executionAttemptID identifying the task to which the state object belongs * @param checkpointId of the state object * @param subtaskState to discard asynchronously */ private void discardSubtaskState( final JobID jobId, final ExecutionAttemptID executionAttemptID, final long checkpointId, final TaskStateSnapshot subtaskState) { if (subtaskState != null) { executor.execute(new Runnable() { @Override public void run() { try { subtaskState.discardState(); } catch (Throwable t2) { LOG.warn("Could not properly discard state object of checkpoint {} " + "belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2); } } }); } } private void abortPendingCheckpoint( PendingCheckpoint pendingCheckpoint, CheckpointException exception) { abortPendingCheckpoint(pendingCheckpoint, exception, null); } private void abortPendingCheckpoint( PendingCheckpoint pendingCheckpoint, CheckpointException exception, @Nullable final ExecutionAttemptID executionAttemptID) { assert(Thread.holdsLock(lock)); if (!pendingCheckpoint.isDiscarded()) { try { pendingCheckpoint.abort( exception.getCheckpointFailureReason(), exception.getCause()); if (pendingCheckpoint.getProps().isSavepoint() && pendingCheckpoint.getProps().isSynchronous()) { failureManager.handleSynchronousSavepointFailure(exception); } else if (executionAttemptID != null) { failureManager.handleTaskLevelCheckpointException( exception, pendingCheckpoint.getCheckpointId(), executionAttemptID); } else { failureManager.handleJobLevelCheckpointException( exception, of(pendingCheckpoint.getCheckpointId())); } } finally { sendAbortedMessages(pendingCheckpoint.getCheckpointId(), pendingCheckpoint.getCheckpointTimestamp()); pendingCheckpoints.remove(pendingCheckpoint.getCheckpointId()); rememberRecentCheckpointId(pendingCheckpoint.getCheckpointId()); timer.execute(this::executeQueuedRequest); } } } private void preCheckGlobalState(boolean isPeriodic) throws CheckpointException { if (shutdown) { throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } if (isPeriodic && !periodicScheduling) { throw new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN); } } /** * Check if all tasks that we need to trigger are running. If not, abort the checkpoint. * * @return the executions need to be triggered. * @throws CheckpointException the exception fails checking */ private Execution[] getTriggerExecutions() throws CheckpointException { Execution[] executions = new Execution[tasksToTrigger.length]; for (int i = 0; i < tasksToTrigger.length; i++) { Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt(); if (ee == null) { LOG.info( "Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job); throw new CheckpointException( CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } else if (ee.getState() == ExecutionState.RUNNING) { executions[i] = ee; } else { LOG.info( "Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job, ExecutionState.RUNNING, ee.getState()); throw new CheckpointException( CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } return executions; } /** * Check if all tasks that need to acknowledge the checkpoint are running. * If not, abort the checkpoint * * @return the execution vertices which should give an ack response * @throws CheckpointException the exception fails checking */ private Map<ExecutionAttemptID, ExecutionVertex> getAckTasks() throws CheckpointException { Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length); for (ExecutionVertex ev : tasksToWaitFor) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ackTasks.put(ee.getAttemptId(), ev); } else { LOG.info( "Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.", ev.getTaskNameWithSubtaskIndex(), job); throw new CheckpointException( CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } return ackTasks; } private void abortPendingAndQueuedCheckpoints(CheckpointException exception) { assert(Thread.holdsLock(lock)); requestDecider.abortAll(exception); abortPendingCheckpoints(exception); } /** * The canceller of checkpoint. The checkpoint might be cancelled if it doesn't finish in a * configured period. */ private class CheckpointCanceller implements Runnable { private final PendingCheckpoint pendingCheckpoint; private CheckpointCanceller(PendingCheckpoint pendingCheckpoint) { this.pendingCheckpoint = checkNotNull(pendingCheckpoint); } @Override public void run() { synchronized (lock) { if (!pendingCheckpoint.isDiscarded()) { LOG.info("Checkpoint {} of job {} expired before completing.", pendingCheckpoint.getCheckpointId(), job); abortPendingCheckpoint( pendingCheckpoint, new CheckpointException(CheckpointFailureReason.CHECKPOINT_EXPIRED)); } } } } private static CheckpointException getCheckpointException( CheckpointFailureReason defaultReason, Throwable throwable) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(throwable, CheckpointException.class); return checkpointExceptionOptional .orElseGet(() -> new CheckpointException(defaultReason, throwable)); } private static class CheckpointIdAndStorageLocation { private final long checkpointId; private final CheckpointStorageLocation checkpointStorageLocation; CheckpointIdAndStorageLocation( long checkpointId, CheckpointStorageLocation checkpointStorageLocation) { this.checkpointId = checkpointId; this.checkpointStorageLocation = checkNotNull(checkpointStorageLocation); } } static class CheckpointTriggerRequest { final long timestamp; final CheckpointProperties props; final @Nullable String externalSavepointLocation; final boolean isPeriodic; final boolean advanceToEndOfTime; private final CompletableFuture<CompletedCheckpoint> onCompletionPromise = new CompletableFuture<>(); CheckpointTriggerRequest( CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime) { this.timestamp = System.currentTimeMillis(); this.props = checkNotNull(props); this.externalSavepointLocation = externalSavepointLocation; this.isPeriodic = isPeriodic; this.advanceToEndOfTime = advanceToEndOfTime; } CompletableFuture<CompletedCheckpoint> getOnCompletionFuture() { return onCompletionPromise; } public void completeExceptionally(CheckpointException exception) { onCompletionPromise.completeExceptionally(exception); } public boolean isForce() { return props.forceCheckpoint(); } } }
class CheckpointCoordinator { private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class); /** The number of recent checkpoints whose IDs are remembered. */ private static final int NUM_GHOST_CHECKPOINT_IDS = 16; /** Coordinator-wide lock to safeguard the checkpoint updates. */ private final Object lock = new Object(); /** The job whose checkpoint this coordinator coordinates. */ private final JobID job; /** Default checkpoint properties. **/ private final CheckpointProperties checkpointProperties; /** The executor used for asynchronous calls, like potentially blocking I/O. */ private final Executor executor; /** Tasks who need to be sent a message when a checkpoint is started. */ private final ExecutionVertex[] tasksToTrigger; /** Tasks who need to acknowledge a checkpoint before it succeeds. */ private final ExecutionVertex[] tasksToWaitFor; /** Tasks who need to be sent a message when a checkpoint is confirmed. */ private final ExecutionVertex[] tasksToCommitTo; /** The operator coordinators that need to be checkpointed. */ private final Collection<OperatorCoordinatorCheckpointContext> coordinatorsToCheckpoint; /** Map from checkpoint ID to the pending checkpoint. */ @GuardedBy("lock") private final Map<Long, PendingCheckpoint> pendingCheckpoints; /** Completed checkpoints. Implementations can be blocking. Make sure calls to methods * accessing this don't block the job manager actor and run asynchronously. */ private final CompletedCheckpointStore completedCheckpointStore; /** The root checkpoint state backend, which is responsible for initializing the * checkpoint, storing the metadata, and cleaning up the checkpoint. */ private final CheckpointStorageCoordinatorView checkpointStorage; /** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */ private final ArrayDeque<Long> recentPendingCheckpoints; /** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these * need to be ascending across job managers. */ private final CheckpointIDCounter checkpointIdCounter; /** The base checkpoint interval. Actual trigger time may be affected by the * max concurrent checkpoints and minimum-pause values */ private final long baseInterval; /** The max time (in ms) that a checkpoint may take. */ private final long checkpointTimeout; /** The min time(in ms) to delay after a checkpoint could be triggered. Allows to * enforce minimum processing time between checkpoint attempts */ private final long minPauseBetweenCheckpoints; /** The timer that handles the checkpoint timeouts and triggers periodic checkpoints. * It must be single-threaded. Eventually it will be replaced by main thread executor. */ private final ScheduledExecutor timer; /** The master checkpoint hooks executed by this checkpoint coordinator. */ private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks; private final boolean unalignedCheckpointsEnabled; /** Actor that receives status updates from the execution graph this coordinator works for. */ private JobStatusListener jobStatusListener; /** The number of consecutive failed trigger attempts. */ private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0); /** A handle to the current periodic trigger, to cancel it when necessary. */ private ScheduledFuture<?> currentPeriodicTrigger; /** The timestamp (via {@link Clock * completed. */ private long lastCheckpointCompletionRelativeTime; /** Flag whether a triggered checkpoint should immediately schedule the next checkpoint. * Non-volatile, because only accessed in synchronized scope */ private boolean periodicScheduling; /** Flag marking the coordinator as shut down (not accepting any messages any more). */ private volatile boolean shutdown; /** Optional tracker for checkpoint statistics. */ @Nullable private CheckpointStatsTracker statsTracker; /** A factory for SharedStateRegistry objects. */ private final SharedStateRegistryFactory sharedStateRegistryFactory; /** Registry that tracks state which is shared across (incremental) checkpoints. */ private SharedStateRegistry sharedStateRegistry; private boolean isPreferCheckpointForRecovery; private final CheckpointFailureManager failureManager; private final Clock clock; private final boolean isExactlyOnceMode; /** Flag represents there is an in-flight trigger request. */ private boolean isTriggering = false; private final CheckpointRequestDecider requestDecider; public CheckpointCoordinator( JobID job, CheckpointCoordinatorConfiguration chkConfig, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, Collection<OperatorCoordinatorCheckpointContext> coordinatorsToCheckpoint, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, ScheduledExecutor timer, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager) { this( job, chkConfig, tasksToTrigger, tasksToWaitFor, tasksToCommitTo, coordinatorsToCheckpoint, checkpointIDCounter, completedCheckpointStore, checkpointStateBackend, executor, timer, sharedStateRegistryFactory, failureManager, SystemClock.getInstance()); } @VisibleForTesting public CheckpointCoordinator( JobID job, CheckpointCoordinatorConfiguration chkConfig, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, Collection<OperatorCoordinatorCheckpointContext> coordinatorsToCheckpoint, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, StateBackend checkpointStateBackend, Executor executor, ScheduledExecutor timer, SharedStateRegistryFactory sharedStateRegistryFactory, CheckpointFailureManager failureManager, Clock clock) { checkNotNull(checkpointStateBackend); long minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints(); if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) { minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000; } long baseInterval = chkConfig.getCheckpointInterval(); if (baseInterval < minPauseBetweenCheckpoints) { baseInterval = minPauseBetweenCheckpoints; } this.job = checkNotNull(job); this.baseInterval = baseInterval; this.checkpointTimeout = chkConfig.getCheckpointTimeout(); this.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints; this.tasksToTrigger = checkNotNull(tasksToTrigger); this.tasksToWaitFor = checkNotNull(tasksToWaitFor); this.tasksToCommitTo = checkNotNull(tasksToCommitTo); this.coordinatorsToCheckpoint = Collections.unmodifiableCollection(coordinatorsToCheckpoint); this.pendingCheckpoints = new LinkedHashMap<>(); this.checkpointIdCounter = checkNotNull(checkpointIDCounter); this.completedCheckpointStore = checkNotNull(completedCheckpointStore); this.executor = checkNotNull(executor); this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory); this.sharedStateRegistry = sharedStateRegistryFactory.create(executor); this.isPreferCheckpointForRecovery = chkConfig.isPreferCheckpointForRecovery(); this.failureManager = checkNotNull(failureManager); this.clock = checkNotNull(clock); this.isExactlyOnceMode = chkConfig.isExactlyOnce(); this.unalignedCheckpointsEnabled = chkConfig.isUnalignedCheckpointsEnabled(); this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS); this.masterHooks = new HashMap<>(); this.timer = timer; this.checkpointProperties = CheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy()); try { this.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job); checkpointStorage.initializeBaseLocations(); } catch (IOException e) { throw new FlinkRuntimeException("Failed to create checkpoint storage at checkpoint coordinator side.", e); } try { checkpointIDCounter.start(); } catch (Throwable t) { throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t); } this.requestDecider = new CheckpointRequestDecider( chkConfig.getMaxConcurrentCheckpoints(), this::rescheduleTrigger, this.clock, this.minPauseBetweenCheckpoints, this.pendingCheckpoints::size, this.lock); } /** * Adds the given master hook to the checkpoint coordinator. This method does nothing, if * the checkpoint coordinator already contained a hook with the same ID (as defined via * {@link MasterTriggerRestoreHook * * @param hook The hook to add. * @return True, if the hook was added, false if the checkpoint coordinator already * contained a hook with the same ID. */ public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) { checkNotNull(hook); final String id = hook.getIdentifier(); checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id"); synchronized (lock) { if (!masterHooks.containsKey(id)) { masterHooks.put(id, hook); return true; } else { return false; } } } /** * Gets the number of currently register master hooks. */ public int getNumberOfRegisteredMasterHooks() { synchronized (lock) { return masterHooks.size(); } } /** * Sets the checkpoint stats tracker. * * @param statsTracker The checkpoint stats tracker. */ public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) { this.statsTracker = statsTracker; } /** * Shuts down the checkpoint coordinator. * * <p>After this method has been called, the coordinator does not accept * and further messages and cannot trigger any further checkpoints. */ public void shutdown(JobStatus jobStatus) throws Exception { synchronized (lock) { if (!shutdown) { shutdown = true; LOG.info("Stopping checkpoint coordinator for job {}.", job); periodicScheduling = false; MasterHooks.close(masterHooks.values(), LOG); masterHooks.clear(); final CheckpointException reason = new CheckpointException( CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); abortPendingAndQueuedCheckpoints(reason); completedCheckpointStore.shutdown(jobStatus); checkpointIdCounter.shutdown(jobStatus); } } } public boolean isShutdown() { return shutdown; } /** * Triggers a savepoint with the given savepoint directory as a target. * * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSavepoint(@Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSavepoint(!unalignedCheckpointsEnabled); return triggerSavepointInternal(properties, false, targetLocation); } /** * Triggers a synchronous savepoint with the given savepoint directory as a target. * * @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline * to fire any registered event-time timers. * @param targetLocation Target location for the savepoint, optional. If null, the * state backend's configured default will be used. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured */ public CompletableFuture<CompletedCheckpoint> triggerSynchronousSavepoint( final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { final CheckpointProperties properties = CheckpointProperties.forSyncSavepoint(!unalignedCheckpointsEnabled); return triggerSavepointInternal(properties, advanceToEndOfEventTime, targetLocation); } private CompletableFuture<CompletedCheckpoint> triggerSavepointInternal( final CheckpointProperties checkpointProperties, final boolean advanceToEndOfEventTime, @Nullable final String targetLocation) { checkNotNull(checkpointProperties); final CompletableFuture<CompletedCheckpoint> resultFuture = new CompletableFuture<>(); timer.execute(() -> triggerCheckpoint( checkpointProperties, targetLocation, false, advanceToEndOfEventTime) .whenComplete((completedCheckpoint, throwable) -> { if (throwable == null) { resultFuture.complete(completedCheckpoint); } else { resultFuture.completeExceptionally(throwable); } })); return resultFuture; } /** * Triggers a new standard checkpoint and uses the given timestamp as the checkpoint * timestamp. The return value is a future. It completes when the checkpoint triggered finishes * or an error occurred. * * @param isPeriodic Flag indicating whether this triggered checkpoint is * periodic. If this flag is true, but the periodic scheduler is disabled, * the checkpoint will be declined. * @return a future to the completed checkpoint. */ public CompletableFuture<CompletedCheckpoint> triggerCheckpoint(boolean isPeriodic) { return triggerCheckpoint(checkpointProperties, null, isPeriodic, false); } @VisibleForTesting public CompletableFuture<CompletedCheckpoint> triggerCheckpoint( CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime) { if (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) { return FutureUtils.completedExceptionally(new IllegalArgumentException( "Only synchronous savepoints are allowed to advance the watermark to MAX.")); } CheckpointTriggerRequest request = new CheckpointTriggerRequest(props, externalSavepointLocation, isPeriodic, advanceToEndOfTime); requestDecider .chooseRequestToExecute(request, isTriggering, lastCheckpointCompletionRelativeTime) .ifPresent(this::startTriggeringCheckpoint); return request.onCompletionPromise; } /** * Initialize the checkpoint trigger asynchronously. It will be executed in io thread due to * it might be time-consuming. * * @param props checkpoint properties * @param externalSavepointLocation the external savepoint location, it might be null * @return the future of initialized result, checkpoint id and checkpoint location */ private CompletableFuture<CheckpointIdAndStorageLocation> initializeCheckpoint( CheckpointProperties props, @Nullable String externalSavepointLocation) { return CompletableFuture.supplyAsync(() -> { try { long checkpointID = checkpointIdCounter.getAndIncrement(); CheckpointStorageLocation checkpointStorageLocation = props.isSavepoint() ? checkpointStorage .initializeLocationForSavepoint(checkpointID, externalSavepointLocation) : checkpointStorage.initializeLocationForCheckpoint(checkpointID); return new CheckpointIdAndStorageLocation(checkpointID, checkpointStorageLocation); } catch (Throwable throwable) { throw new CompletionException(throwable); } }, executor); } private PendingCheckpoint createPendingCheckpoint( long timestamp, CheckpointProperties props, Map<ExecutionAttemptID, ExecutionVertex> ackTasks, boolean isPeriodic, long checkpointID, CheckpointStorageLocation checkpointStorageLocation, CompletableFuture<CompletedCheckpoint> onCompletionPromise) { synchronized (lock) { try { preCheckGlobalState(isPeriodic); } catch (Throwable t) { throw new CompletionException(t); } } final PendingCheckpoint checkpoint = new PendingCheckpoint( job, checkpointID, timestamp, ackTasks, OperatorInfo.getIds(coordinatorsToCheckpoint), masterHooks.keySet(), props, checkpointStorageLocation, executor, onCompletionPromise); if (statsTracker != null) { PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint( checkpointID, timestamp, props); checkpoint.setStatsCallback(callback); } synchronized (lock) { pendingCheckpoints.put(checkpointID, checkpoint); ScheduledFuture<?> cancellerHandle = timer.schedule( new CheckpointCanceller(checkpoint), checkpointTimeout, TimeUnit.MILLISECONDS); if (!checkpoint.setCancellerHandle(cancellerHandle)) { cancellerHandle.cancel(false); } } LOG.info("Triggering checkpoint {} (type={}) @ {} for job {}.", checkpointID, checkpoint.getProps().getCheckpointType(), timestamp, job); return checkpoint; } /** * Snapshot master hook states asynchronously. * * @param checkpoint the pending checkpoint * @return the future represents master hook states are finished or not */ private CompletableFuture<Void> snapshotMasterState(PendingCheckpoint checkpoint) { if (masterHooks.isEmpty()) { return CompletableFuture.completedFuture(null); } final long checkpointID = checkpoint.getCheckpointId(); final long timestamp = checkpoint.getCheckpointTimestamp(); final CompletableFuture<Void> masterStateCompletableFuture = new CompletableFuture<>(); for (MasterTriggerRestoreHook<?> masterHook : masterHooks.values()) { MasterHooks .triggerHook(masterHook, checkpointID, timestamp, executor) .whenCompleteAsync( (masterState, throwable) -> { try { synchronized (lock) { if (masterStateCompletableFuture.isDone()) { return; } if (checkpoint.isDiscarded()) { throw new IllegalStateException( "Checkpoint " + checkpointID + " has been discarded"); } if (throwable == null) { checkpoint.acknowledgeMasterState( masterHook.getIdentifier(), masterState); if (checkpoint.areMasterStatesFullyAcknowledged()) { masterStateCompletableFuture.complete(null); } } else { masterStateCompletableFuture.completeExceptionally(throwable); } } } catch (Throwable t) { masterStateCompletableFuture.completeExceptionally(t); } }, timer); } return masterStateCompletableFuture; } /** * Snapshot task state. * * @param timestamp the timestamp of this checkpoint reques * @param checkpointID the checkpoint id * @param checkpointStorageLocation the checkpoint location * @param props the checkpoint properties * @param executions the executions which should be triggered * @param advanceToEndOfTime Flag indicating if the source should inject a {@code MAX_WATERMARK} * in the pipeline to fire any registered event-time timers. */ private void snapshotTaskState( long timestamp, long checkpointID, CheckpointStorageLocation checkpointStorageLocation, CheckpointProperties props, Execution[] executions, boolean advanceToEndOfTime) { final CheckpointOptions checkpointOptions = new CheckpointOptions( props.getCheckpointType(), checkpointStorageLocation.getLocationReference(), isExactlyOnceMode, props.getCheckpointType() == CheckpointType.CHECKPOINT && unalignedCheckpointsEnabled); for (Execution execution: executions) { if (props.isSynchronous()) { execution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime); } else { execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions); } } } /** * Trigger request is successful. * NOTE, it must be invoked if trigger request is successful. */ private void onTriggerSuccess() { isTriggering = false; numUnsuccessfulCheckpointsTriggers.set(0); executeQueuedRequest(); } /** * The trigger request is failed prematurely without a proper initialization. * There is no resource to release, but the completion promise needs to fail manually here. * * @param onCompletionPromise the completion promise of the checkpoint/savepoint * @param throwable the reason of trigger failure */ private void onTriggerFailure( CheckpointTriggerRequest onCompletionPromise, Throwable throwable) { final CheckpointException checkpointException = getCheckpointException(CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable); onCompletionPromise.completeExceptionally(checkpointException); onTriggerFailure((PendingCheckpoint) null, checkpointException); } /** * The trigger request is failed. * NOTE, it must be invoked if trigger request is failed. * * @param checkpoint the pending checkpoint which is failed. It could be null if it's failed * prematurely without a proper initialization. * @param throwable the reason of trigger failure */ private void onTriggerFailure(@Nullable PendingCheckpoint checkpoint, Throwable throwable) { throwable = ExceptionUtils.stripCompletionException(throwable); try { coordinatorsToCheckpoint.forEach(OperatorCoordinatorCheckpointContext::abortCurrentTriggering); if (checkpoint != null && !checkpoint.isDiscarded()) { int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn( "Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)", checkpoint.getCheckpointId(), job, numUnsuccessful, throwable); final CheckpointException cause = getCheckpointException( CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, throwable); synchronized (lock) { abortPendingCheckpoint(checkpoint, cause); } } } finally { isTriggering = false; executeQueuedRequest(); } } private void executeQueuedRequest() { requestDecider.chooseQueuedRequestToExecute(isTriggering, lastCheckpointCompletionRelativeTime).ifPresent(this::startTriggeringCheckpoint); } /** * Receives a {@link DeclineCheckpoint} message for a pending checkpoint. * * @param message Checkpoint decline from the task manager * @param taskManagerLocationInfo The location info of the decline checkpoint message's sender */ public void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) { if (shutdown || message == null) { return; } if (!job.equals(message.getJob())) { throw new IllegalArgumentException("Received DeclineCheckpoint message for job " + message.getJob() + " from " + taskManagerLocationInfo + " while this coordinator handles job " + job); } final long checkpointId = message.getCheckpointId(); final String reason = (message.getReason() != null ? message.getReason().getMessage() : ""); PendingCheckpoint checkpoint; synchronized (lock) { if (shutdown) { return; } checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null) { Preconditions.checkState( !checkpoint.isDiscarded(), "Received message for discarded but non-removed checkpoint " + checkpointId); LOG.info("Decline checkpoint {} by task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo); final CheckpointException checkpointException; if (message.getReason() == null) { checkpointException = new CheckpointException(CheckpointFailureReason.CHECKPOINT_DECLINED); } else { checkpointException = getCheckpointException( CheckpointFailureReason.JOB_FAILURE, message.getReason()); } abortPendingCheckpoint( checkpoint, checkpointException, message.getTaskExecutionId()); } else if (LOG.isDebugEnabled()) { if (recentPendingCheckpoints.contains(checkpointId)) { LOG.debug("Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } else { LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}", checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason); } } } } /** * Receives an AcknowledgeCheckpoint message and returns whether the * message was associated with a pending checkpoint. * * @param message Checkpoint ack from the task manager * * @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender * @return Flag indicating whether the ack'd checkpoint was associated * with a pending checkpoint. * * @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store. */ public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message, String taskManagerLocationInfo) throws CheckpointException { if (shutdown || message == null) { return false; } if (!job.equals(message.getJob())) { LOG.error("Received wrong AcknowledgeCheckpoint message for job {} from {} : {}", job, taskManagerLocationInfo, message); return false; } final long checkpointId = message.getCheckpointId(); synchronized (lock) { if (shutdown) { return false; } final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) { case SUCCESS: LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); if (checkpoint.areTasksFullyAcknowledged()) { completePendingCheckpoint(checkpoint); } break; case DUPLICATE: LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); break; case UNKNOWN: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the task's execution attempt id was unknown. Discarding " + "the state handle to avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); break; case DISCARDED: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " + "because the pending checkpoint had been discarded. Discarding the " + "state handle tp avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); } return true; } else if (checkpoint != null) { throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else { boolean wasPendingCheckpoint; if (recentPendingCheckpoints.contains(checkpointId)) { wasPendingCheckpoint = true; LOG.warn("Received late message for now expired checkpoint attempt {} from task " + "{} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); } else { LOG.debug("Received message for an unknown checkpoint {} from task {} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo); wasPendingCheckpoint = false; } discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); return wasPendingCheckpoint; } } } /** * Try to complete the given pending checkpoint. * * <p>Important: This method should only be called in the checkpoint lock scope. * * @param pendingCheckpoint to complete * @throws CheckpointException if the completion failed */ private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException { final long checkpointId = pendingCheckpoint.getCheckpointId(); final CompletedCheckpoint completedCheckpoint; Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates(); sharedStateRegistry.registerAll(operatorStates.values()); try { try { completedCheckpoint = pendingCheckpoint.finalizeCheckpoint(); failureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId()); } catch (Exception e1) { if (!pendingCheckpoint.isDiscarded()) { abortPendingCheckpoint( pendingCheckpoint, new CheckpointException( CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1)); } throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1); } Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null); try { completedCheckpointStore.addCheckpoint(completedCheckpoint); } catch (Exception exception) { executor.execute(new Runnable() { @Override public void run() { try { completedCheckpoint.discardOnFailedStoring(); } catch (Throwable t) { LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t); } } }); sendAbortedMessages(checkpointId, pendingCheckpoint.getCheckpointTimestamp()); throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.', CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception); } } finally { pendingCheckpoints.remove(checkpointId); timer.execute(this::executeQueuedRequest); } rememberRecentCheckpointId(checkpointId); dropSubsumedCheckpoints(checkpointId); lastCheckpointCompletionRelativeTime = clock.relativeTimeMillis(); LOG.info("Completed checkpoint {} for job {} ({} bytes in {} ms).", checkpointId, job, completedCheckpoint.getStateSize(), completedCheckpoint.getDuration()); if (LOG.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("Checkpoint state: "); for (OperatorState state : completedCheckpoint.getOperatorStates().values()) { builder.append(state); builder.append(", "); } builder.setLength(builder.length() - 2); LOG.debug(builder.toString()); } sendAcknowledgeMessages(checkpointId, completedCheckpoint.getTimestamp()); } private void sendAcknowledgeMessages(long checkpointId, long timestamp) { for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointComplete(checkpointId, timestamp); } } for (OperatorCoordinatorCheckpointContext coordinatorContext : coordinatorsToCheckpoint) { coordinatorContext.checkpointComplete(checkpointId); } } private void sendAbortedMessages(long checkpointId, long timeStamp) { executor.execute(() -> { for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointAborted(checkpointId, timeStamp); } } }); } /** * Fails all pending checkpoints which have not been acknowledged by the given execution * attempt id. * * @param executionAttemptId for which to discard unacknowledged pending checkpoints * @param cause of the failure */ public void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) { synchronized (lock) { abortPendingCheckpoints( checkpoint -> !checkpoint.isAcknowledgedBy(executionAttemptId), new CheckpointException(CheckpointFailureReason.TASK_FAILURE, cause)); } } private void rememberRecentCheckpointId(long id) { if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) { recentPendingCheckpoints.removeFirst(); } recentPendingCheckpoints.addLast(id); } private void dropSubsumedCheckpoints(long checkpointId) { abortPendingCheckpoints( checkpoint -> checkpoint.getCheckpointId() < checkpointId && checkpoint.canBeSubsumed(), new CheckpointException(CheckpointFailureReason.CHECKPOINT_SUBSUMED)); } /** * Restores the latest checkpointed state. * * @param tasks Map of job vertices to restore. State for these vertices is * restored via {@link Execution * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ @Deprecated public boolean restoreLatestCheckpointedState( Map<JobVertexID, ExecutionJobVertex> tasks, boolean errorIfNoCheckpoint, boolean allowNonRestoredState) throws Exception { return restoreLatestCheckpointedStateInternal(new HashSet<>(tasks.values()), true, errorIfNoCheckpoint, allowNonRestoredState); } /** * Restores the latest checkpointed state to a set of subtasks. This method represents a "local" * or "regional" failover and does restore states to coordinators. Note that a regional failover * might still include all tasks. * * @param tasks Set of job vertices to restore. State for these vertices is * restored via {@link Execution * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedStateToSubtasks(final Set<ExecutionJobVertex> tasks) throws Exception { return restoreLatestCheckpointedStateInternal(tasks, false, false, true); } /** * Restores the latest checkpointed state to all tasks and all coordinators. * This method represents a "global restore"-style operation where all stateful tasks * and coordinators from the given set of Job Vertices are restored. * are restored to their latest checkpointed state. * * @param tasks Set of job vertices to restore. State for these vertices is * restored via {@link Execution * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedStateToAll( final Set<ExecutionJobVertex> tasks, final boolean allowNonRestoredState) throws Exception { return restoreLatestCheckpointedStateInternal(tasks, true, false, allowNonRestoredState); } private boolean restoreLatestCheckpointedStateInternal( final Set<ExecutionJobVertex> tasks, final boolean restoreCoordinators, final boolean errorIfNoCheckpoint, final boolean allowNonRestoredState) throws Exception { synchronized (lock) { if (shutdown) { throw new IllegalStateException("CheckpointCoordinator is shut down"); } sharedStateRegistry.close(); sharedStateRegistry = sharedStateRegistryFactory.create(executor); completedCheckpointStore.recover(); for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) { completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry); } LOG.debug("Status of the shared state registry of job {} after restore: {}.", job, sharedStateRegistry); CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(isPreferCheckpointForRecovery); if (latest == null) { if (errorIfNoCheckpoint) { throw new IllegalStateException("No completed checkpoint available"); } else { LOG.debug("Resetting the master hooks."); MasterHooks.reset(masterHooks.values(), LOG); return false; } } LOG.info("Restoring job {} from latest valid checkpoint: {}.", job, latest); final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates(); StateAssignmentOperation stateAssignmentOperation = new StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState); stateAssignmentOperation.assignStates(); MasterHooks.restoreMasterHooks( masterHooks, latest.getMasterHookStates(), latest.getCheckpointID(), allowNonRestoredState, LOG); if (restoreCoordinators) { restoreStateToCoordinators(operatorStates); } if (statsTracker != null) { long restoreTimestamp = System.currentTimeMillis(); RestoredCheckpointStats restored = new RestoredCheckpointStats( latest.getCheckpointID(), latest.getProperties(), restoreTimestamp, latest.getExternalPointer()); statsTracker.reportRestoredCheckpoint(restored); } return true; } } /** * Restore the state with given savepoint. * * @param savepointPointer The pointer to the savepoint. * @param allowNonRestored True if allowing checkpoint state that cannot be * mapped to any job vertex in tasks. * @param tasks Map of job vertices to restore. State for these * vertices is restored via * {@link Execution * @param userClassLoader The class loader to resolve serialized classes in * legacy savepoint versions. */ public boolean restoreSavepoint( String savepointPointer, boolean allowNonRestored, Map<JobVertexID, ExecutionJobVertex> tasks, ClassLoader userClassLoader) throws Exception { Preconditions.checkNotNull(savepointPointer, "The savepoint path cannot be null."); LOG.info("Starting job {} from savepoint {} ({})", job, savepointPointer, (allowNonRestored ? "allowing non restored state" : "")); final CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer); CompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint( job, tasks, checkpointLocation, userClassLoader, allowNonRestored); completedCheckpointStore.addCheckpoint(savepoint); long nextCheckpointId = savepoint.getCheckpointID() + 1; checkpointIdCounter.setCount(nextCheckpointId); LOG.info("Reset the checkpoint ID of job {} to {}.", job, nextCheckpointId); return restoreLatestCheckpointedStateInternal(new HashSet<>(tasks.values()), true, true, allowNonRestored); } public int getNumberOfPendingCheckpoints() { synchronized (lock) { return this.pendingCheckpoints.size(); } } public int getNumberOfRetainedSuccessfulCheckpoints() { synchronized (lock) { return completedCheckpointStore.getNumberOfRetainedCheckpoints(); } } public Map<Long, PendingCheckpoint> getPendingCheckpoints() { synchronized (lock) { return new HashMap<>(this.pendingCheckpoints); } } public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception { synchronized (lock) { return completedCheckpointStore.getAllCheckpoints(); } } public CheckpointStorageCoordinatorView getCheckpointStorage() { return checkpointStorage; } public CompletedCheckpointStore getCheckpointStore() { return completedCheckpointStore; } public long getCheckpointTimeout() { return checkpointTimeout; } /** * @deprecated use {@link */ @Deprecated @VisibleForTesting PriorityQueue<CheckpointTriggerRequest> getTriggerRequestQueue() { return requestDecider.getTriggerRequestQueue(); } public boolean isTriggering() { return isTriggering; } @VisibleForTesting boolean isCurrentPeriodicTriggerAvailable() { return currentPeriodicTrigger != null; } /** * Returns whether periodic checkpointing has been configured. * * @return <code>true</code> if periodic checkpoints have been configured. */ public boolean isPeriodicCheckpointingConfigured() { return baseInterval != Long.MAX_VALUE; } public void startCheckpointScheduler() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } stopCheckpointScheduler(); periodicScheduling = true; currentPeriodicTrigger = scheduleTriggerWithDelay(getRandomInitDelay()); } } public void stopCheckpointScheduler() { synchronized (lock) { periodicScheduling = false; cancelPeriodicTrigger(); final CheckpointException reason = new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND); abortPendingAndQueuedCheckpoints(reason); numUnsuccessfulCheckpointsTriggers.set(0); } } /** * Aborts all the pending checkpoints due to en exception. * @param exception The exception. */ public void abortPendingCheckpoints(CheckpointException exception) { synchronized (lock) { abortPendingCheckpoints(ignored -> true, exception); } } private void abortPendingCheckpoints( Predicate<PendingCheckpoint> checkpointToFailPredicate, CheckpointException exception) { assert Thread.holdsLock(lock); final PendingCheckpoint[] pendingCheckpointsToFail = pendingCheckpoints .values() .stream() .filter(checkpointToFailPredicate) .toArray(PendingCheckpoint[]::new); for (PendingCheckpoint pendingCheckpoint : pendingCheckpointsToFail) { abortPendingCheckpoint(pendingCheckpoint, exception); } } private void rescheduleTrigger(long tillNextMillis) { cancelPeriodicTrigger(); currentPeriodicTrigger = scheduleTriggerWithDelay(tillNextMillis); } private void cancelPeriodicTrigger() { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } } private long getRandomInitDelay() { return ThreadLocalRandom.current().nextLong(minPauseBetweenCheckpoints, baseInterval + 1L); } private ScheduledFuture<?> scheduleTriggerWithDelay(long initDelay) { return timer.scheduleAtFixedRate( new ScheduledTrigger(), initDelay, baseInterval, TimeUnit.MILLISECONDS); } private void restoreStateToCoordinators(final Map<OperatorID, OperatorState> operatorStates) throws Exception { for (OperatorCoordinatorCheckpointContext coordContext : coordinatorsToCheckpoint) { final OperatorState state = operatorStates.get(coordContext.operatorId()); if (state == null) { continue; } final ByteStreamStateHandle coordinatorState = state.getCoordinatorState(); if (coordinatorState != null) { coordContext.resetToCheckpoint(coordinatorState.getData()); } } } public JobStatusListener createActivatorDeactivator() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } if (jobStatusListener == null) { jobStatusListener = new CheckpointCoordinatorDeActivator(this); } return jobStatusListener; } } int getNumQueuedRequests() { return requestDecider.getNumQueuedRequests(); } private final class ScheduledTrigger implements Runnable { @Override public void run() { try { triggerCheckpoint(true); } catch (Exception e) { LOG.error("Exception while triggering checkpoint for job {}.", job, e); } } } /** * Discards the given state object asynchronously belonging to the given job, execution attempt * id and checkpoint id. * * @param jobId identifying the job to which the state object belongs * @param executionAttemptID identifying the task to which the state object belongs * @param checkpointId of the state object * @param subtaskState to discard asynchronously */ private void discardSubtaskState( final JobID jobId, final ExecutionAttemptID executionAttemptID, final long checkpointId, final TaskStateSnapshot subtaskState) { if (subtaskState != null) { executor.execute(new Runnable() { @Override public void run() { try { subtaskState.discardState(); } catch (Throwable t2) { LOG.warn("Could not properly discard state object of checkpoint {} " + "belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2); } } }); } } private void abortPendingCheckpoint( PendingCheckpoint pendingCheckpoint, CheckpointException exception) { abortPendingCheckpoint(pendingCheckpoint, exception, null); } private void abortPendingCheckpoint( PendingCheckpoint pendingCheckpoint, CheckpointException exception, @Nullable final ExecutionAttemptID executionAttemptID) { assert(Thread.holdsLock(lock)); if (!pendingCheckpoint.isDiscarded()) { try { pendingCheckpoint.abort( exception.getCheckpointFailureReason(), exception.getCause()); if (pendingCheckpoint.getProps().isSavepoint() && pendingCheckpoint.getProps().isSynchronous()) { failureManager.handleSynchronousSavepointFailure(exception); } else if (executionAttemptID != null) { failureManager.handleTaskLevelCheckpointException( exception, pendingCheckpoint.getCheckpointId(), executionAttemptID); } else { failureManager.handleJobLevelCheckpointException( exception, pendingCheckpoint.getCheckpointId()); } } finally { sendAbortedMessages(pendingCheckpoint.getCheckpointId(), pendingCheckpoint.getCheckpointTimestamp()); pendingCheckpoints.remove(pendingCheckpoint.getCheckpointId()); rememberRecentCheckpointId(pendingCheckpoint.getCheckpointId()); timer.execute(this::executeQueuedRequest); } } } private void preCheckGlobalState(boolean isPeriodic) throws CheckpointException { if (shutdown) { throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN); } if (isPeriodic && !periodicScheduling) { throw new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN); } } /** * Check if all tasks that we need to trigger are running. If not, abort the checkpoint. * * @return the executions need to be triggered. * @throws CheckpointException the exception fails checking */ private Execution[] getTriggerExecutions() throws CheckpointException { Execution[] executions = new Execution[tasksToTrigger.length]; for (int i = 0; i < tasksToTrigger.length; i++) { Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt(); if (ee == null) { LOG.info( "Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job); throw new CheckpointException( CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } else if (ee.getState() == ExecutionState.RUNNING) { executions[i] = ee; } else { LOG.info( "Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex(), job, ExecutionState.RUNNING, ee.getState()); throw new CheckpointException( CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } return executions; } /** * Check if all tasks that need to acknowledge the checkpoint are running. * If not, abort the checkpoint * * @return the execution vertices which should give an ack response * @throws CheckpointException the exception fails checking */ private Map<ExecutionAttemptID, ExecutionVertex> getAckTasks() throws CheckpointException { Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length); for (ExecutionVertex ev : tasksToWaitFor) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ackTasks.put(ee.getAttemptId(), ev); } else { LOG.info( "Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.", ev.getTaskNameWithSubtaskIndex(), job); throw new CheckpointException( CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } return ackTasks; } private void abortPendingAndQueuedCheckpoints(CheckpointException exception) { assert(Thread.holdsLock(lock)); requestDecider.abortAll(exception); abortPendingCheckpoints(exception); } /** * The canceller of checkpoint. The checkpoint might be cancelled if it doesn't finish in a * configured period. */ private class CheckpointCanceller implements Runnable { private final PendingCheckpoint pendingCheckpoint; private CheckpointCanceller(PendingCheckpoint pendingCheckpoint) { this.pendingCheckpoint = checkNotNull(pendingCheckpoint); } @Override public void run() { synchronized (lock) { if (!pendingCheckpoint.isDiscarded()) { LOG.info("Checkpoint {} of job {} expired before completing.", pendingCheckpoint.getCheckpointId(), job); abortPendingCheckpoint( pendingCheckpoint, new CheckpointException(CheckpointFailureReason.CHECKPOINT_EXPIRED)); } } } } private static CheckpointException getCheckpointException( CheckpointFailureReason defaultReason, Throwable throwable) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(throwable, CheckpointException.class); return checkpointExceptionOptional .orElseGet(() -> new CheckpointException(defaultReason, throwable)); } private static class CheckpointIdAndStorageLocation { private final long checkpointId; private final CheckpointStorageLocation checkpointStorageLocation; CheckpointIdAndStorageLocation( long checkpointId, CheckpointStorageLocation checkpointStorageLocation) { this.checkpointId = checkpointId; this.checkpointStorageLocation = checkNotNull(checkpointStorageLocation); } } static class CheckpointTriggerRequest { final long timestamp; final CheckpointProperties props; final @Nullable String externalSavepointLocation; final boolean isPeriodic; final boolean advanceToEndOfTime; private final CompletableFuture<CompletedCheckpoint> onCompletionPromise = new CompletableFuture<>(); CheckpointTriggerRequest( CheckpointProperties props, @Nullable String externalSavepointLocation, boolean isPeriodic, boolean advanceToEndOfTime) { this.timestamp = System.currentTimeMillis(); this.props = checkNotNull(props); this.externalSavepointLocation = externalSavepointLocation; this.isPeriodic = isPeriodic; this.advanceToEndOfTime = advanceToEndOfTime; } CompletableFuture<CompletedCheckpoint> getOnCompletionFuture() { return onCompletionPromise; } public void completeExceptionally(CheckpointException exception) { onCompletionPromise.completeExceptionally(exception); } public boolean isForce() { return props.forceCheckpoint(); } } }
I think calling this method every time, whether log empty diagnostics is enabled or not is not good. We should have the check on this method, so that we don't even go into the execution of this method if the flag is disabled (avoids creating unnecessary method stack and saves some small resources and computation).
public Flux<FeedResponse<T>> apply(Flux<DocumentProducer<T>.DocumentProducerFeedResponse> source) { return source.filter(documentProducerFeedResponse -> { if (documentProducerFeedResponse.pageResult.getResults().isEmpty() && !ModelBridgeInternal .getEmptyPagesAllowedFromQueryRequestOptions(this.cosmosQueryRequestOptions)) { tracker.addCharge(documentProducerFeedResponse.pageResult.getRequestCharge()); ConcurrentMap<String, QueryMetrics> currentQueryMetrics = BridgeInternal.queryMetricsFromFeedResponse(documentProducerFeedResponse.pageResult); QueryMetrics.mergeQueryMetricsMap(emptyPageQueryMetricsMap, currentQueryMetrics); cosmosDiagnostics = documentProducerFeedResponse.pageResult.getCosmosDiagnostics(); logEmptyPageDiagnostics( cosmosDiagnostics, this.cosmosQueryRequestOptions, this.correlatedActivityId, documentProducerFeedResponse.pageResult.getActivityId()); return false; } return true; }).map(documentProducerFeedResponse -> { if (!emptyPageQueryMetricsMap.isEmpty()) { ConcurrentMap<String, QueryMetrics> currentQueryMetrics = BridgeInternal.queryMetricsFromFeedResponse(documentProducerFeedResponse.pageResult); QueryMetrics.mergeQueryMetricsMap(currentQueryMetrics, emptyPageQueryMetricsMap); emptyPageQueryMetricsMap.clear(); } double charge = tracker.getAndResetCharge(); if (charge > 0) { return new ValueHolder<>(plusCharge(documentProducerFeedResponse, charge)); } else { return new ValueHolder<>(documentProducerFeedResponse); } }).concatWith(Flux.just(new ValueHolder<>(null))).map(heldValue -> { DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse = heldValue.v; ImmutablePair<DocumentProducer<T>.DocumentProducerFeedResponse, DocumentProducer<T>.DocumentProducerFeedResponse> previousCurrent = new ImmutablePair<>( this.previousPage, documentProducerFeedResponse); this.previousPage = documentProducerFeedResponse; return previousCurrent; }).skip(1).map(currentNext -> { DocumentProducer<T>.DocumentProducerFeedResponse current = currentNext.left; DocumentProducer<T>.DocumentProducerFeedResponse next = currentNext.right; String compositeContinuationToken; String backendContinuationToken = current.pageResult.getContinuationToken(); if (backendContinuationToken == null) { if (next == null) { compositeContinuationToken = null; } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken(null, next.sourceFeedRange.getRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken( backendContinuationToken, current.sourceFeedRange.getRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } DocumentProducer<T>.DocumentProducerFeedResponse page; page = current; page = this.addCompositeContinuationToken(page, compositeContinuationToken); return page; }).map(documentProducerFeedResponse -> { return documentProducerFeedResponse.pageResult; }).switchIfEmpty(Flux.defer(() -> { return Flux.just(BridgeInternal.createFeedResponseWithQueryMetrics(Utils.immutableListOf(), headerResponse(tracker.getAndResetCharge()), emptyPageQueryMetricsMap, null, false, false, cosmosDiagnostics)); })); }
logEmptyPageDiagnostics(
Combining previous empty page query metrics with current non empty page query metrics if (!emptyPageQueryMetricsMap.isEmpty()) { ConcurrentMap<String, QueryMetrics> currentQueryMetrics = BridgeInternal.queryMetricsFromFeedResponse(documentProducerFeedResponse.pageResult); QueryMetrics.mergeQueryMetricsMap(currentQueryMetrics, emptyPageQueryMetricsMap); emptyPageQueryMetricsMap.clear(); }
class EmptyPagesFilterTransformer<T extends Resource> implements Function<Flux<DocumentProducer<T>.DocumentProducerFeedResponse>, Flux<FeedResponse<T>>> { private final RequestChargeTracker tracker; private DocumentProducer<T>.DocumentProducerFeedResponse previousPage; private final CosmosQueryRequestOptions cosmosQueryRequestOptions; private final UUID correlatedActivityId; private ConcurrentMap<String, QueryMetrics> emptyPageQueryMetricsMap = new ConcurrentHashMap<>(); private CosmosDiagnostics cosmosDiagnostics; public EmptyPagesFilterTransformer(RequestChargeTracker tracker, CosmosQueryRequestOptions options, UUID correlatedActivityId) { if (tracker == null) { throw new IllegalArgumentException("Request Charge Tracker must not be null."); } this.tracker = tracker; this.previousPage = null; this.cosmosQueryRequestOptions = options; this.correlatedActivityId = correlatedActivityId; } private DocumentProducer<T>.DocumentProducerFeedResponse plusCharge( DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse, double charge) { FeedResponse<T> page = documentProducerFeedResponse.pageResult; Map<String, String> headers = new HashMap<>(page.getResponseHeaders()); double pageCharge = page.getRequestCharge(); pageCharge += charge; headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, String.valueOf(pageCharge)); FeedResponse<T> newPage = BridgeInternal.createFeedResponseWithQueryMetrics(page.getResults(), headers, BridgeInternal.queryMetricsFromFeedResponse(page), ModelBridgeInternal.getQueryPlanDiagnosticsContext(page), false, false, page.getCosmosDiagnostics()); documentProducerFeedResponse.pageResult = newPage; return documentProducerFeedResponse; } private DocumentProducer<T>.DocumentProducerFeedResponse addCompositeContinuationToken( DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse, String compositeContinuationToken) { FeedResponse<T> page = documentProducerFeedResponse.pageResult; Map<String, String> headers = new HashMap<>(page.getResponseHeaders()); headers.put(HttpConstants.HttpHeaders.CONTINUATION, compositeContinuationToken); FeedResponse<T> newPage = BridgeInternal.createFeedResponseWithQueryMetrics(page.getResults(), headers, BridgeInternal.queryMetricsFromFeedResponse(page), ModelBridgeInternal.getQueryPlanDiagnosticsContext(page), false, false, page.getCosmosDiagnostics() ); documentProducerFeedResponse.pageResult = newPage; return documentProducerFeedResponse; } private static Map<String, String> headerResponse( double requestCharge) { return Utils.immutableMapOf(HttpConstants.HttpHeaders.REQUEST_CHARGE, String.valueOf(requestCharge)); } @Override public Flux<FeedResponse<T>> apply(Flux<DocumentProducer<T>.DocumentProducerFeedResponse> source) { return source.filter(documentProducerFeedResponse -> { if (documentProducerFeedResponse.pageResult.getResults().isEmpty() && !ModelBridgeInternal .getEmptyPagesAllowedFromQueryRequestOptions(this.cosmosQueryRequestOptions)) { tracker.addCharge(documentProducerFeedResponse.pageResult.getRequestCharge()); ConcurrentMap<String, QueryMetrics> currentQueryMetrics = BridgeInternal.queryMetricsFromFeedResponse(documentProducerFeedResponse.pageResult); QueryMetrics.mergeQueryMetricsMap(emptyPageQueryMetricsMap, currentQueryMetrics); cosmosDiagnostics = documentProducerFeedResponse.pageResult.getCosmosDiagnostics(); logEmptyPageDiagnostics( cosmosDiagnostics, this.cosmosQueryRequestOptions, this.correlatedActivityId, documentProducerFeedResponse.pageResult.getActivityId()); return false; } return true; }).map(documentProducerFeedResponse -> { double charge = tracker.getAndResetCharge(); if (charge > 0) { return new ValueHolder<>(plusCharge(documentProducerFeedResponse, charge)); } else { return new ValueHolder<>(documentProducerFeedResponse); } }).concatWith(Flux.just(new ValueHolder<>(null))).map(heldValue -> { DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse = heldValue.v; ImmutablePair<DocumentProducer<T>.DocumentProducerFeedResponse, DocumentProducer<T>.DocumentProducerFeedResponse> previousCurrent = new ImmutablePair<>( this.previousPage, documentProducerFeedResponse); this.previousPage = documentProducerFeedResponse; return previousCurrent; }).skip(1).map(currentNext -> { DocumentProducer<T>.DocumentProducerFeedResponse current = currentNext.left; DocumentProducer<T>.DocumentProducerFeedResponse next = currentNext.right; String compositeContinuationToken; String backendContinuationToken = current.pageResult.getContinuationToken(); if (backendContinuationToken == null) { if (next == null) { compositeContinuationToken = null; } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken(null, next.sourceFeedRange.getRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken( backendContinuationToken, current.sourceFeedRange.getRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } DocumentProducer<T>.DocumentProducerFeedResponse page; page = current; page = this.addCompositeContinuationToken(page, compositeContinuationToken); return page; }).map(documentProducerFeedResponse -> { return documentProducerFeedResponse.pageResult; }).switchIfEmpty(Flux.defer(() -> { return Flux.just(BridgeInternal.createFeedResponseWithQueryMetrics(Utils.immutableListOf(), headerResponse(tracker.getAndResetCharge()), emptyPageQueryMetricsMap, null, false, false, cosmosDiagnostics)); })); } }
class EmptyPagesFilterTransformer<T extends Resource> implements Function<Flux<DocumentProducer<T>.DocumentProducerFeedResponse>, Flux<FeedResponse<T>>> { private final RequestChargeTracker tracker; private DocumentProducer<T>.DocumentProducerFeedResponse previousPage; private final CosmosQueryRequestOptions cosmosQueryRequestOptions; private final UUID correlatedActivityId; private ConcurrentMap<String, QueryMetrics> emptyPageQueryMetricsMap = new ConcurrentHashMap<>(); private CosmosDiagnostics cosmosDiagnostics; public EmptyPagesFilterTransformer(RequestChargeTracker tracker, CosmosQueryRequestOptions options, UUID correlatedActivityId) { if (tracker == null) { throw new IllegalArgumentException("Request Charge Tracker must not be null."); } this.tracker = tracker; this.previousPage = null; this.cosmosQueryRequestOptions = options; this.correlatedActivityId = correlatedActivityId; } private DocumentProducer<T>.DocumentProducerFeedResponse plusCharge( DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse, double charge) { FeedResponse<T> page = documentProducerFeedResponse.pageResult; Map<String, String> headers = new HashMap<>(page.getResponseHeaders()); double pageCharge = page.getRequestCharge(); pageCharge += charge; headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, String.valueOf(pageCharge)); FeedResponse<T> newPage = BridgeInternal.createFeedResponseWithQueryMetrics(page.getResults(), headers, BridgeInternal.queryMetricsFromFeedResponse(page), ModelBridgeInternal.getQueryPlanDiagnosticsContext(page), false, false, page.getCosmosDiagnostics()); documentProducerFeedResponse.pageResult = newPage; return documentProducerFeedResponse; } private DocumentProducer<T>.DocumentProducerFeedResponse addCompositeContinuationToken( DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse, String compositeContinuationToken) { FeedResponse<T> page = documentProducerFeedResponse.pageResult; Map<String, String> headers = new HashMap<>(page.getResponseHeaders()); headers.put(HttpConstants.HttpHeaders.CONTINUATION, compositeContinuationToken); FeedResponse<T> newPage = BridgeInternal.createFeedResponseWithQueryMetrics(page.getResults(), headers, BridgeInternal.queryMetricsFromFeedResponse(page), ModelBridgeInternal.getQueryPlanDiagnosticsContext(page), false, false, page.getCosmosDiagnostics() ); documentProducerFeedResponse.pageResult = newPage; return documentProducerFeedResponse; } private static Map<String, String> headerResponse( double requestCharge) { return Utils.immutableMapOf(HttpConstants.HttpHeaders.REQUEST_CHARGE, String.valueOf(requestCharge)); } @Override public Flux<FeedResponse<T>> apply(Flux<DocumentProducer<T>.DocumentProducerFeedResponse> source) { return source.filter(documentProducerFeedResponse -> { if (documentProducerFeedResponse.pageResult.getResults().isEmpty() && !ModelBridgeInternal .getEmptyPagesAllowedFromQueryRequestOptions(this.cosmosQueryRequestOptions)) { tracker.addCharge(documentProducerFeedResponse.pageResult.getRequestCharge()); ConcurrentMap<String, QueryMetrics> currentQueryMetrics = BridgeInternal.queryMetricsFromFeedResponse(documentProducerFeedResponse.pageResult); QueryMetrics.mergeQueryMetricsMap(emptyPageQueryMetricsMap, currentQueryMetrics); cosmosDiagnostics = documentProducerFeedResponse.pageResult.getCosmosDiagnostics(); if (ImplementationBridgeHelpers .CosmosQueryRequestOptionsHelper .getCosmosQueryRequestOptionsAccessor() .isEmptyPageDiagnosticsEnabled(cosmosQueryRequestOptions)) { logEmptyPageDiagnostics( cosmosDiagnostics, this.correlatedActivityId, documentProducerFeedResponse.pageResult.getActivityId()); } return false; } return true; }).map(documentProducerFeedResponse -> { double charge = tracker.getAndResetCharge(); if (charge > 0) { return new ValueHolder<>(plusCharge(documentProducerFeedResponse, charge)); } else { return new ValueHolder<>(documentProducerFeedResponse); } }).concatWith(Flux.just(new ValueHolder<>(null))).map(heldValue -> { DocumentProducer<T>.DocumentProducerFeedResponse documentProducerFeedResponse = heldValue.v; ImmutablePair<DocumentProducer<T>.DocumentProducerFeedResponse, DocumentProducer<T>.DocumentProducerFeedResponse> previousCurrent = new ImmutablePair<>( this.previousPage, documentProducerFeedResponse); this.previousPage = documentProducerFeedResponse; return previousCurrent; }).skip(1).map(currentNext -> { DocumentProducer<T>.DocumentProducerFeedResponse current = currentNext.left; DocumentProducer<T>.DocumentProducerFeedResponse next = currentNext.right; String compositeContinuationToken; String backendContinuationToken = current.pageResult.getContinuationToken(); if (backendContinuationToken == null) { if (next == null) { compositeContinuationToken = null; } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken(null, next.sourceFeedRange.getRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } } else { CompositeContinuationToken compositeContinuationTokenDom = new CompositeContinuationToken( backendContinuationToken, current.sourceFeedRange.getRange()); compositeContinuationToken = compositeContinuationTokenDom.toJson(); } DocumentProducer<T>.DocumentProducerFeedResponse page; page = current; page = this.addCompositeContinuationToken(page, compositeContinuationToken); return page; }).map(documentProducerFeedResponse -> { return documentProducerFeedResponse.pageResult; }).switchIfEmpty(Flux.defer(() -> { return Flux.just(BridgeInternal.createFeedResponseWithQueryMetrics(Utils.immutableListOf(), headerResponse(tracker.getAndResetCharge()), emptyPageQueryMetricsMap, null, false, false, cosmosDiagnostics)); })); } }
You could look into removing the nullness warning suppression at the top of the class to automate this catch if needed. But that might require more fixes than we want to make in this cl
public void addTimingInfo(Collection<GetWorkStreamTimingInfo> infos) { Map<Event, Instant> getWorkStreamTimings = new HashMap<>(); for (GetWorkStreamTimingInfo info : infos) { getWorkStreamTimings.putIfAbsent( info.getEvent(), Instant.ofEpochMilli(info.getTimestampUsec() / 1000)); } Instant workItemCreationStart = getWorkStreamTimings.get(Event.GET_WORK_CREATION_START); Instant workItemCreationEnd = getWorkStreamTimings.get(Event.GET_WORK_CREATION_END); if (workItemCreationStart != null && workItemCreationEnd != null && workItemCreationLatency == null) { workItemCreationLatency = LatencyAttribution.newBuilder() .setState(State.GET_WORK_IN_WINDMILL_WORKER) .setTotalDurationMillis( new Duration(workItemCreationStart, workItemCreationEnd).getMillis()) .build(); } if (workItemCreationEnd != null && workItemCreationEnd.isAfter(workItemCreationEndTime)) { workItemCreationEndTime = workItemCreationEnd; } Instant receivedByDispatcherTiming = getWorkStreamTimings.get(Event.GET_WORK_RECEIVED_BY_DISPATCHER); if (workItemCreationEnd != null && receivedByDispatcherTiming != null) { aggregatedGetWorkStreamLatencies.compute( State.GET_WORK_IN_TRANSIT_TO_DISPATCHER, (state_key, duration) -> { Duration newDuration = new Duration(workItemCreationEnd, receivedByDispatcherTiming); if (duration == null) { return newDuration; } return duration.plus(newDuration); }); } Instant forwardedByDispatcherTiming = getWorkStreamTimings.get(Event.GET_WORK_FORWARDED_BY_DISPATCHER); Instant now = Instant.ofEpochMilli(clock.getMillis()); if (forwardedByDispatcherTiming != null) { aggregatedGetWorkStreamLatencies.compute( State.GET_WORK_IN_TRANSIT_TO_USER_WORKER, (state_key, duration) -> { Duration newDuration = new Duration(forwardedByDispatcherTiming, now); if (duration == null) { return newDuration; } return duration.plus(newDuration); }); } workItemLastChunkReceivedByWorkerTime = now; }
return duration.plus(newDuration);
public void addTimingInfo(Collection<GetWorkStreamTimingInfo> infos) { Map<Event, Instant> getWorkStreamTimings = new HashMap<>(); for (GetWorkStreamTimingInfo info : infos) { getWorkStreamTimings.putIfAbsent( info.getEvent(), Instant.ofEpochMilli(info.getTimestampUsec() / 1000)); } Instant workItemCreationStart = getWorkStreamTimings.get(Event.GET_WORK_CREATION_START); Instant workItemCreationEnd = getWorkStreamTimings.get(Event.GET_WORK_CREATION_END); if (workItemCreationStart != null && workItemCreationEnd != null && workItemCreationLatency == null) { workItemCreationLatency = LatencyAttribution.newBuilder() .setState(State.GET_WORK_IN_WINDMILL_WORKER) .setTotalDurationMillis( new Duration(workItemCreationStart, workItemCreationEnd).getMillis()) .build(); } if (workItemCreationEnd != null && workItemCreationEnd.isAfter(workItemCreationEndTime)) { workItemCreationEndTime = workItemCreationEnd; } Instant receivedByDispatcherTiming = getWorkStreamTimings.get(Event.GET_WORK_RECEIVED_BY_DISPATCHER); if (workItemCreationEnd != null && receivedByDispatcherTiming != null) { Duration newDuration = new Duration(workItemCreationEnd, receivedByDispatcherTiming); aggregatedGetWorkStreamLatencies.compute( State.GET_WORK_IN_TRANSIT_TO_DISPATCHER, (stateKey, duration) -> { if (duration == null) { return new SumAndMaxDurations(newDuration, newDuration); } duration.max = newDuration.isLongerThan(duration.max) ? newDuration : duration.max; duration.sum = duration.sum.plus(newDuration); return duration; }); } Instant forwardedByDispatcherTiming = getWorkStreamTimings.get(Event.GET_WORK_FORWARDED_BY_DISPATCHER); Instant now = Instant.ofEpochMilli(clock.getMillis()); if (forwardedByDispatcherTiming != null) { Duration newDuration = new Duration(forwardedByDispatcherTiming, now); aggregatedGetWorkStreamLatencies.compute( State.GET_WORK_IN_TRANSIT_TO_USER_WORKER, (stateKey, duration) -> { if (duration == null) { return new SumAndMaxDurations(newDuration, newDuration); } duration.max = newDuration.isLongerThan(duration.max) ? newDuration : duration.max; duration.sum = duration.sum.plus(newDuration); return duration; }); } workItemLastChunkReceivedByWorkerTime = now; }
class GetWorkTimingInfosTracker { private Instant workItemCreationEndTime = Instant.EPOCH; private Instant workItemLastChunkReceivedByWorkerTime = Instant.EPOCH; private LatencyAttribution workItemCreationLatency = null; private final Map<State, Duration> aggregatedGetWorkStreamLatencies; private final MillisProvider clock; public GetWorkTimingInfosTracker(MillisProvider clock) { this.aggregatedGetWorkStreamLatencies = new EnumMap<>(State.class); this.clock = clock; } List<LatencyAttribution> getLatencyAttributions() { if (workItemCreationLatency == null && aggregatedGetWorkStreamLatencies.size() == 0) { return Collections.emptyList(); } List<LatencyAttribution> latencyAttributions = new ArrayList<>(aggregatedGetWorkStreamLatencies.size() + 1); if (workItemCreationLatency != null) { latencyAttributions.add(workItemCreationLatency); } if (workItemCreationEndTime.isAfter(workItemLastChunkReceivedByWorkerTime)) { LOG.warn( "Work item creation time {} is after the work received time {}, " + "one or more GetWorkStream timing infos are missing.", workItemCreationEndTime, workItemLastChunkReceivedByWorkerTime); return latencyAttributions; } long totalTransmissionDurationElapsedTime = new Duration(workItemCreationEndTime, workItemLastChunkReceivedByWorkerTime).getMillis(); long totalSumDurationTimeMills = 0; for (Duration duration : aggregatedGetWorkStreamLatencies.values()) { totalSumDurationTimeMills += duration.getMillis(); } for (Map.Entry<State, Duration> duration : aggregatedGetWorkStreamLatencies.entrySet()) { latencyAttributions.add( LatencyAttribution.newBuilder() .setState(duration.getKey()) .setTotalDurationMillis( (long) (((double) duration.getValue().getMillis() / (double) totalSumDurationTimeMills) * totalTransmissionDurationElapsedTime)) .build()); } return latencyAttributions; } public void reset() { this.aggregatedGetWorkStreamLatencies.clear(); this.workItemCreationEndTime = Instant.EPOCH; this.workItemLastChunkReceivedByWorkerTime = Instant.EPOCH; this.workItemCreationLatency = null; } }
class SumAndMaxDurations { private Duration sum; private Duration max; public SumAndMaxDurations(Duration sum, Duration max) { this.sum = sum; this.max = max; } }