[ { "comment": "Just to confirm, this will include compile-only, runtime and quarkusDev scopes?", "method_body": "private static Configuration classpathConfig(Project project, LaunchMode mode) {\n if (LaunchMode.TEST.equals(mode)) {\n return project.getConfigurations().getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME);\n }\n if (LaunchMode.DEVELOPMENT.equals(mode)) {\n Configuration classpathConfiguration = project.getConfigurations().findByName(CLASSPATH_CONFIGURATION);\n if (classpathConfiguration != null) {\n project.getConfigurations().remove(classpathConfiguration);\n }\n\n return project.getConfigurations().create(CLASSPATH_CONFIGURATION).extendsFrom(\n project.getConfigurations().getByName(QuarkusPlugin.DEV_MODE_CONFIGURATION_NAME),\n project.getConfigurations().getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME));\n }\n return project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME);\n }", "target_code": "project.getConfigurations().getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME));", "method_body_after": "private static Configuration classpathConfig(Project project, LaunchMode mode) {\n if (LaunchMode.TEST.equals(mode)) {\n return project.getConfigurations().getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME);\n }\n if (LaunchMode.DEVELOPMENT.equals(mode)) {\n Configuration classpathConfiguration = project.getConfigurations().findByName(CLASSPATH_CONFIGURATION);\n if (classpathConfiguration != null) {\n project.getConfigurations().remove(classpathConfiguration);\n }\n\n return project.getConfigurations().create(CLASSPATH_CONFIGURATION).extendsFrom(\n project.getConfigurations().getByName(QuarkusPlugin.DEV_MODE_CONFIGURATION_NAME),\n project.getConfigurations().getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME),\n project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));\n }\n return project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME);\n }", "context_before": "class QuarkusModelBuilder implements ParameterizedToolingModelBuilder {\n\n private static final String MAIN_RESOURCES_OUTPUT = \"build/resources/main\";\n private static final String CLASSES_OUTPUT = \"build/classes\";\n private static final String DEPLOYMENT_CONFIGURATION = \"deploymentConfiguration\";\n private static final String CLASSPATH_CONFIGURATION = \"classpathConfiguration\";\n\n \n\n private static Configuration deploymentClasspathConfig(Project project, LaunchMode mode,\n Collection platforms) {\n\n Configuration deploymentConfiguration = project.getConfigurations().findByName(DEPLOYMENT_CONFIGURATION);\n if (deploymentConfiguration != null) {\n project.getConfigurations().remove(deploymentConfiguration);\n }\n\n deploymentConfiguration = project.getConfigurations().create(DEPLOYMENT_CONFIGURATION)\n .withDependencies(ds -> ds.addAll(platforms));\n Configuration implementationDeployment = project.getConfigurations().findByName(ApplicationDeploymentClasspathBuilder\n .toDeploymentConfigurationName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME));\n if (implementationDeployment != null) {\n deploymentConfiguration.extendsFrom(implementationDeployment);\n }\n\n if (LaunchMode.TEST.equals(mode)) {\n Configuration testDeploymentConfiguration = project.getConfigurations()\n .findByName(ApplicationDeploymentClasspathBuilder\n .toDeploymentConfigurationName(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME));\n if (testDeploymentConfiguration != null) {\n deploymentConfiguration.extendsFrom(testDeploymentConfiguration);\n }\n }\n if (LaunchMode.DEVELOPMENT.equals(mode)) {\n Configuration devDeploymentConfiguration = project.getConfigurations()\n .findByName(ApplicationDeploymentClasspathBuilder\n .toDeploymentConfigurationName(QuarkusPlugin.DEV_MODE_CONFIGURATION_NAME));\n if (devDeploymentConfiguration != null) {\n deploymentConfiguration.extendsFrom(devDeploymentConfiguration);\n }\n\n }\n return deploymentConfiguration;\n }\n\n @Override\n public boolean canBuild(String modelName) {\n return modelName.equals(QuarkusModel.class.getName());\n }\n\n @Override\n public Class getParameterType() {\n return ModelParameter.class;\n }\n\n @Override\n public Object buildAll(String modelName, Project project) {\n final ModelParameterImpl modelParameter = new ModelParameterImpl();\n modelParameter.setMode(LaunchMode.DEVELOPMENT.toString());\n return buildAll(modelName, modelParameter, project);\n }\n\n @Override\n public Object buildAll(String modelName, ModelParameter parameter, Project project) {\n LaunchMode mode = LaunchMode.valueOf(parameter.getMode());\n\n final List deploymentDeps = DependencyUtils.getEnforcedPlatforms(project);\n final PlatformImports platformImports = resolvePlatformImports(project, deploymentDeps);\n\n final Map appDependencies = new LinkedHashMap<>();\n Configuration classpathConfig = classpathConfig(project, mode);\n final ResolvedConfiguration resolvedConfiguration = classpathConfig.getResolvedConfiguration();\n collectDependencies(resolvedConfiguration, mode, project, appDependencies);\n\n Configuration deploymentConfig = deploymentClasspathConfig(project, mode, deploymentDeps);\n collectExtensionDependencies(deploymentConfig, appDependencies);\n\n ArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(),\n project.getVersion().toString());\n\n return new QuarkusModelImpl(\n new WorkspaceImpl(appArtifactCoords, getWorkspace(project.getRootProject(), mode, appArtifactCoords)),\n new ArrayList<>(appDependencies.values()),\n platformImports);\n }\n\n private PlatformImports resolvePlatformImports(Project project,\n List deploymentDeps) {\n final Configuration boms = project.getConfigurations()\n .detachedConfiguration(deploymentDeps.toArray(new org.gradle.api.artifacts.Dependency[0]));\n final PlatformImportsImpl platformImports = new PlatformImportsImpl();\n boms.getResolutionStrategy().eachDependency(d -> {\n final String group = d.getTarget().getGroup();\n final String name = d.getTarget().getName();\n if (name.endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX)) {\n platformImports.addPlatformDescriptor(group, name, d.getTarget().getVersion(), \"json\",\n d.getTarget().getVersion());\n } else if (name.endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) {\n final DefaultDependencyArtifact dep = new DefaultDependencyArtifact();\n dep.setExtension(\"properties\");\n dep.setType(\"properties\");\n dep.setName(name);\n\n final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(\n group, name, d.getTarget().getVersion(), null);\n gradleDep.addArtifact(dep);\n\n for (ResolvedArtifact a : project.getConfigurations().detachedConfiguration(gradleDep)\n .getResolvedConfiguration().getResolvedArtifacts()) {\n if (a.getName().equals(name)) {\n try {\n platformImports.addPlatformProperties(group, name, null, \"properties\", d.getTarget().getVersion(),\n a.getFile().toPath());\n } catch (AppModelResolverException e) {\n throw new GradleException(\"Failed to import platform properties \" + a.getFile(), e);\n }\n break;\n }\n }\n }\n\n });\n boms.getResolvedConfiguration();\n return platformImports;\n }\n\n public Set getWorkspace(Project project, LaunchMode mode, ArtifactCoords mainModuleCoord) {\n Set modules = new HashSet<>();\n for (Project subproject : project.getAllprojects()) {\n final Convention convention = subproject.getConvention();\n JavaPluginConvention javaConvention = convention.findPlugin(JavaPluginConvention.class);\n if (javaConvention == null || !javaConvention.getSourceSets().getNames().contains(SourceSet.MAIN_SOURCE_SET_NAME)) {\n continue;\n }\n if (subproject.getName().equals(mainModuleCoord.getArtifactId())\n && subproject.getGroup().equals(mainModuleCoord.getGroupId())) {\n modules.add(getWorkspaceModule(subproject, mode, true));\n } else {\n modules.add(getWorkspaceModule(subproject, mode, false));\n }\n\n }\n return modules;\n }\n\n private WorkspaceModule getWorkspaceModule(Project project, LaunchMode mode, boolean isMainModule) {\n ArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(),\n project.getVersion().toString());\n final SourceSet mainSourceSet = QuarkusGradleUtils.getSourceSet(project, SourceSet.MAIN_SOURCE_SET_NAME);\n final SourceSetImpl modelSourceSet;\n if (isMainModule && mode == LaunchMode.TEST) {\n final SourceSet testSourceSet = QuarkusGradleUtils.getSourceSet(project, SourceSet.TEST_SOURCE_SET_NAME);\n modelSourceSet = convert(mainSourceSet, testSourceSet.getOutput().getClassesDirs().getFiles());\n } else {\n modelSourceSet = convert(mainSourceSet, Collections.emptySet());\n }\n return new WorkspaceModuleImpl(appArtifactCoords,\n project.getProjectDir().getAbsoluteFile(),\n project.getBuildDir().getAbsoluteFile(), getSourceSourceSet(mainSourceSet), modelSourceSet);\n }\n\n private void collectExtensionDependencies(Configuration deploymentConfiguration,\n Map appDependencies) {\n final ResolvedConfiguration rc = deploymentConfiguration.getResolvedConfiguration();\n for (ResolvedArtifact a : rc.getResolvedArtifacts()) {\n if (isDependency(a)) {\n DependencyImpl dep = appDependencies.computeIfAbsent(\n toAppDependenciesKey(a.getModuleVersion().getId().getGroup(), a.getName(), a.getClassifier()),\n k -> toDependency(a, AppDependency.DEPLOYMENT_CP_FLAG));\n dep.setFlag(AppDependency.DEPLOYMENT_CP_FLAG);\n }\n }\n }\n\n private void collectDependencies(ResolvedConfiguration configuration,\n LaunchMode mode, Project project, Map appDependencies) {\n\n final Set resolvedArtifacts = configuration.getResolvedArtifacts();\n \n \n final Set artifactFiles = resolvedArtifacts.size() < configuration.getFiles().size()\n ? new HashSet<>(resolvedArtifacts.size())\n : null;\n\n configuration.getFirstLevelModuleDependencies()\n .forEach(d -> collectDependencies(d, mode, project, appDependencies, artifactFiles, new HashSet<>()));\n\n if (artifactFiles != null) {\n \n for (File f : configuration.getFiles()) {\n if (artifactFiles.contains(f)) {\n continue;\n }\n \n \n \n final String parentPath = f.getParent();\n final String group = HashUtil.sha1(parentPath == null ? f.getName() : parentPath);\n String name = f.getName();\n String type = \"jar\";\n if (!f.isDirectory()) {\n final int dot = f.getName().lastIndexOf('.');\n if (dot > 0) {\n name = f.getName().substring(0, dot);\n type = f.getName().substring(dot + 1);\n }\n }\n \n final String version = String.valueOf(f.lastModified());\n final DependencyImpl dep = new DependencyImpl(name, group, version, \"compile\", type, null,\n AppDependency.DIRECT_FLAG, AppDependency.RUNTIME_CP_FLAG);\n dep.addPath(f);\n final ArtifactCoords key = toAppDependenciesKey(group, name, \"\");\n appDependencies.put(key, dep);\n }\n }\n }\n\n private void collectDependencies(ResolvedDependency resolvedDep, LaunchMode mode, Project project,\n Map appDependencies, Set artifactFiles,\n Set processedModules) {\n\n for (ResolvedArtifact a : resolvedDep.getModuleArtifacts()) {\n final ArtifactCoords artifactKey = toAppDependenciesKey(a.getModuleVersion().getId().getGroup(), a.getName(),\n a.getClassifier());\n if (!isDependency(a) || appDependencies.containsKey(artifactKey)) {\n continue;\n }\n final DependencyImpl dep = initDependency(a, processedModules.isEmpty() ? AppDependency.DIRECT_FLAG : 0,\n AppDependency.RUNTIME_CP_FLAG);\n if ((LaunchMode.DEVELOPMENT.equals(mode) || LaunchMode.TEST.equals(mode)) &&\n a.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier) {\n if (\"test-fixtures\".equals(a.getClassifier()) || \"test\".equals(a.getClassifier())) {\n \n dep.addPath(a.getFile());\n } else {\n IncludedBuild includedBuild = DependencyUtils.includedBuild(project.getRootProject(), a.getName());\n if (includedBuild != null) {\n addSubstitutedProject(dep, includedBuild.getProjectDir());\n } else {\n Project projectDep = project.getRootProject()\n .findProject(\n ((ProjectComponentIdentifier) a.getId().getComponentIdentifier()).getProjectPath());\n if (projectDep != null) {\n addDevModePaths(dep, a, projectDep);\n } else {\n dep.addPath(a.getFile());\n }\n }\n }\n } else {\n dep.addPath(a.getFile());\n }\n appDependencies.put(artifactKey, dep);\n if (artifactFiles != null) {\n artifactFiles.add(a.getFile());\n }\n }\n\n processedModules.add(new AppArtifactKey(resolvedDep.getModuleGroup(), resolvedDep.getModuleName()));\n for (ResolvedDependency child : resolvedDep.getChildren()) {\n if (!processedModules.contains(new AppArtifactKey(child.getModuleGroup(), child.getModuleName()))) {\n collectDependencies(child, mode, project, appDependencies, artifactFiles, processedModules);\n }\n }\n }\n\n private void addDevModePaths(final DependencyImpl dep, ResolvedArtifact a, Project project) {\n final JavaPluginConvention javaConvention = project.getConvention().findPlugin(JavaPluginConvention.class);\n if (javaConvention == null) {\n dep.addPath(a.getFile());\n return;\n }\n final SourceSet mainSourceSet = javaConvention.getSourceSets().findByName(SourceSet.MAIN_SOURCE_SET_NAME);\n if (mainSourceSet == null) {\n dep.addPath(a.getFile());\n return;\n }\n final String classes = QuarkusGradleUtils.getClassesDir(mainSourceSet, project.getBuildDir(), false);\n if (classes == null) {\n dep.addPath(a.getFile());\n } else {\n final File classesDir = new File(classes);\n if (classesDir.exists()) {\n dep.addPath(classesDir);\n } else {\n dep.addPath(a.getFile());\n }\n }\n for (File resourcesDir : mainSourceSet.getResources().getSourceDirectories()) {\n if (resourcesDir.exists()) {\n dep.addPath(resourcesDir);\n }\n }\n final Task resourcesTask = project.getTasks().findByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME);\n for (File outputDir : resourcesTask.getOutputs().getFiles()) {\n if (outputDir.exists()) {\n dep.addPath(outputDir);\n }\n }\n }\n\n private void addSubstitutedProject(final DependencyImpl dep, File projectFile) {\n File mainResourceDirectory = new File(projectFile, MAIN_RESOURCES_OUTPUT);\n if (mainResourceDirectory.exists()) {\n dep.addPath(mainResourceDirectory);\n }\n File classesOutput = new File(projectFile, CLASSES_OUTPUT);\n File[] languageDirectories = classesOutput.listFiles();\n if (languageDirectories == null) {\n throw new GradleException(\n \"The project does not contain a class output directory. \" + classesOutput.getPath() + \" must exist.\");\n }\n for (File languageDirectory : languageDirectories) {\n if (languageDirectory.isDirectory()) {\n for (File sourceSet : languageDirectory.listFiles()) {\n if (sourceSet.isDirectory() && sourceSet.getName().equals(SourceSet.MAIN_SOURCE_SET_NAME)) {\n dep.addPath(sourceSet);\n }\n }\n }\n }\n }\n\n private SourceSetImpl convert(SourceSet sourceSet, Set additionalSourceDirs) {\n Set existingSrcDirs = new HashSet<>();\n for (File srcDir : sourceSet.getOutput().getClassesDirs().getFiles()) {\n if (srcDir.exists()) {\n existingSrcDirs.add(srcDir);\n }\n }\n existingSrcDirs.addAll(additionalSourceDirs);\n if (sourceSet.getOutput().getResourcesDir().exists()) {\n return new SourceSetImpl(\n existingSrcDirs,\n Collections.singleton(sourceSet.getOutput().getResourcesDir()));\n }\n return new SourceSetImpl(existingSrcDirs);\n }\n\n private io.quarkus.bootstrap.model.gradle.SourceSet getSourceSourceSet(SourceSet sourceSet) {\n return new SourceSetImpl(sourceSet.getAllJava().getSrcDirs(),\n sourceSet.getResources().getSourceDirectories().getFiles());\n }\n\n private static boolean isDependency(ResolvedArtifact a) {\n return BootstrapConstants.JAR.equalsIgnoreCase(a.getExtension()) || \"exe\".equalsIgnoreCase(a.getExtension()) ||\n a.getFile().isDirectory();\n }\n\n /**\n * Creates an instance of Dependency and associates it with the ResolvedArtifact's path\n */\n static DependencyImpl toDependency(ResolvedArtifact a, int... flags) {\n final DependencyImpl dependency = initDependency(a, flags);\n dependency.addPath(a.getFile());\n return dependency;\n }\n\n /**\n * Creates an instance of DependencyImpl but does not associates it with a path\n */\n private static DependencyImpl initDependency(ResolvedArtifact a, int... flags) {\n final String[] split = a.getModuleVersion().toString().split(\":\");\n return new DependencyImpl(split[1], split[0], split.length > 2 ? split[2] : null,\n \"compile\", a.getType(), a.getClassifier(), flags);\n }\n\n private static ArtifactCoords toAppDependenciesKey(String groupId, String artifactId, String classifier) {\n \n classifier = classifier == null ? \"\" : classifier;\n return new ArtifactCoordsImpl(groupId, artifactId, classifier, \"\", ArtifactCoordsImpl.TYPE_JAR);\n }\n}", "context_after": "class QuarkusModelBuilder implements ParameterizedToolingModelBuilder {\n\n private static final String MAIN_RESOURCES_OUTPUT = \"build/resources/main\";\n private static final String CLASSES_OUTPUT = \"build/classes\";\n private static final String DEPLOYMENT_CONFIGURATION = \"quarkusDeploymentConfiguration\";\n private static final String CLASSPATH_CONFIGURATION = \"quarkusClasspathConfiguration\";\n\n \n\n private static Configuration deploymentClasspathConfig(Project project, LaunchMode mode,\n Collection platforms) {\n\n Configuration deploymentConfiguration = project.getConfigurations().findByName(DEPLOYMENT_CONFIGURATION);\n if (deploymentConfiguration != null) {\n project.getConfigurations().remove(deploymentConfiguration);\n }\n\n deploymentConfiguration = project.getConfigurations().create(DEPLOYMENT_CONFIGURATION)\n .withDependencies(ds -> ds.addAll(platforms));\n Configuration implementationDeployment = project.getConfigurations().findByName(ApplicationDeploymentClasspathBuilder\n .toDeploymentConfigurationName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME));\n if (implementationDeployment != null) {\n deploymentConfiguration.extendsFrom(implementationDeployment);\n }\n\n if (LaunchMode.TEST.equals(mode)) {\n Configuration testDeploymentConfiguration = project.getConfigurations()\n .findByName(ApplicationDeploymentClasspathBuilder\n .toDeploymentConfigurationName(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME));\n if (testDeploymentConfiguration != null) {\n deploymentConfiguration.extendsFrom(testDeploymentConfiguration);\n }\n }\n if (LaunchMode.DEVELOPMENT.equals(mode)) {\n Configuration devDeploymentConfiguration = project.getConfigurations()\n .findByName(ApplicationDeploymentClasspathBuilder\n .toDeploymentConfigurationName(QuarkusPlugin.DEV_MODE_CONFIGURATION_NAME));\n if (devDeploymentConfiguration != null) {\n deploymentConfiguration.extendsFrom(devDeploymentConfiguration);\n }\n\n }\n return deploymentConfiguration;\n }\n\n @Override\n public boolean canBuild(String modelName) {\n return modelName.equals(QuarkusModel.class.getName());\n }\n\n @Override\n public Class getParameterType() {\n return ModelParameter.class;\n }\n\n @Override\n public Object buildAll(String modelName, Project project) {\n final ModelParameterImpl modelParameter = new ModelParameterImpl();\n modelParameter.setMode(LaunchMode.DEVELOPMENT.toString());\n return buildAll(modelName, modelParameter, project);\n }\n\n @Override\n public Object buildAll(String modelName, ModelParameter parameter, Project project) {\n LaunchMode mode = LaunchMode.valueOf(parameter.getMode());\n\n final List deploymentDeps = DependencyUtils.getEnforcedPlatforms(project);\n final PlatformImports platformImports = resolvePlatformImports(project, deploymentDeps);\n\n final Map appDependencies = new LinkedHashMap<>();\n Configuration classpathConfig = classpathConfig(project, mode);\n final ResolvedConfiguration resolvedConfiguration = classpathConfig.getResolvedConfiguration();\n collectDependencies(resolvedConfiguration, mode, project, appDependencies);\n\n Configuration deploymentConfig = deploymentClasspathConfig(project, mode, deploymentDeps);\n collectExtensionDependencies(deploymentConfig, appDependencies);\n\n ArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(),\n project.getVersion().toString());\n\n return new QuarkusModelImpl(\n new WorkspaceImpl(appArtifactCoords, getWorkspace(project.getRootProject(), mode, appArtifactCoords)),\n new ArrayList<>(appDependencies.values()),\n platformImports);\n }\n\n private PlatformImports resolvePlatformImports(Project project,\n List deploymentDeps) {\n final Configuration boms = project.getConfigurations()\n .detachedConfiguration(deploymentDeps.toArray(new org.gradle.api.artifacts.Dependency[0]));\n final PlatformImportsImpl platformImports = new PlatformImportsImpl();\n boms.getResolutionStrategy().eachDependency(d -> {\n final String group = d.getTarget().getGroup();\n final String name = d.getTarget().getName();\n if (name.endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX)) {\n platformImports.addPlatformDescriptor(group, name, d.getTarget().getVersion(), \"json\",\n d.getTarget().getVersion());\n } else if (name.endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) {\n final DefaultDependencyArtifact dep = new DefaultDependencyArtifact();\n dep.setExtension(\"properties\");\n dep.setType(\"properties\");\n dep.setName(name);\n\n final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(\n group, name, d.getTarget().getVersion(), null);\n gradleDep.addArtifact(dep);\n\n for (ResolvedArtifact a : project.getConfigurations().detachedConfiguration(gradleDep)\n .getResolvedConfiguration().getResolvedArtifacts()) {\n if (a.getName().equals(name)) {\n try {\n platformImports.addPlatformProperties(group, name, null, \"properties\", d.getTarget().getVersion(),\n a.getFile().toPath());\n } catch (AppModelResolverException e) {\n throw new GradleException(\"Failed to import platform properties \" + a.getFile(), e);\n }\n break;\n }\n }\n }\n\n });\n boms.getResolvedConfiguration();\n return platformImports;\n }\n\n public Set getWorkspace(Project project, LaunchMode mode, ArtifactCoords mainModuleCoord) {\n Set modules = new HashSet<>();\n for (Project subproject : project.getAllprojects()) {\n final Convention convention = subproject.getConvention();\n JavaPluginConvention javaConvention = convention.findPlugin(JavaPluginConvention.class);\n if (javaConvention == null || !javaConvention.getSourceSets().getNames().contains(SourceSet.MAIN_SOURCE_SET_NAME)) {\n continue;\n }\n if (subproject.getName().equals(mainModuleCoord.getArtifactId())\n && subproject.getGroup().equals(mainModuleCoord.getGroupId())) {\n modules.add(getWorkspaceModule(subproject, mode, true));\n } else {\n modules.add(getWorkspaceModule(subproject, mode, false));\n }\n\n }\n return modules;\n }\n\n private WorkspaceModule getWorkspaceModule(Project project, LaunchMode mode, boolean isMainModule) {\n ArtifactCoords appArtifactCoords = new ArtifactCoordsImpl(project.getGroup().toString(), project.getName(),\n project.getVersion().toString());\n final SourceSet mainSourceSet = QuarkusGradleUtils.getSourceSet(project, SourceSet.MAIN_SOURCE_SET_NAME);\n final SourceSetImpl modelSourceSet;\n if (isMainModule && mode == LaunchMode.TEST) {\n final SourceSet testSourceSet = QuarkusGradleUtils.getSourceSet(project, SourceSet.TEST_SOURCE_SET_NAME);\n modelSourceSet = convert(mainSourceSet, testSourceSet.getOutput().getClassesDirs().getFiles());\n } else {\n modelSourceSet = convert(mainSourceSet, Collections.emptySet());\n }\n return new WorkspaceModuleImpl(appArtifactCoords,\n project.getProjectDir().getAbsoluteFile(),\n project.getBuildDir().getAbsoluteFile(), getSourceSourceSet(mainSourceSet), modelSourceSet);\n }\n\n private void collectExtensionDependencies(Configuration deploymentConfiguration,\n Map appDependencies) {\n final ResolvedConfiguration rc = deploymentConfiguration.getResolvedConfiguration();\n for (ResolvedArtifact a : rc.getResolvedArtifacts()) {\n if (isDependency(a)) {\n DependencyImpl dep = appDependencies.computeIfAbsent(\n toAppDependenciesKey(a.getModuleVersion().getId().getGroup(), a.getName(), a.getClassifier()),\n k -> toDependency(a, AppDependency.DEPLOYMENT_CP_FLAG));\n dep.setFlag(AppDependency.DEPLOYMENT_CP_FLAG);\n }\n }\n }\n\n private void collectDependencies(ResolvedConfiguration configuration,\n LaunchMode mode, Project project, Map appDependencies) {\n\n final Set resolvedArtifacts = configuration.getResolvedArtifacts();\n \n \n final Set artifactFiles = resolvedArtifacts.size() < configuration.getFiles().size()\n ? new HashSet<>(resolvedArtifacts.size())\n : null;\n\n configuration.getFirstLevelModuleDependencies()\n .forEach(d -> collectDependencies(d, mode, project, appDependencies, artifactFiles, new HashSet<>()));\n\n if (artifactFiles != null) {\n \n for (File f : configuration.getFiles()) {\n if (artifactFiles.contains(f)) {\n continue;\n }\n \n \n \n final String parentPath = f.getParent();\n final String group = HashUtil.sha1(parentPath == null ? f.getName() : parentPath);\n String name = f.getName();\n String type = \"jar\";\n if (!f.isDirectory()) {\n final int dot = f.getName().lastIndexOf('.');\n if (dot > 0) {\n name = f.getName().substring(0, dot);\n type = f.getName().substring(dot + 1);\n }\n }\n \n final String version = String.valueOf(f.lastModified());\n final DependencyImpl dep = new DependencyImpl(name, group, version, \"compile\", type, null,\n AppDependency.DIRECT_FLAG, AppDependency.RUNTIME_CP_FLAG);\n dep.addPath(f);\n final ArtifactCoords key = toAppDependenciesKey(group, name, \"\");\n appDependencies.put(key, dep);\n }\n }\n }\n\n private void collectDependencies(ResolvedDependency resolvedDep, LaunchMode mode, Project project,\n Map appDependencies, Set artifactFiles,\n Set processedModules) {\n\n for (ResolvedArtifact a : resolvedDep.getModuleArtifacts()) {\n final ArtifactCoords artifactKey = toAppDependenciesKey(a.getModuleVersion().getId().getGroup(), a.getName(),\n a.getClassifier());\n if (!isDependency(a) || appDependencies.containsKey(artifactKey)) {\n continue;\n }\n final DependencyImpl dep = initDependency(a, processedModules.isEmpty() ? AppDependency.DIRECT_FLAG : 0,\n AppDependency.RUNTIME_CP_FLAG);\n if ((LaunchMode.DEVELOPMENT.equals(mode) || LaunchMode.TEST.equals(mode)) &&\n a.getId().getComponentIdentifier() instanceof ProjectComponentIdentifier) {\n if (\"test-fixtures\".equals(a.getClassifier()) || \"test\".equals(a.getClassifier())) {\n \n dep.addPath(a.getFile());\n } else {\n IncludedBuild includedBuild = DependencyUtils.includedBuild(project.getRootProject(), a.getName());\n if (includedBuild != null) {\n addSubstitutedProject(dep, includedBuild.getProjectDir());\n } else {\n Project projectDep = project.getRootProject()\n .findProject(\n ((ProjectComponentIdentifier) a.getId().getComponentIdentifier()).getProjectPath());\n if (projectDep != null) {\n addDevModePaths(dep, a, projectDep);\n } else {\n dep.addPath(a.getFile());\n }\n }\n }\n } else {\n dep.addPath(a.getFile());\n }\n appDependencies.put(artifactKey, dep);\n if (artifactFiles != null) {\n artifactFiles.add(a.getFile());\n }\n }\n\n processedModules.add(new AppArtifactKey(resolvedDep.getModuleGroup(), resolvedDep.getModuleName()));\n for (ResolvedDependency child : resolvedDep.getChildren()) {\n if (!processedModules.contains(new AppArtifactKey(child.getModuleGroup(), child.getModuleName()))) {\n collectDependencies(child, mode, project, appDependencies, artifactFiles, processedModules);\n }\n }\n }\n\n private void addDevModePaths(final DependencyImpl dep, ResolvedArtifact a, Project project) {\n final JavaPluginConvention javaConvention = project.getConvention().findPlugin(JavaPluginConvention.class);\n if (javaConvention == null) {\n dep.addPath(a.getFile());\n return;\n }\n final SourceSet mainSourceSet = javaConvention.getSourceSets().findByName(SourceSet.MAIN_SOURCE_SET_NAME);\n if (mainSourceSet == null) {\n dep.addPath(a.getFile());\n return;\n }\n final String classes = QuarkusGradleUtils.getClassesDir(mainSourceSet, project.getBuildDir(), false);\n if (classes == null) {\n dep.addPath(a.getFile());\n } else {\n final File classesDir = new File(classes);\n if (classesDir.exists()) {\n dep.addPath(classesDir);\n } else {\n dep.addPath(a.getFile());\n }\n }\n for (File resourcesDir : mainSourceSet.getResources().getSourceDirectories()) {\n if (resourcesDir.exists()) {\n dep.addPath(resourcesDir);\n }\n }\n final Task resourcesTask = project.getTasks().findByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME);\n for (File outputDir : resourcesTask.getOutputs().getFiles()) {\n if (outputDir.exists()) {\n dep.addPath(outputDir);\n }\n }\n }\n\n private void addSubstitutedProject(final DependencyImpl dep, File projectFile) {\n File mainResourceDirectory = new File(projectFile, MAIN_RESOURCES_OUTPUT);\n if (mainResourceDirectory.exists()) {\n dep.addPath(mainResourceDirectory);\n }\n File classesOutput = new File(projectFile, CLASSES_OUTPUT);\n File[] languageDirectories = classesOutput.listFiles();\n if (languageDirectories == null) {\n throw new GradleException(\n \"The project does not contain a class output directory. \" + classesOutput.getPath() + \" must exist.\");\n }\n for (File languageDirectory : languageDirectories) {\n if (languageDirectory.isDirectory()) {\n for (File sourceSet : languageDirectory.listFiles()) {\n if (sourceSet.isDirectory() && sourceSet.getName().equals(SourceSet.MAIN_SOURCE_SET_NAME)) {\n dep.addPath(sourceSet);\n }\n }\n }\n }\n }\n\n private SourceSetImpl convert(SourceSet sourceSet, Set additionalSourceDirs) {\n Set existingSrcDirs = new HashSet<>();\n for (File srcDir : sourceSet.getOutput().getClassesDirs().getFiles()) {\n if (srcDir.exists()) {\n existingSrcDirs.add(srcDir);\n }\n }\n existingSrcDirs.addAll(additionalSourceDirs);\n if (sourceSet.getOutput().getResourcesDir().exists()) {\n return new SourceSetImpl(\n existingSrcDirs,\n Collections.singleton(sourceSet.getOutput().getResourcesDir()));\n }\n return new SourceSetImpl(existingSrcDirs);\n }\n\n private io.quarkus.bootstrap.model.gradle.SourceSet getSourceSourceSet(SourceSet sourceSet) {\n return new SourceSetImpl(sourceSet.getAllJava().getSrcDirs(),\n sourceSet.getResources().getSourceDirectories().getFiles());\n }\n\n private static boolean isDependency(ResolvedArtifact a) {\n return BootstrapConstants.JAR.equalsIgnoreCase(a.getExtension()) || \"exe\".equalsIgnoreCase(a.getExtension()) ||\n a.getFile().isDirectory();\n }\n\n /**\n * Creates an instance of Dependency and associates it with the ResolvedArtifact's path\n */\n static DependencyImpl toDependency(ResolvedArtifact a, int... flags) {\n final DependencyImpl dependency = initDependency(a, flags);\n dependency.addPath(a.getFile());\n return dependency;\n }\n\n /**\n * Creates an instance of DependencyImpl but does not associates it with a path\n */\n private static DependencyImpl initDependency(ResolvedArtifact a, int... flags) {\n final String[] split = a.getModuleVersion().toString().split(\":\");\n return new DependencyImpl(split[1], split[0], split.length > 2 ? split[2] : null,\n \"compile\", a.getType(), a.getClassifier(), flags);\n }\n\n private static ArtifactCoords toAppDependenciesKey(String groupId, String artifactId, String classifier) {\n \n classifier = classifier == null ? \"\" : classifier;\n return new ArtifactCoordsImpl(groupId, artifactId, classifier, \"\", ArtifactCoordsImpl.TYPE_JAR);\n }\n}" }, { "comment": "I would prefer hamcrest for a proper failure message: ``` import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertThat; ... assertThat(expected.get().getMessage(), containsString(\"ZipException\")) ```", "method_body": "public void testRunJar() throws Exception {\n\t\tPath uploadDir = TMP.newFolder().toPath();\n\n\t\tPath actualUploadDir = uploadDir.resolve(\"flink-web-upload\");\n\t\tFiles.createDirectory(actualUploadDir);\n\n\t\tPath emptyJar = actualUploadDir.resolve(\"empty.jar\");\n\t\tFiles.createFile(emptyJar);\n\n\t\tConfiguration config = new Configuration();\n\t\tconfig.setString(WebOptions.UPLOAD_DIR, uploadDir.toString());\n\n\t\tMiniClusterResource clusterResource = new MiniClusterResource(\n\t\t\tnew MiniClusterResourceConfiguration.Builder()\n\t\t\t\t.setConfiguration(config)\n\t\t\t\t.setNumberTaskManagers(1)\n\t\t\t\t.setNumberSlotsPerTaskManager(1)\n\t\t\t\t.build());\n\t\tclusterResource.before();\n\n\t\ttry {\n\t\t\tConfiguration clientConfig = clusterResource.getClientConfiguration();\n\t\t\tRestClient client = new RestClient(RestClientConfiguration.fromConfiguration(clientConfig), TestingUtils.defaultExecutor());\n\n\t\t\ttry {\n\t\t\t\tJarRunHeaders headers = JarRunHeaders.getInstance();\n\t\t\t\tJarRunMessageParameters parameters = headers.getUnresolvedMessageParameters();\n\t\t\t\tparameters.jarIdPathParameter.resolve(emptyJar.getFileName().toString());\n\n\t\t\t\tString host = clientConfig.getString(RestOptions.ADDRESS);\n\t\t\t\tint port = clientConfig.getInteger(RestOptions.PORT);\n\n\t\t\t\ttry {\n\t\t\t\t\tclient.sendRequest(host, port, headers, parameters, new JarRunRequestBody())\n\t\t\t\t\t\t.get();\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tOptional expected = ExceptionUtils.findThrowable(e, RestClientException.class);\n\t\t\t\t\tif (expected.isPresent()) {\n\t\t\t\t\t\t\n\t\t\t\t\t\tassertTrue(expected.get().getMessage().contains(\"ProgramInvocationException\"));\n\t\t\t\t\t\t\n\t\t\t\t\t\tassertTrue(expected.get().getMessage().contains(\"ZipException\"));\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\tassertTrue(expected.get().getMessage().contains(\"empty.jar'. zip file is empty\"));\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthrow e;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} finally {\n\t\t\t\tclient.shutdown(Time.milliseconds(10));\n\t\t\t}\n\t\t} finally {\n\t\t\tclusterResource.after();\n\t\t}\n\t}", "target_code": "assertTrue(expected.get().getMessage().contains(\"ZipException\"));", "method_body_after": "public void testRunJar() throws Exception {\n\t\tPath uploadDir = TMP.newFolder().toPath();\n\n\t\tPath actualUploadDir = uploadDir.resolve(\"flink-web-upload\");\n\t\tFiles.createDirectory(actualUploadDir);\n\n\t\tPath emptyJar = actualUploadDir.resolve(\"empty.jar\");\n\t\tFiles.createFile(emptyJar);\n\n\t\tConfiguration config = new Configuration();\n\t\tconfig.setString(WebOptions.UPLOAD_DIR, uploadDir.toString());\n\n\t\tMiniClusterResource clusterResource = new MiniClusterResource(\n\t\t\tnew MiniClusterResourceConfiguration.Builder()\n\t\t\t\t.setConfiguration(config)\n\t\t\t\t.setNumberTaskManagers(1)\n\t\t\t\t.setNumberSlotsPerTaskManager(1)\n\t\t\t\t.build());\n\t\tclusterResource.before();\n\n\t\ttry {\n\t\t\tConfiguration clientConfig = clusterResource.getClientConfiguration();\n\t\t\tRestClient client = new RestClient(RestClientConfiguration.fromConfiguration(clientConfig), TestingUtils.defaultExecutor());\n\n\t\t\ttry {\n\t\t\t\tJarRunHeaders headers = JarRunHeaders.getInstance();\n\t\t\t\tJarRunMessageParameters parameters = headers.getUnresolvedMessageParameters();\n\t\t\t\tparameters.jarIdPathParameter.resolve(emptyJar.getFileName().toString());\n\n\t\t\t\tString host = clientConfig.getString(RestOptions.ADDRESS);\n\t\t\t\tint port = clientConfig.getInteger(RestOptions.PORT);\n\n\t\t\t\ttry {\n\t\t\t\t\tclient.sendRequest(host, port, headers, parameters, new JarRunRequestBody())\n\t\t\t\t\t\t.get();\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tOptional expected = ExceptionUtils.findThrowable(e, RestClientException.class);\n\t\t\t\t\tif (expected.isPresent()) {\n\t\t\t\t\t\t\n\t\t\t\t\t\tassertTrue(expected.get().getMessage().contains(\"ProgramInvocationException\"));\n\t\t\t\t\t\t\n\t\t\t\t\t\tassertThat(expected.get().getMessage(), containsString(\"ZipException\"));\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\tassertTrue(expected.get().getMessage().contains(\"empty.jar'. zip file is empty\"));\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthrow e;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} finally {\n\t\t\t\tclient.shutdown(Time.milliseconds(10));\n\t\t\t}\n\t\t} finally {\n\t\t\tclusterResource.after();\n\t\t}\n\t}", "context_before": "class JarRunHandlerTest extends TestLogger {\n\n\t@ClassRule\n\tpublic static final TemporaryFolder TMP = new TemporaryFolder();\n\n\t@Test\n\t\n}", "context_after": "class JarRunHandlerTest extends TestLogger {\n\n\t@ClassRule\n\tpublic static final TemporaryFolder TMP = new TemporaryFolder();\n\n\t@Test\n\t\n}" }, { "comment": "I'm not familiar with the logic here, but it looks like you're retrying up to the maximum number of times, even if it was successful before then.", "method_body": "private void reSetupBdbEnvironment(InsufficientLogException insufficientLogEx) {\n LOG.warn(\"catch insufficient log exception. will recover and try again.\", insufficientLogEx);\n \n \n \n \n HostInfo helperNode = Env.getServingEnv().getHelperNode();\n\n for (int i = 0; i < RETRY_TIME; i++) {\n try {\n NetworkRestore restore = new NetworkRestore();\n NetworkRestoreConfig config = new NetworkRestoreConfig();\n config.setRetainLogFiles(false);\n restore.execute(insufficientLogEx, config);\n } catch (Exception e) {\n LOG.warn(\"retry={}, reSetupBdbEnvironment exception:\", i, e);\n try {\n Thread.sleep(5 * 1000);\n LOG.warn(\"after sleep insufficientLogEx:\", insufficientLogEx);\n } catch (InterruptedException e1) {\n LOG.warn(\"InterruptedException\", e1);\n }\n }\n }\n\n bdbEnvironment.close();\n bdbEnvironment.setup(new File(environmentPath), selfNodeName, selfNodeHostPort,\n helperNode.getIp() + \":\" + helperNode.getPort(), Env.getServingEnv().isElectable());\n }", "target_code": "restore.execute(insufficientLogEx, config);", "method_body_after": "private void reSetupBdbEnvironment(InsufficientLogException insufficientLogEx) {\n LOG.warn(\"catch insufficient log exception. will recover and try again.\", insufficientLogEx);\n \n \n \n \n HostInfo helperNode = Env.getServingEnv().getHelperNode();\n\n for (int i = 0; i < RETRY_TIME; i++) {\n try {\n NetworkRestore restore = new NetworkRestore();\n NetworkRestoreConfig config = new NetworkRestoreConfig();\n config.setRetainLogFiles(false);\n restore.execute(insufficientLogEx, config);\n break;\n } catch (Exception e) {\n LOG.warn(\"retry={}, reSetupBdbEnvironment exception:\", i, e);\n try {\n Thread.sleep(5 * 1000);\n LOG.warn(\"after sleep insufficientLogEx:\", insufficientLogEx);\n } catch (InterruptedException e1) {\n LOG.warn(\"InterruptedException\", e1);\n }\n }\n }\n\n bdbEnvironment.close();\n bdbEnvironment.setup(new File(environmentPath), selfNodeName, selfNodeHostPort,\n helperNode.getIp() + \":\" + helperNode.getPort(), Env.getServingEnv().isElectable());\n }", "context_before": "class BDBJEJournal implements Journal { \n public static final Logger LOG = LogManager.getLogger(BDBJEJournal.class);\n private static final int OUTPUT_BUFFER_INIT_SIZE = 128;\n private static final int RETRY_TIME = 3;\n\n private String environmentPath = null;\n private String selfNodeName;\n private String selfNodeHostPort;\n\n private BDBEnvironment bdbEnvironment = null;\n private Database currentJournalDB;\n \n private AtomicLong nextJournalId = new AtomicLong(1);\n\n public BDBJEJournal(String nodeName) {\n initBDBEnv(nodeName);\n }\n\n /*\n * Initialize bdb environment.\n * node name is ip_port (the port is edit_log_port)\n */\n private void initBDBEnv(String nodeName) {\n environmentPath = Env.getServingEnv().getBdbDir();\n HostInfo selfNode = Env.getServingEnv().getSelfNode();\n selfNodeName = nodeName;\n if (Config.enable_fqdn_mode) {\n \n \n \n \n selfNodeHostPort = selfNode.getHostName() + \":\" + selfNode.getPort();\n } else {\n selfNodeHostPort = selfNode.getIp() + \":\" + selfNode.getPort();\n }\n }\n\n /*\n * Database is named by its minimum journal id.\n * For example:\n * One database contains journal 100 to journal 200, its name is 100.\n * The next database's name is 201\n */\n @Override\n public synchronized void rollJournal() {\n \n if (currentJournalDB.count() == 0) {\n return;\n }\n\n long newName = nextJournalId.get();\n String currentDbName = currentJournalDB.getDatabaseName();\n long currentName = Long.parseLong(currentDbName);\n long newNameVerify = currentName + currentJournalDB.count();\n if (newName == newNameVerify) {\n LOG.info(\"roll edit log. new db name is {}\", newName);\n currentJournalDB = bdbEnvironment.openDatabase(Long.toString(newName));\n } else {\n String msg = String.format(\"roll journal error! journalId and db journal numbers is not match. \"\n + \"journal id: %d, current db: %s, expected db count: %d\",\n newName, currentDbName, newNameVerify);\n LOG.error(msg);\n Util.stdoutWithTime(msg);\n System.exit(-1);\n }\n }\n\n @Override\n public synchronized void write(short op, Writable writable) throws IOException {\n JournalEntity entity = new JournalEntity();\n entity.setOpCode(op);\n entity.setData(writable);\n\n \n long id = nextJournalId.getAndIncrement();\n Long idLong = id;\n DatabaseEntry theKey = new DatabaseEntry();\n TupleBinding idBinding = TupleBinding.getPrimitiveBinding(Long.class);\n idBinding.objectToEntry(idLong, theKey);\n\n \n DataOutputBuffer buffer = new DataOutputBuffer(OUTPUT_BUFFER_INIT_SIZE);\n entity.write(buffer);\n\n DatabaseEntry theData = new DatabaseEntry(buffer.getData());\n if (MetricRepo.isInit) {\n MetricRepo.COUNTER_EDIT_LOG_SIZE_BYTES.increase((long) theData.getSize());\n MetricRepo.COUNTER_CURRENT_EDIT_LOG_SIZE_BYTES.increase((long) theData.getSize());\n }\n LOG.debug(\"opCode = {}, journal size = {}\", op, theData.getSize());\n \n boolean writeSucceed = false;\n for (int i = 0; i < RETRY_TIME; i++) {\n try {\n \n if (currentJournalDB.put(null, theKey, theData) == OperationStatus.SUCCESS) {\n writeSucceed = true;\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"master write journal {} finished. db name {}, current time {}\",\n id, currentJournalDB.getDatabaseName(), System.currentTimeMillis());\n }\n break;\n }\n } catch (DatabaseException e) {\n LOG.error(\"catch an exception when writing to database. sleep and retry. journal id {}\", id, e);\n try {\n Thread.sleep(5 * 1000);\n } catch (InterruptedException e1) {\n LOG.warn(\"\", e1);\n }\n }\n }\n\n if (!writeSucceed) {\n if (op == OperationType.OP_TIMESTAMP) {\n /*\n * Do not exit if the write operation is OP_TIMESTAMP.\n * If all the followers exit except master, master should continue provide query\n * service.\n * To prevent master exit, we should exempt OP_TIMESTAMP write\n */\n nextJournalId.set(id);\n LOG.warn(\"master can not achieve quorum. write timestamp fail. but will not exit.\");\n return;\n }\n String msg = \"write bdb failed. will exit. journalId: \" + id + \", bdb database Name: \"\n + currentJournalDB.getDatabaseName();\n LOG.error(msg);\n Util.stdoutWithTime(msg);\n System.exit(-1);\n }\n }\n\n @Override\n public JournalEntity read(long journalId) {\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n return null;\n }\n String dbName = null;\n for (long db : dbNames) {\n if (journalId >= db) {\n dbName = Long.toString(db);\n continue;\n } else {\n break;\n }\n }\n\n if (dbName == null) {\n return null;\n }\n\n JournalEntity ret = null;\n Long key = new Long(journalId);\n DatabaseEntry theKey = new DatabaseEntry();\n TupleBinding myBinding = TupleBinding.getPrimitiveBinding(Long.class);\n myBinding.objectToEntry(key, theKey);\n\n DatabaseEntry theData = new DatabaseEntry();\n\n Database database = bdbEnvironment.openDatabase(dbName);\n try {\n \n \n if (database.get(null, theKey, theData, LockMode.READ_COMMITTED) == OperationStatus.SUCCESS) {\n \n byte[] retData = theData.getData();\n DataInputStream in = new DataInputStream(new ByteArrayInputStream(retData));\n ret = new JournalEntity();\n try {\n ret.readFields(in);\n } catch (IOException e) {\n LOG.warn(\"\", e);\n }\n } else {\n System.out.println(\"No record found for key '\" + journalId + \"'.\");\n }\n } catch (Exception e) {\n LOG.warn(\"catch an exception when get JournalEntity. key:{}\", journalId, e);\n return null;\n }\n return ret;\n }\n\n @Override\n public JournalCursor read(long fromKey, long toKey) {\n return BDBJournalCursor.getJournalCursor(bdbEnvironment, fromKey, toKey);\n }\n\n @Override\n public long getMaxJournalId() {\n long ret = -1;\n if (bdbEnvironment == null) {\n return ret;\n }\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n return ret;\n }\n if (dbNames.size() == 0) {\n return ret;\n }\n\n int index = dbNames.size() - 1;\n String dbName = dbNames.get(index).toString();\n long dbNumberName = dbNames.get(index);\n Database database = bdbEnvironment.openDatabase(dbName);\n ret = dbNumberName + database.count() - 1;\n\n return ret;\n }\n\n @Override\n public long getMinJournalId() {\n long ret = -1;\n if (bdbEnvironment == null) {\n return ret;\n }\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n return ret;\n }\n if (dbNames.size() == 0) {\n return ret;\n }\n\n String dbName = dbNames.get(0).toString();\n Database database = bdbEnvironment.openDatabase(dbName);\n \n if (database.count() == 0) {\n return ret;\n }\n\n return dbNames.get(0);\n }\n\n @Override\n public void close() {\n bdbEnvironment.close();\n bdbEnvironment = null;\n }\n\n /*\n * open the bdbje environment, and get the current journal database\n */\n @Override\n public synchronized void open() {\n if (bdbEnvironment == null) {\n File dbEnv = new File(environmentPath);\n bdbEnvironment = new BDBEnvironment();\n HostInfo helperNode = Env.getServingEnv().getHelperNode();\n String helperHostPort = helperNode.getIp() + \":\" + helperNode.getPort();\n if (Config.enable_fqdn_mode) {\n helperHostPort = helperNode.getHostName() + \":\" + helperNode.getPort();\n }\n try {\n bdbEnvironment.setup(dbEnv, selfNodeName, selfNodeHostPort, helperHostPort,\n Env.getServingEnv().isElectable());\n } catch (Exception e) {\n if (e instanceof DatabaseNotFoundException) {\n LOG.error(\"It is not allowed to set metadata_failure_recovery to true \"\n + \"when meta dir or bdbje dir is empty\uff0c which may mean it is \"\n + \"the first time to start this node\");\n }\n LOG.error(\"catch an exception when setup bdb environment. will exit.\", e);\n System.exit(-1);\n }\n }\n\n \n \n List dbNames = null;\n for (int i = 0; i < RETRY_TIME; i++) {\n try {\n dbNames = getDatabaseNames();\n\n if (dbNames == null) {\n LOG.error(\"fail to get dbNames while open bdbje journal. will exit\");\n System.exit(-1);\n }\n if (dbNames.size() == 0) {\n /*\n * This is the very first time to open. Usually, we will open a new database\n * named \"1\".\n * But when we start cluster with an image file copied from other cluster,\n * here we should open database with name image max journal id + 1.\n * (default Catalog.getServingEnv().getReplayedJournalId() is 0)\n */\n String dbName = Long.toString(Env.getServingEnv().getReplayedJournalId() + 1);\n LOG.info(\"the very first time to open bdb, dbname is {}\", dbName);\n currentJournalDB = bdbEnvironment.openDatabase(dbName);\n } else {\n \n currentJournalDB = bdbEnvironment.openDatabase(dbNames.get(dbNames.size() - 1).toString());\n }\n\n \n nextJournalId.set(getMaxJournalId() + 1);\n\n break;\n } catch (InsufficientLogException insufficientLogEx) {\n reSetupBdbEnvironment(insufficientLogEx);\n } catch (RollbackException rollbackEx) {\n LOG.warn(\"catch rollback log exception. will reopen the ReplicatedEnvironment.\", rollbackEx);\n bdbEnvironment.closeReplicatedEnvironment();\n bdbEnvironment.openReplicatedEnvironment(new File(environmentPath));\n }\n }\n }\n\n \n\n @Override\n public long getJournalNum() {\n return currentJournalDB.count();\n }\n\n @Override\n public void deleteJournals(long deleteToJournalId) {\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n LOG.info(\"delete database names is null.\");\n return;\n }\n\n String msg = \"existing database names: \";\n for (long name : dbNames) {\n msg += name + \" \";\n }\n msg += \", deleteToJournalId is \" + deleteToJournalId;\n LOG.info(msg);\n\n for (int i = 1; i < dbNames.size(); i++) {\n if (deleteToJournalId >= dbNames.get(i)) {\n long name = dbNames.get(i - 1);\n String stringName = Long.toString(name);\n LOG.info(\"delete database name {}\", stringName);\n bdbEnvironment.removeDatabase(stringName);\n } else {\n LOG.info(\"database name {} is larger than deleteToJournalId {}, not delete\",\n dbNames.get(i), deleteToJournalId);\n break;\n }\n }\n }\n\n @Override\n public long getFinalizedJournalId() {\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n LOG.error(\"database name is null.\");\n return 0;\n }\n\n String msg = \"database names: \";\n for (long name : dbNames) {\n msg += name + \" \";\n }\n LOG.info(msg);\n\n if (dbNames.size() < 2) {\n return 0;\n }\n\n return dbNames.get(dbNames.size() - 1) - 1;\n }\n\n @Override\n public List getDatabaseNames() {\n if (bdbEnvironment == null) {\n return null;\n }\n\n \n \n List dbNames = null;\n for (int i = 0; i < RETRY_TIME; i++) {\n try {\n dbNames = bdbEnvironment.getDatabaseNames();\n break;\n } catch (InsufficientLogException insufficientLogEx) {\n /*\n * If this is not a checkpoint thread, which means this maybe the FE startup\n * thread,\n * or a replay thread. We will reopen bdbEnvironment for these 2 cases to get\n * valid log\n * from helper nodes.\n *\n * The checkpoint thread will only run on Master FE. And Master FE should not\n * encounter\n * these exception. So if it happens, throw exception out.\n */\n if (!Env.isCheckpointThread()) {\n reSetupBdbEnvironment(insufficientLogEx);\n } else {\n throw insufficientLogEx;\n }\n } catch (RollbackException rollbackEx) {\n if (!Env.isCheckpointThread()) {\n LOG.warn(\"catch rollback log exception. will reopen the ReplicatedEnvironment.\", rollbackEx);\n bdbEnvironment.closeReplicatedEnvironment();\n bdbEnvironment.openReplicatedEnvironment(new File(environmentPath));\n } else {\n throw rollbackEx;\n }\n }\n }\n\n return dbNames;\n }\n\n public BDBEnvironment getBDBEnvironment() {\n return this.bdbEnvironment;\n }\n}", "context_after": "class BDBJEJournal implements Journal { \n public static final Logger LOG = LogManager.getLogger(BDBJEJournal.class);\n private static final int OUTPUT_BUFFER_INIT_SIZE = 128;\n private static final int RETRY_TIME = 3;\n\n private String environmentPath = null;\n private String selfNodeName;\n private String selfNodeHostPort;\n\n private BDBEnvironment bdbEnvironment = null;\n private Database currentJournalDB;\n \n private AtomicLong nextJournalId = new AtomicLong(1);\n\n public BDBJEJournal(String nodeName) {\n initBDBEnv(nodeName);\n }\n\n /*\n * Initialize bdb environment.\n * node name is ip_port (the port is edit_log_port)\n */\n private void initBDBEnv(String nodeName) {\n environmentPath = Env.getServingEnv().getBdbDir();\n HostInfo selfNode = Env.getServingEnv().getSelfNode();\n selfNodeName = nodeName;\n if (Config.enable_fqdn_mode) {\n \n \n \n \n selfNodeHostPort = selfNode.getHostName() + \":\" + selfNode.getPort();\n } else {\n selfNodeHostPort = selfNode.getIp() + \":\" + selfNode.getPort();\n }\n }\n\n /*\n * Database is named by its minimum journal id.\n * For example:\n * One database contains journal 100 to journal 200, its name is 100.\n * The next database's name is 201\n */\n @Override\n public synchronized void rollJournal() {\n \n if (currentJournalDB.count() == 0) {\n return;\n }\n\n long newName = nextJournalId.get();\n String currentDbName = currentJournalDB.getDatabaseName();\n long currentName = Long.parseLong(currentDbName);\n long newNameVerify = currentName + currentJournalDB.count();\n if (newName == newNameVerify) {\n LOG.info(\"roll edit log. new db name is {}\", newName);\n currentJournalDB = bdbEnvironment.openDatabase(Long.toString(newName));\n } else {\n String msg = String.format(\"roll journal error! journalId and db journal numbers is not match. \"\n + \"journal id: %d, current db: %s, expected db count: %d\",\n newName, currentDbName, newNameVerify);\n LOG.error(msg);\n Util.stdoutWithTime(msg);\n System.exit(-1);\n }\n }\n\n @Override\n public synchronized void write(short op, Writable writable) throws IOException {\n JournalEntity entity = new JournalEntity();\n entity.setOpCode(op);\n entity.setData(writable);\n\n \n long id = nextJournalId.getAndIncrement();\n Long idLong = id;\n DatabaseEntry theKey = new DatabaseEntry();\n TupleBinding idBinding = TupleBinding.getPrimitiveBinding(Long.class);\n idBinding.objectToEntry(idLong, theKey);\n\n \n DataOutputBuffer buffer = new DataOutputBuffer(OUTPUT_BUFFER_INIT_SIZE);\n entity.write(buffer);\n\n DatabaseEntry theData = new DatabaseEntry(buffer.getData());\n if (MetricRepo.isInit) {\n MetricRepo.COUNTER_EDIT_LOG_SIZE_BYTES.increase((long) theData.getSize());\n MetricRepo.COUNTER_CURRENT_EDIT_LOG_SIZE_BYTES.increase((long) theData.getSize());\n }\n LOG.debug(\"opCode = {}, journal size = {}\", op, theData.getSize());\n \n boolean writeSucceed = false;\n for (int i = 0; i < RETRY_TIME; i++) {\n try {\n \n if (currentJournalDB.put(null, theKey, theData) == OperationStatus.SUCCESS) {\n writeSucceed = true;\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"master write journal {} finished. db name {}, current time {}\",\n id, currentJournalDB.getDatabaseName(), System.currentTimeMillis());\n }\n break;\n }\n } catch (DatabaseException e) {\n LOG.error(\"catch an exception when writing to database. sleep and retry. journal id {}\", id, e);\n try {\n Thread.sleep(5 * 1000);\n } catch (InterruptedException e1) {\n LOG.warn(\"\", e1);\n }\n }\n }\n\n if (!writeSucceed) {\n if (op == OperationType.OP_TIMESTAMP) {\n /*\n * Do not exit if the write operation is OP_TIMESTAMP.\n * If all the followers exit except master, master should continue provide query\n * service.\n * To prevent master exit, we should exempt OP_TIMESTAMP write\n */\n nextJournalId.set(id);\n LOG.warn(\"master can not achieve quorum. write timestamp fail. but will not exit.\");\n return;\n }\n String msg = \"write bdb failed. will exit. journalId: \" + id + \", bdb database Name: \"\n + currentJournalDB.getDatabaseName();\n LOG.error(msg);\n Util.stdoutWithTime(msg);\n System.exit(-1);\n }\n }\n\n @Override\n public JournalEntity read(long journalId) {\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n return null;\n }\n String dbName = null;\n for (long db : dbNames) {\n if (journalId >= db) {\n dbName = Long.toString(db);\n continue;\n } else {\n break;\n }\n }\n\n if (dbName == null) {\n return null;\n }\n\n JournalEntity ret = null;\n Long key = new Long(journalId);\n DatabaseEntry theKey = new DatabaseEntry();\n TupleBinding myBinding = TupleBinding.getPrimitiveBinding(Long.class);\n myBinding.objectToEntry(key, theKey);\n\n DatabaseEntry theData = new DatabaseEntry();\n\n Database database = bdbEnvironment.openDatabase(dbName);\n try {\n \n \n if (database.get(null, theKey, theData, LockMode.READ_COMMITTED) == OperationStatus.SUCCESS) {\n \n byte[] retData = theData.getData();\n DataInputStream in = new DataInputStream(new ByteArrayInputStream(retData));\n ret = new JournalEntity();\n try {\n ret.readFields(in);\n } catch (IOException e) {\n LOG.warn(\"\", e);\n }\n } else {\n System.out.println(\"No record found for key '\" + journalId + \"'.\");\n }\n } catch (Exception e) {\n LOG.warn(\"catch an exception when get JournalEntity. key:{}\", journalId, e);\n return null;\n }\n return ret;\n }\n\n @Override\n public JournalCursor read(long fromKey, long toKey) {\n return BDBJournalCursor.getJournalCursor(bdbEnvironment, fromKey, toKey);\n }\n\n @Override\n public long getMaxJournalId() {\n long ret = -1;\n if (bdbEnvironment == null) {\n return ret;\n }\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n return ret;\n }\n if (dbNames.size() == 0) {\n return ret;\n }\n\n int index = dbNames.size() - 1;\n String dbName = dbNames.get(index).toString();\n long dbNumberName = dbNames.get(index);\n Database database = bdbEnvironment.openDatabase(dbName);\n ret = dbNumberName + database.count() - 1;\n\n return ret;\n }\n\n @Override\n public long getMinJournalId() {\n long ret = -1;\n if (bdbEnvironment == null) {\n return ret;\n }\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n return ret;\n }\n if (dbNames.size() == 0) {\n return ret;\n }\n\n String dbName = dbNames.get(0).toString();\n Database database = bdbEnvironment.openDatabase(dbName);\n \n if (database.count() == 0) {\n return ret;\n }\n\n return dbNames.get(0);\n }\n\n @Override\n public void close() {\n bdbEnvironment.close();\n bdbEnvironment = null;\n }\n\n /*\n * open the bdbje environment, and get the current journal database\n */\n @Override\n public synchronized void open() {\n if (bdbEnvironment == null) {\n File dbEnv = new File(environmentPath);\n bdbEnvironment = new BDBEnvironment();\n HostInfo helperNode = Env.getServingEnv().getHelperNode();\n String helperHostPort = helperNode.getIp() + \":\" + helperNode.getPort();\n if (Config.enable_fqdn_mode) {\n helperHostPort = helperNode.getHostName() + \":\" + helperNode.getPort();\n }\n try {\n bdbEnvironment.setup(dbEnv, selfNodeName, selfNodeHostPort, helperHostPort,\n Env.getServingEnv().isElectable());\n } catch (Exception e) {\n if (e instanceof DatabaseNotFoundException) {\n LOG.error(\"It is not allowed to set metadata_failure_recovery to true \"\n + \"when meta dir or bdbje dir is empty\uff0c which may mean it is \"\n + \"the first time to start this node\");\n }\n LOG.error(\"catch an exception when setup bdb environment. will exit.\", e);\n System.exit(-1);\n }\n }\n\n \n \n List dbNames = null;\n for (int i = 0; i < RETRY_TIME; i++) {\n try {\n dbNames = getDatabaseNames();\n\n if (dbNames == null) {\n LOG.error(\"fail to get dbNames while open bdbje journal. will exit\");\n System.exit(-1);\n }\n if (dbNames.size() == 0) {\n /*\n * This is the very first time to open. Usually, we will open a new database\n * named \"1\".\n * But when we start cluster with an image file copied from other cluster,\n * here we should open database with name image max journal id + 1.\n * (default Catalog.getServingEnv().getReplayedJournalId() is 0)\n */\n String dbName = Long.toString(Env.getServingEnv().getReplayedJournalId() + 1);\n LOG.info(\"the very first time to open bdb, dbname is {}\", dbName);\n currentJournalDB = bdbEnvironment.openDatabase(dbName);\n } else {\n \n currentJournalDB = bdbEnvironment.openDatabase(dbNames.get(dbNames.size() - 1).toString());\n }\n\n \n nextJournalId.set(getMaxJournalId() + 1);\n\n break;\n } catch (InsufficientLogException insufficientLogEx) {\n reSetupBdbEnvironment(insufficientLogEx);\n } catch (RollbackException rollbackEx) {\n LOG.warn(\"catch rollback log exception. will reopen the ReplicatedEnvironment.\", rollbackEx);\n bdbEnvironment.closeReplicatedEnvironment();\n bdbEnvironment.openReplicatedEnvironment(new File(environmentPath));\n }\n }\n }\n\n \n\n @Override\n public long getJournalNum() {\n return currentJournalDB.count();\n }\n\n @Override\n public void deleteJournals(long deleteToJournalId) {\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n LOG.info(\"delete database names is null.\");\n return;\n }\n\n String msg = \"existing database names: \";\n for (long name : dbNames) {\n msg += name + \" \";\n }\n msg += \", deleteToJournalId is \" + deleteToJournalId;\n LOG.info(msg);\n\n for (int i = 1; i < dbNames.size(); i++) {\n if (deleteToJournalId >= dbNames.get(i)) {\n long name = dbNames.get(i - 1);\n String stringName = Long.toString(name);\n LOG.info(\"delete database name {}\", stringName);\n bdbEnvironment.removeDatabase(stringName);\n } else {\n LOG.info(\"database name {} is larger than deleteToJournalId {}, not delete\",\n dbNames.get(i), deleteToJournalId);\n break;\n }\n }\n }\n\n @Override\n public long getFinalizedJournalId() {\n List dbNames = getDatabaseNames();\n if (dbNames == null) {\n LOG.error(\"database name is null.\");\n return 0;\n }\n\n String msg = \"database names: \";\n for (long name : dbNames) {\n msg += name + \" \";\n }\n LOG.info(msg);\n\n if (dbNames.size() < 2) {\n return 0;\n }\n\n return dbNames.get(dbNames.size() - 1) - 1;\n }\n\n @Override\n public List getDatabaseNames() {\n if (bdbEnvironment == null) {\n return null;\n }\n\n \n \n List dbNames = null;\n for (int i = 0; i < RETRY_TIME; i++) {\n try {\n dbNames = bdbEnvironment.getDatabaseNames();\n break;\n } catch (InsufficientLogException insufficientLogEx) {\n /*\n * If this is not a checkpoint thread, which means this maybe the FE startup\n * thread,\n * or a replay thread. We will reopen bdbEnvironment for these 2 cases to get\n * valid log\n * from helper nodes.\n *\n * The checkpoint thread will only run on Master FE. And Master FE should not\n * encounter\n * these exception. So if it happens, throw exception out.\n */\n if (!Env.isCheckpointThread()) {\n reSetupBdbEnvironment(insufficientLogEx);\n } else {\n throw insufficientLogEx;\n }\n } catch (RollbackException rollbackEx) {\n if (!Env.isCheckpointThread()) {\n LOG.warn(\"catch rollback log exception. will reopen the ReplicatedEnvironment.\", rollbackEx);\n bdbEnvironment.closeReplicatedEnvironment();\n bdbEnvironment.openReplicatedEnvironment(new File(environmentPath));\n } else {\n throw rollbackEx;\n }\n }\n }\n\n return dbNames;\n }\n\n public BDBEnvironment getBDBEnvironment() {\n return this.bdbEnvironment;\n }\n}" }, { "comment": "You either filter it or you attempt to re-add those that you already went over, which is harmless anyway.", "method_body": "private static ScopeInfo initStereotypeScope(List stereotypes, AnnotationTarget target) {\n if (stereotypes.isEmpty()) {\n return null;\n }\n \n Set stereotypeScopes = stereotypes.stream().filter(s -> !s.isAdditionalBeanDefiningAnnotation())\n .map(stereotypeInfo -> stereotypeInfo.getDefaultScope())\n .collect(Collectors.toSet());\n \n if (stereotypeScopes.isEmpty()) {\n stereotypeScopes.addAll(stereotypes.stream().filter(s -> s.isAdditionalBeanDefiningAnnotation())\n .map(si -> si.getDefaultScope())\n .collect(Collectors.toSet()));\n }\n return BeanDeployment.getValidScope(stereotypeScopes, target);\n }", "target_code": "stereotypeScopes.addAll(stereotypes.stream().filter(s -> s.isAdditionalBeanDefiningAnnotation())", "method_body_after": "private static ScopeInfo initStereotypeScope(List stereotypes, AnnotationTarget target) {\n if (stereotypes.isEmpty()) {\n return null;\n }\n final Set stereotypeScopes = new HashSet<>();\n final Set additionalBDAScopes = new HashSet<>();\n for (StereotypeInfo stereotype : stereotypes) {\n if (!stereotype.isAdditionalBeanDefiningAnnotation()) {\n stereotypeScopes.add(stereotype.getDefaultScope());\n } else {\n additionalBDAScopes.add(stereotype.getDefaultScope());\n }\n }\n \n return BeanDeployment.getValidScope(stereotypeScopes.isEmpty() ? additionalBDAScopes : stereotypeScopes, target);\n }", "context_before": "class \" +\n \"%s is not part of Jandex index. Dependent scope will be used instead.\", beanClass, superClassName);\n return null;\n }\n for (AnnotationInstance annotation : beanDeployment.getAnnotationStore().getAnnotations(classFromIndex)) {\n ScopeInfo scopeAnnotation = beanDeployment.getScope(annotation.name());\n if (scopeAnnotation != null && scopeAnnotation.declaresInherited()) {\n \n return scopeAnnotation;\n }\n }", "context_after": "class \" +\n \"%s is not part of Jandex index. Dependent scope will be used instead.\", beanClass, superClassName);\n return null;\n }\n for (AnnotationInstance annotation : beanDeployment.getAnnotationStore().getAnnotations(classFromIndex)) {\n ScopeInfo scopeAnnotation = beanDeployment.getScope(annotation.name());\n if (scopeAnnotation != null && scopeAnnotation.declaresInherited()) {\n \n return scopeAnnotation;\n }\n }" }, { "comment": "Same validation here as well. https://github.com/ballerina-platform/ballerina-lang/pull/18233/files#r318003438", "method_body": "public boolean equals(Object obj) {\n if (!(obj instanceof BDiagnosticSource)) {\n return false;\n }\n BDiagnosticSource diagnosticSource = (BDiagnosticSource) obj;\n return pkgID.equals(diagnosticSource.pkgID) && cUnitName.equals(diagnosticSource.cUnitName);\n }", "target_code": "if (!(obj instanceof BDiagnosticSource)) {", "method_body_after": "public boolean equals(Object obj) {\n if (this == obj) {\n return true;\n }\n if (!(obj instanceof BDiagnosticSource)) {\n return false;\n }\n BDiagnosticSource diagnosticSource = (BDiagnosticSource) obj;\n return pkgID.equals(diagnosticSource.pkgID) && cUnitName.equals(diagnosticSource.cUnitName);\n }", "context_before": "class BDiagnosticSource implements Diagnostic.DiagnosticSource {\n\n public PackageID pkgID;\n public String cUnitName;\n\n public BDiagnosticSource(PackageID packageID, String compUnitName) {\n this.pkgID = packageID;\n this.cUnitName = compUnitName;\n }\n\n @Override\n public String getPackageName() {\n return pkgID.name.value;\n }\n\n @Override\n public String getPackageVersion() {\n return pkgID.version.value;\n }\n\n @Override\n public String getCompilationUnitName() {\n return cUnitName;\n }\n\n @Override\n \n\n @Override\n public int hashCode() {\n return pkgID.hashCode() + cUnitName.hashCode();\n }\n\n @Override\n public int compareTo(Diagnostic.DiagnosticSource diagnosticSource) {\n String thisDiagnosticSourceString = getPackageName() + getPackageVersion() + getCompilationUnitName();\n String otherDiagnosticSourceString = diagnosticSource.getPackageName() + diagnosticSource.getPackageVersion() +\n diagnosticSource.getCompilationUnitName();\n return thisDiagnosticSourceString.compareTo(otherDiagnosticSourceString);\n }\n}", "context_after": "class BDiagnosticSource implements Diagnostic.DiagnosticSource {\n\n public PackageID pkgID;\n public String cUnitName;\n\n public BDiagnosticSource(PackageID packageID, String compUnitName) {\n this.pkgID = packageID;\n this.cUnitName = compUnitName;\n }\n\n @Override\n public String getPackageName() {\n return pkgID.name.value;\n }\n\n @Override\n public String getPackageVersion() {\n return pkgID.version.value;\n }\n\n @Override\n public String getCompilationUnitName() {\n return cUnitName;\n }\n\n @Override\n \n\n @Override\n public int hashCode() {\n return pkgID.hashCode() + cUnitName.hashCode();\n }\n\n @Override\n public int compareTo(Diagnostic.DiagnosticSource diagnosticSource) {\n String thisDiagnosticSourceString = getPackageName() + getPackageVersion() + getCompilationUnitName();\n String otherDiagnosticSourceString = diagnosticSource.getPackageName() + diagnosticSource.getPackageVersion() +\n diagnosticSource.getCompilationUnitName();\n return thisDiagnosticSourceString.compareTo(otherDiagnosticSourceString);\n }\n}" }, { "comment": "It looks like the new IT doesn't pass: https://builds.apache.org/job/beam_PostCommit_Java_PR/169/testReport/junit/org.apache.beam.sdk.io.gcp.pubsub/PubsubReadIT/testReadPubsubMessageId/", "method_body": "public void testReadPubsubMessageId() throws Exception {\n \n pipeline.getOptions().as(DirectOptions.class).setBlockOnRun(false);\n\n PCollection messages =\n pipeline.apply(\n PubsubIO.readMessagesWithMessageId()\n .fromTopic(\"projects/pubsub-public-data/topics/taxirides-realtime\"));\n\n messages.apply(\n \"isMessageIdNonNull\",\n signal.signalSuccessWhen(\n messages.getCoder(),\n pubsubMessages ->\n pubsubMessages.stream().noneMatch(m -> Strings.isNullOrEmpty(m.getMessageId()))));\n\n Supplier start = signal.waitForStart(Duration.standardMinutes(5));\n pipeline.apply(signal.signalStart());\n PipelineResult job = pipeline.run();\n start.get();\n\n signal.waitForSuccess(Duration.standardSeconds(30));\n \n try {\n job.cancel();\n } catch (UnsupportedOperationException exc) {\n \n }\n }", "target_code": "pubsubMessages.stream().noneMatch(m -> Strings.isNullOrEmpty(m.getMessageId()))));", "method_body_after": "public void testReadPubsubMessageId() throws Exception {\n \n pipeline.getOptions().as(DirectOptions.class).setBlockOnRun(false);\n\n PCollection messages =\n pipeline.apply(\n PubsubIO.readMessagesWithAttributesAndMessageId()\n .fromTopic(\"projects/pubsub-public-data/topics/taxirides-realtime\"));\n\n messages.apply(\n \"isMessageIdNonNull\",\n signal.signalSuccessWhen(messages.getCoder(), new NonEmptyMessageIdCheck()));\n\n Supplier start = signal.waitForStart(Duration.standardMinutes(5));\n pipeline.apply(signal.signalStart());\n PipelineResult job = pipeline.run();\n start.get();\n\n signal.waitForSuccess(Duration.standardMinutes(1));\n \n try {\n job.cancel();\n } catch (UnsupportedOperationException exc) {\n \n }\n }", "context_before": "class PubsubReadIT {\n\n @Rule public transient TestPubsubSignal signal = TestPubsubSignal.create();\n @Rule public transient TestPipeline pipeline = TestPipeline.create();\n\n @Test\n public void testReadPublicData() throws Exception {\n \n pipeline.getOptions().as(DirectOptions.class).setBlockOnRun(false);\n\n PCollection messages =\n pipeline.apply(\n PubsubIO.readStrings()\n .fromTopic(\"projects/pubsub-public-data/topics/taxirides-realtime\"));\n\n messages.apply(\n \"waitForAnyMessage\", signal.signalSuccessWhen(messages.getCoder(), anyMessages -> true));\n\n Supplier start = signal.waitForStart(Duration.standardMinutes(5));\n pipeline.apply(signal.signalStart());\n PipelineResult job = pipeline.run();\n start.get();\n\n signal.waitForSuccess(Duration.standardSeconds(30));\n \n try {\n job.cancel();\n } catch (UnsupportedOperationException exc) {\n \n }\n }\n\n @Test\n \n}", "context_after": "class PubsubReadIT {\n private static final Logger LOG = LoggerFactory.getLogger(PubsubReadIT.class);\n\n @Rule public transient TestPubsubSignal signal = TestPubsubSignal.create();\n @Rule public transient TestPipeline pipeline = TestPipeline.create();\n\n @Test\n public void testReadPublicData() throws Exception {\n \n pipeline.getOptions().as(DirectOptions.class).setBlockOnRun(false);\n\n PCollection messages =\n pipeline.apply(\n PubsubIO.readStrings()\n .fromTopic(\"projects/pubsub-public-data/topics/taxirides-realtime\"));\n\n messages.apply(\n \"waitForAnyMessage\", signal.signalSuccessWhen(messages.getCoder(), anyMessages -> true));\n\n Supplier start = signal.waitForStart(Duration.standardMinutes(5));\n pipeline.apply(signal.signalStart());\n PipelineResult job = pipeline.run();\n start.get();\n\n signal.waitForSuccess(Duration.standardSeconds(30));\n \n try {\n job.cancel();\n } catch (UnsupportedOperationException exc) {\n \n }\n }\n\n @Test\n \n\n private static class NonEmptyMessageIdCheck\n implements SerializableFunction, Boolean> {\n @Override\n public Boolean apply(Set input) {\n for (PubsubMessage message : input) {\n if (Strings.isNullOrEmpty(message.getMessageId())) {\n return false;\n }\n }\n return true;\n }\n }\n}" }, { "comment": "This will lead to flaky tests. You should create a thread that monitors for a subscription on the topic and when it sees one publishes the results.", "method_body": "public void testSelectFromPubsub() throws Exception {\n Future>> expectedResult =\n pool.submit(\n (Callable)\n () -> {\n String[] args =\n buildArgs(\n String.format(createPubsubTableStatement, eventsTopic.topicPath()),\n readFromPubsub);\n\n ByteArrayOutputStream outputStream = new ByteArrayOutputStream();\n BeamSqlLine.runSqlLine(args, null, outputStream, null);\n return toLines(outputStream);\n });\n\n \n Thread.sleep(10 * 1000);\n List messages =\n ImmutableList.of(\n message(\n convertTimestampToMillis(\"2018-07-01 21:25:20\"),\n constructor.construct(\"id1\", 1, 40.702, -74.001, 1000, 10, \"enroute\", 2)),\n message(\n convertTimestampToMillis(\"2018-07-01 21:26:06\"),\n constructor.construct(\"id2\", 2, 40.703, -74.002, 1000, 10, \"enroute\", 4)),\n message(\n convertTimestampToMillis(\"2018-07-02 13:26:06\"),\n constructor.construct(\"id3\", 3, 30.0, -72.32324, 2000, 20, \"enroute\", 7)));\n\n eventsTopic.publish(messages);\n\n assertThat(\n Arrays.asList(\n Arrays.asList(\"2018-07-01 21:25:20\", \"enroute\", \"40.702\", \"-74.001\"),\n Arrays.asList(\"2018-07-01 21:26:06\", \"enroute\", \"40.703\", \"-74.002\"),\n Arrays.asList(\"2018-07-02 13:26:06\", \"enroute\", \"30.0\", \"-72.32324\")),\n everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray())));\n }", "target_code": "convertTimestampToMillis(\"2018-07-01 21:25:20\"),", "method_body_after": "public void testSelectFromPubsub() throws Exception {\n String[] args =\n buildArgs(\n String.format(createPubsubTableStatement, eventsTopic.topicPath()),\n setProject,\n \"SELECT event_timestamp, taxi_rides.payload.ride_status, taxi_rides.payload.latitude, \"\n + \"taxi_rides.payload.longitude from taxi_rides LIMIT 3;\");\n\n Future>> expectedResult = runQueryInBackground(args);\n eventsTopic.checkIfAnySubscriptionExists(project, Duration.standardMinutes(1));\n\n List messages =\n ImmutableList.of(\n message(\n convertTimestampToMillis(\"2018-07-01 21:25:20\"),\n taxiRideJSON(\"id1\", 1, 40.702, -74.001, 1000, 10, \"enroute\", 2)),\n message(\n convertTimestampToMillis(\"2018-07-01 21:26:06\"),\n taxiRideJSON(\"id2\", 2, 40.703, -74.002, 1000, 10, \"enroute\", 4)),\n message(\n convertTimestampToMillis(\"2018-07-02 13:26:06\"),\n taxiRideJSON(\"id3\", 3, 30.0, -72.32324, 2000, 20, \"enroute\", 7)));\n\n eventsTopic.publish(messages);\n\n assertThat(\n Arrays.asList(\n Arrays.asList(\"2018-07-01 21:25:20\", \"enroute\", \"40.702\", \"-74.001\"),\n Arrays.asList(\"2018-07-01 21:26:06\", \"enroute\", \"40.703\", \"-74.002\"),\n Arrays.asList(\"2018-07-02 13:26:06\", \"enroute\", \"30.0\", \"-72.32324\")),\n everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray())));\n }", "context_before": "class BeamSqlLineIT implements Serializable {\n\n @Rule public transient TestPubsub eventsTopic = TestPubsub.create();\n\n private static final Integer numberOfThreads = 4;\n private static String createPubsubTableStatement;\n private static String readFromPubsub;\n private static String filterForSouthManhattan;\n private static String slidingWindowQuery;\n private static String fixedWindowQuery;\n private static PubsubMessageJSONStringConstructor constructor;\n private static ExecutorService pool;\n private static final String publicTopic = \"projects/pubsub-public-data/topics/taxirides-realtime\";\n\n @BeforeClass\n public static void setUp() {\n pool = Executors.newFixedThreadPool(numberOfThreads);\n\n createPubsubTableStatement =\n \"CREATE TABLE taxi_rides (\\n\"\n + \" event_timestamp TIMESTAMP,\\n\"\n + \" attributes MAP,\\n\"\n + \" payload ROW<\\n\"\n + \" ride_id VARCHAR,\\n\"\n + \" point_idx INT,\\n\"\n + \" latitude DOUBLE,\\n\"\n + \" longitude DOUBLE,\\n\"\n + \" meter_reading DOUBLE,\\n\"\n + \" meter_increment DOUBLE,\\n\"\n + \" ride_status VARCHAR,\\n\"\n + \" passenger_count TINYINT>)\\n\"\n + \" TYPE pubsub \\n\"\n + \" LOCATION '%s'\\n\"\n + \" TBLPROPERTIES '{\\\"timestampAttributeKey\\\": \\\"ts\\\"}';\";\n\n readFromPubsub =\n \"SELECT event_timestamp, taxi_rides.payload.ride_status, taxi_rides.payload.latitude, \"\n + \"taxi_rides.payload.longitude from taxi_rides LIMIT 3;\";\n\n filterForSouthManhattan =\n \"SELECT event_timestamp, taxi_rides.payload.ride_status, \\n\"\n + \"taxi_rides.payload.latitude, taxi_rides.payload.longitude from taxi_rides\\n\"\n + \" WHERE taxi_rides.payload.longitude > -74.747\\n\"\n + \" AND taxi_rides.payload.longitude < -73.969\\n\"\n + \" AND taxi_rides.payload.latitude > 40.699\\n\"\n + \" AND taxi_rides.payload.latitude < 40.720 LIMIT 2;\";\n\n fixedWindowQuery =\n \"WITH geo_cells AS (\\n\"\n + \" SELECT FLOOR(taxi_rides.payload.latitude / 0.05) * 0.05 AS reduced_lat,\\n\"\n + \" FLOOR(taxi_rides.payload.longitude / 0.05) * 0.05 AS reduced_lon,\\n\"\n + \" taxi_rides.event_timestamp\\n\"\n + \" FROM taxi_rides)\\n\"\n + \" SELECT COUNT(*) as num_events,\\n\"\n + \" geo_cells.reduced_lat,\\n\"\n + \" geo_cells.reduced_lon, \\n\"\n + \" TUMBLE_START(geo_cells.event_timestamp, INTERVAL '1' SECOND)\\n\"\n + \" FROM geo_cells \\n\"\n + \" GROUP BY geo_cells.reduced_lat,\\n\"\n + \" geo_cells.reduced_lon,\\n\"\n + \" TUMBLE(geo_cells.event_timestamp, INTERVAL '1' SECOND)\\n\"\n + \" LIMIT 2;\";\n\n slidingWindowQuery =\n \"SELECT COUNT(*) AS num_events,\\n\"\n + \" SUM(taxi_rides.payload.meter_increment) as revenue,\\n\"\n + \" HOP_END(\\n\"\n + \" taxi_rides.event_timestamp, \\n\"\n + \" INTERVAL '1' SECOND, \\n\"\n + \" INTERVAL '2' SECOND) as minute_end\\n\"\n + \" FROM taxi_rides\\n\"\n + \" GROUP BY HOP(\\n\"\n + \" taxi_rides.event_timestamp,\\n\"\n + \" INTERVAL '1' SECOND, \\n\"\n + \" INTERVAL '2' SECOND) LIMIT 2\";\n\n constructor =\n new PubsubMessageJSONStringConstructor(\n \"ride_id\",\n \"point_idx\",\n \"latitude\",\n \"longitude\",\n \"meter_reading\",\n \"meter_increment\",\n \"ride_status\",\n \"passenger_count\");\n }\n\n @Test\n \n\n @Test\n public void testFilterForSouthManhattan() throws Exception {\n Future>> expectedResult =\n pool.submit(\n (Callable)\n () -> {\n String[] args =\n buildArgs(\n String.format(createPubsubTableStatement, eventsTopic.topicPath()),\n filterForSouthManhattan);\n\n ByteArrayOutputStream outputStream = new ByteArrayOutputStream();\n BeamSqlLine.runSqlLine(args, null, outputStream, null);\n return toLines(outputStream);\n });\n\n \n Thread.sleep(10 * 1000);\n List messages =\n ImmutableList.of(\n message(\n convertTimestampToMillis(\"2018-07-01 21:25:20\"),\n constructor.construct(\"id1\", 1, 40.701, -74.001, 1000, 10, \"enroute\", 2)),\n message(\n convertTimestampToMillis(\"2018-07-01 21:26:06\"),\n constructor.construct(\"id2\", 2, 40.702, -74.002, 1000, 10, \"enroute\", 4)),\n message(\n convertTimestampToMillis(\"2018-07-02 13:26:06\"),\n constructor.construct(\"id3\", 3, 30, -72.32324, 2000, 20, \"enroute\", 7)),\n message(\n convertTimestampToMillis(\"2018-07-02 14:28:22\"),\n constructor.construct(\"id4\", 4, 34, -73.32324, 2000, 20, \"enroute\", 8)));\n\n eventsTopic.publish(messages);\n\n assertThat(\n Arrays.asList(\n Arrays.asList(\"2018-07-01 21:25:20\", \"enroute\", \"40.701\", \"-74.001\"),\n Arrays.asList(\"2018-07-01 21:26:06\", \"enroute\", \"40.702\", \"-74.002\")),\n everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray())));\n }\n\n @Test\n public void testFixedWindow() throws Exception {\n Future>> expectedResult =\n pool.submit(\n (Callable)\n () -> {\n String[] args =\n buildArgs(\n String.format(createPubsubTableStatement, publicTopic), fixedWindowQuery);\n ByteArrayOutputStream outputStream = new ByteArrayOutputStream();\n BeamSqlLine.runSqlLine(args, null, outputStream, null);\n return toLines(outputStream);\n });\n\n \n Thread.sleep(10 * 1000);\n\n \n assertTrue(expectedResult.get().size() == 6);\n }\n\n @Test\n public void testSlidingWindow() throws Exception {\n Future>> expectedResult =\n pool.submit(\n (Callable)\n () -> {\n String[] args =\n buildArgs(\n String.format(createPubsubTableStatement, publicTopic),\n slidingWindowQuery);\n ByteArrayOutputStream outputStream = new ByteArrayOutputStream();\n BeamSqlLine.runSqlLine(args, null, outputStream, null);\n return toLines(outputStream);\n });\n\n \n Thread.sleep(10 * 1000);\n\n \n assertTrue(expectedResult.get().size() == 6);\n }\n\n private long convertTimestampToMillis(String timestamp) throws ParseException {\n SimpleDateFormat dateFormat = new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss\");\n dateFormat.setTimeZone(TimeZone.getTimeZone(\"UTC\"));\n return dateFormat.parse(timestamp).getTime();\n }\n\n private PubsubMessage message(long timestampInMillis, String jsonPayload) {\n return new PubsubMessage(\n jsonPayload.getBytes(UTF_8), ImmutableMap.of(\"ts\", String.valueOf(timestampInMillis)));\n }\n\n private static class PubsubMessageJSONStringConstructor {\n private List messageSchema;\n\n public PubsubMessageJSONStringConstructor(String... schemas) {\n ImmutableList.Builder builder = ImmutableList.builder();\n for (String schema : schemas) {\n builder.add(schema);\n }\n\n messageSchema = builder.build();\n }\n\n public String construct(Object... values) throws IllegalArgumentException {\n if (values.length != messageSchema.size()) {\n throw new IllegalArgumentException(\n String.format(\n \"length of values %d does not match \" + \"with size of schema %d\",\n values.length, messageSchema.size()));\n }\n\n JSONObject jsonObject = new JSONObject();\n for (int i = 0; i < values.length; i++) {\n jsonObject.put(messageSchema.get(i), values[i]);\n }\n\n return jsonObject.toString();\n }\n }\n}", "context_after": "class BeamSqlLineIT implements Serializable {\n\n @Rule public transient TestPubsub eventsTopic = TestPubsub.create();\n\n private static String project;\n private static String createPubsubTableStatement;\n private static String setProject;\n private static final SimpleDateFormat dateFormat = new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss\");\n\n private ExecutorService pool;\n\n @BeforeClass\n public static void setUpClass() {\n project = TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject();\n\n setProject = String.format(\"SET project = '%s';\", project);\n\n createPubsubTableStatement =\n \"CREATE TABLE taxi_rides (\\n\"\n + \" event_timestamp TIMESTAMP,\\n\"\n + \" attributes MAP,\\n\"\n + \" payload ROW<\\n\"\n + \" ride_id VARCHAR,\\n\"\n + \" point_idx INT,\\n\"\n + \" latitude DOUBLE,\\n\"\n + \" longitude DOUBLE,\\n\"\n + \" meter_reading DOUBLE,\\n\"\n + \" meter_increment DOUBLE,\\n\"\n + \" ride_status VARCHAR,\\n\"\n + \" passenger_count TINYINT>)\\n\"\n + \" TYPE pubsub \\n\"\n + \" LOCATION '%s'\\n\"\n + \" TBLPROPERTIES '{\\\"timestampAttributeKey\\\": \\\"ts\\\"}';\";\n\n dateFormat.setTimeZone(TimeZone.getTimeZone(\"UTC\"));\n }\n\n @Before\n public void setUp() {\n pool = Executors.newFixedThreadPool(1);\n }\n\n @After\n public void tearDown() {\n pool.shutdown();\n }\n\n @Test\n \n\n @Test\n public void testFilterForSouthManhattan() throws Exception {\n String[] args =\n buildArgs(\n String.format(createPubsubTableStatement, eventsTopic.topicPath()),\n setProject,\n \"SELECT event_timestamp, taxi_rides.payload.ride_status, \\n\"\n + \"taxi_rides.payload.latitude, taxi_rides.payload.longitude from taxi_rides\\n\"\n + \" WHERE taxi_rides.payload.longitude > -74.747\\n\"\n + \" AND taxi_rides.payload.longitude < -73.969\\n\"\n + \" AND taxi_rides.payload.latitude > 40.699\\n\"\n + \" AND taxi_rides.payload.latitude < 40.720 LIMIT 2;\");\n\n Future>> expectedResult = runQueryInBackground(args);\n eventsTopic.checkIfAnySubscriptionExists(project, Duration.standardMinutes(1));\n\n List messages =\n ImmutableList.of(\n message(\n convertTimestampToMillis(\"2018-07-01 21:25:20\"),\n taxiRideJSON(\"id1\", 1, 40.701, -74.001, 1000, 10, \"enroute\", 2)),\n message(\n convertTimestampToMillis(\"2018-07-01 21:26:06\"),\n taxiRideJSON(\"id2\", 2, 40.702, -74.002, 1000, 10, \"enroute\", 4)),\n message(\n convertTimestampToMillis(\"2018-07-02 13:26:06\"),\n taxiRideJSON(\"id3\", 3, 30, -72.32324, 2000, 20, \"enroute\", 7)),\n message(\n convertTimestampToMillis(\"2018-07-02 14:28:22\"),\n taxiRideJSON(\"id4\", 4, 34, -73.32324, 2000, 20, \"enroute\", 8)));\n\n eventsTopic.publish(messages);\n\n assertThat(\n Arrays.asList(\n Arrays.asList(\"2018-07-01 21:25:20\", \"enroute\", \"40.701\", \"-74.001\"),\n Arrays.asList(\"2018-07-01 21:26:06\", \"enroute\", \"40.702\", \"-74.002\")),\n everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray())));\n }\n\n private String taxiRideJSON(\n String rideId,\n int pointIdex,\n double latitude,\n double longitude,\n int meterReading,\n int meterIncrement,\n String rideStatus,\n int passengerCount) {\n ObjectMapper mapper = new ObjectMapper();\n ObjectNode objectNode = mapper.createObjectNode();\n objectNode.put(\"ride_id\", rideId);\n objectNode.put(\"point_idx\", pointIdex);\n objectNode.put(\"latitude\", latitude);\n objectNode.put(\"longitude\", longitude);\n objectNode.put(\"meter_reading\", meterReading);\n objectNode.put(\"meter_increment\", meterIncrement);\n objectNode.put(\"ride_status\", rideStatus);\n objectNode.put(\"passenger_count\", passengerCount);\n return objectNode.toString();\n }\n\n private Future>> runQueryInBackground(String[] args) {\n return pool.submit(\n (Callable)\n () -> {\n ByteArrayOutputStream outputStream = new ByteArrayOutputStream();\n BeamSqlLine.runSqlLine(args, null, outputStream, null);\n return toLines(outputStream);\n });\n }\n\n private long convertTimestampToMillis(String timestamp) throws ParseException {\n return dateFormat.parse(timestamp).getTime();\n }\n\n private PubsubMessage message(long timestampInMillis, String jsonPayload) {\n return new PubsubMessage(\n jsonPayload.getBytes(UTF_8), ImmutableMap.of(\"ts\", String.valueOf(timestampInMillis)));\n }\n}" }, { "comment": "Yes, this was specifically asked by the service. They want to ensure every docker consumer validates this but they have found customer's not necessarily doing it so they wanted us to put it in the SDK.", "method_body": "Mono> downloadManifestWithResponse(String tagOrDigest, Context context) {\n if (tagOrDigest == null) {\n return monoError(logger, new NullPointerException(\"'tagOrDigest' can't be null.\"));\n }\n\n return this.registriesImpl.getManifestWithResponseAsync(repositoryName, tagOrDigest, UtilsImpl.OCI_MANIFEST_MEDIA_TYPE, context)\n .flatMap(response -> {\n String digest = response.getHeaders().getValue(UtilsImpl.DOCKER_DIGEST_HEADER_NAME);\n ManifestWrapper wrapper = response.getValue();\n\n \n \n if (Objects.equals(digest, tagOrDigest) || Objects.equals(response.getValue().getTag(), tagOrDigest)) {\n OciManifest ociManifest = new OciManifest()\n .setAnnotations(wrapper.getAnnotations())\n .setConfig(wrapper.getConfig())\n .setLayers(wrapper.getLayers())\n .setSchemaVersion(wrapper.getSchemaVersion());\n\n Response res = new ResponseBase(\n response.getRequest(),\n response.getStatusCode(),\n response.getHeaders(),\n ociManifest,\n null);\n\n return Mono.just(res);\n } else {\n return monoError(logger, new ServiceResponseException(\"The digest in the response does not match the expected digest.\"));\n }\n }).onErrorMap(UtilsImpl::mapException);\n }", "target_code": "if (Objects.equals(digest, tagOrDigest) || Objects.equals(response.getValue().getTag(), tagOrDigest)) {", "method_body_after": "return monoError(logger, new NullPointerException(\"'tagOrDigest' can't be null.\"));\n }\n\n return this.registriesImpl.getManifestWithResponseAsync(repositoryName, tagOrDigest, UtilsImpl.OCI_MANIFEST_MEDIA_TYPE, context)\n .flatMap(response -> {\n String digest = response.getHeaders().getValue(UtilsImpl.DOCKER_DIGEST_HEADER_NAME);\n ManifestWrapper wrapper = response.getValue();\n\n \n \n if (Objects.equals(digest, tagOrDigest) || Objects.equals(response.getValue().getTag(), tagOrDigest)) {\n OciManifest ociManifest = new OciManifest()\n .setAnnotations(wrapper.getAnnotations())\n .setConfig(wrapper.getConfig())\n .setLayers(wrapper.getLayers())\n .setSchemaVersion(wrapper.getSchemaVersion());\n\n Response res = new ResponseBase(\n response.getRequest(),\n response.getStatusCode(),\n response.getHeaders(),\n ociManifest,\n null);\n\n return Mono.just(res);\n } else {\n return monoError(logger, new ServiceResponseException(\"The digest in the response does not match the expected digest.\"));\n }\n }", "context_before": "class ContainerRegistryBlobAsyncClient {\n\n private final AzureContainerRegistryImpl registryImplClient;\n private final ContainerRegistryBlobsImpl blobsImpl;\n private final ContainerRegistriesImpl registriesImpl;\n private final String endpoint;\n private final String repositoryName;\n\n private final ClientLogger logger = new ClientLogger(ContainerRegistryBlobAsyncClient.class);\n\n ContainerRegistryBlobAsyncClient(String repositoryName, HttpPipeline httpPipeline, String endpoint, String version) {\n this.repositoryName = repositoryName;\n this.endpoint = endpoint;\n this.registryImplClient = new AzureContainerRegistryImplBuilder()\n .url(endpoint)\n .pipeline(httpPipeline)\n .apiVersion(version)\n .buildClient();\n this.blobsImpl = this.registryImplClient.getContainerRegistryBlobs();\n this.registriesImpl = this.registryImplClient.getContainerRegistries();\n }\n\n /**\n * This method returns the registry's repository on which operations are being performed.\n *\n * @return The name of the repository\n */\n public String getRepositoryName() {\n return this.repositoryName;\n }\n\n /**\n * This method returns the complete registry endpoint.\n *\n * @return The registry endpoint including the authority.\n */\n public String getEndpoint() {\n return this.endpoint;\n }\n\n /**\n * Upload the Oci manifest to the repository.\n * The upload is done as a single operation.\n * @see uploadManifest(OciManifest manifest) {\n if (manifest == null) {\n return monoError(logger, new NullPointerException(\"'manifest' can't be null.\"));\n }\n\n try {\n byte[] bytes = this.registryImplClient.getSerializerAdapter().serializeToBytes(manifest, SerializerEncoding.JSON);\n return withContext(context -> this.uploadManifestWithResponse(ByteBuffer.wrap(bytes), context)).flatMap(FluxUtil::toMono);\n } catch (IOException exception) {\n return monoError(logger, new UncheckedIOException(exception));\n }\n }\n\n /**\n * Uploads a manifest to the repository.\n * The client currently only supports uploading OciManifests to the repository.\n * And this operation makes the assumption that the data provided is a valid OCI manifest.\n *

\n * Also, the data is read into memory and then an upload operation is performed as a single operation.\n * @see uploadManifest(BinaryData data) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n return withContext(context -> this.uploadManifestWithResponse(data.toByteBuffer(), context)).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Uploads a manifest to the repository.\n * The client currently only supports uploading OciManifests to the repository.\n * And this operation makes the assumption that the data provided is a valid OCI manifest.\n *

\n * Also, the data is read into memory and then an upload operation is performed as a single operation.\n * @see > uploadManifestWithResponse(BinaryData data) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n return withContext(context -> this.uploadManifestWithResponse(data.toByteBuffer(), context));\n }\n\n Mono> uploadManifestWithResponse(ByteBuffer data, Context context) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n String digest = UtilsImpl.computeDigest(data);\n return this.registriesImpl.createManifestWithResponseAsync(\n repositoryName,\n digest,\n Flux.just(data),\n data.remaining(),\n UtilsImpl.OCI_MANIFEST_MEDIA_TYPE,\n context).map(response -> {\n Response res = new ResponseBase(\n response.getRequest(),\n response.getStatusCode(),\n response.getHeaders(),\n new UploadManifestResult(response.getDeserializedHeaders().getDockerContentDigest()),\n response.getDeserializedHeaders());\n return res;\n }).onErrorMap(UtilsImpl::mapException);\n }\n\n /**\n * Uploads a blob to the repository.\n * The client currently uploads the entire blob\\layer as a single unit.\n *

\n * The blob is read into memory and then an upload operation is performed as a single operation.\n * We currently do not support breaking the layer into multiple chunks and uploading them one at a time\n *\n * @param data The blob\\image content that needs to be uploaded.\n * @return The operation result.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code data} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono uploadBlob(BinaryData data) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n return withContext(context -> this.uploadBlobWithResponse(data.toByteBuffer(), context)).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Uploads a blob to the repository.\n * The client currently uploads the entire blob\\layer as a single unit.\n *

\n * The blob is read into memory and then an upload operation is performed as a single operation.\n * We currently do not support breaking the layer into multiple chunks and uploading them one at a time\n *\n * @param data The blob\\image content that needs to be uploaded.\n * @return The rest response containing the operation result.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code data} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono> uploadBlobWithResponse(BinaryData data) {\n return withContext(context -> this.uploadBlobWithResponse(data.toByteBuffer(), context));\n }\n\n Mono> uploadBlobWithResponse(ByteBuffer data, Context context) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n String digest = UtilsImpl.computeDigest(data);\n return this.blobsImpl.startUploadWithResponseAsync(repositoryName, context)\n .flatMap(startUploadResponse -> this.blobsImpl.uploadChunkWithResponseAsync(trimNextLink(startUploadResponse.getDeserializedHeaders().getLocation()), Flux.just(data), data.remaining(), context))\n .flatMap(uploadChunkResponse -> this.blobsImpl.completeUploadWithResponseAsync(digest, trimNextLink(uploadChunkResponse.getDeserializedHeaders().getLocation()), null, 0L, context))\n .flatMap(completeUploadResponse -> {\n Response res = new ResponseBase(completeUploadResponse.getRequest(),\n completeUploadResponse.getStatusCode(),\n completeUploadResponse.getHeaders(),\n new UploadBlobResult(completeUploadResponse.getDeserializedHeaders().getDockerContentDigest()),\n completeUploadResponse.getDeserializedHeaders());\n\n return Mono.just(res);\n }).onErrorMap(UtilsImpl::mapException);\n }\n\n private String trimNextLink(String locationHeader) {\n \n \n if (locationHeader.startsWith(\"/\")) {\n return locationHeader.substring(1);\n }\n\n return locationHeader;\n }\n\n /**\n * Download the manifest associated with the given tag or digest.\n * We currently only support downloading OCI manifests.\n *\n * @see downloadManifest(String tagOrDigest) {\n return this.downloadManifestWithResponse(tagOrDigest).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Download the manifest associated with the given tag or digest.\n * We currently only support downloading OCI manifests.\n *\n * @see > downloadManifestWithResponse(String tagOrDigest) {\n return withContext(context -> this.downloadManifestWithResponse(tagOrDigest, context));\n }\n\n Mono> downloadManifestWithResponse(String tagOrDigest, Context context) {\n if (tagOrDigest == null) {\n ).onErrorMap(UtilsImpl::mapException);\n }\n\n /**\n * Download the blob associated with the given digest.\n *\n * @param digest The digest for the given image layer.\n * @return The image associated with the given digest.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code digest} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono downloadBlob(String digest) {\n return this.downloadBlobWithResponse(digest).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Download the blob\\layer associated with the given digest.\n *\n * @param digest The digest for the given image layer.\n * @return The image associated with the given digest.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code digest} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono> downloadBlobWithResponse(String digest) {\n return withContext(context -> this.downloadBlobWithResponse(digest, context));\n }\n\n Mono> downloadBlobWithResponse(String digest, Context context) {\n if (digest == null) {\n return monoError(logger, new NullPointerException(\"'digest' can't be null.\"));\n }\n\n return this.blobsImpl.getBlobWithResponseAsync(repositoryName, digest, context).flatMap(streamResponse -> {\n String resDigest = streamResponse.getHeaders().getValue(UtilsImpl.DOCKER_DIGEST_HEADER_NAME);\n\n return BinaryData.fromFlux(streamResponse.getValue())\n .flatMap(binaryData -> {\n Response response = new ResponseBase(\n streamResponse.getRequest(),\n streamResponse.getStatusCode(),\n streamResponse.getHeaders(),\n new DownloadBlobResult().setContent(binaryData).setDigest(resDigest),\n null);\n\n return Mono.just(response);\n });\n }).onErrorMap(UtilsImpl::mapException);\n }\n\n /**\n * Delete the image associated with the given digest\n *\n * @param digest The digest for the given image layer.\n * @return The completion signal.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code digest} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono deleteBlob(String digest) {\n return this.deleteBlobWithResponse(digest).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Delete the image associated with the given digest\n *\n * @param digest The digest for the given image layer.\n * @return The REST response for the completion.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code digest} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono> deleteBlobWithResponse(String digest) {\n return withContext(context -> deleteBlobWithResponse(digest, context));\n }\n\n Mono> deleteBlobWithResponse(String digest, Context context) {\n if (digest == null) {\n return monoError(logger, new NullPointerException(\"'digest' can't be null.\"));\n }\n\n return this.blobsImpl.deleteBlobWithResponseAsync(repositoryName, digest, context)\n .flatMap(UtilsImpl::deleteResponseToSuccess)\n .onErrorMap(UtilsImpl::mapException);\n }\n\n /**\n * Delete the manifest associated with the given digest.\n * We currently only support downloading OCI manifests.\n *\n * @see deleteManifest(String digest) {\n return this.deleteManifestWithResponse(digest).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Delete the manifest associated with the given digest.\n * We currently only support downloading OCI manifests.\n *\n * @see > deleteManifestWithResponse(String digest) {\n return withContext(context -> deleteManifestWithResponse(digest, context));\n }\n\n Mono> deleteManifestWithResponse(String digest, Context context) {\n return this.registriesImpl.deleteManifestWithResponseAsync(repositoryName, digest, context)\n .flatMap(UtilsImpl::deleteResponseToSuccess)\n .onErrorMap(UtilsImpl::mapException);\n }\n}", "context_after": "class ContainerRegistryBlobAsyncClient {\n\n private final AzureContainerRegistryImpl registryImplClient;\n private final ContainerRegistryBlobsImpl blobsImpl;\n private final ContainerRegistriesImpl registriesImpl;\n private final String endpoint;\n private final String repositoryName;\n\n private final ClientLogger logger = new ClientLogger(ContainerRegistryBlobAsyncClient.class);\n\n ContainerRegistryBlobAsyncClient(String repositoryName, HttpPipeline httpPipeline, String endpoint, String version) {\n this.repositoryName = repositoryName;\n this.endpoint = endpoint;\n this.registryImplClient = new AzureContainerRegistryImplBuilder()\n .url(endpoint)\n .pipeline(httpPipeline)\n .apiVersion(version)\n .buildClient();\n this.blobsImpl = this.registryImplClient.getContainerRegistryBlobs();\n this.registriesImpl = this.registryImplClient.getContainerRegistries();\n }\n\n /**\n * This method returns the registry's repository on which operations are being performed.\n *\n * @return The name of the repository\n */\n public String getRepositoryName() {\n return this.repositoryName;\n }\n\n /**\n * This method returns the complete registry endpoint.\n *\n * @return The registry endpoint including the authority.\n */\n public String getEndpoint() {\n return this.endpoint;\n }\n\n /**\n * Upload the Oci manifest to the repository.\n * The upload is done as a single operation.\n * @see uploadManifest(OciManifest manifest) {\n if (manifest == null) {\n return monoError(logger, new NullPointerException(\"'manifest' can't be null.\"));\n }\n\n return uploadManifest(BinaryData.fromObject(manifest));\n }\n\n /**\n * Uploads a manifest to the repository.\n * The client currently only supports uploading OciManifests to the repository.\n * And this operation makes the assumption that the data provided is a valid OCI manifest.\n *

\n * Also, the data is read into memory and then an upload operation is performed as a single operation.\n * @see uploadManifest(BinaryData data) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n return withContext(context -> this.uploadManifestWithResponse(data.toByteBuffer(), context)).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Uploads a manifest to the repository.\n * The client currently only supports uploading OciManifests to the repository.\n * And this operation makes the assumption that the data provided is a valid OCI manifest.\n *

\n * Also, the data is read into memory and then an upload operation is performed as a single operation.\n * @see > uploadManifestWithResponse(BinaryData data) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n return withContext(context -> this.uploadManifestWithResponse(data.toByteBuffer(), context));\n }\n\n Mono> uploadManifestWithResponse(ByteBuffer data, Context context) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n String digest = UtilsImpl.computeDigest(data);\n return this.registriesImpl.createManifestWithResponseAsync(\n repositoryName,\n digest,\n Flux.just(data),\n data.remaining(),\n UtilsImpl.OCI_MANIFEST_MEDIA_TYPE,\n context).map(response -> {\n Response res = new ResponseBase(\n response.getRequest(),\n response.getStatusCode(),\n response.getHeaders(),\n new UploadManifestResult(response.getDeserializedHeaders().getDockerContentDigest()),\n response.getDeserializedHeaders());\n return res;\n }).onErrorMap(UtilsImpl::mapException);\n }\n\n /**\n * Uploads a blob to the repository.\n * The client currently uploads the entire blob\\layer as a single unit.\n *

\n * The blob is read into memory and then an upload operation is performed as a single operation.\n * We currently do not support breaking the layer into multiple chunks and uploading them one at a time\n *\n * @param data The blob\\image content that needs to be uploaded.\n * @return The operation result.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code data} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono uploadBlob(BinaryData data) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n return withContext(context -> this.uploadBlobWithResponse(data.toByteBuffer(), context)).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Uploads a blob to the repository.\n * The client currently uploads the entire blob\\layer as a single unit.\n *

\n * The blob is read into memory and then an upload operation is performed as a single operation.\n * We currently do not support breaking the layer into multiple chunks and uploading them one at a time\n *\n * @param data The blob\\image content that needs to be uploaded.\n * @return The rest response containing the operation result.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code data} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono> uploadBlobWithResponse(BinaryData data) {\n return withContext(context -> this.uploadBlobWithResponse(data.toByteBuffer(), context));\n }\n\n Mono> uploadBlobWithResponse(ByteBuffer data, Context context) {\n if (data == null) {\n return monoError(logger, new NullPointerException(\"'data' can't be null.\"));\n }\n\n String digest = UtilsImpl.computeDigest(data);\n return this.blobsImpl.startUploadWithResponseAsync(repositoryName, context)\n .flatMap(startUploadResponse -> this.blobsImpl.uploadChunkWithResponseAsync(trimNextLink(startUploadResponse.getDeserializedHeaders().getLocation()), Flux.just(data), data.remaining(), context))\n .flatMap(uploadChunkResponse -> this.blobsImpl.completeUploadWithResponseAsync(digest, trimNextLink(uploadChunkResponse.getDeserializedHeaders().getLocation()), null, 0L, context))\n .flatMap(completeUploadResponse -> {\n Response res = new ResponseBase(completeUploadResponse.getRequest(),\n completeUploadResponse.getStatusCode(),\n completeUploadResponse.getHeaders(),\n new UploadBlobResult(completeUploadResponse.getDeserializedHeaders().getDockerContentDigest()),\n completeUploadResponse.getDeserializedHeaders());\n\n return Mono.just(res);\n }).onErrorMap(UtilsImpl::mapException);\n }\n\n private String trimNextLink(String locationHeader) {\n \n \n if (locationHeader.startsWith(\"/\")) {\n return locationHeader.substring(1);\n }\n\n return locationHeader;\n }\n\n /**\n * Download the manifest associated with the given tag or digest.\n * We currently only support downloading OCI manifests.\n *\n * @see downloadManifest(String tagOrDigest) {\n return this.downloadManifestWithResponse(tagOrDigest).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Download the manifest associated with the given tag or digest.\n * We currently only support downloading OCI manifests.\n *\n * @see > downloadManifestWithResponse(String tagOrDigest) {\n return withContext(context -> this.downloadManifestWithResponse(tagOrDigest, context));\n }\n\n Mono> downloadManifestWithResponse(String tagOrDigest, Context context) {\n if (tagOrDigest == null) {\n ).onErrorMap(UtilsImpl::mapException);\n }\n\n /**\n * Download the blob associated with the given digest.\n *\n * @param digest The digest for the given image layer.\n * @return The image associated with the given digest.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code digest} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono downloadBlob(String digest) {\n return this.downloadBlobWithResponse(digest).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Download the blob\\layer associated with the given digest.\n *\n * @param digest The digest for the given image layer.\n * @return The image associated with the given digest.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code digest} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono> downloadBlobWithResponse(String digest) {\n return withContext(context -> this.downloadBlobWithResponse(digest, context));\n }\n\n Mono> downloadBlobWithResponse(String digest, Context context) {\n if (digest == null) {\n return monoError(logger, new NullPointerException(\"'digest' can't be null.\"));\n }\n\n return this.blobsImpl.getBlobWithResponseAsync(repositoryName, digest, context).flatMap(streamResponse -> {\n String resDigest = streamResponse.getHeaders().getValue(UtilsImpl.DOCKER_DIGEST_HEADER_NAME);\n\n return BinaryData.fromFlux(streamResponse.getValue())\n .flatMap(binaryData -> {\n Response response = new ResponseBase(\n streamResponse.getRequest(),\n streamResponse.getStatusCode(),\n streamResponse.getHeaders(),\n new DownloadBlobResult(resDigest, binaryData),\n null);\n\n return Mono.just(response);\n });\n }).onErrorMap(UtilsImpl::mapException);\n }\n\n /**\n * Delete the image associated with the given digest\n *\n * @param digest The digest for the given image layer.\n * @return The completion signal.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code digest} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono deleteBlob(String digest) {\n return this.deleteBlobWithResponse(digest).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Delete the image associated with the given digest\n *\n * @param digest The digest for the given image layer.\n * @return The REST response for the completion.\n * @throws ClientAuthenticationException thrown if the client's credentials do not have access to modify the namespace.\n * @throws NullPointerException thrown if the {@code digest} is null.\n */\n @ServiceMethod(returns = ReturnType.SINGLE)\n public Mono> deleteBlobWithResponse(String digest) {\n return withContext(context -> deleteBlobWithResponse(digest, context));\n }\n\n Mono> deleteBlobWithResponse(String digest, Context context) {\n if (digest == null) {\n return monoError(logger, new NullPointerException(\"'digest' can't be null.\"));\n }\n\n return this.blobsImpl.deleteBlobWithResponseAsync(repositoryName, digest, context)\n .flatMap(UtilsImpl::deleteResponseToSuccess)\n .onErrorMap(UtilsImpl::mapException);\n }\n\n /**\n * Delete the manifest associated with the given digest.\n * We currently only support downloading OCI manifests.\n *\n * @see deleteManifest(String digest) {\n return this.deleteManifestWithResponse(digest).flatMap(FluxUtil::toMono);\n }\n\n /**\n * Delete the manifest associated with the given digest.\n * We currently only support downloading OCI manifests.\n *\n * @see > deleteManifestWithResponse(String digest) {\n return withContext(context -> deleteManifestWithResponse(digest, context));\n }\n\n Mono> deleteManifestWithResponse(String digest, Context context) {\n return this.registriesImpl.deleteManifestWithResponseAsync(repositoryName, digest, context)\n .flatMap(UtilsImpl::deleteResponseToSuccess)\n .onErrorMap(UtilsImpl::mapException);\n }\n}" }, { "comment": "Yeah, `convertibleTypes.size() < 2` is kind of repeated check, that is not needed here. But we need to preserve the `convertibleTypes.size() == 0` check for other places. Moved that check to outside of this method. Addressed with https://github.com/ballerina-platform/ballerina-lang/pull/30710/commits/688ab907b7244cd39e24c251849c329e7f7fbe24", "method_body": "public static boolean hasIntegerSubTypes(Set convertibleTypes) {\n if (convertibleTypes.size() < 2) {\n return false;\n }\n for (Type type : convertibleTypes) {\n if (!TypeTags.isIntegerTypeTag(type.getTag()) && type.getTag() != TypeTags.BYTE_TAG) {\n return false;\n }\n }\n return true;\n }", "target_code": "if (convertibleTypes.size() < 2) {", "method_body_after": "public static boolean hasIntegerSubTypes(Set convertibleTypes) {\n for (Type type : convertibleTypes) {\n if (!TypeTags.isIntegerTypeTag(type.getTag()) && type.getTag() != TypeTags.BYTE_TAG) {\n return false;\n }\n }\n return true;\n }", "context_before": "class TypeConverter {\n\n private static final String NaN = \"NaN\";\n private static final String POSITIVE_INFINITY = \"Infinity\";\n private static final String NEGATIVE_INFINITY = \"-Infinity\";\n\n public static Object convertValues(Type targetType, Object inputValue) {\n Type inputType = TypeChecker.getType(inputValue);\n switch (targetType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return anyToInt(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_INT));\n case TypeTags.DECIMAL_TAG:\n return anyToDecimal(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_DECIMAL));\n case TypeTags.FLOAT_TAG:\n return anyToFloat(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_FLOAT));\n case TypeTags.STRING_TAG:\n return StringUtils.fromString(anyToString(inputValue));\n case TypeTags.BOOLEAN_TAG:\n return anyToBoolean(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_BOOLEAN));\n case TypeTags.BYTE_TAG:\n return anyToByte(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_BYTE));\n default:\n throw ErrorCreator.createError(BallerinaErrorReasons.NUMBER_CONVERSION_ERROR,\n BLangExceptionHelper.getErrorMessage(\n RuntimeErrors.INCOMPATIBLE_SIMPLE_TYPE_CONVERT_OPERATION,\n inputType, inputValue, targetType));\n }\n }\n\n public static Object castValues(Type targetType, Object inputValue) {\n switch (targetType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return anyToIntCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_INT));\n case TypeTags.DECIMAL_TAG:\n return anyToDecimalCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_DECIMAL));\n case TypeTags.FLOAT_TAG:\n return anyToFloatCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_FLOAT));\n case TypeTags.STRING_TAG:\n return anyToStringCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_STRING));\n case TypeTags.BOOLEAN_TAG:\n return anyToBooleanCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_BOOLEAN));\n case TypeTags.BYTE_TAG:\n return anyToByteCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_BYTE));\n default:\n throw ErrorUtils.createTypeCastError(inputValue, targetType);\n }\n }\n\n static boolean isConvertibleToByte(Object value) {\n Type inputType = TypeChecker.getType(value);\n switch (inputType.getTag()) {\n case TypeTags.BYTE_TAG:\n return true;\n case TypeTags.INT_TAG:\n return TypeChecker.isByteLiteral((long) value);\n case TypeTags.FLOAT_TAG:\n Double doubleValue = (Double) value;\n return isFloatWithinIntRange(doubleValue) && TypeChecker.isByteLiteral(doubleValue.longValue());\n case TypeTags.DECIMAL_TAG:\n return isDecimalWithinIntRange((DecimalValue) value)\n && TypeChecker.isByteLiteral(((DecimalValue) value).value().longValue());\n default:\n return false;\n }\n }\n\n static boolean isConvertibleToInt(Object value) {\n Type inputType = TypeChecker.getType(value);\n switch (inputType.getTag()) {\n case TypeTags.BYTE_TAG:\n case TypeTags.INT_TAG:\n return true;\n case TypeTags.FLOAT_TAG:\n return isFloatWithinIntRange((double) value);\n case TypeTags.DECIMAL_TAG:\n return isDecimalWithinIntRange((DecimalValue) value);\n default:\n return false;\n }\n }\n\n static boolean isConvertibleToIntSubType(Object value, Type targetType) {\n Type inputType = TypeChecker.getType(value);\n long val;\n switch (inputType.getTag()) {\n case TypeTags.BYTE_TAG:\n case TypeTags.INT_TAG:\n val = ((Number) value).longValue();\n break;\n case TypeTags.FLOAT_TAG:\n if (!isFloatWithinIntRange((Double) value)) {\n return false;\n }\n val = floatToInt((Double) value);\n break;\n case TypeTags.DECIMAL_TAG:\n if (!isDecimalWithinIntRange((DecimalValue) value)) {\n return false;\n }\n val = ((DecimalValue) value).value().intValue();\n break;\n default:\n return false;\n }\n switch (targetType.getTag()) {\n case TypeTags.SIGNED32_INT_TAG:\n return TypeChecker.isSigned32LiteralValue(val);\n case TypeTags.SIGNED16_INT_TAG:\n return TypeChecker.isSigned16LiteralValue(val);\n case TypeTags.SIGNED8_INT_TAG:\n return TypeChecker.isSigned8LiteralValue(val);\n case TypeTags.UNSIGNED32_INT_TAG:\n return TypeChecker.isUnsigned32LiteralValue(val);\n case TypeTags.UNSIGNED16_INT_TAG:\n return TypeChecker.isUnsigned16LiteralValue(val);\n case TypeTags.UNSIGNED8_INT_TAG:\n return TypeChecker.isUnsigned8LiteralValue(val);\n }\n return false;\n }\n\n static boolean isConvertibleToChar(Object value) {\n Type inputType = TypeChecker.getType(value);\n if (inputType.getTag() == TypeTags.STRING_TAG) {\n return isCharLiteralValue(value);\n }\n return false;\n }\n\n static boolean isConvertibleToFloatingPointTypes(Object value) {\n Type inputType = TypeChecker.getType(value);\n switch (inputType.getTag()) {\n case TypeTags.BYTE_TAG:\n case TypeTags.INT_TAG:\n case TypeTags.FLOAT_TAG:\n case TypeTags.DECIMAL_TAG:\n return true;\n default:\n return false;\n }\n }\n\n \n public static Set getConvertibleTypes(Object inputValue, Type targetType) {\n return getConvertibleTypes(inputValue, targetType, new ArrayList<>());\n }\n\n public static Set getConvertibleTypes(Object inputValue, Type targetType,\n List unresolvedValues) {\n Set convertibleTypes = new LinkedHashSet<>();\n\n int targetTypeTag = targetType.getTag();\n\n switch (targetTypeTag) {\n case TypeTags.UNION_TAG:\n for (Type memType : ((BUnionType) targetType).getMemberTypes()) {\n if (TypeChecker.getType(inputValue) == memType) {\n return Set.of(memType);\n }\n convertibleTypes.addAll(getConvertibleTypes(inputValue, memType, unresolvedValues));\n }\n break;\n case TypeTags.ARRAY_TAG:\n if (isConvertibleToArrayType(inputValue, (BArrayType) targetType, unresolvedValues)) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.RECORD_TYPE_TAG:\n if (isConvertibleToRecordType(inputValue, (BRecordType) targetType, false, unresolvedValues)) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.ANYDATA_TAG:\n Type matchingType = TypeConverter.resolveMatchingTypeForUnion(inputValue, targetType);\n if (matchingType != null) {\n convertibleTypes.add(matchingType);\n }\n break;\n case TypeTags.MAP_TAG:\n if (isConvertibleToMapType(inputValue, (BMapType) targetType, unresolvedValues)) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.TABLE_TAG:\n if (isConvertibleToTableType(((BTableType) targetType).getConstrainedType())) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.INTERSECTION_TAG:\n Type effectiveType = ((BIntersectionType) targetType).getEffectiveType();\n convertibleTypes.addAll(getConvertibleTypes(inputValue, effectiveType, unresolvedValues));\n break;\n case TypeTags.FINITE_TYPE_TAG:\n for (Object valueSpaceItem : ((BFiniteType) targetType).valueSpace) {\n Type inputValueType = TypeChecker.getType(inputValue);\n if (inputValue == valueSpaceItem) {\n return Set.of(inputValueType);\n }\n if (TypeChecker.isFiniteTypeValue(inputValue, inputValueType, valueSpaceItem)) {\n convertibleTypes.add(TypeChecker.getType(valueSpaceItem));\n }\n }\n break;\n default:\n if (TypeChecker.checkIsLikeType(inputValue, targetType, true)) {\n convertibleTypes.add(targetType);\n }\n }\n return convertibleTypes;\n }\n\n public static List getConvertibleTypesFromJson(Object value, Type targetType,\n List unresolvedValues) {\n\n int targetTypeTag = targetType.getTag();\n\n List convertibleTypes = new ArrayList<>(TypeConverter.getConvertibleTypes(value, targetType));\n\n if (convertibleTypes.size() == 0) {\n switch (targetTypeTag) {\n case TypeTags.RECORD_TYPE_TAG:\n if (isConvertibleToRecordType(value, (BRecordType) targetType, true, unresolvedValues)) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.TABLE_TAG:\n if (isConvertibleToTableType(((BTableType) targetType).getConstrainedType())) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.XML_TAG:\n case TypeTags.XML_ELEMENT_TAG:\n case TypeTags.XML_COMMENT_TAG:\n case TypeTags.XML_PI_TAG:\n case TypeTags.XML_TEXT_TAG:\n if (TypeChecker.getType(value).getTag() == TypeTags.STRING_TAG) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.INTERSECTION_TAG:\n return getConvertibleTypesFromJson(value, ((BIntersectionType) targetType).getEffectiveType(),\n unresolvedValues);\n }\n }\n return convertibleTypes;\n }\n\n private static boolean isConvertibleToRecordType(Object sourceValue, BRecordType targetType, boolean isFromJson,\n List unresolvedValues) {\n if (!(sourceValue instanceof MapValueImpl)) {\n return false;\n }\n\n TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);\n if (unresolvedValues.contains(typeValuePair)) {\n return true;\n }\n unresolvedValues.add(typeValuePair);\n\n Map targetFieldTypes = new HashMap<>();\n Type restFieldType = targetType.restFieldType;\n\n for (Map.Entry field : targetType.getFields().entrySet()) {\n targetFieldTypes.put(field.getKey(), field.getValue().getFieldType());\n }\n\n MapValueImpl sourceMapValueImpl = (MapValueImpl) sourceValue;\n for (Map.Entry targetTypeEntry : targetFieldTypes.entrySet()) {\n String fieldName = targetTypeEntry.getKey().toString();\n\n if (sourceMapValueImpl.containsKey(StringUtils.fromString(fieldName))) {\n continue;\n }\n Field targetField = targetType.getFields().get(fieldName);\n if (SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.REQUIRED)) {\n return false;\n }\n }\n\n for (Object object : sourceMapValueImpl.entrySet()) {\n Map.Entry valueEntry = (Map.Entry) object;\n String fieldName = valueEntry.getKey().toString();\n\n if (isFromJson) {\n if (targetFieldTypes.containsKey(fieldName)) {\n if (getConvertibleTypesFromJson(valueEntry.getValue(), targetFieldTypes.get(fieldName),\n unresolvedValues).size() != 1) {\n return false;\n }\n } else if (!targetType.sealed) {\n if (getConvertibleTypesFromJson(valueEntry.getValue(), restFieldType,\n unresolvedValues).size() != 1) {\n return false;\n }\n } else {\n return false;\n }\n } else {\n if (targetFieldTypes.containsKey(fieldName)) {\n if (getConvertibleTypes(valueEntry.getValue(), targetFieldTypes.get(fieldName),\n unresolvedValues).size() != 1) {\n return false;\n }\n } else if (!targetType.sealed) {\n if (getConvertibleTypes(valueEntry.getValue(), restFieldType, unresolvedValues).size() != 1) {\n return false;\n }\n } else {\n return false;\n }\n }\n }\n return true;\n }\n\n private static boolean isConvertibleToTableType(Type tableConstrainedType) {\n switch (tableConstrainedType.getTag()) {\n case TypeTags.RECORD_TYPE_TAG:\n case TypeTags.MAP_TAG:\n return true;\n case TypeTags.INTERSECTION_TAG:\n return isConvertibleToTableType(((BIntersectionType) tableConstrainedType).getEffectiveType());\n }\n return false;\n }\n\n private static boolean isConvertibleToMapType(Object sourceValue, BMapType targetType,\n List unresolvedValues) {\n if (!(sourceValue instanceof MapValueImpl)) {\n return false;\n }\n for (Object mapEntry : ((MapValueImpl) sourceValue).values()) {\n if (getConvertibleTypes(mapEntry, targetType.getConstrainedType(), unresolvedValues).size() != 1) {\n return false;\n }\n }\n return true;\n }\n\n private static boolean isConvertibleToArrayType(Object sourceValue, BArrayType targetType,\n List unresolvedValues) {\n if (!(sourceValue instanceof ArrayValue)) {\n return false;\n }\n ArrayValue source = (ArrayValue) sourceValue;\n Type targetTypeElementType = targetType.getElementType();\n if (source.getType().getTag() == TypeTags.ARRAY_TAG) {\n Type sourceElementType = ((BArrayType) source.getType()).getElementType();\n if (isNumericType(sourceElementType) && isNumericType(targetTypeElementType)) {\n return true;\n }\n }\n Set convertibleTypes;\n for (int i = 0; i < source.size(); i++) {\n convertibleTypes = getConvertibleTypes(source.get(i), targetTypeElementType, unresolvedValues);\n if (convertibleTypes.size() != 1 && !convertibleTypes.contains(TypeChecker.getType(source.get(i)))\n && !hasIntegerSubTypes(convertibleTypes)) {\n return false;\n }\n }\n return true;\n }\n\n \n\n static long anyToInt(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return (Long) sourceVal;\n } else if (sourceVal instanceof Double) {\n return floatToInt((double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return ((Integer) sourceVal).longValue();\n } else if (sourceVal instanceof Boolean) {\n return (Boolean) sourceVal ? 1 : 0;\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).intValue();\n } else if (sourceVal instanceof String) {\n try {\n return Long.parseLong((String) sourceVal);\n } catch (NumberFormatException e) {\n throw errorFunc.get();\n }\n }\n\n throw errorFunc.get();\n }\n\n static long anyToIntCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return (Long) sourceVal;\n } else if (sourceVal instanceof Double) {\n return floatToInt((double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return ((Integer) sourceVal).longValue();\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).intValue();\n } else {\n throw errorFunc.get();\n }\n }\n\n static long anyToIntSubTypeCast(Object sourceVal, Type type, Supplier errorFunc) {\n long value = anyToIntCast(sourceVal, errorFunc);\n if (type == PredefinedTypes.TYPE_INT_SIGNED_32 && isSigned32LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_SIGNED_16 && isSigned16LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_SIGNED_8 && isSigned8LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_UNSIGNED_32 && isUnsigned32LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_UNSIGNED_16 && isUnsigned16LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_UNSIGNED_8 && isUnsigned8LiteralValue(value)) {\n return value;\n }\n throw errorFunc.get();\n }\n\n static double anyToFloat(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return ((Long) sourceVal).doubleValue();\n } else if (sourceVal instanceof Double) {\n return (Double) sourceVal;\n } else if (sourceVal instanceof Integer) {\n return ((Integer) sourceVal).floatValue();\n } else if (sourceVal instanceof Boolean) {\n return (Boolean) sourceVal ? 1.0 : 0.0;\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).floatValue();\n } else if (sourceVal instanceof String) {\n try {\n return Double.parseDouble((String) sourceVal);\n } catch (NumberFormatException e) {\n throw errorFunc.get();\n }\n }\n\n throw errorFunc.get();\n }\n\n static double anyToFloatCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return ((Long) sourceVal).doubleValue();\n } else if (sourceVal instanceof Double) {\n return (Double) sourceVal;\n } else if (sourceVal instanceof Integer) {\n return ((Integer) sourceVal).floatValue();\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).floatValue();\n } else {\n throw errorFunc.get();\n }\n }\n\n static boolean anyToBoolean(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return (long) sourceVal != 0;\n } else if (sourceVal instanceof Double) {\n return (Double) sourceVal != 0.0;\n } else if (sourceVal instanceof Integer) {\n return (int) sourceVal != 0;\n } else if (sourceVal instanceof Boolean) {\n return (boolean) sourceVal;\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).booleanValue();\n } else if (sourceVal instanceof String) {\n try {\n return Boolean.parseBoolean((String) sourceVal);\n } catch (NumberFormatException e) {\n throw errorFunc.get();\n }\n }\n\n throw errorFunc.get();\n }\n\n static boolean anyToBooleanCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Boolean) {\n return (boolean) sourceVal;\n }\n\n throw errorFunc.get();\n }\n\n public static int intToByte(long sourceVal) {\n if (!TypeChecker.isByteLiteral(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_BYTE);\n }\n return ((Long) sourceVal).intValue();\n }\n\n public static long intToSigned32(long sourceVal) {\n if (!TypeChecker.isSigned32LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_SIGNED_32);\n }\n return sourceVal;\n }\n\n public static long intToSigned16(long sourceVal) {\n if (!isSigned16LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_SIGNED_16);\n }\n return sourceVal;\n }\n\n public static long intToSigned8(long sourceVal) {\n if (!isSigned8LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_SIGNED_8);\n }\n return sourceVal;\n }\n\n public static long intToUnsigned32(long sourceVal) {\n if (!isUnsigned32LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_UNSIGNED_32);\n }\n return sourceVal;\n }\n\n public static long intToUnsigned16(long sourceVal) {\n if (!isUnsigned16LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_UNSIGNED_16);\n }\n return sourceVal;\n }\n\n public static long intToUnsigned8(long sourceVal) {\n if (!isUnsigned8LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_UNSIGNED_8);\n }\n return sourceVal;\n }\n\n public static long floatToSigned32(double sourceVal) {\n return intToSigned32(floatToInt(sourceVal));\n }\n\n public static long floatToSigned16(double sourceVal) {\n return intToSigned16(floatToInt(sourceVal));\n }\n\n public static long floatToSigned8(double sourceVal) {\n return intToSigned8(floatToInt(sourceVal));\n }\n\n public static long floatToUnsigned32(double sourceVal) {\n return intToUnsigned32(floatToInt(sourceVal));\n }\n\n public static long floatToUnsigned16(double sourceVal) {\n return intToUnsigned16(floatToInt(sourceVal));\n }\n\n public static long floatToUnsigned8(double sourceVal) {\n return intToUnsigned8(floatToInt(sourceVal));\n }\n\n public static BString stringToChar(Object sourceVal) {\n if (!isCharLiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_STRING_CHAR);\n }\n return StringUtils.fromString(Objects.toString(sourceVal));\n }\n\n public static Long stringToInt(String value) throws NumberFormatException {\n return Long.parseLong(value);\n }\n\n public static int stringToByte(String value) throws NumberFormatException, BError {\n int byteValue = Integer.parseInt(value);\n return intToByte(byteValue);\n }\n\n public static Double stringToFloat(String value) throws NumberFormatException {\n return Double.parseDouble(value);\n }\n\n public static Boolean stringToBoolean(String value) throws NumberFormatException {\n if (\"true\".equalsIgnoreCase(value) || \"1\".equalsIgnoreCase(value)) {\n return true;\n } else if (\"false\".equalsIgnoreCase(value) || \"0\".equalsIgnoreCase(value)) {\n return false;\n }\n throw new NumberFormatException();\n }\n\n public static BDecimal stringToDecimal(String value) throws NumberFormatException {\n return new DecimalValue(value);\n }\n\n public static BXml stringToXml(String value) throws BError {\n StringBuilder sb = new StringBuilder();\n sb.append(\"\").append(value).append(\"\");\n BXml item = XmlUtils.parse(sb.toString());\n return item.children();\n }\n\n public static BString anyToChar(Object sourceVal) {\n String value = Objects.toString(sourceVal);\n return stringToChar(value);\n }\n\n\n public static int floatToByte(double sourceVal) {\n checkIsValidFloat(sourceVal, PredefinedTypes.TYPE_BYTE);\n\n long intVal = Math.round(sourceVal);\n if (!TypeChecker.isByteLiteral(intVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_BYTE);\n }\n\n return (int) intVal;\n }\n\n public static long floatToInt(double sourceVal) {\n checkIsValidFloat(sourceVal, PredefinedTypes.TYPE_INT);\n\n if (!isFloatWithinIntRange(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT);\n }\n\n return (long) Math.rint(sourceVal);\n }\n\n private static void checkIsValidFloat(double sourceVal, Type targetType) {\n if (Double.isNaN(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(NaN, PredefinedTypes.TYPE_FLOAT, targetType);\n }\n\n if (Double.isInfinite(sourceVal)) {\n String value = sourceVal > 0 ? POSITIVE_INFINITY : NEGATIVE_INFINITY;\n throw ErrorUtils.createNumericConversionError(value, PredefinedTypes.TYPE_FLOAT, targetType);\n }\n }\n\n static int anyToByte(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return intToByte((Long) sourceVal);\n } else if (sourceVal instanceof Double) {\n return floatToByte((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return (int) sourceVal;\n } else if (sourceVal instanceof Boolean) {\n return ((Boolean) sourceVal ? 1 : 0);\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).byteValue();\n } else if (sourceVal instanceof String) {\n try {\n return Integer.parseInt((String) sourceVal);\n } catch (NumberFormatException e) {\n throw errorFunc.get();\n }\n }\n\n throw errorFunc.get();\n }\n\n static int anyToByteCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return intToByte((Long) sourceVal);\n } else if (sourceVal instanceof Byte) {\n return ((Byte) sourceVal).intValue();\n } else if (sourceVal instanceof Double) {\n return floatToByte((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return (int) sourceVal;\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).byteValue();\n } else {\n throw errorFunc.get();\n }\n\n }\n\n private static String anyToString(Object sourceVal) {\n if (sourceVal instanceof Long) {\n return Long.toString((Long) sourceVal);\n } else if (sourceVal instanceof Double) {\n return Double.toString((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return Long.toString((Integer) sourceVal);\n } else if (sourceVal instanceof Boolean) {\n return Boolean.toString((Boolean) sourceVal);\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).stringValue(null);\n } else if (sourceVal instanceof String) {\n return (String) sourceVal;\n } else if (sourceVal == null) {\n return \"()\";\n }\n\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_STRING);\n }\n\n private static String anyToStringCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof String) {\n return (String) sourceVal;\n }\n\n throw errorFunc.get();\n }\n\n static DecimalValue anyToDecimal(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return DecimalValue.valueOf((Long) sourceVal);\n } else if (sourceVal instanceof Double) {\n return DecimalValue.valueOf((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return DecimalValue.valueOf((Integer) sourceVal);\n } else if (sourceVal instanceof Boolean) {\n return DecimalValue.valueOf((Boolean) sourceVal);\n } else if (sourceVal instanceof DecimalValue) {\n return (DecimalValue) sourceVal;\n }\n throw errorFunc.get();\n }\n\n static DecimalValue anyToDecimalCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return DecimalValue.valueOf((Long) sourceVal);\n } else if (sourceVal instanceof Double) {\n return DecimalValue.valueOf((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return DecimalValue.valueOf((Integer) sourceVal);\n } else if (sourceVal instanceof DecimalValue) {\n return (DecimalValue) sourceVal;\n } else if (sourceVal instanceof String) {\n return new DecimalValue((String) sourceVal);\n }\n throw errorFunc.get();\n }\n\n \n\n static byte anyToJByteCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Byte) {\n return (Byte) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static char anyToJCharCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Character) {\n return (Character) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static short anyToJShortCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Short) {\n return (Short) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static int anyToJIntCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Integer) {\n return (Integer) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static long anyToJLongCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return (Long) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static float anyToJFloatCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Float) {\n return (Float) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static double anyToJDoubleCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Double) {\n return (Double) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static boolean anyToJBooleanCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Boolean) {\n return (Boolean) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n public static long jFloatToBInt(float sourceVal) {\n checkIsValidFloat(sourceVal, PredefinedTypes.TYPE_INT);\n\n if (!isFloatWithinIntRange(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT);\n }\n\n return (long) Math.rint(sourceVal);\n }\n\n public static long jDoubleToBInt(double sourceVal) {\n checkIsValidFloat(sourceVal, PredefinedTypes.TYPE_INT);\n\n if (!isFloatWithinIntRange(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT);\n }\n\n return (long) Math.rint(sourceVal);\n }\n\n\n private static boolean isFloatWithinIntRange(double doubleValue) {\n return doubleValue < BINT_MAX_VALUE_DOUBLE_RANGE_MAX && doubleValue > BINT_MIN_VALUE_DOUBLE_RANGE_MIN;\n }\n\n public static Type resolveMatchingTypeForUnion(Object value, Type type) {\n if (value instanceof ArrayValue && ((ArrayValue) value).getType().getTag() == TypeTags.ARRAY_TAG &&\n !isDeepConversionRequiredForArray(((ArrayValue) value).getType())) {\n return TypeCreator.createArrayType(type);\n }\n\n if (value instanceof MapValue && ((MapValue) value).getType().getTag() == TypeTags.MAP_TAG &&\n !isDeepConversionRequiredForMap(((MapValue) value).getType())) {\n return TypeCreator.createMapType(type);\n }\n\n if (value == null && type.isNilable()) {\n return PredefinedTypes.TYPE_NULL;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_INT)) {\n return PredefinedTypes.TYPE_INT;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_FLOAT)) {\n return PredefinedTypes.TYPE_FLOAT;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_STRING)) {\n return PredefinedTypes.TYPE_STRING;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_BOOLEAN)) {\n return PredefinedTypes.TYPE_BOOLEAN;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_BYTE)) {\n return PredefinedTypes.TYPE_BYTE;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_DECIMAL)) {\n return PredefinedTypes.TYPE_DECIMAL;\n }\n\n Type anydataArrayType = new BArrayType(type);\n if (checkIsLikeType(value, anydataArrayType)) {\n return anydataArrayType;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_XML)) {\n return PredefinedTypes.TYPE_XML;\n }\n\n Type anydataMapType = new BMapType(type);\n if (checkIsLikeType(value, anydataMapType)) {\n return anydataMapType;\n }\n\n \n return null;\n }\n\n private static boolean isDeepConversionRequiredForArray(Type sourceType) {\n Type elementType = ((BArrayType) sourceType).getElementType();\n\n if (elementType != null) {\n if (TypeUtils.isValueType(elementType)) {\n return false;\n } else if (elementType instanceof BArrayType) {\n return isDeepConversionRequiredForArray(elementType);\n }\n return true;\n }\n return true;\n }\n\n private static boolean isDeepConversionRequiredForMap(Type sourceType) {\n Type constrainedType = ((BMapType) sourceType).getConstrainedType();\n\n if (constrainedType != null) {\n if (TypeUtils.isValueType(constrainedType)) {\n return false;\n } else if (constrainedType instanceof BMapType) {\n return isDeepConversionRequiredForMap(constrainedType);\n }\n return true;\n }\n return true;\n }\n\n}", "context_after": "class TypeConverter {\n\n private static final String NaN = \"NaN\";\n private static final String POSITIVE_INFINITY = \"Infinity\";\n private static final String NEGATIVE_INFINITY = \"-Infinity\";\n\n public static Object convertValues(Type targetType, Object inputValue) {\n Type inputType = TypeChecker.getType(inputValue);\n switch (targetType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return anyToInt(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_INT));\n case TypeTags.DECIMAL_TAG:\n return anyToDecimal(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_DECIMAL));\n case TypeTags.FLOAT_TAG:\n return anyToFloat(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_FLOAT));\n case TypeTags.STRING_TAG:\n return StringUtils.fromString(anyToString(inputValue));\n case TypeTags.BOOLEAN_TAG:\n return anyToBoolean(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_BOOLEAN));\n case TypeTags.BYTE_TAG:\n return anyToByte(inputValue, () ->\n ErrorUtils.createNumericConversionError(inputValue, PredefinedTypes.TYPE_BYTE));\n default:\n throw ErrorCreator.createError(BallerinaErrorReasons.NUMBER_CONVERSION_ERROR,\n BLangExceptionHelper.getErrorMessage(\n RuntimeErrors.INCOMPATIBLE_SIMPLE_TYPE_CONVERT_OPERATION,\n inputType, inputValue, targetType));\n }\n }\n\n public static Object castValues(Type targetType, Object inputValue) {\n switch (targetType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return anyToIntCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_INT));\n case TypeTags.DECIMAL_TAG:\n return anyToDecimalCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_DECIMAL));\n case TypeTags.FLOAT_TAG:\n return anyToFloatCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_FLOAT));\n case TypeTags.STRING_TAG:\n return anyToStringCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_STRING));\n case TypeTags.BOOLEAN_TAG:\n return anyToBooleanCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_BOOLEAN));\n case TypeTags.BYTE_TAG:\n return anyToByteCast(inputValue, () ->\n ErrorUtils.createTypeCastError(inputValue, PredefinedTypes.TYPE_BYTE));\n default:\n throw ErrorUtils.createTypeCastError(inputValue, targetType);\n }\n }\n\n static boolean isConvertibleToByte(Object value) {\n Type inputType = TypeChecker.getType(value);\n switch (inputType.getTag()) {\n case TypeTags.BYTE_TAG:\n return true;\n case TypeTags.INT_TAG:\n return TypeChecker.isByteLiteral((long) value);\n case TypeTags.FLOAT_TAG:\n Double doubleValue = (Double) value;\n return isFloatWithinIntRange(doubleValue) && TypeChecker.isByteLiteral(doubleValue.longValue());\n case TypeTags.DECIMAL_TAG:\n return isDecimalWithinIntRange((DecimalValue) value)\n && TypeChecker.isByteLiteral(((DecimalValue) value).value().longValue());\n default:\n return false;\n }\n }\n\n static boolean isConvertibleToInt(Object value) {\n Type inputType = TypeChecker.getType(value);\n switch (inputType.getTag()) {\n case TypeTags.BYTE_TAG:\n case TypeTags.INT_TAG:\n return true;\n case TypeTags.FLOAT_TAG:\n return isFloatWithinIntRange((double) value);\n case TypeTags.DECIMAL_TAG:\n return isDecimalWithinIntRange((DecimalValue) value);\n default:\n return false;\n }\n }\n\n static boolean isConvertibleToIntSubType(Object value, Type targetType) {\n Type inputType = TypeChecker.getType(value);\n long val;\n switch (inputType.getTag()) {\n case TypeTags.BYTE_TAG:\n case TypeTags.INT_TAG:\n val = ((Number) value).longValue();\n break;\n case TypeTags.FLOAT_TAG:\n if (!isFloatWithinIntRange((Double) value)) {\n return false;\n }\n val = floatToInt((Double) value);\n break;\n case TypeTags.DECIMAL_TAG:\n if (!isDecimalWithinIntRange((DecimalValue) value)) {\n return false;\n }\n val = ((DecimalValue) value).value().intValue();\n break;\n default:\n return false;\n }\n switch (targetType.getTag()) {\n case TypeTags.SIGNED32_INT_TAG:\n return TypeChecker.isSigned32LiteralValue(val);\n case TypeTags.SIGNED16_INT_TAG:\n return TypeChecker.isSigned16LiteralValue(val);\n case TypeTags.SIGNED8_INT_TAG:\n return TypeChecker.isSigned8LiteralValue(val);\n case TypeTags.UNSIGNED32_INT_TAG:\n return TypeChecker.isUnsigned32LiteralValue(val);\n case TypeTags.UNSIGNED16_INT_TAG:\n return TypeChecker.isUnsigned16LiteralValue(val);\n case TypeTags.UNSIGNED8_INT_TAG:\n return TypeChecker.isUnsigned8LiteralValue(val);\n }\n return false;\n }\n\n static boolean isConvertibleToChar(Object value) {\n Type inputType = TypeChecker.getType(value);\n if (inputType.getTag() == TypeTags.STRING_TAG) {\n return isCharLiteralValue(value);\n }\n return false;\n }\n\n static boolean isConvertibleToFloatingPointTypes(Object value) {\n Type inputType = TypeChecker.getType(value);\n switch (inputType.getTag()) {\n case TypeTags.BYTE_TAG:\n case TypeTags.INT_TAG:\n case TypeTags.FLOAT_TAG:\n case TypeTags.DECIMAL_TAG:\n return true;\n default:\n return false;\n }\n }\n\n \n public static Set getConvertibleTypes(Object inputValue, Type targetType) {\n return getConvertibleTypes(inputValue, targetType, new ArrayList<>());\n }\n\n public static Set getConvertibleTypes(Object inputValue, Type targetType,\n List unresolvedValues) {\n Set convertibleTypes = new LinkedHashSet<>();\n\n int targetTypeTag = targetType.getTag();\n\n switch (targetTypeTag) {\n case TypeTags.UNION_TAG:\n for (Type memType : ((BUnionType) targetType).getMemberTypes()) {\n if (TypeChecker.getType(inputValue) == memType) {\n return Set.of(memType);\n }\n convertibleTypes.addAll(getConvertibleTypes(inputValue, memType, unresolvedValues));\n }\n break;\n case TypeTags.ARRAY_TAG:\n if (isConvertibleToArrayType(inputValue, (BArrayType) targetType, unresolvedValues)) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.RECORD_TYPE_TAG:\n if (isConvertibleToRecordType(inputValue, (BRecordType) targetType, false, unresolvedValues)) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.ANYDATA_TAG:\n Type matchingType = TypeConverter.resolveMatchingTypeForUnion(inputValue, targetType);\n if (matchingType != null) {\n convertibleTypes.add(matchingType);\n }\n break;\n case TypeTags.MAP_TAG:\n if (isConvertibleToMapType(inputValue, (BMapType) targetType, unresolvedValues)) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.TABLE_TAG:\n if (isConvertibleToTableType(((BTableType) targetType).getConstrainedType())) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.INTERSECTION_TAG:\n Type effectiveType = ((BIntersectionType) targetType).getEffectiveType();\n convertibleTypes.addAll(getConvertibleTypes(inputValue, effectiveType, unresolvedValues));\n break;\n case TypeTags.FINITE_TYPE_TAG:\n for (Object valueSpaceItem : ((BFiniteType) targetType).valueSpace) {\n Type inputValueType = TypeChecker.getType(inputValue);\n if (inputValue == valueSpaceItem) {\n return Set.of(inputValueType);\n }\n if (TypeChecker.isFiniteTypeValue(inputValue, inputValueType, valueSpaceItem)) {\n convertibleTypes.add(TypeChecker.getType(valueSpaceItem));\n }\n }\n break;\n default:\n if (TypeChecker.checkIsLikeType(inputValue, targetType, true)) {\n convertibleTypes.add(targetType);\n }\n }\n return convertibleTypes;\n }\n\n public static List getConvertibleTypesFromJson(Object value, Type targetType,\n List unresolvedValues) {\n\n int targetTypeTag = targetType.getTag();\n\n List convertibleTypes = new ArrayList<>(TypeConverter.getConvertibleTypes(value, targetType));\n\n if (convertibleTypes.size() == 0) {\n switch (targetTypeTag) {\n case TypeTags.RECORD_TYPE_TAG:\n if (isConvertibleToRecordType(value, (BRecordType) targetType, true, unresolvedValues)) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.TABLE_TAG:\n if (isConvertibleToTableType(((BTableType) targetType).getConstrainedType())) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.XML_TAG:\n case TypeTags.XML_ELEMENT_TAG:\n case TypeTags.XML_COMMENT_TAG:\n case TypeTags.XML_PI_TAG:\n case TypeTags.XML_TEXT_TAG:\n if (TypeChecker.getType(value).getTag() == TypeTags.STRING_TAG) {\n convertibleTypes.add(targetType);\n }\n break;\n case TypeTags.INTERSECTION_TAG:\n return getConvertibleTypesFromJson(value, ((BIntersectionType) targetType).getEffectiveType(),\n unresolvedValues);\n }\n }\n return convertibleTypes;\n }\n\n private static boolean isConvertibleToRecordType(Object sourceValue, BRecordType targetType, boolean isFromJson,\n List unresolvedValues) {\n if (!(sourceValue instanceof MapValueImpl)) {\n return false;\n }\n\n TypeValuePair typeValuePair = new TypeValuePair(sourceValue, targetType);\n if (unresolvedValues.contains(typeValuePair)) {\n return true;\n }\n unresolvedValues.add(typeValuePair);\n\n Map targetFieldTypes = new HashMap<>();\n Type restFieldType = targetType.restFieldType;\n\n for (Map.Entry field : targetType.getFields().entrySet()) {\n targetFieldTypes.put(field.getKey(), field.getValue().getFieldType());\n }\n\n MapValueImpl sourceMapValueImpl = (MapValueImpl) sourceValue;\n for (Map.Entry targetTypeEntry : targetFieldTypes.entrySet()) {\n String fieldName = targetTypeEntry.getKey().toString();\n\n if (sourceMapValueImpl.containsKey(StringUtils.fromString(fieldName))) {\n continue;\n }\n Field targetField = targetType.getFields().get(fieldName);\n if (SymbolFlags.isFlagOn(targetField.getFlags(), SymbolFlags.REQUIRED)) {\n return false;\n }\n }\n\n for (Object object : sourceMapValueImpl.entrySet()) {\n Map.Entry valueEntry = (Map.Entry) object;\n String fieldName = valueEntry.getKey().toString();\n\n if (isFromJson) {\n if (targetFieldTypes.containsKey(fieldName)) {\n if (getConvertibleTypesFromJson(valueEntry.getValue(), targetFieldTypes.get(fieldName),\n unresolvedValues).size() != 1) {\n return false;\n }\n } else if (!targetType.sealed) {\n if (getConvertibleTypesFromJson(valueEntry.getValue(), restFieldType,\n unresolvedValues).size() != 1) {\n return false;\n }\n } else {\n return false;\n }\n } else {\n if (targetFieldTypes.containsKey(fieldName)) {\n if (getConvertibleTypes(valueEntry.getValue(), targetFieldTypes.get(fieldName),\n unresolvedValues).size() != 1) {\n return false;\n }\n } else if (!targetType.sealed) {\n if (getConvertibleTypes(valueEntry.getValue(), restFieldType, unresolvedValues).size() != 1) {\n return false;\n }\n } else {\n return false;\n }\n }\n }\n return true;\n }\n\n private static boolean isConvertibleToTableType(Type tableConstrainedType) {\n switch (tableConstrainedType.getTag()) {\n case TypeTags.RECORD_TYPE_TAG:\n case TypeTags.MAP_TAG:\n return true;\n case TypeTags.INTERSECTION_TAG:\n return isConvertibleToTableType(((BIntersectionType) tableConstrainedType).getEffectiveType());\n }\n return false;\n }\n\n private static boolean isConvertibleToMapType(Object sourceValue, BMapType targetType,\n List unresolvedValues) {\n if (!(sourceValue instanceof MapValueImpl)) {\n return false;\n }\n for (Object mapEntry : ((MapValueImpl) sourceValue).values()) {\n if (getConvertibleTypes(mapEntry, targetType.getConstrainedType(), unresolvedValues).size() != 1) {\n return false;\n }\n }\n return true;\n }\n\n private static boolean isConvertibleToArrayType(Object sourceValue, BArrayType targetType,\n List unresolvedValues) {\n if (!(sourceValue instanceof ArrayValue)) {\n return false;\n }\n ArrayValue source = (ArrayValue) sourceValue;\n Type targetTypeElementType = targetType.getElementType();\n if (source.getType().getTag() == TypeTags.ARRAY_TAG) {\n Type sourceElementType = ((BArrayType) source.getType()).getElementType();\n if (isNumericType(sourceElementType) && isNumericType(targetTypeElementType)) {\n return true;\n }\n }\n Set convertibleTypes;\n for (int i = 0; i < source.size(); i++) {\n convertibleTypes = getConvertibleTypes(source.get(i), targetTypeElementType, unresolvedValues);\n if (convertibleTypes.isEmpty()) {\n return false;\n }\n if (convertibleTypes.size() != 1 && !convertibleTypes.contains(TypeChecker.getType(source.get(i)))\n && !hasIntegerSubTypes(convertibleTypes)) {\n return false;\n }\n }\n return true;\n }\n\n \n\n static long anyToInt(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return (Long) sourceVal;\n } else if (sourceVal instanceof Double) {\n return floatToInt((double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return ((Integer) sourceVal).longValue();\n } else if (sourceVal instanceof Boolean) {\n return (Boolean) sourceVal ? 1 : 0;\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).intValue();\n } else if (sourceVal instanceof String) {\n try {\n return Long.parseLong((String) sourceVal);\n } catch (NumberFormatException e) {\n throw errorFunc.get();\n }\n }\n\n throw errorFunc.get();\n }\n\n static long anyToIntCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return (Long) sourceVal;\n } else if (sourceVal instanceof Double) {\n return floatToInt((double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return ((Integer) sourceVal).longValue();\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).intValue();\n } else {\n throw errorFunc.get();\n }\n }\n\n static long anyToIntSubTypeCast(Object sourceVal, Type type, Supplier errorFunc) {\n long value = anyToIntCast(sourceVal, errorFunc);\n if (type == PredefinedTypes.TYPE_INT_SIGNED_32 && isSigned32LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_SIGNED_16 && isSigned16LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_SIGNED_8 && isSigned8LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_UNSIGNED_32 && isUnsigned32LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_UNSIGNED_16 && isUnsigned16LiteralValue(value)) {\n return value;\n } else if (type == PredefinedTypes.TYPE_INT_UNSIGNED_8 && isUnsigned8LiteralValue(value)) {\n return value;\n }\n throw errorFunc.get();\n }\n\n static double anyToFloat(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return ((Long) sourceVal).doubleValue();\n } else if (sourceVal instanceof Double) {\n return (Double) sourceVal;\n } else if (sourceVal instanceof Integer) {\n return ((Integer) sourceVal).floatValue();\n } else if (sourceVal instanceof Boolean) {\n return (Boolean) sourceVal ? 1.0 : 0.0;\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).floatValue();\n } else if (sourceVal instanceof String) {\n try {\n return Double.parseDouble((String) sourceVal);\n } catch (NumberFormatException e) {\n throw errorFunc.get();\n }\n }\n\n throw errorFunc.get();\n }\n\n static double anyToFloatCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return ((Long) sourceVal).doubleValue();\n } else if (sourceVal instanceof Double) {\n return (Double) sourceVal;\n } else if (sourceVal instanceof Integer) {\n return ((Integer) sourceVal).floatValue();\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).floatValue();\n } else {\n throw errorFunc.get();\n }\n }\n\n static boolean anyToBoolean(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return (long) sourceVal != 0;\n } else if (sourceVal instanceof Double) {\n return (Double) sourceVal != 0.0;\n } else if (sourceVal instanceof Integer) {\n return (int) sourceVal != 0;\n } else if (sourceVal instanceof Boolean) {\n return (boolean) sourceVal;\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).booleanValue();\n } else if (sourceVal instanceof String) {\n try {\n return Boolean.parseBoolean((String) sourceVal);\n } catch (NumberFormatException e) {\n throw errorFunc.get();\n }\n }\n\n throw errorFunc.get();\n }\n\n static boolean anyToBooleanCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Boolean) {\n return (boolean) sourceVal;\n }\n\n throw errorFunc.get();\n }\n\n public static int intToByte(long sourceVal) {\n if (!TypeChecker.isByteLiteral(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_BYTE);\n }\n return ((Long) sourceVal).intValue();\n }\n\n public static long intToSigned32(long sourceVal) {\n if (!TypeChecker.isSigned32LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_SIGNED_32);\n }\n return sourceVal;\n }\n\n public static long intToSigned16(long sourceVal) {\n if (!isSigned16LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_SIGNED_16);\n }\n return sourceVal;\n }\n\n public static long intToSigned8(long sourceVal) {\n if (!isSigned8LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_SIGNED_8);\n }\n return sourceVal;\n }\n\n public static long intToUnsigned32(long sourceVal) {\n if (!isUnsigned32LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_UNSIGNED_32);\n }\n return sourceVal;\n }\n\n public static long intToUnsigned16(long sourceVal) {\n if (!isUnsigned16LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_UNSIGNED_16);\n }\n return sourceVal;\n }\n\n public static long intToUnsigned8(long sourceVal) {\n if (!isUnsigned8LiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT_UNSIGNED_8);\n }\n return sourceVal;\n }\n\n public static long floatToSigned32(double sourceVal) {\n return intToSigned32(floatToInt(sourceVal));\n }\n\n public static long floatToSigned16(double sourceVal) {\n return intToSigned16(floatToInt(sourceVal));\n }\n\n public static long floatToSigned8(double sourceVal) {\n return intToSigned8(floatToInt(sourceVal));\n }\n\n public static long floatToUnsigned32(double sourceVal) {\n return intToUnsigned32(floatToInt(sourceVal));\n }\n\n public static long floatToUnsigned16(double sourceVal) {\n return intToUnsigned16(floatToInt(sourceVal));\n }\n\n public static long floatToUnsigned8(double sourceVal) {\n return intToUnsigned8(floatToInt(sourceVal));\n }\n\n public static BString stringToChar(Object sourceVal) {\n if (!isCharLiteralValue(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_STRING_CHAR);\n }\n return StringUtils.fromString(Objects.toString(sourceVal));\n }\n\n public static Long stringToInt(String value) throws NumberFormatException {\n return Long.parseLong(value);\n }\n\n public static int stringToByte(String value) throws NumberFormatException, BError {\n int byteValue = Integer.parseInt(value);\n return intToByte(byteValue);\n }\n\n public static Double stringToFloat(String value) throws NumberFormatException {\n return Double.parseDouble(value);\n }\n\n public static Boolean stringToBoolean(String value) throws NumberFormatException {\n if (\"true\".equalsIgnoreCase(value) || \"1\".equalsIgnoreCase(value)) {\n return true;\n } else if (\"false\".equalsIgnoreCase(value) || \"0\".equalsIgnoreCase(value)) {\n return false;\n }\n throw new NumberFormatException();\n }\n\n public static BDecimal stringToDecimal(String value) throws NumberFormatException {\n return new DecimalValue(value);\n }\n\n public static BXml stringToXml(String value) throws BError {\n StringBuilder sb = new StringBuilder();\n sb.append(\"\").append(value).append(\"\");\n BXml item = XmlUtils.parse(sb.toString());\n return item.children();\n }\n\n public static BString anyToChar(Object sourceVal) {\n String value = Objects.toString(sourceVal);\n return stringToChar(value);\n }\n\n\n public static int floatToByte(double sourceVal) {\n checkIsValidFloat(sourceVal, PredefinedTypes.TYPE_BYTE);\n\n long intVal = Math.round(sourceVal);\n if (!TypeChecker.isByteLiteral(intVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_BYTE);\n }\n\n return (int) intVal;\n }\n\n public static long floatToInt(double sourceVal) {\n checkIsValidFloat(sourceVal, PredefinedTypes.TYPE_INT);\n\n if (!isFloatWithinIntRange(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT);\n }\n\n return (long) Math.rint(sourceVal);\n }\n\n private static void checkIsValidFloat(double sourceVal, Type targetType) {\n if (Double.isNaN(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(NaN, PredefinedTypes.TYPE_FLOAT, targetType);\n }\n\n if (Double.isInfinite(sourceVal)) {\n String value = sourceVal > 0 ? POSITIVE_INFINITY : NEGATIVE_INFINITY;\n throw ErrorUtils.createNumericConversionError(value, PredefinedTypes.TYPE_FLOAT, targetType);\n }\n }\n\n static int anyToByte(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return intToByte((Long) sourceVal);\n } else if (sourceVal instanceof Double) {\n return floatToByte((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return (int) sourceVal;\n } else if (sourceVal instanceof Boolean) {\n return ((Boolean) sourceVal ? 1 : 0);\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).byteValue();\n } else if (sourceVal instanceof String) {\n try {\n return Integer.parseInt((String) sourceVal);\n } catch (NumberFormatException e) {\n throw errorFunc.get();\n }\n }\n\n throw errorFunc.get();\n }\n\n static int anyToByteCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return intToByte((Long) sourceVal);\n } else if (sourceVal instanceof Byte) {\n return ((Byte) sourceVal).intValue();\n } else if (sourceVal instanceof Double) {\n return floatToByte((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return (int) sourceVal;\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).byteValue();\n } else {\n throw errorFunc.get();\n }\n\n }\n\n private static String anyToString(Object sourceVal) {\n if (sourceVal instanceof Long) {\n return Long.toString((Long) sourceVal);\n } else if (sourceVal instanceof Double) {\n return Double.toString((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return Long.toString((Integer) sourceVal);\n } else if (sourceVal instanceof Boolean) {\n return Boolean.toString((Boolean) sourceVal);\n } else if (sourceVal instanceof DecimalValue) {\n return ((DecimalValue) sourceVal).stringValue(null);\n } else if (sourceVal instanceof String) {\n return (String) sourceVal;\n } else if (sourceVal == null) {\n return \"()\";\n }\n\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_STRING);\n }\n\n private static String anyToStringCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof String) {\n return (String) sourceVal;\n }\n\n throw errorFunc.get();\n }\n\n static DecimalValue anyToDecimal(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return DecimalValue.valueOf((Long) sourceVal);\n } else if (sourceVal instanceof Double) {\n return DecimalValue.valueOf((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return DecimalValue.valueOf((Integer) sourceVal);\n } else if (sourceVal instanceof Boolean) {\n return DecimalValue.valueOf((Boolean) sourceVal);\n } else if (sourceVal instanceof DecimalValue) {\n return (DecimalValue) sourceVal;\n }\n throw errorFunc.get();\n }\n\n static DecimalValue anyToDecimalCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return DecimalValue.valueOf((Long) sourceVal);\n } else if (sourceVal instanceof Double) {\n return DecimalValue.valueOf((Double) sourceVal);\n } else if (sourceVal instanceof Integer) {\n return DecimalValue.valueOf((Integer) sourceVal);\n } else if (sourceVal instanceof DecimalValue) {\n return (DecimalValue) sourceVal;\n } else if (sourceVal instanceof String) {\n return new DecimalValue((String) sourceVal);\n }\n throw errorFunc.get();\n }\n\n \n\n static byte anyToJByteCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Byte) {\n return (Byte) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static char anyToJCharCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Character) {\n return (Character) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static short anyToJShortCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Short) {\n return (Short) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static int anyToJIntCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Integer) {\n return (Integer) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static long anyToJLongCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Long) {\n return (Long) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static float anyToJFloatCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Float) {\n return (Float) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static double anyToJDoubleCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Double) {\n return (Double) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n static boolean anyToJBooleanCast(Object sourceVal, Supplier errorFunc) {\n if (sourceVal instanceof Boolean) {\n return (Boolean) sourceVal;\n } else {\n throw errorFunc.get();\n }\n }\n\n public static long jFloatToBInt(float sourceVal) {\n checkIsValidFloat(sourceVal, PredefinedTypes.TYPE_INT);\n\n if (!isFloatWithinIntRange(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT);\n }\n\n return (long) Math.rint(sourceVal);\n }\n\n public static long jDoubleToBInt(double sourceVal) {\n checkIsValidFloat(sourceVal, PredefinedTypes.TYPE_INT);\n\n if (!isFloatWithinIntRange(sourceVal)) {\n throw ErrorUtils.createNumericConversionError(sourceVal, PredefinedTypes.TYPE_INT);\n }\n\n return (long) Math.rint(sourceVal);\n }\n\n\n private static boolean isFloatWithinIntRange(double doubleValue) {\n return doubleValue < BINT_MAX_VALUE_DOUBLE_RANGE_MAX && doubleValue > BINT_MIN_VALUE_DOUBLE_RANGE_MIN;\n }\n\n public static Type resolveMatchingTypeForUnion(Object value, Type type) {\n if (value instanceof ArrayValue && ((ArrayValue) value).getType().getTag() == TypeTags.ARRAY_TAG &&\n !isDeepConversionRequiredForArray(((ArrayValue) value).getType())) {\n return TypeCreator.createArrayType(type);\n }\n\n if (value instanceof MapValue && ((MapValue) value).getType().getTag() == TypeTags.MAP_TAG &&\n !isDeepConversionRequiredForMap(((MapValue) value).getType())) {\n return TypeCreator.createMapType(type);\n }\n\n if (value == null && type.isNilable()) {\n return PredefinedTypes.TYPE_NULL;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_INT)) {\n return PredefinedTypes.TYPE_INT;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_FLOAT)) {\n return PredefinedTypes.TYPE_FLOAT;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_STRING)) {\n return PredefinedTypes.TYPE_STRING;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_BOOLEAN)) {\n return PredefinedTypes.TYPE_BOOLEAN;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_BYTE)) {\n return PredefinedTypes.TYPE_BYTE;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_DECIMAL)) {\n return PredefinedTypes.TYPE_DECIMAL;\n }\n\n Type anydataArrayType = new BArrayType(type);\n if (checkIsLikeType(value, anydataArrayType)) {\n return anydataArrayType;\n }\n\n if (checkIsLikeType(value, PredefinedTypes.TYPE_XML)) {\n return PredefinedTypes.TYPE_XML;\n }\n\n Type anydataMapType = new BMapType(type);\n if (checkIsLikeType(value, anydataMapType)) {\n return anydataMapType;\n }\n\n \n return null;\n }\n\n private static boolean isDeepConversionRequiredForArray(Type sourceType) {\n Type elementType = ((BArrayType) sourceType).getElementType();\n\n if (elementType != null) {\n if (TypeUtils.isValueType(elementType)) {\n return false;\n } else if (elementType instanceof BArrayType) {\n return isDeepConversionRequiredForArray(elementType);\n }\n return true;\n }\n return true;\n }\n\n private static boolean isDeepConversionRequiredForMap(Type sourceType) {\n Type constrainedType = ((BMapType) sourceType).getConstrainedType();\n\n if (constrainedType != null) {\n if (TypeUtils.isValueType(constrainedType)) {\n return false;\n } else if (constrainedType instanceof BMapType) {\n return isDeepConversionRequiredForMap(constrainedType);\n }\n return true;\n }\n return true;\n }\n\n}" }, { "comment": "It would submit a thread to do this job", "method_body": "public AnalysisTaskExecutor(int simultaneouslyRunningTaskNum) {\n if (!Env.isCheckpointThread()) {\n executors = ThreadPoolManager.newDaemonThreadPool(\n simultaneouslyRunningTaskNum,\n simultaneouslyRunningTaskNum, 0,\n TimeUnit.DAYS, new LinkedBlockingQueue<>(),\n new BlockedPolicy(\"Analysis Job Executor\", Integer.MAX_VALUE),\n \"Analysis Job Executor\", true);\n cancelExpiredTask();\n } else {\n executors = null;\n }\n }", "target_code": "cancelExpiredTask();", "method_body_after": "public AnalysisTaskExecutor(int simultaneouslyRunningTaskNum) {\n if (!Env.isCheckpointThread()) {\n executors = ThreadPoolManager.newDaemonThreadPool(\n simultaneouslyRunningTaskNum,\n simultaneouslyRunningTaskNum, 0,\n TimeUnit.DAYS, new LinkedBlockingQueue<>(),\n new BlockedPolicy(\"Analysis Job Executor\", Integer.MAX_VALUE),\n \"Analysis Job Executor\", true);\n cancelExpiredTask();\n } else {\n executors = null;\n }\n }", "context_before": "class AnalysisTaskExecutor {\n\n private static final Logger LOG = LogManager.getLogger(AnalysisTaskExecutor.class);\n\n protected final ThreadPoolExecutor executors;\n\n private final BlockingQueue taskQueue =\n new PriorityBlockingQueue(20,\n Comparator.comparingLong(AnalysisTaskWrapper::getStartTime));\n\n \n\n private void cancelExpiredTask() {\n String name = \"Expired Analysis Task Killer\";\n Thread t = new Thread(this::doCancelExpiredJob, name);\n t.setDaemon(true);\n t.start();\n }\n\n private void doCancelExpiredJob() {\n for (;;) {\n tryToCancel();\n }\n }\n\n protected void tryToCancel() {\n try {\n AnalysisTaskWrapper taskWrapper = taskQueue.take();\n try {\n long timeout = TimeUnit.SECONDS.toMillis(StatisticsUtil.getAnalyzeTimeout())\n - (System.currentTimeMillis() - taskWrapper.getStartTime());\n taskWrapper.get(timeout < 0 ? 0 : timeout, TimeUnit.MILLISECONDS);\n } catch (Exception e) {\n taskWrapper.cancel(e.getMessage());\n }\n } catch (Throwable throwable) {\n LOG.warn(\"cancel analysis task failed\", throwable);\n }\n }\n\n public void submitTask(BaseAnalysisTask task) {\n AnalysisTaskWrapper taskWrapper = new AnalysisTaskWrapper(this, task);\n executors.submit(taskWrapper);\n }\n\n public void putJob(AnalysisTaskWrapper wrapper) throws Exception {\n taskQueue.put(wrapper);\n }\n\n public boolean idle() {\n return executors.getQueue().isEmpty();\n }\n\n public void clear() {\n executors.getQueue().clear();\n taskQueue.clear();\n }\n}", "context_after": "class AnalysisTaskExecutor {\n\n private static final Logger LOG = LogManager.getLogger(AnalysisTaskExecutor.class);\n\n protected final ThreadPoolExecutor executors;\n\n private final BlockingQueue taskQueue =\n new PriorityBlockingQueue(20,\n Comparator.comparingLong(AnalysisTaskWrapper::getStartTime));\n\n \n\n private void cancelExpiredTask() {\n String name = \"Expired Analysis Task Killer\";\n Thread t = new Thread(this::doCancelExpiredJob, name);\n t.setDaemon(true);\n t.start();\n }\n\n private void doCancelExpiredJob() {\n for (;;) {\n tryToCancel();\n }\n }\n\n protected void tryToCancel() {\n try {\n AnalysisTaskWrapper taskWrapper = taskQueue.take();\n try {\n long timeout = TimeUnit.SECONDS.toMillis(StatisticsUtil.getAnalyzeTimeout())\n - (System.currentTimeMillis() - taskWrapper.getStartTime());\n taskWrapper.get(timeout < 0 ? 0 : timeout, TimeUnit.MILLISECONDS);\n } catch (Exception e) {\n taskWrapper.cancel(e.getMessage());\n }\n } catch (Throwable throwable) {\n LOG.warn(\"cancel analysis task failed\", throwable);\n }\n }\n\n public void submitTask(BaseAnalysisTask task) {\n AnalysisTaskWrapper taskWrapper = new AnalysisTaskWrapper(this, task);\n executors.submit(taskWrapper);\n }\n\n public void putJob(AnalysisTaskWrapper wrapper) throws Exception {\n taskQueue.put(wrapper);\n }\n\n public boolean idle() {\n return executors.getQueue().isEmpty();\n }\n\n public void clear() {\n executors.getQueue().clear();\n taskQueue.clear();\n }\n}" }, { "comment": "Shall we make it as single line statement?", "method_body": "public WebSocketService(Service service) {\n this.service = service;\n for (Resource resource : service.getResources()) {\n resourceMap.put(resource.getName(), resource);\n }\n\n Annotation configAnnotation =\n WebSocketUtil.getServiceConfigAnnotation(service);\n\n Struct configAnnotationStruct = null;\n if (configAnnotation != null && (configAnnotationStruct = configAnnotation.getValue()) != null) {\n negotiableSubProtocols = findNegotiableSubProtocols(configAnnotationStruct);\n idleTimeoutInSeconds = findIdleTimeoutInSeconds(configAnnotationStruct);\n maxFrameSize = findMaxFrameSize(configAnnotationStruct);\n }\n if (WebSocketConstants.WEBSOCKET_ENDPOINT_NAME.equals(service.getEndpointName())) {\n basePath = findFullWebSocketUpgradePath(configAnnotationStruct);\n }\n\n }", "target_code": "Annotation configAnnotation =", "method_body_after": "public WebSocketService(Service service) {\n this.service = service;\n for (Resource resource : service.getResources()) {\n resourceMap.put(resource.getName(), resource);\n }\n\n Annotation configAnnotation = WebSocketUtil.getServiceConfigAnnotation(service);\n\n Struct configAnnotationStruct = null;\n if (configAnnotation != null && (configAnnotationStruct = configAnnotation.getValue()) != null) {\n negotiableSubProtocols = findNegotiableSubProtocols(configAnnotationStruct);\n idleTimeoutInSeconds = findIdleTimeoutInSeconds(configAnnotationStruct);\n maxFrameSize = findMaxFrameSize(configAnnotationStruct);\n }\n if (WebSocketConstants.WEBSOCKET_ENDPOINT_NAME.equals(service.getEndpointName())) {\n basePath = findFullWebSocketUpgradePath(configAnnotationStruct);\n }\n\n }", "context_before": "class WebSocketService {\n\n private final Service service;\n private String[] negotiableSubProtocols = null;\n private int idleTimeoutInSeconds = 0;\n private final Map resourceMap = new ConcurrentHashMap<>();\n private String basePath;\n private HttpResource upgradeResource;\n private static final int DEFAULT_MAX_FRAME_SIZE = 65536;\n private int maxFrameSize = DEFAULT_MAX_FRAME_SIZE;\n\n public WebSocketService() {\n service = null;\n }\n\n \n\n public WebSocketService(String httpBasePath, HttpResource upgradeResource, Service service) {\n this(service);\n Annotation resourceConfigAnnotation = HttpResource.getResourceConfigAnnotation(\n upgradeResource.getBalResource());\n if (resourceConfigAnnotation == null) {\n throw new BallerinaException(\"Cannot find a resource config for resource \" + upgradeResource.getName());\n }\n Struct webSocketConfig =\n resourceConfigAnnotation.getValue().getStructField(HttpConstants.ANN_CONFIG_ATTR_WEBSOCKET_UPGRADE);\n String upgradePath = webSocketConfig.getStringField(HttpConstants.ANN_WEBSOCKET_ATTR_UPGRADE_PATH);\n this.basePath = httpBasePath.concat(upgradePath);\n this.upgradeResource = upgradeResource;\n }\n\n public String getName() {\n return service != null ? service.getName() : null;\n }\n\n public ServiceInfo getServiceInfo() {\n return service != null ? service.getServiceInfo() : null;\n }\n\n public Resource getResourceByName(String resourceName) {\n return resourceMap.get(resourceName);\n }\n\n public String[] getNegotiableSubProtocols() {\n return negotiableSubProtocols;\n }\n\n public HttpResource getUpgradeResource() {\n return upgradeResource;\n }\n\n public int getIdleTimeoutInSeconds() {\n return idleTimeoutInSeconds;\n }\n\n public int getMaxFrameSize() {\n return maxFrameSize;\n }\n\n private String[] findNegotiableSubProtocols(Struct annAttrSubProtocols) {\n Value[] subProtocolsInAnnotation = annAttrSubProtocols.getArrayField(\n WebSocketConstants.ANNOTATION_ATTR_SUB_PROTOCOLS);\n\n if (subProtocolsInAnnotation == null) {\n return new String[0];\n }\n\n String[] subProtoCols = new String[subProtocolsInAnnotation.length];\n for (int i = 0; i < subProtocolsInAnnotation.length; i++) {\n subProtoCols[i] = subProtocolsInAnnotation[i].getStringValue();\n }\n return subProtoCols;\n }\n\n private int findIdleTimeoutInSeconds(Struct annAttrIdleTimeout) {\n return (int) annAttrIdleTimeout.getIntField(WebSocketConstants.ANNOTATION_ATTR_IDLE_TIMEOUT);\n }\n\n private int findMaxFrameSize(Struct annotation) {\n int size = (int) annotation.getIntField(WebSocketConstants.ANNOTATION_ATTR_MAX_FRAME_SIZE);\n if (size <= 0) {\n size = DEFAULT_MAX_FRAME_SIZE;\n }\n return size;\n }\n\n public String getBasePath() {\n return basePath;\n }\n\n /**\n * Find the Full path for WebSocket upgrade.\n *\n * @return the full path of the WebSocket upgrade.\n */\n private String findFullWebSocketUpgradePath(Struct annStruct) {\n String path = null;\n if (annStruct != null) {\n String basePathVal = annStruct.getStringField(WebSocketConstants.ANNOTATION_ATTR_PATH);\n if (!basePathVal.trim().isEmpty()) {\n path = HttpUtil.sanitizeBasePath(basePathVal);\n }\n }\n if (path == null) {\n path = \"/\".concat(getName());\n }\n return path;\n }\n}", "context_after": "class WebSocketService {\n\n private final Service service;\n private String[] negotiableSubProtocols = null;\n private int idleTimeoutInSeconds = 0;\n private final Map resourceMap = new ConcurrentHashMap<>();\n private String basePath;\n private HttpResource upgradeResource;\n private static final int DEFAULT_MAX_FRAME_SIZE = 65536;\n private int maxFrameSize = DEFAULT_MAX_FRAME_SIZE;\n\n public WebSocketService() {\n service = null;\n }\n\n \n\n public WebSocketService(String httpBasePath, HttpResource upgradeResource, Service service) {\n this(service);\n Annotation resourceConfigAnnotation = HttpResource.getResourceConfigAnnotation(\n upgradeResource.getBalResource());\n if (resourceConfigAnnotation == null) {\n throw new BallerinaException(\"Cannot find a resource config for resource \" + upgradeResource.getName());\n }\n Struct webSocketConfig =\n resourceConfigAnnotation.getValue().getStructField(HttpConstants.ANN_CONFIG_ATTR_WEBSOCKET_UPGRADE);\n String upgradePath = webSocketConfig.getStringField(HttpConstants.ANN_WEBSOCKET_ATTR_UPGRADE_PATH);\n this.basePath = httpBasePath.concat(upgradePath);\n this.upgradeResource = upgradeResource;\n }\n\n public String getName() {\n return service != null ? service.getName() : null;\n }\n\n public ServiceInfo getServiceInfo() {\n return service != null ? service.getServiceInfo() : null;\n }\n\n public Resource getResourceByName(String resourceName) {\n return resourceMap.get(resourceName);\n }\n\n public String[] getNegotiableSubProtocols() {\n return negotiableSubProtocols;\n }\n\n public HttpResource getUpgradeResource() {\n return upgradeResource;\n }\n\n public int getIdleTimeoutInSeconds() {\n return idleTimeoutInSeconds;\n }\n\n public int getMaxFrameSize() {\n return maxFrameSize;\n }\n\n private String[] findNegotiableSubProtocols(Struct annAttrSubProtocols) {\n Value[] subProtocolsInAnnotation = annAttrSubProtocols.getArrayField(\n WebSocketConstants.ANNOTATION_ATTR_SUB_PROTOCOLS);\n\n if (subProtocolsInAnnotation == null) {\n return new String[0];\n }\n\n String[] subProtoCols = new String[subProtocolsInAnnotation.length];\n for (int i = 0; i < subProtocolsInAnnotation.length; i++) {\n subProtoCols[i] = subProtocolsInAnnotation[i].getStringValue();\n }\n return subProtoCols;\n }\n\n private int findIdleTimeoutInSeconds(Struct annAttrIdleTimeout) {\n return (int) annAttrIdleTimeout.getIntField(WebSocketConstants.ANNOTATION_ATTR_IDLE_TIMEOUT);\n }\n\n private int findMaxFrameSize(Struct annotation) {\n int size = (int) annotation.getIntField(WebSocketConstants.ANNOTATION_ATTR_MAX_FRAME_SIZE);\n if (size <= 0) {\n size = DEFAULT_MAX_FRAME_SIZE;\n }\n return size;\n }\n\n public String getBasePath() {\n return basePath;\n }\n\n /**\n * Find the Full path for WebSocket upgrade.\n *\n * @return the full path of the WebSocket upgrade.\n */\n private String findFullWebSocketUpgradePath(Struct annStruct) {\n String path = null;\n if (annStruct != null) {\n String basePathVal = annStruct.getStringField(WebSocketConstants.ANNOTATION_ATTR_PATH);\n if (!basePathVal.trim().isEmpty()) {\n path = HttpUtil.sanitizeBasePath(basePathVal);\n }\n }\n if (path == null) {\n path = \"/\".concat(getName());\n }\n return path;\n }\n}" }, { "comment": "Can we move these operations to the outside of the database lock?", "method_body": "private boolean syncTableMetaInternal(Database db, OlapTable table, boolean forceDeleteData) throws DdlException {\n StarOSAgent starOSAgent = GlobalStateMgr.getCurrentStarOSAgent();\n HashMap> redundantGroupToShards = new HashMap<>();\n Locker locker = new Locker();\n locker.lockDatabase(db, LockType.READ);\n try {\n if (db.getTable(table.getId()) == null) {\n return false; \n }\n if (table.getState() != OlapTable.OlapTableState.NORMAL) {\n return false; \n }\n\n for (Partition partition : table.getAllPartitions()) {\n for (PhysicalPartition physicalPartition : partition.getSubPartitions()) {\n long groupId = physicalPartition.getShardGroupId();\n List feShardIds = new ArrayList<>();\n for (MaterializedIndex materializedIndex :\n physicalPartition.getMaterializedIndices(MaterializedIndex.IndexExtState.ALL)) {\n for (Tablet tablet : materializedIndex.getTablets()) {\n feShardIds.add(tablet.getId());\n }\n }\n \n List starmgrShardIds = starOSAgent.listShard(groupId);\n starmgrShardIds.removeAll(feShardIds);\n redundantGroupToShards.put(groupId, starmgrShardIds);\n }\n }\n } finally {\n locker.unLockDatabase(db, LockType.READ);\n }\n\n \n Set shardToDelete = new HashSet<>();\n for (Map.Entry> entry : redundantGroupToShards.entrySet()) {\n if (forceDeleteData) {\n try {\n dropTabletAndDeleteShard(entry.getValue(), starOSAgent);\n } catch (Exception e) {\n \n LOG.info(e.getMessage());\n }\n }\n shardToDelete.addAll(entry.getValue());\n }\n\n \n if (!shardToDelete.isEmpty()) {\n starOSAgent.deleteShards(shardToDelete);\n }\n return !shardToDelete.isEmpty();\n }", "target_code": "starmgrShardIds.removeAll(feShardIds);", "method_body_after": "private boolean syncTableMetaInternal(Database db, OlapTable table, boolean forceDeleteData) throws DdlException {\n StarOSAgent starOSAgent = GlobalStateMgr.getCurrentStarOSAgent();\n HashMap> redundantGroupToShards = new HashMap<>();\n List physicalPartitions = new ArrayList<>();\n Locker locker = new Locker();\n locker.lockDatabase(db, LockType.READ);\n try {\n if (db.getTable(table.getId()) == null) {\n return false; \n }\n GlobalStateMgr.getCurrentState()\n .getAllPartitionsIncludeRecycleBin(table)\n .stream()\n .map(Partition::getSubPartitions)\n .forEach(physicalPartitions::addAll);\n } finally {\n locker.unLockDatabase(db, LockType.READ);\n }\n\n for (PhysicalPartition physicalPartition : physicalPartitions) {\n locker.lockDatabase(db, LockType.READ);\n try {\n if (table.getState() != OlapTable.OlapTableState.NORMAL) {\n return false; \n }\n \n long groupId = physicalPartition.getShardGroupId();\n List starmgrShardIds = starOSAgent.listShard(groupId);\n Set starmgrShardIdsSet = new HashSet<>(starmgrShardIds);\n for (MaterializedIndex materializedIndex :\n physicalPartition.getMaterializedIndices(MaterializedIndex.IndexExtState.ALL)) {\n for (Tablet tablet : materializedIndex.getTablets()) {\n starmgrShardIdsSet.remove(tablet.getId());\n }\n }\n \n redundantGroupToShards.put(groupId, starmgrShardIdsSet);\n } finally {\n locker.unLockDatabase(db, LockType.READ);\n }\n }\n\n \n Set shardToDelete = new HashSet<>();\n for (Map.Entry> entry : redundantGroupToShards.entrySet()) {\n if (forceDeleteData) {\n try {\n List shardIds = new ArrayList<>();\n shardIds.addAll(entry.getValue());\n dropTabletAndDeleteShard(shardIds, starOSAgent);\n } catch (Exception e) {\n \n LOG.info(e.getMessage());\n }\n }\n shardToDelete.addAll(entry.getValue());\n }\n\n \n if (!shardToDelete.isEmpty()) {\n starOSAgent.deleteShards(shardToDelete);\n }\n return !shardToDelete.isEmpty();\n }", "context_before": "class StarMgrMetaSyncer extends FrontendDaemon {\n private static final Logger LOG = LogManager.getLogger(StarMgrMetaSyncer.class);\n\n public StarMgrMetaSyncer() {\n super(\"StarMgrMetaSyncer\", Config.star_mgr_meta_sync_interval_sec * 1000L);\n }\n\n private List getAllPartitionShardGroupId() {\n List groupIds = new ArrayList<>();\n List dbIds = GlobalStateMgr.getCurrentState().getDbIdsIncludeRecycleBin();\n for (Long dbId : dbIds) {\n Database db = GlobalStateMgr.getCurrentState().getDbIncludeRecycleBin(dbId);\n if (db == null) {\n continue;\n }\n if (db.isSystemDatabase()) {\n continue;\n }\n\n Locker locker = new Locker();\n locker.lockDatabase(db, LockType.READ);\n try {\n for (Table table : GlobalStateMgr.getCurrentState().getTablesIncludeRecycleBin(db)) {\n if (table.isCloudNativeTableOrMaterializedView()) {\n GlobalStateMgr.getCurrentState()\n .getAllPartitionsIncludeRecycleBin((OlapTable) table)\n .stream()\n .map(Partition::getSubPartitions)\n .flatMap(p -> p.stream().map(PhysicalPartition::getShardGroupId))\n .forEach(groupIds::add);\n }\n }\n } finally {\n locker.unLockDatabase(db, LockType.READ);\n }\n }\n return groupIds;\n }\n\n public static void dropTabletAndDeleteShard(List shardIds, StarOSAgent starOSAgent) {\n Preconditions.checkNotNull(starOSAgent);\n Map> shardIdsByBeMap = new HashMap<>();\n \n for (long shardId : shardIds) {\n try {\n long backendId = starOSAgent.getPrimaryComputeNodeIdByShard(shardId);\n shardIdsByBeMap.computeIfAbsent(backendId, k -> Sets.newHashSet()).add(shardId);\n } catch (UserException ignored1) {\n \n }\n }\n\n for (Map.Entry> entry : shardIdsByBeMap.entrySet()) {\n long backendId = entry.getKey();\n Set shards = entry.getValue();\n\n \n ComputeNode node = GlobalStateMgr.getCurrentState().getCurrentSystemInfo().getBackendOrComputeNode(backendId);\n if (node == null) {\n continue;\n }\n DeleteTabletRequest request = new DeleteTabletRequest();\n request.tabletIds = Lists.newArrayList(shards);\n\n try {\n LakeService lakeService = BrpcProxy.getLakeService(node.getHost(), node.getBrpcPort());\n DeleteTabletResponse response = lakeService.deleteTablet(request).get();\n if (response != null && response.failedTablets != null && !response.failedTablets.isEmpty()) {\n LOG.info(\"failedTablets is {}\", response.failedTablets);\n response.failedTablets.forEach(shards::remove);\n }\n } catch (Throwable e) {\n LOG.error(e);\n if (e instanceof InterruptedException) {\n Thread.currentThread().interrupt();\n }\n continue;\n }\n\n \n try {\n if (!shards.isEmpty()) {\n starOSAgent.deleteShards(shards);\n }\n } catch (DdlException e) {\n LOG.warn(\"failed to delete shard from starMgr\");\n continue;\n }\n }\n }\n\n /**\n * Delete redundant shard & shard group.\n * 1. List shard groups from FE and from StarMgr\n * 2. Compare the list and get a list of shard groups that in StarMgr but not in FE\n * 3. shard groups with empty shards and older than threshold, will be permanently deleted.\n */\n private void deleteUnusedShardAndShardGroup() {\n StarOSAgent starOSAgent = GlobalStateMgr.getCurrentStarOSAgent();\n\n List groupIdFe = getAllPartitionShardGroupId();\n List shardGroupsInfo = starOSAgent.listShardGroup()\n .stream()\n .filter(x -> x.getGroupId() != 0L)\n .collect(Collectors.toList());\n\n if (shardGroupsInfo.isEmpty()) {\n return;\n }\n\n LOG.debug(\"size of groupIdFe is {}, size of shardGroupsInfo is {}\",\n groupIdFe.size(), shardGroupsInfo.size());\n LOG.debug(\"groupIdFe is {}\", groupIdFe);\n\n Map groupToCreateTimeMap = shardGroupsInfo.stream().collect(Collectors.toMap(\n ShardGroupInfo::getGroupId,\n val -> val.getPropertiesMap().get(\"createTime\"),\n (key1, key2) -> key1\n ));\n\n List diffList = shardGroupsInfo.stream()\n .map(ShardGroupInfo::getGroupId)\n .filter(x -> !groupIdFe.contains(x))\n .collect(Collectors.toList());\n LOG.debug(\"diff.size is {}, diff: {}\", diffList.size(), diffList);\n\n \n long nowMs = System.currentTimeMillis();\n List emptyShardGroup = new ArrayList<>();\n for (long groupId : diffList) {\n if (Config.shard_group_clean_threshold_sec * 1000L + Long.parseLong(groupToCreateTimeMap.get(groupId)) < nowMs) {\n try {\n List shardIds = starOSAgent.listShard(groupId);\n if (shardIds.isEmpty()) {\n emptyShardGroup.add(groupId);\n } else {\n dropTabletAndDeleteShard(shardIds, starOSAgent);\n }\n } catch (Exception e) {\n continue;\n }\n }\n }\n\n LOG.debug(\"emptyShardGroup.size is {}\", emptyShardGroup.size());\n if (!emptyShardGroup.isEmpty()) {\n starOSAgent.deleteShardGroup(emptyShardGroup);\n }\n }\n\n \n \n public int deleteUnusedWorker() {\n int cnt = 0;\n try {\n List workerAddresses = GlobalStateMgr.getCurrentStarOSAgent().listDefaultWorkerGroupIpPort();\n\n \n List backends = GlobalStateMgr.getCurrentSystemInfo().getBackends();\n for (Backend backend : backends) {\n if (backend.getStarletPort() != 0) {\n String workerAddr = backend.getHost() + \":\" + backend.getStarletPort();\n workerAddresses.remove(workerAddr);\n }\n }\n\n \n List computeNodes = GlobalStateMgr.getCurrentSystemInfo().getComputeNodes();\n for (ComputeNode computeNode : computeNodes) {\n if (computeNode.getStarletPort() != 0) {\n String workerAddr = computeNode.getHost() + \":\" + computeNode.getStarletPort();\n workerAddresses.remove(workerAddr);\n }\n }\n\n for (String unusedWorkerAddress : workerAddresses) {\n GlobalStateMgr.getCurrentStarOSAgent().removeWorker(unusedWorkerAddress);\n LOG.info(\"unused worker {} removed from star mgr\", unusedWorkerAddress);\n cnt++;\n }\n } catch (Exception e) {\n LOG.warn(\"fail to delete unused worker, {}\", e);\n }\n return cnt;\n }\n\n public void syncTableMetaAndColocationInfo() {\n List dbIds = GlobalStateMgr.getCurrentState().getDbIds();\n for (Long dbId : dbIds) {\n Database db = GlobalStateMgr.getCurrentState().getDb(dbId);\n if (db == null) {\n continue;\n }\n if (db.isSystemDatabase()) {\n continue;\n }\n\n List tables = db.getTables();\n for (Table table : tables) {\n if (!table.isCloudNativeTableOrMaterializedView()) {\n continue;\n }\n try {\n syncTableMetaAndColocationInfoInternal(db, (OlapTable) table, true /* forceDeleteData */);\n } catch (Exception e) {\n LOG.info(\"fail to sync table {} meta, {}\", table.getName(), e.getMessage());\n }\n }\n }\n }\n\n \n \n\n private void syncTableColocationInfo(Database db, OlapTable table) throws DdlException {\n \n if (!GlobalStateMgr.getCurrentColocateIndex().isLakeColocateTable(table.getId())) {\n return;\n }\n Locker locker = new Locker();\n locker.lockDatabase(db, LockType.WRITE);\n try {\n GlobalStateMgr.getCurrentColocateIndex().updateLakeTableColocationInfo(table, true /* isJoin */,\n null /* expectGroupId */);\n } finally {\n locker.unLockDatabase(db, LockType.WRITE);\n }\n }\n\n \n \n private void syncTableMetaAndColocationInfoInternal(Database db, OlapTable table, boolean forceDeleteData)\n throws DdlException {\n boolean changed = syncTableMetaInternal(db, table, forceDeleteData);\n \n if (changed) {\n syncTableColocationInfo(db, table);\n }\n }\n\n @Override\n protected void runAfterCatalogReady() {\n deleteUnusedShardAndShardGroup();\n deleteUnusedWorker();\n syncTableMetaAndColocationInfo();\n }\n\n public void syncTableMeta(String dbName, String tableName, boolean forceDeleteData) throws DdlException {\n if (!RunMode.isSharedDataMode()) {\n throw new DdlException(\"only shared_data run mode support this operation.\");\n }\n Database db = GlobalStateMgr.getCurrentState().getDb(dbName);\n if (db == null) {\n throw new DdlException(String.format(\"db %s does not exist.\", dbName));\n }\n\n HashMap> feGroupToShards = new HashMap<>();\n Table table = db.getTable(tableName);\n if (table == null) {\n throw new DdlException(String.format(\"table %s does not exist.\", tableName));\n }\n if (!table.isCloudNativeTableOrMaterializedView()) {\n throw new DdlException(\"only support cloud table or cloud mv.\");\n }\n\n syncTableMetaAndColocationInfoInternal(db, (OlapTable) table, forceDeleteData);\n }\n}", "context_after": "class StarMgrMetaSyncer extends FrontendDaemon {\n private static final Logger LOG = LogManager.getLogger(StarMgrMetaSyncer.class);\n\n public StarMgrMetaSyncer() {\n super(\"StarMgrMetaSyncer\", Config.star_mgr_meta_sync_interval_sec * 1000L);\n }\n\n private List getAllPartitionShardGroupId() {\n List groupIds = new ArrayList<>();\n List dbIds = GlobalStateMgr.getCurrentState().getDbIdsIncludeRecycleBin();\n for (Long dbId : dbIds) {\n Database db = GlobalStateMgr.getCurrentState().getDbIncludeRecycleBin(dbId);\n if (db == null) {\n continue;\n }\n if (db.isSystemDatabase()) {\n continue;\n }\n\n Locker locker = new Locker();\n locker.lockDatabase(db, LockType.READ);\n try {\n for (Table table : GlobalStateMgr.getCurrentState().getTablesIncludeRecycleBin(db)) {\n if (table.isCloudNativeTableOrMaterializedView()) {\n GlobalStateMgr.getCurrentState()\n .getAllPartitionsIncludeRecycleBin((OlapTable) table)\n .stream()\n .map(Partition::getSubPartitions)\n .flatMap(p -> p.stream().map(PhysicalPartition::getShardGroupId))\n .forEach(groupIds::add);\n }\n }\n } finally {\n locker.unLockDatabase(db, LockType.READ);\n }\n }\n return groupIds;\n }\n\n public static void dropTabletAndDeleteShard(List shardIds, StarOSAgent starOSAgent) {\n Preconditions.checkNotNull(starOSAgent);\n Map> shardIdsByBeMap = new HashMap<>();\n \n for (long shardId : shardIds) {\n try {\n long backendId = starOSAgent.getPrimaryComputeNodeIdByShard(shardId);\n shardIdsByBeMap.computeIfAbsent(backendId, k -> Sets.newHashSet()).add(shardId);\n } catch (UserException ignored1) {\n \n }\n }\n\n for (Map.Entry> entry : shardIdsByBeMap.entrySet()) {\n long backendId = entry.getKey();\n Set shards = entry.getValue();\n\n \n ComputeNode node = GlobalStateMgr.getCurrentState().getCurrentSystemInfo().getBackendOrComputeNode(backendId);\n if (node == null) {\n continue;\n }\n DeleteTabletRequest request = new DeleteTabletRequest();\n request.tabletIds = Lists.newArrayList(shards);\n\n try {\n LakeService lakeService = BrpcProxy.getLakeService(node.getHost(), node.getBrpcPort());\n DeleteTabletResponse response = lakeService.deleteTablet(request).get();\n if (response != null && response.failedTablets != null && !response.failedTablets.isEmpty()) {\n LOG.info(\"failedTablets is {}\", response.failedTablets);\n response.failedTablets.forEach(shards::remove);\n }\n } catch (Throwable e) {\n LOG.error(e);\n if (e instanceof InterruptedException) {\n Thread.currentThread().interrupt();\n }\n continue;\n }\n\n \n try {\n if (!shards.isEmpty()) {\n starOSAgent.deleteShards(shards);\n }\n } catch (DdlException e) {\n LOG.warn(\"failed to delete shard from starMgr\");\n continue;\n }\n }\n }\n\n /**\n * Delete redundant shard & shard group.\n * 1. List shard groups from FE and from StarMgr\n * 2. Compare the list and get a list of shard groups that in StarMgr but not in FE\n * 3. shard groups with empty shards and older than threshold, will be permanently deleted.\n */\n private void deleteUnusedShardAndShardGroup() {\n StarOSAgent starOSAgent = GlobalStateMgr.getCurrentStarOSAgent();\n\n List groupIdFe = getAllPartitionShardGroupId();\n List shardGroupsInfo = starOSAgent.listShardGroup()\n .stream()\n .filter(x -> x.getGroupId() != 0L)\n .collect(Collectors.toList());\n\n if (shardGroupsInfo.isEmpty()) {\n return;\n }\n\n LOG.debug(\"size of groupIdFe is {}, size of shardGroupsInfo is {}\",\n groupIdFe.size(), shardGroupsInfo.size());\n LOG.debug(\"groupIdFe is {}\", groupIdFe);\n\n Map groupToCreateTimeMap = shardGroupsInfo.stream().collect(Collectors.toMap(\n ShardGroupInfo::getGroupId,\n val -> val.getPropertiesMap().get(\"createTime\"),\n (key1, key2) -> key1\n ));\n\n List diffList = shardGroupsInfo.stream()\n .map(ShardGroupInfo::getGroupId)\n .filter(x -> !groupIdFe.contains(x))\n .collect(Collectors.toList());\n LOG.debug(\"diff.size is {}, diff: {}\", diffList.size(), diffList);\n\n \n long nowMs = System.currentTimeMillis();\n List emptyShardGroup = new ArrayList<>();\n for (long groupId : diffList) {\n if (Config.shard_group_clean_threshold_sec * 1000L + Long.parseLong(groupToCreateTimeMap.get(groupId)) < nowMs) {\n try {\n List shardIds = starOSAgent.listShard(groupId);\n if (shardIds.isEmpty()) {\n emptyShardGroup.add(groupId);\n } else {\n dropTabletAndDeleteShard(shardIds, starOSAgent);\n }\n } catch (Exception e) {\n continue;\n }\n }\n }\n\n LOG.debug(\"emptyShardGroup.size is {}\", emptyShardGroup.size());\n if (!emptyShardGroup.isEmpty()) {\n starOSAgent.deleteShardGroup(emptyShardGroup);\n }\n }\n\n \n \n public int deleteUnusedWorker() {\n int cnt = 0;\n try {\n List workerAddresses = GlobalStateMgr.getCurrentStarOSAgent().listDefaultWorkerGroupIpPort();\n\n \n List backends = GlobalStateMgr.getCurrentSystemInfo().getBackends();\n for (Backend backend : backends) {\n if (backend.getStarletPort() != 0) {\n String workerAddr = backend.getHost() + \":\" + backend.getStarletPort();\n workerAddresses.remove(workerAddr);\n }\n }\n\n \n List computeNodes = GlobalStateMgr.getCurrentSystemInfo().getComputeNodes();\n for (ComputeNode computeNode : computeNodes) {\n if (computeNode.getStarletPort() != 0) {\n String workerAddr = computeNode.getHost() + \":\" + computeNode.getStarletPort();\n workerAddresses.remove(workerAddr);\n }\n }\n\n for (String unusedWorkerAddress : workerAddresses) {\n GlobalStateMgr.getCurrentStarOSAgent().removeWorker(unusedWorkerAddress);\n LOG.info(\"unused worker {} removed from star mgr\", unusedWorkerAddress);\n cnt++;\n }\n } catch (Exception e) {\n LOG.warn(\"fail to delete unused worker, {}\", e);\n }\n return cnt;\n }\n\n public void syncTableMetaAndColocationInfo() {\n List dbIds = GlobalStateMgr.getCurrentState().getDbIds();\n for (Long dbId : dbIds) {\n Database db = GlobalStateMgr.getCurrentState().getDb(dbId);\n if (db == null) {\n continue;\n }\n if (db.isSystemDatabase()) {\n continue;\n }\n\n List
tables = db.getTables();\n for (Table table : tables) {\n if (!table.isCloudNativeTableOrMaterializedView()) {\n continue;\n }\n try {\n syncTableMetaAndColocationInfoInternal(db, (OlapTable) table, true /* forceDeleteData */);\n } catch (Exception e) {\n LOG.info(\"fail to sync table {} meta, {}\", table.getName(), e.getMessage());\n }\n }\n }\n }\n\n \n \n\n private void syncTableColocationInfo(Database db, OlapTable table) throws DdlException {\n \n if (!GlobalStateMgr.getCurrentColocateIndex().isLakeColocateTable(table.getId())) {\n return;\n }\n Locker locker = new Locker();\n locker.lockDatabase(db, LockType.WRITE);\n try {\n \n if (GlobalStateMgr.getCurrentState().getDb(db.getId()) == null) {\n return;\n }\n if (db.getTable(table.getId()) == null) {\n return;\n }\n GlobalStateMgr.getCurrentColocateIndex().updateLakeTableColocationInfo(table, true /* isJoin */,\n null /* expectGroupId */);\n } finally {\n locker.unLockDatabase(db, LockType.WRITE);\n }\n }\n\n \n \n private void syncTableMetaAndColocationInfoInternal(Database db, OlapTable table, boolean forceDeleteData)\n throws DdlException {\n boolean changed = syncTableMetaInternal(db, table, forceDeleteData);\n \n if (changed) {\n syncTableColocationInfo(db, table);\n }\n }\n\n @Override\n protected void runAfterCatalogReady() {\n deleteUnusedShardAndShardGroup();\n deleteUnusedWorker();\n syncTableMetaAndColocationInfo();\n }\n\n public void syncTableMeta(String dbName, String tableName, boolean forceDeleteData) throws DdlException {\n Database db = GlobalStateMgr.getCurrentState().getDb(dbName);\n if (db == null) {\n throw new DdlException(String.format(\"db %s does not exist.\", dbName));\n }\n\n Table table = db.getTable(tableName);\n if (table == null) {\n throw new DdlException(String.format(\"table %s does not exist.\", tableName));\n }\n if (!table.isCloudNativeTableOrMaterializedView()) {\n throw new DdlException(\"only support cloud table or cloud mv.\");\n }\n\n syncTableMetaAndColocationInfoInternal(db, (OlapTable) table, forceDeleteData);\n }\n}" }, { "comment": "same here based on input I would expect ```java Row.of(\"C\", new Integer[] {3, null}), ```", "method_body": "Stream getTestCaseSpecs() {\n return Stream.of(\n TestSpec.forFunction(BuiltInFunctionDefinitions.ARRAY_AGG)\n .withDescription(\"ARRAY changelog stream aggregation\")\n .withSource(\n ROW(STRING(), INT()),\n Arrays.asList(\n Row.ofKind(INSERT, \"A\", 1),\n Row.ofKind(INSERT, \"A\", 2),\n Row.ofKind(INSERT, \"B\", 2),\n Row.ofKind(INSERT, \"B\", 2),\n Row.ofKind(INSERT, \"B\", 3),\n Row.ofKind(INSERT, \"C\", 3),\n Row.ofKind(INSERT, \"C\", null),\n Row.ofKind(INSERT, \"D\", null),\n Row.ofKind(INSERT, \"E\", 4),\n Row.ofKind(INSERT, \"E\", 5),\n Row.ofKind(DELETE, \"E\", 5),\n Row.ofKind(UPDATE_BEFORE, \"E\", 4),\n Row.ofKind(UPDATE_AFTER, \"E\", 6)))\n .testResult(\n source ->\n \"SELECT f0, array_agg(f1) FROM \" + source + \" GROUP BY f0\",\n TableApiAggSpec.groupBySelect(\n Collections.singletonList($(\"f0\")),\n $(\"f0\"),\n $(\"f1\").arrayAgg()),\n ROW(STRING(), ARRAY(INT())),\n ROW(STRING(), ARRAY(INT())),\n Arrays.asList(\n Row.of(\"A\", new Integer[] {1, 2}),\n Row.of(\"B\", new Integer[] {2, 2, 3}),\n Row.of(\"C\", new Integer[] {3}),\n Row.of(\"D\", null),\n Row.of(\"E\", new Integer[] {6})))\n .testResult(\n source ->\n \"SELECT f0, array_agg(DISTINCT f1) FROM \"\n + source\n + \" GROUP BY f0\",\n TableApiAggSpec.groupBySelect(\n Collections.singletonList($(\"f0\")),\n $(\"f0\"),\n $(\"f1\").arrayAgg().distinct()),\n ROW(STRING(), ARRAY(INT())),\n ROW(STRING(), ARRAY(INT())),\n Arrays.asList(\n Row.of(\"A\", new Integer[] {1, 2}),\n Row.of(\"B\", new Integer[] {2, 3}),\n Row.of(\"C\", new Integer[] {3}),\n Row.of(\"D\", null),\n Row.of(\"E\", new Integer[] {6}))));\n }", "target_code": "ROW(STRING(), ARRAY(INT())),", "method_body_after": "Stream getTestCaseSpecs() {\n return Stream.of(\n TestSpec.forFunction(BuiltInFunctionDefinitions.ARRAY_AGG)\n .withDescription(\"ARRAY changelog stream aggregation\")\n .withSource(\n ROW(STRING(), INT()),\n Arrays.asList(\n Row.ofKind(INSERT, \"A\", 1),\n Row.ofKind(INSERT, \"A\", 2),\n Row.ofKind(INSERT, \"B\", 2),\n Row.ofKind(INSERT, \"B\", 2),\n Row.ofKind(INSERT, \"B\", 3),\n Row.ofKind(INSERT, \"C\", 3),\n Row.ofKind(INSERT, \"C\", null),\n Row.ofKind(DELETE, \"C\", null),\n Row.ofKind(INSERT, \"D\", null),\n Row.ofKind(INSERT, \"E\", 4),\n Row.ofKind(INSERT, \"E\", 5),\n Row.ofKind(DELETE, \"E\", 5),\n Row.ofKind(UPDATE_BEFORE, \"E\", 4),\n Row.ofKind(UPDATE_AFTER, \"E\", 6)))\n .testResult(\n source ->\n \"SELECT f0, array_agg(f1) FROM \" + source + \" GROUP BY f0\",\n TableApiAggSpec.groupBySelect(\n Collections.singletonList($(\"f0\")),\n $(\"f0\"),\n $(\"f1\").arrayAgg()),\n ROW(STRING(), ARRAY(INT())),\n ROW(STRING(), ARRAY(INT())),\n Arrays.asList(\n Row.of(\"A\", new Integer[] {1, 2}),\n Row.of(\"B\", new Integer[] {2, 2, 3}),\n Row.of(\"C\", new Integer[] {3}),\n Row.of(\"D\", new Integer[] {null}),\n Row.of(\"E\", new Integer[] {6})))\n .testSqlResult(\n source ->\n \"SELECT f0, array_agg(DISTINCT f1 IGNORE NULLS) FROM \"\n + source\n + \" GROUP BY f0\",\n ROW(STRING(), ARRAY(INT())),\n Arrays.asList(\n Row.of(\"A\", new Integer[] {1, 2}),\n Row.of(\"B\", new Integer[] {2, 3}),\n Row.of(\"C\", new Integer[] {3}),\n Row.of(\"D\", null),\n Row.of(\"E\", new Integer[] {6}))));\n }", "context_before": "class ArrayAggFunctionITCase extends BuiltInAggregateFunctionTestBase {\n\n @Override\n \n}", "context_after": "class ArrayAggFunctionITCase extends BuiltInAggregateFunctionTestBase {\n\n @Override\n \n}" }, { "comment": "yeah I'd revisit it once junit5 is in.", "method_body": "private static void randomize(Configuration conf) {\n if (Randomization) {\n final String testName = TestNameProvider.getCurrentTestName();\n final PseudoRandomValueSelector valueSelector =\n PseudoRandomValueSelector.create(testName != null ? testName : \"unknown\");\n valueSelector.select(conf, ExecutionCheckpointingOptions.ENABLE_UNALIGNED, true, false);\n }\n }", "target_code": "PseudoRandomValueSelector.create(testName != null ? testName : \"unknown\");", "method_body_after": "private static void randomize(Configuration conf) {\n if (RANDOMIZE_CHECKPOINTING_CONFIG) {\n final String testName = TestNameProvider.getCurrentTestName();\n final PseudoRandomValueSelector valueSelector =\n PseudoRandomValueSelector.create(testName != null ? testName : \"unknown\");\n valueSelector.select(conf, ExecutionCheckpointingOptions.ENABLE_UNALIGNED, true, false);\n }\n }", "context_before": "class rule.\n *\n * @param conf\n */", "context_after": "class rule.\n *\n *

Note that only unset properties are randomized.\n *\n * @param conf the configuration to randomize\n */" }, { "comment": "IIMO, giving an error for the first scenario is a bug because it can be invoked using position arguments. Therefore, if we try to invoke it with named arguments,error should be raised in that instance. ```ballerina type Foo record {| int r; |}; function fn(int r, *Foo x) { } public function main() { Foo f = {r: 1}; fn(1, f); } ```", "method_body": "private BType checkInvocationParam(BLangInvocation iExpr, AnalyzerData data) {\n if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {\n dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);\n return symTable.semanticError;\n }\n BType invocableType = Types.getReferredType(iExpr.symbol.type);\n if (invocableType.tag != TypeTags.INVOKABLE) {\n dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);\n return symTable.noType;\n }\n\n BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);\n List paramTypes = ((BInvokableType) invocableType).getParameterTypes();\n List incRecordParams = new ArrayList<>();\n BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,\n incRecordParams);\n int parameterCountForPositionalArgs = paramTypes.size();\n int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();\n iExpr.requiredArgs = new ArrayList<>();\n for (BVarSymbol symbol : invokableSymbol.params) {\n if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||\n Types.getReferredType(symbol.type).tag != TypeTags.RECORD) {\n continue;\n }\n LinkedHashMap fields =\n ((BRecordType) Types.getReferredType(symbol.type)).fields;\n if (fields.isEmpty()) {\n continue;\n }\n for (String field : fields.keySet()) {\n if (Types.getReferredType(fields.get(field).type).tag != TypeTags.NEVER) {\n parameterCountForNamedArgs = parameterCountForNamedArgs - 1;\n break;\n }\n }\n }\n\n \n int i = 0;\n BLangExpression vararg = null;\n boolean foundNamedArg = false;\n boolean isIncRecordAllowExtraFields = incRecordParamAllowAdditionalFields != null;\n for (BLangExpression expr : iExpr.argExprs) {\n switch (expr.getKind()) {\n case NAMED_ARGS_EXPR:\n foundNamedArg = true;\n BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) expr;\n boolean isNamedArgForIncRecordParam =\n isNamedArgForIncRecordParam(namedArg.name.value, incRecordParamAllowAdditionalFields);\n if (i < parameterCountForNamedArgs) {\n if (isNamedArgForIncRecordParam) {\n isIncRecordAllowExtraFields = false;\n }\n iExpr.requiredArgs.add(expr);\n } else {\n if (isIncRecordAllowExtraFields && !isNamedArgForIncRecordParam) {\n iExpr.requiredArgs.add(expr);\n } else {\n dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);\n }\n }\n i++;\n break;\n case REST_ARGS_EXPR:\n if (foundNamedArg) {\n dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);\n continue;\n }\n vararg = expr;\n break;\n default: \n if (foundNamedArg) {\n dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);\n }\n if (i < parameterCountForPositionalArgs) {\n iExpr.requiredArgs.add(expr);\n } else {\n iExpr.restArgs.add(expr);\n }\n i++;\n break;\n }\n }\n\n return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,\n incRecordParamAllowAdditionalFields, data);\n }", "target_code": "isNamedArgForIncRecordParam(namedArg.name.value, incRecordParamAllowAdditionalFields);", "method_body_after": "private BType checkInvocationParam(BLangInvocation iExpr, AnalyzerData data) {\n if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {\n dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);\n return symTable.semanticError;\n }\n BType invocableType = Types.getReferredType(iExpr.symbol.type);\n if (invocableType.tag != TypeTags.INVOKABLE) {\n dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);\n return symTable.noType;\n }\n\n BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);\n List paramTypes = ((BInvokableType) invocableType).getParameterTypes();\n List incRecordParams = new ArrayList<>();\n BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,\n incRecordParams);\n int parameterCountForPositionalArgs = paramTypes.size();\n int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();\n iExpr.requiredArgs = new ArrayList<>();\n for (BVarSymbol symbol : invokableSymbol.params) {\n if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||\n Types.getReferredType(symbol.type).tag != TypeTags.RECORD) {\n continue;\n }\n LinkedHashMap fields =\n ((BRecordType) Types.getReferredType(symbol.type)).fields;\n if (fields.isEmpty()) {\n continue;\n }\n for (String field : fields.keySet()) {\n if (Types.getReferredType(fields.get(field).type).tag != TypeTags.NEVER) {\n parameterCountForNamedArgs = parameterCountForNamedArgs - 1;\n break;\n }\n }\n }\n\n \n int i = 0;\n BLangExpression vararg = null;\n boolean foundNamedArg = false;\n boolean incRecordAllowAdditionalFields = incRecordParamAllowAdditionalFields != null;\n for (BLangExpression expr : iExpr.argExprs) {\n switch (expr.getKind()) {\n case NAMED_ARGS_EXPR:\n foundNamedArg = true;\n boolean namedArgForIncRecordParam =\n isNamedArgForIncRecordParam(((BLangNamedArgsExpression) expr).name.value,\n incRecordParamAllowAdditionalFields);\n if (i < parameterCountForNamedArgs) {\n if (namedArgForIncRecordParam) {\n incRecordAllowAdditionalFields = false;\n }\n iExpr.requiredArgs.add(expr);\n } else {\n if (incRecordAllowAdditionalFields && !namedArgForIncRecordParam) {\n iExpr.requiredArgs.add(expr);\n } else {\n checkTypeParamExpr(expr, new BNoType(TypeTags.NONE), iExpr.langLibInvocation, data);\n dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);\n }\n }\n i++;\n break;\n case REST_ARGS_EXPR:\n if (foundNamedArg) {\n dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);\n continue;\n }\n vararg = expr;\n break;\n default: \n if (foundNamedArg) {\n dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);\n }\n if (i < parameterCountForPositionalArgs) {\n if (Symbols.isFlagOn(invokableSymbol.params.get(i).flags, Flags.INCLUDED)) {\n incRecordAllowAdditionalFields = false;\n }\n iExpr.requiredArgs.add(expr);\n } else {\n iExpr.restArgs.add(expr);\n }\n i++;\n break;\n }\n }\n\n return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,\n incRecordParamAllowAdditionalFields, data);\n }", "context_before": "class InferredTupleDetails {\n List fixedMemberTypes = new ArrayList<>();\n List restMemberTypes = new ArrayList<>();\n }", "context_after": "class InferredTupleDetails {\n List fixedMemberTypes = new ArrayList<>();\n List restMemberTypes = new ArrayList<>();\n }" }, { "comment": "One small question, is this the standard way of adding \"`\"? I couldn't find the relevant documents.", "method_body": "public void testNestedFilterWithBacktickInTheName() {\n util.verifyRelPlan(\n \"SELECT id FROM NestedTable WHERE `deepNestedWith.`.nested.```name` = 'foo'\");\n }", "target_code": "\"SELECT id FROM NestedTable WHERE `deepNestedWith.`.nested.```name` = 'foo'\");", "method_body_after": "public void testNestedFilterWithBacktickInTheName() {\n util.verifyRelPlan(\n \"SELECT id FROM NestedTable WHERE `deepNestedWith.`.nested.```name` = 'foo'\");\n }", "context_before": "class PushFilterIntoTableSourceScanRuleTest\n extends PushFilterIntoTableSourceScanRuleTestBase {\n\n @Before\n public void setup() {\n util = batchTestUtil(TableConfig.getDefault());\n ((BatchTableTestUtil) util).buildBatchProgram(FlinkBatchProgram.DEFAULT_REWRITE());\n CalciteConfig calciteConfig =\n TableConfigUtils.getCalciteConfig(util.tableEnv().getConfig());\n calciteConfig\n .getBatchProgram()\n .get()\n .addLast(\n \"rules\",\n FlinkHepRuleSetProgramBuilder.newBuilder()\n .setHepRulesExecutionType(\n HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION())\n .setHepMatchOrder(HepMatchOrder.BOTTOM_UP)\n .add(\n RuleSets.ofList(\n PushFilterIntoTableSourceScanRule.INSTANCE,\n CoreRules.FILTER_PROJECT_TRANSPOSE))\n .build());\n\n \n String ddl1 =\n \"CREATE TABLE MyTable (\\n\"\n + \" name STRING,\\n\"\n + \" id bigint,\\n\"\n + \" amount int,\\n\"\n + \" price double\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = 'amount',\\n\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n util.tableEnv().executeSql(ddl1);\n\n String ddl2 =\n \"CREATE TABLE VirtualTable (\\n\"\n + \" name STRING,\\n\"\n + \" id bigint,\\n\"\n + \" amount int,\\n\"\n + \" virtualField as amount + 1,\\n\"\n + \" price double\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = 'amount',\\n\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n\n util.tableEnv().executeSql(ddl2);\n\n String ddl3 =\n \"CREATE TABLE NestedTable (\\n\"\n + \" id int,\\n\"\n + \" deepNested row, nested2 row>,\\n\"\n + \" nested row,\\n\"\n + \" `deepNestedWith.` row<`.value` int, nested row<```name` string, `.value` int>>,\\n\"\n + \" name string,\\n\"\n + \" testMap Map\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = '`deepNested.nested1.value`;`deepNestedWith..nested..value`;`deepNestedWith..nested.``name`;',\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n util.tableEnv().executeSql(ddl3);\n\n String ddl4 =\n \"CREATE TABLE NestedItemTable (\\n\"\n + \" `ID` INT,\\n\"\n + \" `Timestamp` TIMESTAMP(3),\\n\"\n + \" `Result` ROW<\\n\"\n + \" `Mid` ROW<\"\n + \" `data_arr` ROW<`value` BIGINT> ARRAY,\\n\"\n + \" `data_map` MAP>\"\n + \" >\"\n + \" >,\\n\"\n + \" WATERMARK FOR `Timestamp` AS `Timestamp`\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = 'Result_Mid_data_map;',\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n util.tableEnv().executeSql(ddl4);\n }\n\n @Test\n public void testLowerUpperPushdown() {\n String ddl =\n \"CREATE TABLE MTable (\\n\"\n + \" a STRING,\\n\"\n + \" b STRING\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = 'a;b',\\n\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n util.tableEnv().executeSql(ddl);\n super.testLowerUpperPushdown();\n }\n\n @Test\n public void testWithInterval() {\n String ddl =\n \"CREATE TABLE MTable (\\n\"\n + \"a TIMESTAMP(3),\\n\"\n + \"b TIMESTAMP(3)\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'bounded' = 'true',\\n\"\n + \" 'filterable-fields' = 'a;b',\\n\"\n + \" 'disable-lookup' = 'true'\"\n + \")\";\n util.tableEnv().executeSql(ddl);\n super.testWithInterval();\n }\n\n @Test\n public void testBasicNestedFilter() {\n util.verifyRelPlan(\"SELECT * FROM NestedTable WHERE deepNested.nested1.`value` > 2\");\n }\n\n @Test\n public void testNestedFilterWithDotInTheName() {\n util.verifyRelPlan(\n \"SELECT id FROM NestedTable WHERE `deepNestedWith.`.nested.`.value` > 5\");\n }\n\n @Test\n \n\n @Test\n public void testNestedFilterOnMapKey() {\n util.verifyRelPlan(\n \"SELECT * FROM NestedItemTable WHERE\"\n + \" `Result`.`Mid`.data_map['item'].`value` = 3\");\n }\n\n @Test\n public void testNestedFilterOnArrayField() {\n util.verifyRelPlan(\n \"SELECT * FROM NestedItemTable WHERE `Result`.`Mid`.data_arr[2].`value` = 3\");\n }\n}", "context_after": "class PushFilterIntoTableSourceScanRuleTest\n extends PushFilterIntoTableSourceScanRuleTestBase {\n\n @Before\n public void setup() {\n util = batchTestUtil(TableConfig.getDefault());\n ((BatchTableTestUtil) util).buildBatchProgram(FlinkBatchProgram.DEFAULT_REWRITE());\n CalciteConfig calciteConfig =\n TableConfigUtils.getCalciteConfig(util.tableEnv().getConfig());\n calciteConfig\n .getBatchProgram()\n .get()\n .addLast(\n \"rules\",\n FlinkHepRuleSetProgramBuilder.newBuilder()\n .setHepRulesExecutionType(\n HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION())\n .setHepMatchOrder(HepMatchOrder.BOTTOM_UP)\n .add(\n RuleSets.ofList(\n PushFilterIntoTableSourceScanRule.INSTANCE,\n CoreRules.FILTER_PROJECT_TRANSPOSE))\n .build());\n\n \n String ddl1 =\n \"CREATE TABLE MyTable (\\n\"\n + \" name STRING,\\n\"\n + \" id bigint,\\n\"\n + \" amount int,\\n\"\n + \" price double\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = 'amount',\\n\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n util.tableEnv().executeSql(ddl1);\n\n String ddl2 =\n \"CREATE TABLE VirtualTable (\\n\"\n + \" name STRING,\\n\"\n + \" id bigint,\\n\"\n + \" amount int,\\n\"\n + \" virtualField as amount + 1,\\n\"\n + \" price double\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = 'amount',\\n\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n\n util.tableEnv().executeSql(ddl2);\n\n String ddl3 =\n \"CREATE TABLE NestedTable (\\n\"\n + \" id int,\\n\"\n + \" deepNested row, nested2 row>,\\n\"\n + \" nested row,\\n\"\n + \" `deepNestedWith.` row<`.value` int, nested row<```name` string, `.value` int>>,\\n\"\n + \" name string,\\n\"\n + \" testMap Map\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = '`deepNested.nested1.value`;`deepNestedWith..nested..value`;`deepNestedWith..nested.``name`;',\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n util.tableEnv().executeSql(ddl3);\n\n String ddl4 =\n \"CREATE TABLE NestedItemTable (\\n\"\n + \" `ID` INT,\\n\"\n + \" `Timestamp` TIMESTAMP(3),\\n\"\n + \" `Result` ROW<\\n\"\n + \" `Mid` ROW<\"\n + \" `data_arr` ROW<`value` BIGINT> ARRAY,\\n\"\n + \" `data_map` MAP>\"\n + \" >\"\n + \" >,\\n\"\n + \" WATERMARK FOR `Timestamp` AS `Timestamp`\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = 'Result_Mid_data_map;',\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n util.tableEnv().executeSql(ddl4);\n }\n\n @Test\n public void testLowerUpperPushdown() {\n String ddl =\n \"CREATE TABLE MTable (\\n\"\n + \" a STRING,\\n\"\n + \" b STRING\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'filterable-fields' = 'a;b',\\n\"\n + \" 'bounded' = 'true'\\n\"\n + \")\";\n util.tableEnv().executeSql(ddl);\n super.testLowerUpperPushdown();\n }\n\n @Test\n public void testWithInterval() {\n String ddl =\n \"CREATE TABLE MTable (\\n\"\n + \"a TIMESTAMP(3),\\n\"\n + \"b TIMESTAMP(3)\\n\"\n + \") WITH (\\n\"\n + \" 'connector' = 'values',\\n\"\n + \" 'bounded' = 'true',\\n\"\n + \" 'filterable-fields' = 'a;b',\\n\"\n + \" 'disable-lookup' = 'true'\"\n + \")\";\n util.tableEnv().executeSql(ddl);\n super.testWithInterval();\n }\n\n @Test\n public void testBasicNestedFilter() {\n util.verifyRelPlan(\"SELECT * FROM NestedTable WHERE deepNested.nested1.`value` > 2\");\n }\n\n @Test\n public void testNestedFilterWithDotInTheName() {\n util.verifyRelPlan(\n \"SELECT id FROM NestedTable WHERE `deepNestedWith.`.nested.`.value` > 5\");\n }\n\n @Test\n \n\n @Test\n public void testNestedFilterOnMapKey() {\n util.verifyRelPlan(\n \"SELECT * FROM NestedItemTable WHERE\"\n + \" `Result`.`Mid`.data_map['item'].`value` = 3\");\n }\n\n @Test\n public void testNestedFilterOnArrayField() {\n util.verifyRelPlan(\n \"SELECT * FROM NestedItemTable WHERE `Result`.`Mid`.data_arr[2].`value` = 3\");\n }\n}" }, { "comment": "This would cause NPE given that we remove `jobVertexID` from `tasksReachedEndOfData` after all its subtasks have finished. Also, given that `areAllTasksOfJobVertexEndOfData()` is currently invoked after `areAllTasksEndOfData()`, it suggests that this code path is not tested. Can you add a test for this scenario?", "method_body": "public boolean areAllTasksOfJobVertexEndOfData(JobVertexID jobVertexID) {\n BitSet subtaskStatus = tasksReachedEndOfData.get(jobVertexID);\n return subtaskStatus.cardinality()\n == executionGraph.getJobVertex(jobVertexID).getParallelism();\n }", "target_code": "return subtaskStatus.cardinality()", "method_body_after": "public boolean areAllTasksOfJobVertexEndOfData(JobVertexID jobVertexID) {\n BitSet subtaskStatus = tasksReachedEndOfData.get(jobVertexID);\n return subtaskStatus == null\n || subtaskStatus.cardinality()\n == executionGraph.getJobVertex(jobVertexID).getParallelism();\n }", "context_before": "class VertexEndOfDataListener {\n private final ExecutionGraph executionGraph;\n\n private final Map tasksReachedEndOfData;\n\n public VertexEndOfDataListener(ExecutionGraph executionGraph) {\n this.executionGraph = executionGraph;\n tasksReachedEndOfData = new HashMap<>();\n for (ExecutionJobVertex vertex : executionGraph.getAllVertices().values()) {\n tasksReachedEndOfData.put(vertex.getJobVertexId(), new BitSet());\n }\n }\n\n public void recordTaskEndOfData(ExecutionAttemptID executionAttemptID) {\n BitSet subtaskStatus = tasksReachedEndOfData.get(executionAttemptID.getJobVertexId());\n subtaskStatus.set(executionAttemptID.getSubtaskIndex());\n }\n\n \n\n public boolean areAllTasksEndOfData() {\n Iterator> iterator =\n tasksReachedEndOfData.entrySet().iterator();\n while (iterator.hasNext()) {\n Map.Entry entry = iterator.next();\n JobVertexID vertex = entry.getKey();\n BitSet status = entry.getValue();\n if (status.cardinality() != executionGraph.getJobVertex(vertex).getParallelism()) {\n return false;\n } else {\n iterator.remove();\n }\n }\n return true;\n }\n}", "context_after": "class VertexEndOfDataListener {\n private final ExecutionGraph executionGraph;\n\n private final Map tasksReachedEndOfData;\n\n public VertexEndOfDataListener(ExecutionGraph executionGraph) {\n this.executionGraph = executionGraph;\n tasksReachedEndOfData = new HashMap<>();\n for (ExecutionJobVertex vertex : executionGraph.getAllVertices().values()) {\n tasksReachedEndOfData.put(vertex.getJobVertexId(), new BitSet());\n }\n }\n\n public void recordTaskEndOfData(ExecutionAttemptID executionAttemptID) {\n BitSet subtaskStatus = tasksReachedEndOfData.get(executionAttemptID.getJobVertexId());\n subtaskStatus.set(executionAttemptID.getSubtaskIndex());\n }\n\n \n\n public boolean areAllTasksEndOfData() {\n Iterator> iterator =\n tasksReachedEndOfData.entrySet().iterator();\n while (iterator.hasNext()) {\n Map.Entry entry = iterator.next();\n JobVertexID vertex = entry.getKey();\n BitSet status = entry.getValue();\n if (status.cardinality() != executionGraph.getJobVertex(vertex).getParallelism()) {\n return false;\n } else {\n iterator.remove();\n }\n }\n return true;\n }\n}" }, { "comment": "We call this method for any `BLangRecordLiteral` [here](https://github.com/ballerina-platform/ballerina-lang/blob/bc7e5e948b6dadc96ea5c781c751904e98f3f3fc/compiler/ballerina-lang/src/main/java/org/wso2/ballerinalang/compiler/desugar/Desugar.java#L6088), so we need to determine if the compiler need to explicitly add fields to the RecordLiteral or not in this method.", "method_body": "private BLangBlockStmt desugarForeachToWhileWithIterator(BLangForeach foreach, BLangSimpleVariableDef varDef) {\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n\n \n\n BVarSymbol iteratorSymbol = varDef.var.symbol;\n\n \n BVarSymbol resultSymbol = new BVarSymbol(0, Names.fromString(\"$result$\"), this.env.scope.owner.pkgID,\n foreach.nillableResultType, this.env.scope.owner, foreach.pos,\n VIRTUAL);\n\n \n BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,\n foreach.nillableResultType, iteratorSymbol, resultSymbol);\n\n \n BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);\n BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(\n resultVariableDefinition, resultReferenceInWhile);\n statementExpression.setBType(foreach.nillableResultType);\n\n \n BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);\n BLangTypeTestExpr typeTestExpr = ASTBuilderUtil\n .createTypeTestExpr(foreach.pos, statementExpression, userDefineType);\n \n BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();\n whileNode.pos = foreach.pos;\n whileNode.expr = typeTestExpr;\n whileNode.body = foreach.body;\n\n VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;\n\n \n \n \n BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);\n\n BLangExpression expr = valueAccessExpr.expr;\n \n \n valueAccessExpr.expr = types.addConversionExprIfRequired(expr, symTable.mapAllType);\n variableDefinitionNode.getVariable()\n .setInitialExpression(types.addConversionExprIfRequired(valueAccessExpr, foreach.varType));\n whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);\n\n \n BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);\n\n \n blockNode.addStatement(varDef);\n\n \n blockNode.addStatement(whileNode);\n return blockNode;\n }\n\n private BLangType getUserDefineTypeNode(BType type) {\n BLangUserDefinedType recordType =\n new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, \"\"),\n ASTBuilderUtil.createIdentifier(null, \"\"));\n recordType.setBType(type);\n return recordType;\n }\n\n @Override\n public void visit(BLangWhile whileNode) {\n if (whileNode.onFailClause != null) {\n BLangOnFailClause onFailClause = whileNode.onFailClause;\n whileNode.onFailClause = null;\n whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;\n BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);\n result = rewrite(doStmt, env);\n } else {\n whileNode.expr = rewriteExpr(whileNode.expr);\n whileNode.body = rewrite(whileNode.body, env);\n result = whileNode;\n }\n }\n\n private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,\n BLangOnFailClause onFailClause) {\n BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();\n BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);\n doBlock.scope = new Scope(env.scope.owner);\n bLDo.body = doBlock;\n bLDo.pos = location;\n bLDo.onFailClause = onFailClause;\n bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;\n doBlock.stmts.add(statement);\n return bLDo;\n }\n\n @Override\n public void visit(BLangLock lockNode) {\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n BLangOnFailClause currentOnFailClause = this.onFailClause;\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);\n if (lockNode.onFailClause != null) {\n blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;\n rewrite(lockNode.onFailClause, env);\n }\n BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);\n blockStmt.addStatement(lockStmt);\n\n enclLocks.push(lockStmt);\n\n BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);\n BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);\n BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);\n statementExpression.setBType(symTable.nilType);\n\n BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();\n trapExpr.setBType(nillableError);\n trapExpr.expr = statementExpression;\n BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, Names.fromString(\"$errorResult\"),\n this.env.scope.owner.pkgID, nillableError,\n this.env.scope.owner, lockNode.pos, VIRTUAL);\n BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, \"$errorResult\",\n nillableError, trapExpr,\n nillableErrorVarSymbol);\n BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);\n blockStmt.addStatement(simpleVariableDef);\n\n BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);\n unLockStmt.relatedLock = lockStmt; \n blockStmt.addStatement(unLockStmt);\n BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);\n\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);\n BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();\n panicNode.pos = lockNode.pos;\n panicNode.expr = types.addConversionExprIfRequired(varRef, symTable.errorType);\n ifBody.addStatement(panicNode);\n\n BLangTypeTestExpr isErrorTest =\n ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());\n isErrorTest.setBType(symTable.booleanType);\n\n BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);\n blockStmt.addStatement(ifelse);\n result = rewrite(blockStmt, env);\n enclLocks.pop();\n this.onFailClause = currentOnFailClause;\n }\n\n @Override\n public void visit(BLangLockStmt lockStmt) {\n result = lockStmt;\n }\n\n @Override\n public void visit(BLangUnLockStmt unLockStmt) {\n result = unLockStmt;\n }\n\n\n private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,\n BLangSimpleVarRef shouldRetryRef) {\n BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();\n trxOnFailClause.pos = pos;\n trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);\n trxOnFailClause.body.scope = new Scope(env.scope.owner);\n trxOnFailClause.isInternal = true;\n\n \n \n BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, Names.fromString(\"$trxError$\"),\n env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);\n BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,\n \"$trxError$\", symTable.errorType, null, trxOnFailErrorSym);\n trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,\n trxOnFailError);\n trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);\n\n \n \n \n \n transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,\n trxBlockId, shouldRetryRef);\n\n BLangGroupExpr shouldNotPanic = new BLangGroupExpr();\n shouldNotPanic.setBType(symTable.booleanType);\n shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);\n\n BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);\n\n BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);\n\n BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();\n panicNode.pos = pos;\n panicNode.expr = caughtError;\n\n \n \n \n \n \n BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);\n trxOnFailClause.body.stmts.add(exitIf);\n\n BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();\n failStmt.pos = pos;\n failStmt.expr = caughtError;\n failBlock.stmts.add(failStmt);\n trxOnFailClause.bodyContainsFail = true;\n\n \n \n \n \n \n \n \n \n \n \n \n \n return trxOnFailClause;\n }\n\n @Override\n public void visit(BLangTransaction transactionNode) {\n if (transactionNode.onFailClause != null) {\n \n BLangOnFailClause onFailClause = transactionNode.onFailClause;\n transactionNode.onFailClause = null;\n transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;\n BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);\n \n \n \n \n \n \n \n \n result = rewrite(doStmt, env);\n } else {\n BLangLiteral currentTrxBlockId = this.trxBlockId;\n String uniqueId = String.valueOf(++transactionBlockCount);\n this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);\n boolean currShouldReturnErrors = this.shouldReturnErrors;\n this.shouldReturnErrors = true;\n\n BLangOnFailClause currOnFailClause = this.onFailClause;\n\n \n BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);\n BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, Names.fromString(\"$shouldPanic$\"),\n env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);\n shouldPanicVarSymbol.closure = true;\n BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,\n \"$shouldPanic$\", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);\n\n BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,\n shouldPanicVariable);\n BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,\n shouldPanicVarSymbol);\n\n \n \n \n \n \n \n \n \n \n \n \n BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,\n this.shouldRetryRef);\n enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);\n\n boolean userDefinedOnFailAvbl = this.onFailClause != null;\n analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);\n\n BLangBlockStmt transactionStmtBlock =\n transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);\n\n transactionStmtBlock.stmts.add(0, shouldPanicDef);\n transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);\n transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?\n BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :\n BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n result = rewrite(transactionStmtBlock, this.env);\n\n this.shouldReturnErrors = currShouldReturnErrors;\n this.trxBlockId = currentTrxBlockId;\n swapAndResetEnclosingOnFail(currOnFailClause);\n }\n }\n\n @Override\n public void visit(BLangRollback rollbackNode) {\n BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);\n result = rewrite(rollbackStmtExpr, env);\n }\n\n private BLangOnFailClause createRetryInternalOnFail(Location pos,\n BLangSimpleVarRef retryResultRef,\n BLangSimpleVarRef retryManagerRef,\n BLangSimpleVarRef shouldRetryRef,\n BLangSimpleVarRef continueLoopRef,\n BLangSimpleVarRef returnResult) {\n BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();\n internalOnFail.pos = pos;\n internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);\n internalOnFail.body.scope = new Scope(env.scope.owner);\n\n BVarSymbol caughtErrorSym = new BVarSymbol(0, Names.fromString(\"$caughtError$\"),\n env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);\n BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,\n \"$caughtError$\", symTable.errorType, null, caughtErrorSym);\n internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,\n caughtError);\n env.scope.define(caughtErrorSym.name, caughtErrorSym);\n BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);\n\n \n BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);\n internalOnFail.body.stmts.add(errorAssignment);\n\n \n BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,\n ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));\n internalOnFail.body.stmts.add(continueLoopTrue);\n\n \n BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,\n retryManagerRef, caughtErrorRef);\n BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,\n shouldRetryInvocation);\n internalOnFail.body.stmts.add(shouldRetryAssignment);\n\n BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();\n shouldNotRetryCheck.setBType(symTable.booleanType);\n shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);\n\n BLangGroupExpr exitCheck = new BLangGroupExpr();\n exitCheck.setBType(symTable.booleanType);\n exitCheck.expression = shouldNotRetryCheck;\n\n BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);\n BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);\n\n if (this.onFailClause != null) {\n \n \n BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();\n failStmt.pos = pos;\n failStmt.expr = retryResultRef;\n\n exitLogicBlock.stmts.add(failStmt);\n internalOnFail.bodyContainsFail = true;\n internalOnFail.body.stmts.add(exitIf);\n\n \n BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();\n loopContinueStmt.pos = pos;\n internalOnFail.body.stmts.add(loopContinueStmt);\n\n \n \n \n \n } else {\n BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,\n ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));\n exitLogicBlock.stmts.add(returnErrorTrue);\n internalOnFail.body.stmts.add(exitIf);\n \n \n \n }\n return internalOnFail;\n }\n\n BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {\n List paramTypes = new ArrayList<>();\n paramTypes.add(symTable.booleanType);\n BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,\n null);\n BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(\n Names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,\n symTable.builtinPos, VIRTUAL);\n return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,\n OperatorKind.NOT, notOperatorSymbol);\n }\n\n BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,\n List lambdaFunctionVariable,\n TypeNode returnType, BLangFunctionBody lambdaBody) {\n BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();\n BLangFunction func =\n ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);\n lambdaFunction.function = func;\n func.requiredParams.addAll(lambdaFunctionVariable);\n func.setReturnTypeNode(returnType);\n func.desugaredReturnType = true;\n defineFunction(func, env.enclPkg);\n lambdaFunctionVariable = func.requiredParams;\n\n func.body = lambdaBody;\n func.desugared = false;\n lambdaFunction.pos = pos;\n List paramTypes = new ArrayList<>();\n lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));\n lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),\n null));\n return lambdaFunction;\n }\n\n private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {\n final BPackageSymbol packageSymbol = targetPkg.symbol;\n final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);\n symbolEnter.defineNode(funcNode, packageEnv);\n packageEnv.enclPkg.functions.add(funcNode);\n packageEnv.enclPkg.topLevelNodes.add(funcNode);\n }\n\n @Override\n public void visit(BLangForkJoin forkJoin) {\n result = forkJoin;\n }\n\n \n\n @Override\n public void visit(BLangLiteral literalExpr) {\n int tag = Types.getImpliedType(literalExpr.getBType()).tag;\n if (tag == TypeTags.ARRAY || tag == TypeTags.TUPLE) {\n \n result = rewriteBlobLiteral(literalExpr);\n return;\n }\n result = literalExpr;\n }\n\n private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {\n byte[] values = types.convertToByteArray((String) literalExpr.value);\n BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();\n arrayLiteralNode.setBType(literalExpr.getBType());\n arrayLiteralNode.pos = literalExpr.pos;\n arrayLiteralNode.exprs = new ArrayList<>();\n for (byte b : values) {\n arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));\n }\n return arrayLiteralNode;\n }\n\n @Override\n public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {\n listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);\n result = listConstructorSpreadOpExpr;\n }\n\n @Override\n public void visit(BLangListConstructorExpr listConstructor) {\n listConstructor.exprs = rewriteExprs(listConstructor.exprs);\n BLangExpression expr;\n BType listConstructorType = Types.getImpliedType(listConstructor.getBType());\n if (listConstructorType.tag == TypeTags.TUPLE) {\n expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());\n result = rewriteExpr(expr);\n } else if (listConstructorType.tag == TypeTags.JSON) {\n expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));\n result = rewriteExpr(expr);\n } else if (getElementType(listConstructorType).tag == TypeTags.JSON) {\n expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());\n result = rewriteExpr(expr);\n } else if (listConstructorType.tag == TypeTags.TYPEDESC) {\n final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();\n typedescExpr.resolvedType = listConstructor.typedescType;\n typedescExpr.setBType(symTable.typeDesc);\n result = rewriteExpr(typedescExpr);\n } else {\n expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());\n result = rewriteExpr(expr);\n }\n }\n\n @Override\n public void visit(BLangTableConstructorExpr tableConstructorExpr) {\n rewriteExprs(tableConstructorExpr.recordLiteralList);\n result = tableConstructorExpr;\n }\n\n @Override\n public void visit(BLangArrayLiteral arrayLiteral) {\n arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);\n BType arrayLiteralType = Types.getImpliedType(arrayLiteral.getBType());\n if (arrayLiteralType.tag == TypeTags.JSON) {\n result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));\n return;\n } else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {\n result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());\n return;\n }\n result = arrayLiteral;\n }\n\n @Override\n public void visit(BLangTupleLiteral tupleLiteral) {\n if (tupleLiteral.isTypedescExpr) {\n final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();\n typedescExpr.resolvedType = tupleLiteral.typedescType;\n typedescExpr.setBType(symTable.typeDesc);\n result = rewriteExpr(typedescExpr);\n return;\n }\n List exprs = tupleLiteral.exprs;\n BTupleType tupleType = (BTupleType) Types.getImpliedType(tupleLiteral.getBType());\n List tupleMemberTypes = tupleType.getTupleTypes();\n int tupleMemberTypeSize = tupleMemberTypes.size();\n int tupleExprSize = exprs.size();\n\n boolean isInRestType = false;\n int i = 0;\n for (BLangExpression expr: exprs) {\n if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {\n BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();\n spreadOpType = Types.getImpliedType(spreadOpType);\n if (spreadOpType.tag == TypeTags.ARRAY) {\n BArrayType spreadOpBArray = (BArrayType) spreadOpType;\n if (spreadOpBArray.size >= 0) {\n i += spreadOpBArray.size;\n continue;\n }\n } else {\n BTupleType spreadOpTuple = spreadOpType.tag == TypeTags.INTERSECTION ?\n (BTupleType) ((BIntersectionType) spreadOpType).effectiveType : (BTupleType) spreadOpType;\n if (types.isFixedLengthTuple(spreadOpTuple)) {\n i += spreadOpTuple.getMembers().size();\n continue;\n }\n }\n isInRestType = true;\n continue;\n }\n\n BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();\n\n BType targetType = tupleType.restType;\n if (!isInRestType && i < tupleMemberTypeSize) {\n targetType = tupleMemberTypes.get(i);\n }\n\n types.setImplicitCastExpr(expr, expType, targetType);\n i++;\n }\n\n tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);\n result = tupleLiteral;\n }\n\n @Override\n public void visit(BLangGroupExpr groupExpr) {\n result = rewriteExpr(groupExpr.expression);\n }\n\n @Override\n public void visit(BLangRecordLiteral recordLiteral) {\n List fields = recordLiteral.fields;\n generateFieldsForUserUnspecifiedRecordFields(recordLiteral, fields);\n fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));\n result = rewriteExpr(rewriteMappingConstructor(recordLiteral));\n }\n\n private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {\n if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {\n return env.enclEnv;\n }\n\n if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {\n return env.enclEnv;\n }\n\n if (env.enclInvokable != null && env.enclInvokable == encInvokable) {\n return findEnclosingInvokableEnv(env.enclEnv, encInvokable);\n }\n return env;\n }\n\n private void updateClosureVariable(BVarSymbol varSymbol, BLangInvokableNode encInvokable, Location pos) {\n if (!varSymbol.closure) {\n SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);\n BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, varSymbol);\n if (resolvedSymbol != symTable.notFoundSymbol) {\n varSymbol.closure = true;\n ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(varSymbol, pos));\n }\n }\n }\n\n private List getNamesOfUserSpecifiedRecordFields(List userSpecifiedFields) {\n List fieldNames = new ArrayList<>();\n\n for (RecordLiteralNode.RecordField field : userSpecifiedFields) {\n if (field.isKeyValueField()) {\n BLangExpression key = ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.expr;\n if (key.getKind() == NodeKind.LITERAL) {\n fieldNames.add(Utils.unescapeBallerina(((BLangLiteral) key).value.toString()));\n } else if (key.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n fieldNames.add(Utils.unescapeBallerina(((BLangSimpleVarRef) key).variableName.value));\n }\n } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n fieldNames.add(Utils.unescapeBallerina(((BLangSimpleVarRef) field).variableName.value));\n } else {\n addRequiredFieldsFromSpreadOperator(field, fieldNames);\n }\n }\n\n return fieldNames;\n }\n\n private void addRequiredFieldsFromSpreadOperator(RecordLiteralNode.RecordField field, List fieldNames) {\n BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =\n (BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\n BType type = Types.getReferredType(spreadOpField.expr.getBType());\n if (type.tag != TypeTags.RECORD) {\n return;\n }\n for (BField bField : ((BRecordType) type).fields.values()) {\n fieldNames.add(Utils.unescapeBallerina(bField.name.value));\n }\n }\n\n private void generateFieldsForUserUnspecifiedRecordFields(List fields,\n List fieldNames,\n Map defaultValues,\n Location pos, boolean isReadonly) {\n for (Map.Entry entry : defaultValues.entrySet()) {\n String fieldName = entry.getKey();\n if (fieldNames.contains(fieldName)) {\n continue;\n }\n fieldNames.add(fieldName);\n BInvokableSymbol invokableSymbol = entry.getValue();\n BLangExpression expression = getFunctionPointerInvocation(invokableSymbol);\n\n if (isReadonly && !Symbols.isFlagOn(invokableSymbol.retType.flags, Flags.READONLY)) {\n expression = visitCloneReadonly(expression, invokableSymbol.retType);\n }\n if (env.enclInvokable != null) {\n BLangInvocation invocation = (BLangInvocation) expression;\n if (invocation.expr.getKind() == NodeKind.INVOCATION) {\n updateClosureVariable((BVarSymbol) ((BLangInvocation) invocation.expr).symbol, env.enclInvokable,\n pos);\n } else {\n updateClosureVariable((BVarSymbol) invocation.symbol, env.enclInvokable, pos);\n }\n }\n BLangRecordLiteral.BLangRecordKeyValueField member = createRecordKeyValueField(pos, fieldName, expression);\n fields.add(member);\n }\n }\n\n private BLangRecordLiteral.BLangRecordKeyValueField createRecordKeyValueField(Location pos,\n String fieldName,\n BLangExpression expression) {\n BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();\n member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(pos, symTable.stringType,\n Utils.unescapeJava(fieldName)));\n member.valueExpr = types.addConversionExprIfRequired(expression, expression.getBType());\n return member;\n }\n\n public void generateFieldsForUserUnspecifiedRecordFields(BLangRecordLiteral recordLiteral,\n List userSpecifiedFields) {\n BType type = Types.getImpliedType(recordLiteral.getBType());\n if (type.getKind() != TypeKind.RECORD || isSpreadingAnOpenRecord(userSpecifiedFields)) {\n return;\n }\n List fieldNames = getNamesOfUserSpecifiedRecordFields(userSpecifiedFields);\n Location pos = recordLiteral.pos;\n BRecordType recordType = (BRecordType) type;\n boolean isReadonly = Symbols.isFlagOn(recordType.flags, Flags.READONLY);\n generateFieldsForUserUnspecifiedRecordFields(recordType, userSpecifiedFields, fieldNames, pos, isReadonly);\n }\n\n private static boolean isSpreadingAnOpenRecord(List userSpecifiedFields) {\n for (RecordLiteralNode.RecordField field : userSpecifiedFields) {\n if (!(field instanceof BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOperatorField)) {\n continue;\n }\n BType type = Types.getReferredType(spreadOperatorField.expr.getBType());\n if (!(type instanceof BRecordType recordType)) {\n return true;\n }\n if (recordType.restFieldType != null) {\n return true;\n }\n }\n return false;\n }\n\n private void generateFieldsForUserUnspecifiedRecordFields(BRecordType recordType,\n List fields,\n List fieldNames, Location pos,\n boolean isReadonly) {\n Map defaultValues = ((BRecordTypeSymbol) recordType.tsymbol).defaultValues;\n generateFieldsForUserUnspecifiedRecordFields(fields, fieldNames, defaultValues, pos, isReadonly);\n List typeInclusions = recordType.typeInclusions;\n for (BType typeInclusion : typeInclusions) {\n generateFieldsForUserUnspecifiedRecordFields((BRecordType) Types.getImpliedType(typeInclusion), fields,\n fieldNames, pos, isReadonly);\n }\n }\n\n @Override\n public void visit(BLangSimpleVarRef varRefExpr) {\n BLangSimpleVarRef genVarRefExpr = varRefExpr;\n\n \n if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {\n BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);\n qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;\n qnameExpr.localname = varRefExpr.variableName;\n qnameExpr.prefix = varRefExpr.pkgAlias;\n qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;\n qnameExpr.isUsedInXML = false;\n qnameExpr.pos = varRefExpr.pos;\n qnameExpr.setBType(symTable.stringType);\n result = qnameExpr;\n return;\n }\n\n if (varRefExpr.symbol == null) {\n result = varRefExpr;\n return;\n }\n\n \n if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {\n BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;\n if (varSymbol.originalSymbol != null) {\n varRefExpr.symbol = varSymbol.originalSymbol;\n if (varSymbol.closure) {\n varRefExpr.symbol.closure = true;\n }\n }\n }\n\n BType type = varRefExpr.getBType();\n\n BSymbol ownerSymbol = varRefExpr.symbol.owner;\n if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&\n Types.getImpliedType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {\n genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);\n } else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&\n !((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {\n genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);\n } else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||\n (ownerSymbol.tag & SymTag.FUNCTION_TYPE) == SymTag.FUNCTION_TYPE ||\n (ownerSymbol.tag & SymTag.LET) == SymTag.LET) {\n \n genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);\n } else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {\n genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);\n } else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||\n (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {\n\n \n \n if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {\n BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;\n BType referredType = Types.getImpliedType(constSymbol.literalType);\n if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {\n BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,\n constSymbol.value.value);\n result = rewriteExpr(types.addConversionExprIfRequired(literal, varRefExpr.getBType()));\n return;\n }\n }\n\n \n \n genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);\n\n if (!enclLocks.isEmpty()) {\n BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;\n BLangLockStmt lockStmt = enclLocks.peek();\n lockStmt.addLockVariable(symbol);\n lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));\n }\n }\n\n genVarRefExpr.setBType(type);\n genVarRefExpr.pos = varRefExpr.pos;\n\n if ((varRefExpr.isLValue)\n || genVarRefExpr.symbol.name.equals(IGNORE)) { \n genVarRefExpr.isLValue = varRefExpr.isLValue;\n genVarRefExpr.setBType(varRefExpr.symbol.type);\n result = genVarRefExpr;\n return;\n }\n\n \n \n genVarRefExpr.isLValue = varRefExpr.isLValue;\n BType targetType = genVarRefExpr.getBType();\n genVarRefExpr.setBType(genVarRefExpr.symbol.type);\n BLangExpression expression = types.addConversionExprIfRequired(genVarRefExpr, targetType);\n result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;\n }\n\n @Override\n public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {\n rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);\n }\n\n private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {\n if (safeNavigate(fieldAccessExpr)) {\n result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));\n return;\n }\n\n BLangAccessExpression targetVarRef = fieldAccessExpr;\n\n \n \n BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());\n fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);\n if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {\n fieldAccessExpr.expr = types.addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);\n }\n\n BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,\n Utils.unescapeJava(fieldAccessExpr.field.value));\n BType refType = Types.getImpliedType(varRefType);\n int varRefTypeTag = refType.tag;\n if (varRefTypeTag == TypeTags.OBJECT ||\n (varRefTypeTag == TypeTags.UNION &&\n Types.getImpliedType(\n ((BUnionType) refType).getMemberTypes().iterator().next()).tag == TypeTags.OBJECT)) {\n if (fieldAccessExpr.symbol != null &&\n Types.getImpliedType(fieldAccessExpr.symbol.type).tag == TypeTags.INVOKABLE &&\n ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {\n result = rewriteObjectMemberAccessAsField(fieldAccessExpr);\n return;\n } else {\n boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;\n\n if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {\n BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;\n BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;\n BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;\n BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;\n\n if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||\n (initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {\n isStoreOnCreation = true;\n }\n }\n\n targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,\n (BVarSymbol) fieldAccessExpr.symbol, false,\n isStoreOnCreation);\n \n }\n } else if (varRefTypeTag == TypeTags.RECORD ||\n (varRefTypeTag == TypeTags.UNION &&\n Types.getImpliedType(\n ((BUnionType) refType).getMemberTypes().iterator().next()).tag == TypeTags.RECORD)) {\n if (fieldAccessExpr.symbol != null &&\n Types.getImpliedType(fieldAccessExpr.symbol.type).tag == TypeTags.INVOKABLE\n && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {\n targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);\n } else {\n targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,\n (BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);\n }\n } else if (types.isLaxFieldAccessAllowed(refType)) {\n if (!types.isAssignable(refType, symTable.xmlType)) {\n if (varRefTypeTag == TypeTags.MAP &&\n TypeTags.isXMLTypeTag(Types.getImpliedType(((BMapType) refType).constraint).tag)) {\n result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));\n return;\n }\n \n \n fieldAccessExpr.expr = types.addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);\n targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);\n } else {\n fieldAccessExpr.expr = types.addConversionExprIfRequired(fieldAccessExpr.expr, symTable.xmlType);\n BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);\n xmlAccessInvocation.setBType(fieldAccessExpr.getBType());\n result = xmlAccessInvocation;\n return;\n }\n } else if (varRefTypeTag == TypeTags.MAP) {\n \n targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,\n fieldAccessExpr.isStoreOnCreation);\n } else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {\n targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,\n fieldAccessExpr.fieldKind);\n }\n\n targetVarRef.isLValue = fieldAccessExpr.isLValue;\n targetVarRef.setBType(fieldAccessExpr.getBType());\n targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;\n result = targetVarRef;\n }\n\n @Override\n public void visit(BLangFieldBasedAccess fieldAccessExpr) {\n rewriteFieldBasedAccess(fieldAccessExpr);\n }\n\n private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {\n Location pos = fieldAccessExpr.pos;\n BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;\n \n BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();\n String funcName = \"$anon$method$delegate$\" + originalMemberFuncSymbol.name.value + \"$\" + lambdaFunctionCount++;\n BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),\n Names.fromString(funcName), env.enclPkg.packageID,\n originalMemberFuncSymbol.type, env.scope.owner, pos,\n VIRTUAL);\n funcSymbol.retType = originalMemberFuncSymbol.retType;\n funcSymbol.bodyExist = true;\n funcSymbol.params = new ArrayList<>();\n funcSymbol.scope = new Scope(funcSymbol);\n func.pos = pos;\n func.name = createIdentifier(pos, funcName);\n func.flagSet.add(Flag.LAMBDA);\n func.flagSet.add(Flag.ANONYMOUS);\n func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();\n func.symbol = funcSymbol;\n func.setBType(funcSymbol.type);\n func.closureVarSymbols = new LinkedHashSet<>();\n \n BLangExpression receiver = fieldAccessExpr.expr;\n \n \n BLangSimpleVariableDef intermediateObjDef = null;\n if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;\n receiverSymbol.closure = true;\n func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));\n } else {\n BLangSimpleVariableDef varDef = createVarDef(\"$$temp$obj$\" + annonVarCount++, receiver.getBType(),\n receiver, pos);\n intermediateObjDef = varDef;\n varDef.var.symbol.closure = true;\n env.scope.define(varDef.var.symbol.name, varDef.var.symbol);\n BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);\n func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));\n receiver = variableRef;\n }\n\n \n\n ArrayList requiredArgs = new ArrayList<>();\n for (BVarSymbol param : originalMemberFuncSymbol.params) {\n BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();\n fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,\n VIRTUAL);\n fParam.pos = pos;\n fParam.name = createIdentifier(pos, param.name.value);\n fParam.setBType(param.type);\n func.requiredParams.add(fParam);\n funcSymbol.params.add(fParam.symbol);\n funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);\n\n BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);\n requiredArgs.add(paramRef);\n }\n\n ArrayList restArgs = new ArrayList<>();\n if (originalMemberFuncSymbol.restParam != null) {\n BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();\n func.restParam = restParam;\n BVarSymbol restSym = originalMemberFuncSymbol.restParam;\n restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);\n restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,\n VIRTUAL);\n restParam.pos = pos;\n restParam.setBType(restSym.type);\n funcSymbol.restParam = restParam.symbol;\n funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);\n\n BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);\n restArgs.add(createRestArgsExpression(restArg));\n }\n\n BLangIdentifier field = fieldAccessExpr.field;\n BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();\n retStmt.expr = createObjectMethodInvocation(\n receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);\n ((BLangBlockFunctionBody) func.body).addStatement(retStmt);\n\n BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();\n lambdaFunction.function = func;\n lambdaFunction.capturedClosureEnv = env;\n env.enclPkg.functions.add(func);\n env.enclPkg.topLevelNodes.add(func);\n \n lambdaFunction.parent = env.enclInvokable;\n lambdaFunction.setBType(func.getBType());\n\n if (intermediateObjDef == null) {\n return rewrite(lambdaFunction, env);\n } else {\n BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));\n expr.setBType(lambdaFunction.getBType());\n return rewrite(expr, env);\n }\n }\n\n private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,\n BSymbol invocableSymbol,\n List requiredArgs,\n List restArgs) {\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n invocationNode.name = field;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n invocationNode.expr = receiver;\n\n invocationNode.symbol = invocableSymbol;\n invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);\n invocationNode.requiredArgs = requiredArgs;\n invocationNode.restArgs = restArgs;\n return invocationNode;\n }\n\n private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {\n BLangStatementExpression statementExpression = new BLangStatementExpression();\n BLangBlockStmt block = new BLangBlockStmt();\n statementExpression.stmt = block;\n BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);\n Location pos = fieldAccessExpr.pos;\n BLangSimpleVariableDef result = createVarDef(\"$mapAccessResult$\", fieldAccessType, null, pos);\n block.addStatement(result);\n BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);\n resultRef.setBType(fieldAccessType);\n statementExpression.setBType(fieldAccessType);\n\n\n \n \n BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(\n fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);\n BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);\n BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);\n mapAccessExpr.setBType(xmlOrNil);\n BLangSimpleVariableDef mapResult = createVarDef(\"$mapAccess\", xmlOrNil, mapAccessExpr, pos);\n BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);\n block.addStatement(mapResult);\n\n BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);\n\n BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);\n\n ifStmt.expr = isLikeNilExpr;\n BLangBlockStmt resultNilBody = new BLangBlockStmt();\n ifStmt.body = resultNilBody;\n BLangBlockStmt resultHasValueBody = new BLangBlockStmt();\n ifStmt.elseStmt = resultHasValueBody;\n\n BLangErrorConstructorExpr errorConstructorExpr =\n (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();\n BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,\n Names.fromString(\"\"), Names.fromString(\"error\"));\n errorConstructorExpr.setBType(symbol.type);\n\n List positionalArgs = new ArrayList<>();\n List namedArgs = new ArrayList<>();\n positionalArgs.add(createStringLiteral(pos, \"{\" + RuntimeConstants.MAP_LANG_LIB + \"}InvalidKey\"));\n BLangNamedArgsExpression message = new BLangNamedArgsExpression();\n message.name = ASTBuilderUtil.createIdentifier(pos, \"key\");\n message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);\n namedArgs.add(message);\n errorConstructorExpr.positionalArgs = positionalArgs;\n errorConstructorExpr.namedArgs = namedArgs;\n\n BLangSimpleVariableDef errorDef =\n createVarDef(\"$_invalid_key_error\", symTable.errorType, errorConstructorExpr, pos);\n resultNilBody.addStatement(errorDef);\n\n BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);\n\n BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);\n errorVarAssignment.varRef = resultRef;\n errorVarAssignment.expr = errorRef;\n\n BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(\n pos, resultHasValueBody);\n mapResultAssignment.varRef = resultRef;\n mapResultAssignment.expr = mapResultRef;\n\n statementExpression.expr = resultRef;\n return statementExpression;\n }\n\n private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {\n ArrayList args = new ArrayList<>();\n\n String fieldName = fieldAccessExpr.field.value;\n if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {\n BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =\n (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;\n fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);\n }\n\n \n if (fieldName.equals(\"_\")) {\n return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,\n fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());\n }\n\n BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);\n args.add(attributeNameLiteral);\n args.add(isOptionalAccessToLiteral(fieldAccessExpr));\n\n return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,\n new ArrayList<>());\n }\n\n private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {\n return rewrite(\n createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);\n }\n\n private String createExpandedQName(String nsURI, String localName) {\n return \"{\" + nsURI + \"}\" + localName;\n }\n\n @Override\n public void visit(BLangIndexBasedAccess indexAccessExpr) {\n if (safeNavigate(indexAccessExpr)) {\n result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));\n return;\n }\n\n BLangIndexBasedAccess targetVarRef = indexAccessExpr;\n indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);\n\n \n \n BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());\n BType varRefType = Types.getImpliedType(effectiveType);\n indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);\n if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {\n indexAccessExpr.expr = types.addConversionExprIfRequired(indexAccessExpr.expr, varRefType);\n }\n\n if (varRefType.tag == TypeTags.MAP) {\n targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);\n } else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {\n targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr,\n (BVarSymbol) indexAccessExpr.symbol, false);\n } else if (types.isSubTypeOfList(varRefType)) {\n targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr);\n } else if (types.isAssignable(varRefType, symTable.xmlType)) {\n BLangExpression indexAccessExprExpr = indexAccessExpr.expr;\n \n if (Types.getImpliedType(indexAccessExprExpr.getBType()).tag == TypeTags.UNION) {\n indexAccessExprExpr = createTypeCastExpr(indexAccessExprExpr, symTable.xmlType);\n }\n targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExprExpr, indexAccessExpr.indexExpr);\n } else if (types.isAssignable(varRefType, symTable.stringType)) {\n indexAccessExpr.expr = types.addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);\n targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr);\n } else if (varRefType.tag == TypeTags.TABLE) {\n targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr);\n }\n\n targetVarRef.isLValue = indexAccessExpr.isLValue;\n targetVarRef.setBType(indexAccessExpr.getBType());\n result = targetVarRef;\n }\n\n @Override\n public void visit(BLangInvocation iExpr) {\n BLangExpression invocation = rewriteInvocation(iExpr, false);\n if (invocation.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {\n ((BLangTypeConversionExpr) invocation).expr =\n createStmtExpr((BLangInvocation) ((BLangTypeConversionExpr) invocation).expr);\n result = invocation;\n } else {\n result = createStmtExpr((BLangInvocation) invocation);\n }\n }\n\n \n @Override\n public void visit(BFunctionPointerInvocation invocation) {\n visitArgs(invocation);\n\n invocation.expr = rewriteExpr(invocation.expr);\n result = invocation;\n }\n\n private void visitArgs(BLangInvocation invocation) {\n \n reorderArguments(invocation);\n\n rewriteExprs(invocation.requiredArgs);\n fixStreamTypeCastsInInvocationParams(invocation);\n fixNonRestArgTypeCastInTypeParamInvocation(invocation);\n\n rewriteExprs(invocation.restArgs);\n }\n\n private BLangStatementExpression createStmtExpr(BLangInvocation invocation) {\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(invocation.pos);\n BType type = Types.getImpliedType(invocation.symbol.type);\n BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) type.tsymbol;\n\n if (invokableTypeSymbol == null) {\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, invocation);\n stmtExpr.setBType(invocation.getBType());\n return stmtExpr;\n }\n TreeMap arguments = new TreeMap<>();\n Map defaultValues = invokableTypeSymbol.defaultValues;\n\n for (int i = 0; i < invokableTypeSymbol.params.size(); i++) {\n BLangExpression arg;\n BLangSimpleVariableDef variableDef;\n BLangSimpleVarRef simpleVarRef;\n if (invocation instanceof BLangInvocation.BLangAttachedFunctionInvocation) {\n arg = invocation.requiredArgs.get(i + 1);\n } else {\n arg = invocation.requiredArgs.get(i);\n }\n BVarSymbol param = invokableTypeSymbol.params.get(i);\n String paramName = param.name.value;\n if (arg.getKind() != NodeKind.IGNORE_EXPR) {\n if (invocation.expr == arg) {\n arguments.put(paramName, arg);\n continue;\n }\n if (arg.impConversionExpr != null) {\n variableDef = createSimpleVarDef(\"$\" + paramName + \"$\" + funcParamCount++, param.type, arg);\n } else {\n variableDef = createSimpleVarDef(\"$\" + paramName + \"$\" + funcParamCount++, arg.getBType(), arg);\n }\n simpleVarRef = ASTBuilderUtil.createVariableRef(invocation.pos, variableDef.var.symbol);\n simpleVarRef = rewrite(simpleVarRef, env);\n blockStmt.addStatement(variableDef);\n arguments.put(paramName, simpleVarRef);\n if (invocation instanceof BLangInvocation.BLangAttachedFunctionInvocation) {\n invocation.requiredArgs.set(i + 1, simpleVarRef);\n } else {\n invocation.requiredArgs.set(i, simpleVarRef);\n }\n continue;\n }\n\n BInvokableSymbol invokableSymbol = defaultValues.get(Utils.unescapeBallerina(paramName));\n BLangInvocation closureInvocation = getFunctionPointerInvocation(invokableSymbol);\n for (int m = 0; m < invokableSymbol.params.size(); m++) {\n String langLibFuncParam = invokableSymbol.params.get(m).name.value;\n closureInvocation.requiredArgs.add(arguments.get(langLibFuncParam));\n }\n variableDef = createVarDef(\"$\" + paramName + \"$\" + funcParamCount++, closureInvocation.getBType(),\n closureInvocation, arg.pos);\n simpleVarRef = ASTBuilderUtil.createVariableRef(invocation.pos, variableDef.var.symbol);\n simpleVarRef = rewrite(simpleVarRef, env);\n blockStmt.addStatement(variableDef);\n arguments.put(paramName, simpleVarRef);\n if (invocation instanceof BLangInvocation.BLangAttachedFunctionInvocation) {\n invocation.requiredArgs.set(i + 1, simpleVarRef);\n } else {\n invocation.requiredArgs.set(i, simpleVarRef);\n }\n }\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, invocation);\n stmtExpr.setBType(invocation.getBType());\n\n return stmtExpr;\n }\n\n private BLangInvocation getFunctionPointerInvocation(BInvokableSymbol symbol) {\n BLangInvocation funcInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();\n funcInvocation.setBType(symbol.retType);\n funcInvocation.symbol = symbol;\n funcInvocation.name = ASTBuilderUtil.createIdentifier(symbol.pos, symbol.name.value);\n return visitFunctionPointerInvocation(funcInvocation);\n }\n\n @Override\n public void visit(BLangErrorConstructorExpr errorConstructorExpr) {\n if (errorConstructorExpr.positionalArgs.size() == 1) {\n errorConstructorExpr.positionalArgs.add(createNilLiteral());\n }\n errorConstructorExpr.positionalArgs.set(1,\n types.addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));\n rewriteExprs(errorConstructorExpr.positionalArgs);\n\n BLangExpression errorDetail;\n BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,\n ((BErrorType) Types.getImpliedType(errorConstructorExpr.getBType())).detailType);\n if (errorConstructorExpr.namedArgs.isEmpty()) {\n errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());\n } else {\n for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {\n BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();\n member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,\n symTable.stringType, Utils.unescapeJava(namedArg.name.value)));\n\n if (Types.getImpliedType(recordLiteral.getBType()).tag == TypeTags.RECORD) {\n member.valueExpr = types.addConversionExprIfRequired(namedArg.expr, symTable.anyType);\n } else {\n member.valueExpr = types.addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());\n }\n recordLiteral.fields.add(member);\n }\n errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),\n ((BErrorType) Types.getImpliedType(errorConstructorExpr.getBType())).detailType);\n }\n errorConstructorExpr.errorDetail = errorDetail;\n result = errorConstructorExpr;\n }\n\n @Override\n public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {\n if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {\n transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);\n }\n\n \n if (!actionInvocation.functionPointerInvocation && actionInvocation.async &&\n Symbols.isFlagOn(actionInvocation.symbol.type.flags, Flags.ISOLATED)) {\n addStrandAnnotationWithThreadAny(actionInvocation.pos);\n actionInvocation.addAnnotationAttachment(this.strandAnnotAttachement);\n ((BInvokableSymbol) actionInvocation.symbol)\n .addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);\n }\n\n BLangExpression invocation = rewriteInvocation(actionInvocation, actionInvocation.async);\n if (invocation.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {\n ((BLangTypeConversionExpr) invocation).expr =\n createStmtExpr((BLangInvocation) ((BLangTypeConversionExpr) invocation).expr);\n result = invocation;\n } else {\n result = createStmtExpr((BLangInvocation) invocation);\n }\n }\n\n private void addStrandAnnotationWithThreadAny(Location pos) {\n if (this.strandAnnotAttachement == null) {\n \n this.strandAnnotAttachement = annotationDesugar.createStrandAnnotationWithThreadAny(pos, env);\n }\n }\n\n @Override\n public void visit(BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n if (resourceAccessInvocation.invokedInsideTransaction) {\n transactionDesugar.startTransactionCoordinatorOnce(env, resourceAccessInvocation.pos);\n }\n\n \n BLangInvocation pathParamInvocation = createInvocationForPathParams(resourceAccessInvocation);\n reorderArguments(pathParamInvocation);\n\n BResourceFunction targetResourceFunc = resourceAccessInvocation.targetResourceFunc;\n List pathSegmentSymbols = targetResourceFunc.pathSegmentSymbols;\n\n int pathParamInvocationRequiredArgCount = pathParamInvocation.requiredArgs.size();\n\n BLangInvocation bLangInvocation = new BLangInvocation();\n\n \n \n \n \n \n \n \n \n BLangStatementExpression firstRequiredArgFromRestArg = null;\n boolean isFirstRequiredArgFromRestArgIncluded = false;\n for (int i = 0; i < pathParamInvocationRequiredArgCount; i++) {\n BLangExpression requiredArg = pathParamInvocation.requiredArgs.get(i);\n \n Name resourcePathName = pathSegmentSymbols.get(i).name;\n if (firstRequiredArgFromRestArg == null && requiredArg.getKind() == NodeKind.STATEMENT_EXPRESSION) {\n firstRequiredArgFromRestArg = (BLangStatementExpression) requiredArg;\n if (resourcePathName.value.equals(\"^\")) {\n isFirstRequiredArgFromRestArgIncluded = true;\n bLangInvocation.requiredArgs.add(requiredArg);\n continue;\n }\n }\n\n if (resourcePathName.value.equals(\"^\")) {\n if (firstRequiredArgFromRestArg != null && !isFirstRequiredArgFromRestArgIncluded) {\n BLangStatementExpression statementExpression = new BLangStatementExpression();\n statementExpression.expr = requiredArg;\n statementExpression.stmt = firstRequiredArgFromRestArg.stmt;\n statementExpression.setBType(requiredArg.getBType());\n bLangInvocation.requiredArgs.add(statementExpression);\n isFirstRequiredArgFromRestArgIncluded = true;\n } else {\n bLangInvocation.requiredArgs.add(requiredArg);\n }\n }\n }\n\n Name lastResourcePathName = pathSegmentSymbols.get(pathSegmentSymbols.size() - 1).name;\n if (lastResourcePathName.value.equals(\"^^\")) {\n \n for (BLangExpression restArg : pathParamInvocation.restArgs) {\n if (firstRequiredArgFromRestArg != null && !isFirstRequiredArgFromRestArgIncluded &&\n restArg.getKind() == NodeKind.STATEMENT_EXPRESSION) {\n BLangStatementExpression restArgStmtExpr = (BLangStatementExpression) restArg;\n ((BLangBlockStmt) restArgStmtExpr.stmt).stmts.add(0,\n ((BLangBlockStmt) firstRequiredArgFromRestArg.stmt).stmts.get(0));\n }\n bLangInvocation.requiredArgs.add(restArg);\n }\n }\n\n bLangInvocation.requiredArgs.addAll(resourceAccessInvocation.requiredArgs);\n bLangInvocation.pkgAlias = resourceAccessInvocation.pkgAlias;\n bLangInvocation.name = resourceAccessInvocation.name;\n bLangInvocation.expr = resourceAccessInvocation.expr;\n bLangInvocation.restArgs = resourceAccessInvocation.restArgs;\n bLangInvocation.symbol = resourceAccessInvocation.symbol;\n bLangInvocation.setBType(resourceAccessInvocation.getBType());\n bLangInvocation.parent = resourceAccessInvocation.parent;\n bLangInvocation.pos = resourceAccessInvocation.pos;\n result = rewriteExpr(bLangInvocation);\n }\n\n private BLangInvocation createInvocationForPathParams(\n BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n BLangInvocation bLangInvocation = new BLangInvocation();\n\n BInvokableSymbol invokableSymbol = new BInvokableSymbol(\n resourceAccessInvocation.symbol.tag,\n resourceAccessInvocation.symbol.flags,\n resourceAccessInvocation.symbol.name,\n resourceAccessInvocation.symbol.pkgID,\n resourceAccessInvocation.symbol.type,\n resourceAccessInvocation.symbol,\n resourceAccessInvocation.symbol.pos, VIRTUAL);\n\n BResourceFunction targetResourceFunc = resourceAccessInvocation.targetResourceFunc;\n List pathSegmentSymbols = targetResourceFunc.pathSegmentSymbols;\n List resourceAccessPathSegments = resourceAccessInvocation.resourceAccessPathSegments.exprs;\n\n List invocationParams = new ArrayList<>(pathSegmentSymbols.size());\n\n int pathSegmentCount = pathSegmentSymbols.size();\n BResourcePathSegmentSymbol lastPathSegmentSym = pathSegmentSymbols.get(pathSegmentSymbols.size() - 1);\n if (lastPathSegmentSym.kind == SymbolKind.RESOURCE_PATH_REST_PARAM_SEGMENT) {\n invokableSymbol.restParam = new BVarSymbol(0, Names.EMPTY, this.env.scope.owner.pkgID,\n new BArrayType(lastPathSegmentSym.type), this.env.scope.owner, lastPathSegmentSym.pos, VIRTUAL);\n pathSegmentCount--;\n }\n\n if (pathSegmentCount > 0 && lastPathSegmentSym.kind != SymbolKind.RESOURCE_ROOT_PATH_SEGMENT) {\n invocationParams.addAll(pathSegmentSymbols.subList(0, pathSegmentCount).stream()\n .map(s -> new BVarSymbol(0, Names.EMPTY, this.env.scope.owner.pkgID, s.type,\n this.env.scope.owner, s.pos, VIRTUAL)).toList());\n }\n\n invokableSymbol.params = invocationParams;\n\n bLangInvocation.symbol = invokableSymbol;\n\n for (int i = 0; i < resourceAccessPathSegments.size(); i++) {\n BLangExpression resourceAccessPathSeg = resourceAccessPathSegments.get(i);\n if (resourceAccessPathSeg.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {\n bLangInvocation.restArgs.add(createRestArgsExpression(\n ((BLangListConstructorSpreadOpExpr) resourceAccessPathSeg).expr));\n } else if (i > invocationParams.size() - 1) {\n bLangInvocation.restArgs.add(resourceAccessPathSeg);\n } else {\n bLangInvocation.requiredArgs.add(resourceAccessPathSeg);\n }\n }\n\n return bLangInvocation;\n }\n\n private BLangRestArgsExpression createRestArgsExpression(BLangExpression expr) {\n BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression();\n bLangRestArgsExpression.expr = expr;\n bLangRestArgsExpression.pos = expr.pos;\n bLangRestArgsExpression.setBType(expr.getBType());\n bLangRestArgsExpression.expectedType = bLangRestArgsExpression.getBType();\n return bLangRestArgsExpression;\n }\n\n private BLangExpression rewriteInvocation(BLangInvocation invocation, boolean async) {\n BLangInvocation invRef = invocation;\n\n if (!enclLocks.isEmpty()) {\n BLangLockStmt lock = enclLocks.peek();\n lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);\n }\n\n visitArgs(invocation);\n\n annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,\n invocation.symbol.pkgID, invocation.symbol.owner, env);\n\n if (invocation.functionPointerInvocation) {\n return visitFunctionPointerInvocation(invocation);\n }\n result = invRef;\n\n BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;\n if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {\n BType retType = unifier.build(invSym.retType);\n invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);\n }\n\n if (invocation.expr == null) {\n BLangExpression expression = fixTypeCastInTypeParamInvocation(invocation, invRef);\n if (invocation.exprSymbol == null) {\n return expression;\n }\n invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);\n invocation.expr = rewriteExpr(invocation.expr);\n }\n switch (Types.getImpliedType(invocation.expr.getBType()).tag) {\n case TypeTags.OBJECT:\n case TypeTags.RECORD:\n case TypeTags.UNION:\n if (!invocation.langLibInvocation) {\n invocation.expr = rewriteExpr(invocation.expr);\n List argExprs = new ArrayList<>(invocation.requiredArgs);\n argExprs.add(0, invocation.expr);\n BLangAttachedFunctionInvocation attachedFunctionInvocation =\n new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,\n invocation.symbol, invocation.getBType(),\n invocation.expr, async);\n attachedFunctionInvocation.name = invocation.name;\n attachedFunctionInvocation.annAttachments = invocation.annAttachments;\n invRef = attachedFunctionInvocation;\n }\n break;\n }\n populateOCEInvocation(invocation, invRef);\n return fixTypeCastInTypeParamInvocation(invocation, invRef);\n }\n\n private void populateOCEInvocation(BLangInvocation invocation,\n BLangInvocation invRef) {\n if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {\n BObjectType initializingObject = (BObjectType) invocation.expr.getBType();\n BLangClassDefinition classDef = initializingObject.classDef;\n if (classDef.hasClosureVars) {\n OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;\n if (oceEnvData.attachedFunctionInvocation == null) {\n oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) invRef;\n }\n }\n }\n }\n\n private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {\n if (!iExpr.langLibInvocation) {\n return;\n }\n\n List requiredArgs = iExpr.requiredArgs;\n\n List params = ((BInvokableSymbol) iExpr.symbol).params;\n\n for (int i = 0; i < requiredArgs.size(); i++) {\n requiredArgs.set(i, types.addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));\n }\n }\n\n /* This function is a workaround and need improvement\n * Notes for improvement :\n * 1. Both arguments are same.\n * 2. Due to current type param logic we put type param flag on the original type.\n * 3. Error type having Cloneable type with type param flag, change expression type by this code.\n * 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest\n * added never to CloneableType type param\n * @typeParam type\n * CloneableType Cloneable|never;\n *\n */\n private BLangExpression fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {\n var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;\n if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {\n return genIExpr;\n }\n\n \n BType originalInvType = genIExpr.getBType();\n if (!genIExpr.async) {\n genIExpr.setBType(returnTypeOfInvokable);\n }\n return types.addConversionExprIfRequired(genIExpr, originalInvType);\n }\n\n private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {\n List requiredArgs = iExpr.requiredArgs;\n List params = ((BInvokableSymbol) iExpr.symbol).params;\n if (!params.isEmpty()) {\n for (int i = 0; i < requiredArgs.size(); i++) {\n BVarSymbol param = params.get(i);\n if (Types.getImpliedType(param.type).tag == TypeTags.STREAM) {\n requiredArgs.set(i, types.addConversionExprIfRequired(requiredArgs.get(i), param.type));\n }\n }\n }\n }\n\n private BLangLiteral createNilLiteral() {\n BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();\n literal.value = null;\n literal.setBType(symTable.nilType);\n return literal;\n }\n\n @Override\n public void visit(BLangTypeInit typeInitExpr) {\n if (Types.getImpliedType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {\n result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));\n } else {\n result = rewrite(desugarObjectTypeInit(typeInitExpr), env);\n }\n }\n\n private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {\n typeInitExpr.desugared = true;\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);\n\n \n BLangInvocation initInvocation = (BLangInvocation) typeInitExpr.initInvocation;\n initInvocation.objectInitMethod = true;\n BType objType = getObjectType(typeInitExpr.getBType());\n BLangSimpleVariableDef objVarDef = createVarDef(\"$obj$\", objType, typeInitExpr, typeInitExpr.pos);\n \n BLangSimpleVariableDef initInvRetValVarDef = createVarDef(\"$temp$\", initInvocation.getBType(),\n initInvocation, initInvocation.pos);\n objVarDef.var.name.pos = symTable.builtinPos;\n BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);\n BLangSimpleVarRef objInitVarRef = ASTBuilderUtil.createVariableRef(initInvocation.pos,\n initInvRetValVarDef.var.symbol);\n blockStmt.addStatement(objVarDef);\n blockStmt.addStatement(initInvRetValVarDef);\n initInvocation.exprSymbol = objVarDef.var.symbol;\n initInvocation.symbol =\n ((BObjectTypeSymbol) Types.getImpliedType(objType).tsymbol).generatedInitializerFunc.symbol;\n\n \n if (initInvocation.getBType().tag == TypeTags.NIL) {\n initInvocation.name.value = GENERATED_INIT_SUFFIX.value;\n BLangNode parent = initInvocation.parent;\n if (parent != null && parent.getKind() == NodeKind.OBJECT_CTOR_EXPRESSION) {\n BLangObjectConstructorExpression oceExpression = (BLangObjectConstructorExpression) parent;\n OCEDynamicEnvironmentData oceData = oceExpression.classNode.oceEnvData;\n oceData.initInvocation = typeInitExpr.initInvocation;\n }\n typeInitExpr.initInvocation = objInitVarRef;\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);\n stmtExpr.setBType(objVarRef.symbol.type);\n return stmtExpr;\n }\n\n \n BLangSimpleVariableDef resultVarDef = createVarDef(\"$result$\", typeInitExpr.getBType(), null, typeInitExpr.pos);\n blockStmt.addStatement(resultVarDef);\n\n \n \n \n \n \n\n \n BLangSimpleVarRef initRetValVarRefInCondition =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);\n BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);\n BLangTypeTestExpr isErrorTest =\n ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());\n isErrorTest.setBType(symTable.booleanType);\n\n \n BLangSimpleVarRef thenInitRetValVarRef =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);\n BLangSimpleVarRef thenResultVarRef =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);\n BLangAssignment errAssignment =\n ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);\n thenStmt.addStatement(errAssignment);\n\n \n BLangSimpleVarRef elseResultVarRef =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);\n BLangAssignment objAssignment =\n ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);\n BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);\n elseStmt.addStatement(objAssignment);\n\n BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);\n blockStmt.addStatement(ifelse);\n\n BLangSimpleVarRef resultVarRef =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(resultVarRef.symbol.type);\n return stmtExpr;\n }\n\n private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {\n BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope\n .lookup(Names.CONSTRUCT_STREAM).symbol;\n\n BStreamType referredStreamType = (BStreamType) Types.getImpliedType(typeInitExpr.getBType());\n BType constraintType = referredStreamType.constraint;\n BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);\n BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();\n constraintTdExpr.resolvedType = constraintType;\n constraintTdExpr.setBType(constraintTdType);\n\n BType completionType = referredStreamType.completionType;\n BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);\n BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();\n completionTdExpr.resolvedType = completionType;\n completionTdExpr.setBType(completionTdType);\n\n List args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));\n if (!typeInitExpr.argsExpr.isEmpty()) {\n args.add(typeInitExpr.argsExpr.get(0));\n }\n BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(\n typeInitExpr.pos, symbol, args, symResolver);\n streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));\n return streamConstructInvocation;\n }\n\n private BLangSimpleVariableDef createSimpleVarDef(String name, BType type, BLangExpression expr) {\n BVarSymbol varSymbol = new BVarSymbol(0, Names.fromString(name), this.env.scope.owner.pkgID, type,\n this.env.scope.owner, expr.pos, VIRTUAL);\n BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(expr.pos, name, type, expr, varSymbol);\n BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(expr.pos);\n simpleVariableDef.var = simpleVariable;\n simpleVariableDef.setBType(simpleVariable.getBType());\n return simpleVariableDef;\n }\n\n private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, Location location) {\n BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, Names.fromString(name));\n \n if (objSym == null || objSym == symTable.notFoundSymbol) {\n objSym = new BVarSymbol(0, Names.fromString(name), this.env.scope.owner.pkgID, type,\n this.env.scope.owner, location, VIRTUAL);\n }\n BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);\n BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);\n objVarDef.var = objVar;\n objVarDef.setBType(objVar.getBType());\n return objVarDef;\n }\n\n private BType getObjectType(BType bType) {\n BType type = Types.getImpliedType(bType);\n if (type.tag == TypeTags.OBJECT) {\n return bType;\n } else if (type.tag == TypeTags.UNION) {\n return ((BUnionType) type).getMemberTypes().stream()\n .filter(t -> Types.getImpliedType(t).tag == TypeTags.OBJECT)\n .findFirst()\n .orElse(symTable.noType);\n }\n\n throw new IllegalStateException(\"None object type '\" + type.toString() + \"' found in object init context\");\n }\n\n BLangErrorType getErrorTypeNode() {\n BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();\n errorTypeNode.setBType(symTable.errorType);\n errorTypeNode.pos = symTable.builtinPos;\n return errorTypeNode;\n }\n\n BLangErrorType getErrorOrNillTypeNode() {\n BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();\n errorTypeNode.setBType(symTable.errorOrNilType);\n return errorTypeNode;\n }\n\n @Override\n public void visit(BLangTernaryExpr ternaryExpr) {\n /*\n * First desugar to if-else:\n *\n * T $result$;\n * if () {\n * $result$ = thenExpr;\n * } else {\n * $result$ = elseExpr;\n * }\n *\n */\n BLangSimpleVariableDef resultVarDef =\n createVarDef(\"$ternary_result$\", ternaryExpr.getBType(), null, ternaryExpr.pos);\n BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);\n\n \n BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);\n BLangAssignment thenAssignment =\n ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);\n thenBody.addStatement(thenAssignment);\n\n \n BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);\n BLangAssignment elseAssignment =\n ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);\n elseBody.addStatement(elseAssignment);\n\n \n BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);\n BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);\n\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(ternaryExpr.getBType());\n\n result = rewriteExpr(stmtExpr);\n }\n\n @Override\n public void visit(BLangWaitExpr waitExpr) {\n \n if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {\n waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());\n } else { \n waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));\n }\n result = waitExpr;\n }\n\n private List collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List exprs) {\n visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);\n visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);\n return exprs;\n }\n\n private void visitBinaryExprOfWait(BLangExpression expr, List exprs) {\n if (expr.getKind() == NodeKind.BINARY_EXPR) {\n collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);\n } else {\n expr = rewriteExpr(expr);\n exprs.add(expr);\n }\n }\n\n @Override\n public void visit(BLangWaitForAllExpr waitExpr) {\n waitExpr.keyValuePairs.forEach(keyValue -> {\n if (keyValue.valueExpr != null) {\n keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);\n } else {\n keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);\n }\n });\n BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());\n expr.pos = waitExpr.pos;\n result = rewriteExpr(expr);\n }\n\n @Override\n public void visit(BLangTrapExpr trapExpr) {\n trapExpr.expr = rewriteExpr(trapExpr.expr);\n if (Types.getImpliedType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {\n trapExpr.expr = types.addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());\n }\n result = trapExpr;\n }\n\n @Override\n public void visit(BLangBinaryExpr binaryExpr) {\n if (isNullableBinaryExpr(binaryExpr)) {\n BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);\n result = rewrite(stmtExpr, env);\n return;\n }\n\n if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {\n BLangExpression lhsExpr = binaryExpr.lhsExpr;\n BLangExpression rhsExpr = binaryExpr.rhsExpr;\n\n \n lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);\n rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);\n\n if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {\n rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);\n }\n\n result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));\n return;\n }\n\n if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {\n visitBinaryLogicalExpr(binaryExpr);\n return;\n }\n\n OperatorKind binaryOpKind = binaryExpr.opKind;\n\n if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||\n binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||\n binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||\n binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {\n checkByteTypeIncompatibleOperations(binaryExpr);\n }\n\n binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);\n binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);\n result = binaryExpr;\n\n int rhsExprTypeTag = Types.getImpliedType(binaryExpr.rhsExpr.getBType()).tag;\n int lhsExprTypeTag = Types.getImpliedType(binaryExpr.lhsExpr.getBType()).tag;\n\n \n if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||\n binaryExpr.opKind == OperatorKind.NOT_EQUAL ||\n binaryExpr.opKind == OperatorKind.REF_EQUAL ||\n binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {\n if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);\n return;\n }\n }\n\n boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);\n boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);\n\n \n if (lhsExprTypeTag == rhsExprTypeTag) {\n if (!isBinaryShiftOperator && !isArithmeticOperator) {\n return;\n }\n if (types.isValueType(binaryExpr.lhsExpr.getBType())) {\n return;\n }\n }\n\n if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&\n (rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {\n \n binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,\n binaryExpr.lhsExpr.pos, symTable.xmlType);\n return;\n }\n\n if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&\n (lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {\n \n binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,\n binaryExpr.rhsExpr.pos, symTable.xmlType);\n return;\n }\n\n if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {\n createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.DECIMAL) {\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());\n return;\n }\n\n if (rhsExprTypeTag == TypeTags.DECIMAL) {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.FLOAT) {\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());\n return;\n }\n\n if (rhsExprTypeTag == TypeTags.FLOAT) {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());\n return;\n }\n\n if (isArithmeticOperator) {\n createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);\n return;\n }\n\n if (isBinaryShiftOperator) {\n createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);\n return;\n }\n }\n\n private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {\n /*\n * int? x = 3;\n * int? y = 5;\n * int? z = x + y;\n * Above is desugared to\n * int? $result$;\n * \n * int? $lhsExprVar$ = x;\n * int? $rhsExprVar$ = y;\n * if (lhsVar is () or rhsVar is ()) {\n * $result$ = ();\n * } else {\n * $result$ = $lhsExprVar$ + $rhsExprVar$;\n * }\n * int z = $result$;\n */\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n\n BUnionType exprBType = (BUnionType) binaryExpr.getBType();\n BType nonNilType = exprBType.getMemberTypes().iterator().next();\n\n BType rhsType;\n BType lhsType;\n if (symResolver.isArithmeticOperator(binaryExpr.opKind)) {\n rhsType = nonNilType;\n lhsType = nonNilType;\n } else {\n \n rhsType = getBinaryExprOperandNonNilType(binaryExpr.rhsExpr.getBType());\n lhsType = getBinaryExprOperandNonNilType(binaryExpr.lhsExpr.getBType());\n }\n\n if (binaryExpr.lhsExpr.getBType().isNullable()) {\n binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);\n }\n\n BLangSimpleVariableDef tempVarDef = createVarDef(\"$result$\",\n binaryExpr.getBType(), null, binaryExpr.pos);\n BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);\n blockStmt.addStatement(tempVarDef);\n\n BLangSimpleVariableDef lhsVarDef = createVarDef(\"$lhsExprVar$\", binaryExpr.lhsExpr.getBType(),\n binaryExpr.lhsExpr, binaryExpr.pos);\n BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);\n blockStmt.addStatement(lhsVarDef);\n\n BLangSimpleVariableDef rhsVarDef = createVarDef(\"$rhsExprVar$\", binaryExpr.rhsExpr.getBType(),\n binaryExpr.rhsExpr, binaryExpr.pos);\n BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);\n blockStmt.addStatement(rhsVarDef);\n\n BLangTypeTestExpr typeTestExprOne = getNilTypeTestExpr(binaryExpr.pos, lhsVarRef);\n BLangTypeTestExpr typeTestExprTwo = getNilTypeTestExpr(binaryExpr.pos, rhsVarRef);\n\n BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,\n typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);\n\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);\n bLangAssignmentIf.varRef = tempVarRef;\n bLangAssignmentIf.expr = createNilLiteral();\n\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);\n bLangAssignmentElse.varRef = tempVarRef;\n\n BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,\n nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);\n newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);\n newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);\n bLangAssignmentElse.expr = newBinaryExpr;\n\n BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);\n ifStatement.expr = ifBlockCondition;\n ifStatement.body = ifBody;\n ifStatement.elseStmt = elseBody;\n\n BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);\n stmtExpr.setBType(binaryExpr.getBType());\n\n return stmtExpr;\n }\n\n BLangTypeTestExpr getNilTypeTestExpr(Location pos, BLangExpression expr) {\n return createTypeCheckExpr(pos, expr, getNillTypeNode());\n }\n\n private BType getBinaryExprOperandNonNilType(BType operandType) {\n return operandType.isNullable() ? types.getSafeType(operandType, true, false) : operandType;\n }\n\n private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {\n if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&\n (binaryExpr.rhsExpr.getBType().isNullable() ||\n binaryExpr.lhsExpr.getBType().isNullable())) {\n switch (binaryExpr.getOperatorKind()) {\n case ADD:\n case SUB:\n case MUL:\n case DIV:\n case MOD:\n case BITWISE_LEFT_SHIFT:\n case BITWISE_RIGHT_SHIFT:\n case BITWISE_UNSIGNED_RIGHT_SHIFT:\n case BITWISE_AND:\n case BITWISE_OR:\n case BITWISE_XOR:\n return true;\n }\n }\n return false;\n }\n\n private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,\n int rhsExprTypeTag) {\n if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||\n (TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||\n (TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {\n return;\n }\n if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {\n if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {\n binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,\n binaryExpr.rhsExpr.pos, symTable.xmlType);\n return;\n }\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);\n return;\n }\n if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {\n if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {\n binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,\n binaryExpr.rhsExpr.pos, symTable.xmlType);\n return;\n }\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);\n return;\n }\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());\n }\n\n private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,\n int rhsExprTypeTag) {\n boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);\n boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);\n if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {\n if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {\n return;\n }\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);\n return;\n }\n\n if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);\n return;\n }\n\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);\n }\n\n private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,\n int rhsExprTypeTag) {\n boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);\n boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);\n BType lhsExprType = binaryExpr.lhsExpr.getBType();\n BType rhsExprType = binaryExpr.rhsExpr.getBType();\n\n if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&\n rhsExprTypeTag == TypeTags.BYTE)) {\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.DECIMAL) {\n addTypeCastForBinaryExprB(binaryExpr, lhsExprType, rhsExprType);\n return;\n }\n\n if (rhsExprTypeTag == TypeTags.DECIMAL) {\n addTypeCastForBinaryExprA(binaryExpr, rhsExprType, lhsExprType);\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.FLOAT) {\n addTypeCastForBinaryExprB(binaryExpr, lhsExprType, rhsExprType);\n return;\n }\n\n if (rhsExprTypeTag == TypeTags.FLOAT) {\n addTypeCastForBinaryExprA(binaryExpr, rhsExprType, lhsExprType);\n return;\n }\n\n if (isLhsIntegerType && !isRhsIntegerType) {\n addTypeCastForBinaryExprB(binaryExpr, symTable.intType, rhsExprType);\n return;\n }\n\n if (!isLhsIntegerType && isRhsIntegerType) {\n addTypeCastForBinaryExprA(binaryExpr, symTable.intType, lhsExprType);\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {\n if ((lhsExprTypeTag == TypeTags.UNION && lhsExprType.isNullable()) ||\n (rhsExprTypeTag == TypeTags.UNION && rhsExprType.isNullable())) {\n binaryExpr.lhsExpr = addNilType(symTable.intType, binaryExpr.lhsExpr);\n binaryExpr.rhsExpr = addNilType(symTable.intType, binaryExpr.rhsExpr);\n return;\n }\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);\n return;\n }\n\n boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);\n boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);\n\n if (isLhsStringType && isRhsStringType) {\n return;\n }\n\n if (isLhsStringType && !isRhsStringType) {\n addTypeCastForBinaryExprB(binaryExpr, symTable.stringType, rhsExprType);\n return;\n }\n\n if (!isLhsStringType && isRhsStringType) {\n addTypeCastForBinaryExprA(binaryExpr, symTable.stringType, lhsExprType);\n }\n }\n\n private void addTypeCastForBinaryExprA(BLangBinaryExpr binaryExpr, BType rhsExprType, BType lhsExprType) {\n if (Types.getImpliedType(lhsExprType).tag == TypeTags.UNION && lhsExprType.isNullable()) {\n binaryExpr.rhsExpr = addNilType(rhsExprType, binaryExpr.rhsExpr);\n } else {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, rhsExprType);\n }\n }\n\n private void addTypeCastForBinaryExprB(BLangBinaryExpr binaryExpr, BType lhsExprType, BType rhsExprType) {\n if (Types.getImpliedType(rhsExprType).tag == TypeTags.UNION && rhsExprType.isNullable()) {\n binaryExpr.lhsExpr = addNilType(lhsExprType, binaryExpr.lhsExpr);\n } else {\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, lhsExprType);\n }\n }\n\n private BLangExpression addNilType(BType exprType, BLangExpression expr) {\n LinkedHashSet members = new LinkedHashSet<>(2);\n members.add(exprType);\n members.add(symTable.nilType);\n BUnionType unionType = new BUnionType(null, members, true, false);\n return createTypeCastExpr(expr, unionType);\n }\n\n private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,\n BLangExpression rhsExpr) {\n BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope\n .lookup(Names.CREATE_INT_RANGE).symbol;\n BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,\n new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);\n createIntRangeInvocation.setBType(symTable.intRangeType);\n return createIntRangeInvocation;\n }\n\n private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {\n if (binaryExpr.expectedType == null) {\n return;\n }\n\n int rhsExprTypeTag = Types.getImpliedType(binaryExpr.rhsExpr.getBType()).tag;\n int lhsExprTypeTag = Types.getImpliedType(binaryExpr.lhsExpr.getBType()).tag;\n if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {\n return;\n }\n\n int resultTypeTag = Types.getImpliedType(binaryExpr.expectedType).tag;\n if (resultTypeTag == TypeTags.INT) {\n if (rhsExprTypeTag == TypeTags.BYTE) {\n binaryExpr.rhsExpr = types.addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);\n }\n\n if (lhsExprTypeTag == TypeTags.BYTE) {\n binaryExpr.lhsExpr = types.addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);\n }\n }\n }\n\n /**\n * This method checks whether given binary expression is related to shift operation.\n * If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.\n *

\n * byte a = 12;\n * byte b = 34;\n * int i = 234;\n * int j = -4;\n *

\n * true: where binary expression's expected type is 'int'\n * int i1 = a >> b;\n * int i2 = a << b;\n * int i3 = a >> i;\n * int i4 = a << i;\n * int i5 = i >> j;\n * int i6 = i << j;\n */\n private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {\n return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||\n binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||\n binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;\n }\n\n @Override\n public void visit(BLangElvisExpr elvisExpr) {\n Location pos = elvisExpr.pos;\n String resultVarName = \"_$result$_\";\n BType resultType = elvisExpr.getBType();\n BLangSimpleVariable resultVar =\n ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,\n new BVarSymbol(0, Names.fromString(resultVarName),\n this.env.scope.owner.pkgID, resultType,\n this.env.scope.owner, pos, VIRTUAL));\n BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);\n resultVarDef.desugared = true;\n BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);\n\n String lhsResultVarName = GEN_VAR_PREFIX.value;\n BLangSimpleVariable lhsResultVar =\n ASTBuilderUtil.createVariable(pos, lhsResultVarName, elvisExpr.lhsExpr.getBType(), elvisExpr.lhsExpr,\n new BVarSymbol(0, Names.fromString(lhsResultVarName),\n this.env.scope.owner.pkgID, elvisExpr.lhsExpr.getBType(),\n this.env.scope.owner, elvisExpr.pos, VIRTUAL));\n BLangSimpleVariableDef lhsResultVarDef = ASTBuilderUtil.createVariableDef(pos, lhsResultVar);\n BLangSimpleVarRef lhsResultVarRef = ASTBuilderUtil.createVariableRef(pos, lhsResultVar.symbol);\n\n BLangAssignment nilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef, elvisExpr.rhsExpr);\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);\n ifBody.addStatement(nilAssignment);\n\n BLangAssignment notNilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,\n createTypeCastExpr(lhsResultVarRef, resultType));\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(pos);\n elseBody.addStatement(notNilAssignment);\n\n BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos, getNilTypeTestExpr(pos, lhsResultVarRef),\n ifBody, elseBody);\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{\n add(resultVarDef);\n add(lhsResultVarDef);\n add(ifStmt);\n }});\n BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(resultType);\n result = rewriteExpr(stmtExpr);\n }\n\n @Override\n public void visit(BLangUnaryExpr unaryExpr) {\n\n if (isNullableUnaryExpr(unaryExpr)) {\n BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);\n result = rewrite(statementExpression, env);\n return;\n }\n\n if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {\n \n \n rewriteBitwiseComplementOperator(unaryExpr);\n return;\n }\n\n \n \n if (types.isExpressionInUnaryValid(unaryExpr.expr) &&\n Types.getImpliedType(unaryExpr.expectedType).tag == TypeTags.FINITE) {\n result = rewriteExpr(Types.constructNumericLiteralFromUnaryExpr(unaryExpr));\n return;\n }\n\n OperatorKind opKind = unaryExpr.operator;\n if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {\n createTypeCastExprForUnaryPlusAndMinus(unaryExpr);\n }\n unaryExpr.expr = rewriteExpr(unaryExpr.expr);\n result = unaryExpr;\n }\n\n private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {\n BLangExpression expr = unaryExpr.expr;\n if (TypeTags.isIntegerTypeTag(Types.getImpliedType(expr.getBType()).tag)) {\n return;\n }\n unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());\n }\n\n /**\n * This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.\n * Example : ~a -> a ^ -1;\n * ~ 11110011 -> 00001100\n * 11110011 ^ 11111111 -> 00001100\n *\n * @param unaryExpr the bitwise complement expression\n */\n private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {\n final Location pos = unaryExpr.pos;\n final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();\n binaryExpr.pos = pos;\n binaryExpr.opKind = OperatorKind.BITWISE_XOR;\n binaryExpr.lhsExpr = unaryExpr.expr;\n if (TypeTags.BYTE == Types.getImpliedType(unaryExpr.getBType()).tag) {\n binaryExpr.setBType(symTable.byteType);\n binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);\n binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,\n symTable.byteType, symTable.byteType);\n } else {\n binaryExpr.setBType(symTable.intType);\n binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);\n binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,\n symTable.intType, symTable.intType);\n }\n result = rewriteExpr(binaryExpr);\n }\n\n private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {\n /*\n * int? x = 3;\n * int? y = +x;\n *\n *\n * Above is desugared to\n * int? $result$;\n * if (x is ()) {\n * $result$ = ();\n * } else {\n * $result$ = +x;\n * }\n * int y = $result$\n */\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);\n\n BUnionType exprBType = (BUnionType) unaryExpr.getBType();\n BType nilLiftType = exprBType.getMemberTypes().iterator().next();\n\n unaryExpr.expr = rewriteExpr(unaryExpr.expr);\n\n BLangSimpleVariableDef tempVarDef = createVarDef(\"$result\",\n unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);\n BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);\n\n blockStmt.addStatement(tempVarDef);\n\n BLangTypeTestExpr typeTestExpr = getNilTypeTestExpr(unaryExpr.pos, unaryExpr.expr);\n\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);\n BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);\n bLangAssignmentIf.varRef = tempVarRef;\n bLangAssignmentIf.expr = createNilLiteral();\n\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);\n BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);\n bLangAssignmentElse.varRef = tempVarRef;\n\n BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);\n bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,\n nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);\n\n BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);\n ifStatement.expr = typeTestExpr;\n ifStatement.body = ifBody;\n ifStatement.elseStmt = elseBody;\n\n BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);\n stmtExpr.setBType(unaryExpr.getBType());\n\n return stmtExpr;\n }\n\n private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {\n if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {\n switch (unaryExpr.operator) {\n case ADD:\n case SUB:\n case BITWISE_COMPLEMENT:\n return true;\n }\n }\n return false;\n }\n\n @Override\n public void visit(BLangTypeConversionExpr conversionExpr) {\n \n \n \n \n\n \n if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {\n result = rewriteExpr(conversionExpr.expr);\n return;\n }\n\n BType targetType = conversionExpr.targetType;\n conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);\n\n conversionExpr.expr = rewriteExpr(conversionExpr.expr);\n result = conversionExpr;\n }\n\n @Override\n public void visit(BLangLambdaFunction bLangLambdaFunction) {\n bLangLambdaFunction.function = rewrite(bLangLambdaFunction.function, bLangLambdaFunction.capturedClosureEnv);\n BLangFunction function = bLangLambdaFunction.function;\n \n if (function.flagSet.contains(Flag.WORKER) && Symbols.isFlagOn(function.symbol.type.flags, Flags.ISOLATED) &&\n Symbols.isFlagOn(env.enclInvokable.symbol.flags, Flags.ISOLATED)) {\n addStrandAnnotationWithThreadAny(function.pos);\n function.addAnnotationAttachment(this.strandAnnotAttachement);\n BInvokableSymbol funcSymbol = function.symbol;\n funcSymbol.addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);\n funcSymbol.schedulerPolicy = SchedulerPolicy.ANY;\n }\n bLangLambdaFunction.capturedClosureEnv = null;\n result = bLangLambdaFunction;\n }\n\n @Override\n public void visit(BLangArrowFunction bLangArrowFunction) {\n BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();\n bLangFunction.setName(bLangArrowFunction.functionName);\n\n BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();\n lambdaFunction.pos = bLangArrowFunction.pos;\n bLangFunction.addFlag(Flag.LAMBDA);\n lambdaFunction.function = bLangFunction;\n\n \n BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();\n returnType.setBType(bLangArrowFunction.body.expr.getBType());\n bLangFunction.setReturnTypeNode(returnType);\n bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));\n\n bLangArrowFunction.params.forEach(bLangFunction::addParameter);\n lambdaFunction.parent = bLangArrowFunction.parent;\n lambdaFunction.setBType(bLangArrowFunction.funcType);\n\n \n BLangFunction funcNode = lambdaFunction.function;\n BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),\n new Name(funcNode.name.value),\n new Name(funcNode.name.originalValue),\n env.enclPkg.symbol.pkgID,\n bLangArrowFunction.funcType,\n env.enclEnv.enclVarSym, true,\n bLangArrowFunction.pos, VIRTUAL);\n\n funcSymbol.originalName = new Name(funcNode.name.originalValue);\n\n SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);\n defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);\n\n List paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {\n Scope enclScope = invokableEnv.scope;\n varNode.symbol.kind = SymbolKind.FUNCTION;\n varNode.symbol.owner = invokableEnv.scope.owner;\n enclScope.define(varNode.symbol.name, varNode.symbol);\n }).map(varNode -> varNode.symbol).collect(Collectors.toList());\n\n funcSymbol.params = paramSymbols;\n funcSymbol.restParam = getRestSymbol(funcNode);\n funcSymbol.retType = funcNode.returnTypeNode.getBType();\n \n List paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());\n funcNode.setBType(new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(),\n funcSymbol.type.tsymbol));\n\n lambdaFunction.function.pos = bLangArrowFunction.pos;\n lambdaFunction.function.body.pos = bLangArrowFunction.pos;\n \n lambdaFunction.capturedClosureEnv = env;\n env.enclPkg.addFunction(lambdaFunction.function);\n result = rewriteExpr(lambdaFunction);\n }\n\n private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,\n SymbolEnv invokableEnv) {\n invokableNode.symbol = funcSymbol;\n funcSymbol.scope = new Scope(funcSymbol);\n invokableEnv.scope = funcSymbol.scope;\n }\n\n @Override\n public void visit(BLangXMLQName xmlQName) {\n result = xmlQName;\n }\n\n @Override\n public void visit(BLangXMLAttribute xmlAttribute) {\n xmlAttribute.name = rewriteExpr(xmlAttribute.name);\n xmlAttribute.value = rewriteExpr(xmlAttribute.value);\n result = xmlAttribute;\n }\n\n @Override\n public void visit(BLangXMLElementLiteral xmlElementLiteral) {\n xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);\n\n \n Iterator attributesItr = xmlElementLiteral.attributes.iterator();\n while (attributesItr.hasNext()) {\n BLangXMLAttribute attribute = attributesItr.next();\n if (!attribute.isNamespaceDeclr) {\n continue;\n }\n\n \n BLangXMLNS xmlns;\n if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {\n xmlns = new BLangPackageXMLNS();\n } else {\n xmlns = new BLangLocalXMLNS();\n }\n xmlns.namespaceURI = attribute.value.concatExpr;\n xmlns.prefix = ((BLangXMLQName) attribute.name).localname;\n xmlns.symbol = attribute.symbol;\n\n xmlElementLiteral.inlineNamespaces.add(xmlns);\n }\n\n List prevInlineNamespaces = this.inlineXMLNamespaces;\n if (isVisitingQuery && this.inlineXMLNamespaces != null) {\n \n xmlElementLiteral.inlineNamespaces.addAll(this.inlineXMLNamespaces);\n }\n this.inlineXMLNamespaces = xmlElementLiteral.inlineNamespaces;\n\n xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);\n xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);\n xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);\n\n this.inlineXMLNamespaces = prevInlineNamespaces;\n result = xmlElementLiteral;\n }\n\n @Override\n public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {\n for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {\n rewriteExpr(xmlItem);\n }\n result = xmlSequenceLiteral;\n }\n\n @Override\n public void visit(BLangXMLTextLiteral xmlTextLiteral) {\n xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));\n result = xmlTextLiteral;\n }\n\n @Override\n public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {\n xmlCommentLiteral.concatExpr = rewriteExpr(\n constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));\n result = xmlCommentLiteral;\n }\n\n @Override\n public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {\n xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);\n xmlProcInsLiteral.dataConcatExpr =\n rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));\n result = xmlProcInsLiteral;\n }\n\n @Override\n public void visit(BLangXMLQuotedString xmlQuotedString) {\n xmlQuotedString.concatExpr = rewriteExpr(\n constructStringTemplateConcatExpression(xmlQuotedString.textFragments));\n result = xmlQuotedString;\n }\n\n @Override\n public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {\n result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));\n }\n\n /**\n * The raw template literal gets desugared to a type init expression. For each literal, a new object class type\n * def is generated from the object type. The type init expression creates an instance of this generated object\n * type. For example, consider the following statements:\n * string name = \"Pubudu\";\n * 'object:RawTemplate rt = `Hello ${name}!`;\n *\n * The raw template literal above is desugared to:\n * type RawTemplate$Impl$0 object {\n * public string[] strings = [\"Hello \", \"!\"];\n * public (any|error)[] insertions;\n *\n * function init((any|error)[] insertions) {\n * self.insertions = insertions;\n * }\n * };\n *\n * \n * 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);\n *\n * @param rawTemplateLiteral The raw template literal to be desugared.\n */\n @Override\n public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {\n Location pos = rawTemplateLiteral.pos;\n BObjectType objType = (BObjectType) Types.getImpliedType(rawTemplateLiteral.getBType());\n BLangClassDefinition objClassDef =\n desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);\n BObjectType classObjType = (BObjectType) objClassDef.getBType();\n\n BVarSymbol insertionsSym = classObjType.fields.get(\"insertions\").symbol;\n BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);\n insertionsList.exprs.addAll(rawTemplateLiteral.insertions);\n insertionsList.expectedType = insertionsSym.type;\n\n \n BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);\n typeNewExpr.argsExpr.add(insertionsList);\n BLangInvocation initInvocation = (BLangInvocation) typeNewExpr.initInvocation;\n initInvocation.argExprs.add(insertionsList);\n initInvocation.requiredArgs.add(insertionsList);\n\n result = rewriteExpr(typeNewExpr);\n }\n\n /**\n * This method desugars a raw template literal object class for the provided raw template object type as follows.\n * A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;\n * is desugared to,\n * type $anonType$0 object {\n * public string[] strings = [\"Hello \", \"!\"];\n * public (any|error)[] insertions;\n *\n * function init((any|error)[] insertions) {\n * self.insertions = insertions;\n * }\n * };\n * @param strings The string portions of the literal\n * @param objectType The abstract object type for which an object class needs to be generated\n * @param pos The diagnostic position info for the type node\n * @return Returns the generated concrete object class def\n */\n private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List strings, BObjectType objectType,\n Location pos) {\n \n BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;\n Name objectClassName = Names.fromString(\n anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));\n\n BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,\n env.enclPkg.packageID, null, env.enclPkg.symbol,\n pos, VIRTUAL, false);\n classTSymbol.flags |= Flags.CLASS;\n\n \n BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);\n objectClassType.fields = objectType.fields;\n classTSymbol.type = objectClassType;\n objectClassType.typeIdSet.add(objectType.typeIdSet);\n\n \n\n\n\n BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);\n classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);\n\n \n \n BType stringsType = objectClassType.fields.get(\"strings\").symbol.type;\n BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);\n stringsList.exprs.addAll(strings);\n stringsList.expectedType = stringsType;\n classDef.fields.get(0).expr = stringsList;\n\n \n BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);\n classDef.initFunction = userDefinedInitFunction;\n env.enclPkg.functions.add(userDefinedInitFunction);\n env.enclPkg.topLevelNodes.add(userDefinedInitFunction);\n\n \n BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);\n tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,\n tempGeneratedInitFunction.symbol.scope, env);\n this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env);\n classDef.generatedInitFunction = tempGeneratedInitFunction;\n env.enclPkg.functions.add(classDef.generatedInitFunction);\n env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);\n\n return rewrite(classDef, env);\n }\n\n /**\n * Creates a user-defined init() method for the provided object type node. If there are fields without default\n * values specified in the type node, this will add parameters for those fields in the init() method and assign the\n * param values to the respective fields in the method body.\n *\n * @param classDefn The object type node for which the init() method is generated\n * @param env The symbol env for the object type node\n * @return The generated init() method\n */\n private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {\n BLangFunction initFunction =\n TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.symbol, env, names,\n Names.USER_DEFINED_INIT_SUFFIX, symTable, classDefn.getBType());\n BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);\n typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,\n (BInvokableType) initFunction.getBType(), classDefn.pos);\n classDefn.initFunction = initFunction;\n initFunction.returnTypeNode.setBType(symTable.nilType);\n\n BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;\n BInvokableType initFnType = (BInvokableType) initFunction.getBType();\n for (BLangSimpleVariable field : classDefn.fields) {\n if (field.expr != null) {\n continue;\n }\n BVarSymbol fieldSym = field.symbol;\n BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,\n initFunction.symbol, classDefn.pos, VIRTUAL);\n BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,\n fieldSym.type, null, paramSym);\n param.flagSet.add(Flag.FINAL);\n initFunction.symbol.scope.define(paramSym.name, paramSym);\n initFunction.symbol.params.add(paramSym);\n initFnType.paramTypes.add(param.getBType());\n initFunction.requiredParams.add(param);\n\n BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);\n BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),\n initFunction.receiver.symbol, field.name);\n initFuncBody.addStatement(fieldInit);\n }\n\n return initFunction;\n }\n\n @Override\n public void visit(BLangWorkerAsyncSendExpr asyncSendExpr) {\n asyncSendExpr.expr = visitCloneInvocation(rewriteExpr(asyncSendExpr.expr), asyncSendExpr.expr.getBType());\n this.channelsWithinIfStmt.add(asyncSendExpr.getChannel());\n result = asyncSendExpr;\n }\n\n @Override\n public void visit(BLangWorkerSyncSendExpr syncSendExpr) {\n syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());\n this.channelsWithinIfStmt.add(syncSendExpr.getChannel());\n result = syncSendExpr;\n }\n\n @Override\n public void visit(BLangAlternateWorkerReceive altWorkerReceive) {\n result = altWorkerReceive;\n }\n\n @Override\n public void visit(BLangMultipleWorkerReceive multipleWorkerReceive) {\n result = multipleWorkerReceive;\n }\n\n @Override\n public void visit(BLangWorkerReceive workerReceiveNode) {\n result = workerReceiveNode;\n }\n\n @Override\n public void visit(BLangWorkerFlushExpr workerFlushExpr) {\n workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts\n .stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());\n result = workerFlushExpr;\n }\n\n @Override\n public void visit(BLangTransactionalExpr transactionalExpr) {\n BInvokableSymbol isTransactionalSymbol =\n (BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);\n result = ASTBuilderUtil\n .createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),\n Collections.emptyList(), symResolver);\n }\n\n @Override\n public void visit(BLangCommitExpr commitExpr) {\n BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);\n result = rewriteExpr(stmtExpr);\n }\n\n @Override\n public void visit(BLangFail failNode) {\n if (this.onFailClause != null && !this.desugarToReturn) {\n if (this.onFailClause.bodyContainsFail) {\n result = rewriteNestedOnFail(this.onFailClause, failNode);\n } else {\n result = createOnFailInvocation(onFailClause, failNode);\n }\n } else {\n BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));\n stmt.desugared = true;\n result = stmt;\n }\n }\n\n \n \n\n @Override\n public void visit(BLangLocalVarRef localVarRef) {\n result = localVarRef;\n }\n\n @Override\n public void visit(BLangFieldVarRef fieldVarRef) {\n result = fieldVarRef;\n }\n\n @Override\n public void visit(BLangPackageVarRef packageVarRef) {\n result = packageVarRef;\n }\n\n @Override\n public void visit(BLangFunctionVarRef functionVarRef) {\n result = functionVarRef;\n }\n\n @Override\n public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {\n result = fieldAccessExpr;\n }\n\n @Override\n public void visit(BLangStructFunctionVarRef functionVarRef) {\n result = functionVarRef;\n }\n\n @Override\n public void visit(BLangMapAccessExpr mapKeyAccessExpr) {\n result = mapKeyAccessExpr;\n }\n\n @Override\n public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {\n result = arrayIndexAccessExpr;\n }\n\n @Override\n public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {\n result = arrayIndexAccessExpr;\n }\n\n @Override\n public void visit(BLangTableAccessExpr tableKeyAccessExpr) {\n result = tableKeyAccessExpr;\n }\n\n @Override\n public void visit(BLangMapLiteral mapLiteral) {\n result = mapLiteral;\n }\n\n @Override\n public void visit(BLangStructLiteral structLiteral) {\n result = structLiteral;\n }\n\n @Override\n public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {\n result = waitLiteral;\n }\n\n @Override\n public void visit(BLangXMLElementAccess xmlElementAccess) {\n \n \n xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);\n\n ArrayList filters = expandFilters(xmlElementAccess.filters);\n\n BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,\n xmlElementAccess.expr, new ArrayList<>(), filters);\n result = rewriteExpr(invocationNode);\n }\n\n private ArrayList expandFilters(List filters) {\n ArrayList args = new ArrayList<>();\n for (BLangXMLElementFilter filter : filters) {\n BSymbol nsSymbol = filter.namespaceSymbol;\n String filterName = filter.name;\n if (nsSymbol != null &&\n !(filter.namespace.equals(XMLConstants.DEFAULT_NS_PREFIX) && filterName.equals(\"*\"))) {\n String expandedName = createExpandedQName(((BXMLNSSymbol) nsSymbol).namespaceURI, filterName);\n args.add(createStringLiteral(filter.elemNamePos, expandedName));\n } else {\n args.add(createStringLiteral(filter.elemNamePos, filterName));\n }\n }\n return args;\n }\n\n private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,\n BLangExpression invokeOnExpr,\n ArrayList args,\n ArrayList restArgs) {\n invokeOnExpr = rewriteExpr(invokeOnExpr);\n\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n invocationNode.pos = pos;\n BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n name.setLiteral(false);\n name.setValue(functionName);\n name.pos = pos;\n invocationNode.name = name;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n invocationNode.expr = invokeOnExpr;\n\n invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, Names.fromString(functionName), env);\n\n ArrayList requiredArgs = new ArrayList<>();\n requiredArgs.add(invokeOnExpr);\n requiredArgs.addAll(args);\n invocationNode.requiredArgs = requiredArgs;\n invocationNode.restArgs = rewriteExprs(restArgs);\n\n invocationNode.setBType(invocationNode.symbol.type.getReturnType());\n invocationNode.langLibInvocation = true;\n return invocationNode;\n }\n\n @Override\n public void visit(BLangXMLNavigationAccess xmlNavigation) {\n xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);\n xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);\n\n ArrayList filters = expandFilters(xmlNavigation.filters);\n\n \n if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {\n BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,\n XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);\n result = rewriteExpr(invocationNode);\n } else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {\n \n BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,\n xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());\n result = rewriteExpr(invocationNode);\n } else {\n BLangExpression childIndexExpr;\n \n if (xmlNavigation.childIndex == null) {\n childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);\n } else {\n \n childIndexExpr = xmlNavigation.childIndex;\n }\n ArrayList args = new ArrayList<>();\n args.add(rewriteExpr(childIndexExpr));\n\n BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,\n XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);\n result = rewriteExpr(invocationNode);\n }\n }\n\n @Override\n public void visit(BLangIsAssignableExpr assignableExpr) {\n assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);\n result = assignableExpr;\n }\n\n @Override\n public void visit(BLangTypedescExpr typedescExpr) {\n typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);\n result = typedescExpr;\n }\n\n @Override\n public void visit(BLangRestArgsExpression bLangVarArgsExpression) {\n result = rewriteExpr(bLangVarArgsExpression.expr);\n }\n\n @Override\n public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {\n bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);\n result = bLangNamedArgsExpression.expr;\n }\n\n @Override\n public void visit(BLangCheckedExpr checkedExpr) {\n visitCheckAndCheckPanicExpr(checkedExpr, false);\n }\n\n @Override\n public void visit(BLangCheckPanickedExpr checkedExpr) {\n visitCheckAndCheckPanicExpr(checkedExpr, true);\n }\n\n private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {\n \n if (checkedExpr.isRedundantChecking) {\n result = rewriteExpr(checkedExpr.expr);\n return;\n }\n\n \n \n \n \n \n \n \n \n \n \n \n \n\n Location pos = checkedExpr.pos;\n \n String resultVarName = \"_$result$_\";\n BType resultType = checkedExpr.getBType();\n BLangSimpleVariable resultVar =\n ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,\n new BVarSymbol(0, Names.fromString(resultVarName),\n this.env.scope.owner.pkgID, resultType,\n this.env.scope.owner, pos, VIRTUAL));\n BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);\n resultVarDef.desugared = true;\n BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);\n\n String checkedExprVarName = GEN_VAR_PREFIX.value;\n BType checkedExprType = checkedExpr.expr.getBType();\n BLangSimpleVariable checkedExprVar =\n ASTBuilderUtil.createVariable(pos, checkedExprVarName, checkedExprType,\n checkedExpr.expr, new BVarSymbol(0, Names.fromString(checkedExprVarName),\n this.env.scope.owner.pkgID, checkedExprType,\n this.env.scope.owner, pos, VIRTUAL));\n BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(pos, checkedExprVar);\n BLangSimpleVarRef checkedExprVarRef = ASTBuilderUtil.createVariableRef(pos, checkedExprVar.symbol);\n\n BLangAssignment successAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,\n createTypeCastExpr(checkedExprVarRef, resultType));\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);\n ifBody.addStatement(successAssignment);\n\n BLangBlockStmt elseBody = getSafeErrorAssignment(pos, checkedExprVarRef, this.env.enclInvokable.symbol,\n checkedExpr.equivalentErrorTypeList, isCheckPanic);\n\n BLangValueType checkedExprTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();\n checkedExprTypeNode.setBType(resultType);\n checkedExprTypeNode.typeKind = resultType.getKind();\n BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,\n createTypeCheckExpr(pos, checkedExprVarRef, checkedExprTypeNode), ifBody, elseBody);\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{\n add(resultVarDef);\n add(checkedExprVarDef);\n add(ifStmt);\n }});\n\n BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(resultType);\n result = rewriteExpr(stmtExpr);\n }\n\n @Override\n public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {\n final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,\n serviceConstructorExpr.serviceNode.serviceClass.symbol.type);\n serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));\n result = rewriteExpr(typeInit);\n }\n\n @Override\n public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {\n visit(bLangObjectConstructorExpression.classNode);\n bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));\n result = rewriteExpr(bLangObjectConstructorExpression.typeInit);\n }\n\n @Override\n public void visit(BLangAnnotAccessExpr annotAccessExpr) {\n\n BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();\n binaryExpr.pos = annotAccessExpr.pos;\n binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;\n binaryExpr.lhsExpr = annotAccessExpr.expr;\n binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,\n annotAccessExpr.annotationSymbol.bvmAlias());\n binaryExpr.setBType(annotAccessExpr.getBType());\n binaryExpr.opSymbol = new BOperatorSymbol(Names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,\n new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),\n binaryExpr.rhsExpr.getBType()),\n annotAccessExpr.getBType(), null), null,\n symTable.builtinPos, VIRTUAL);\n result = rewriteExpr(binaryExpr);\n }\n\n @Override\n public void visit(BLangTypeTestExpr typeTestExpr) {\n BLangExpression expr = typeTestExpr.expr;\n if (types.isValueType(expr.getBType())) {\n expr = types.addConversionExprIfRequired(expr, symTable.anyType);\n }\n if (typeTestExpr.isNegation) {\n BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,\n typeTestExpr.expr, typeTestExpr.typeNode);\n BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();\n bLangGroupExpr.expression = bLangTypeTestExpr;\n bLangGroupExpr.setBType(typeTestExpr.getBType());\n BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,\n typeTestExpr.getBType(),\n OperatorKind.NOT, null);\n result = rewriteExpr(unaryExpr);\n return;\n }\n typeTestExpr.expr = rewriteExpr(expr);\n typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);\n result = typeTestExpr;\n }\n\n @Override\n public void visit(BLangIsLikeExpr isLikeExpr) {\n isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);\n result = isLikeExpr;\n }\n\n @Override\n public void visit(BLangStatementExpression bLangStatementExpression) {\n bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);\n bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);\n result = bLangStatementExpression;\n }\n\n @Override\n public void visit(BLangQueryExpr queryExpr) {\n boolean prevIsVisitingQuery = this.isVisitingQuery;\n boolean prevDesugarToReturn = this.desugarToReturn;\n this.isVisitingQuery = true;\n this.desugarToReturn = true;\n BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env, getVisibleXMLNSStmts(env));\n result = rewrite(stmtExpr, env);\n this.isVisitingQuery = prevIsVisitingQuery;\n this.desugarToReturn = prevDesugarToReturn;\n }\n\n List getVisibleXMLNSStmts(SymbolEnv env) {\n Map nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);\n return nameBXMLNSSymbolMap.keySet().stream()\n .map(key -> this.stmtsToBePropagatedToQuery.get(key))\n .filter(Objects::nonNull)\n .collect(Collectors.toList());\n }\n\n @Override\n public void visit(BLangQueryAction queryAction) {\n boolean prevIsVisitingQuery = this.isVisitingQuery;\n boolean prevDesugarToReturn = this.desugarToReturn;\n this.desugarToReturn = true;\n this.isVisitingQuery = true;\n BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env, getVisibleXMLNSStmts(env));\n result = rewrite(stmtExpr, env);\n this.isVisitingQuery = prevIsVisitingQuery;\n this.desugarToReturn = prevDesugarToReturn;\n }\n\n @Override\n public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {\n jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);\n result = jsonArrayLiteral;\n }\n\n @Override\n public void visit(BLangConstant constant) {\n\n BConstantSymbol constSymbol = constant.symbol;\n BType refType = Types.getImpliedType(constSymbol.literalType);\n if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {\n if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||\n constSymbol.value.value == null)) {\n throw new IllegalStateException();\n }\n BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,\n constSymbol.value.value);\n constant.expr = rewriteExpr(literal);\n } else {\n constant.expr = rewriteExpr(constant.expr);\n }\n constant.annAttachments.forEach(attachment -> rewrite(attachment, env));\n result = constant;\n }\n\n @Override\n public void visit(BLangIgnoreExpr ignoreExpr) {\n result = ignoreExpr;\n }\n\n @Override\n public void visit(BLangDynamicArgExpr dynamicParamExpr) {\n dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);\n dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);\n result = dynamicParamExpr;\n }\n\n @Override\n public void visit(BLangConstRef constantRef) {\n result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);\n }\n\n @Override\n public void visit(BLangRegExpTemplateLiteral regExpTemplateLiteral) {\n regExpTemplateLiteral.reDisjunction = rewriteExpr(regExpTemplateLiteral.reDisjunction);\n result = regExpTemplateLiteral;\n }\n\n @Override\n public void visit(BLangReDisjunction reDisjunction) {\n reDisjunction.sequenceList.forEach(this::rewriteExpr);\n result = reDisjunction;\n }\n\n @Override\n public void visit(BLangReSequence reSequence) {\n reSequence.termList.forEach(this::rewriteExpr);\n result = reSequence;\n }\n\n @Override\n public void visit(BLangReAssertion reAssertion) {\n reAssertion.assertion = rewriteExpr(reAssertion.assertion);\n result = reAssertion;\n }\n\n @Override\n public void visit(BLangReAtomQuantifier reAtomQuantifier) {\n BLangExpression reAtom = reAtomQuantifier.atom;\n if (symResolver.isReAtomNode(reAtom.getKind())) {\n reAtomQuantifier.atom = rewriteExpr(reAtom);\n } else {\n \n reAtomQuantifier.atom = rewriteExpr(getToStringInvocationOnExpr(reAtom));\n }\n\n \n if (reAtomQuantifier.quantifier == null) {\n reAtomQuantifier.quantifier = ASTBuilderUtil.createEmptyQuantifier(reAtomQuantifier.pos,\n symTable.anydataType, symTable.stringType);\n }\n reAtomQuantifier.quantifier = rewriteExpr(reAtomQuantifier.quantifier);\n result = reAtomQuantifier;\n }\n\n @Override\n public void visit(BLangReAtomCharOrEscape reAtomCharOrEscape) {\n reAtomCharOrEscape.charOrEscape = rewriteExpr(reAtomCharOrEscape.charOrEscape);\n result = reAtomCharOrEscape;\n }\n\n @Override\n public void visit(BLangReQuantifier reQuantifier) {\n reQuantifier.quantifier = rewriteExpr(reQuantifier.quantifier);\n \n if (reQuantifier.nonGreedyChar == null) {\n reQuantifier.nonGreedyChar = ASTBuilderUtil.createLiteral(reQuantifier.pos,\n symTable.stringType, \"\");\n }\n reQuantifier.nonGreedyChar = rewriteExpr(reQuantifier.nonGreedyChar);\n result = reQuantifier;\n }\n\n @Override\n public void visit(BLangReCharacterClass reCharacterClass) {\n reCharacterClass.characterClassStart = rewriteExpr(reCharacterClass.characterClassStart);\n \n if (reCharacterClass.negation == null) {\n reCharacterClass.negation = ASTBuilderUtil.createLiteral(reCharacterClass.pos,\n symTable.stringType, \"\");\n }\n reCharacterClass.negation = rewriteExpr(reCharacterClass.negation);\n \n if (reCharacterClass.charSet == null) {\n reCharacterClass.charSet = ASTBuilderUtil.createEmptyCharSet(symTable.anydataType);\n }\n reCharacterClass.charSet = rewriteExpr(reCharacterClass.charSet);\n reCharacterClass.characterClassEnd = rewriteExpr(reCharacterClass.characterClassEnd);\n result = reCharacterClass;\n }\n\n @Override\n public void visit(BLangReCharSet reCharSet) {\n reCharSet.charSetAtoms.forEach(this::rewriteExpr);\n result = reCharSet;\n }\n\n @Override\n public void visit(BLangReCharSetRange reCharSetRange) {\n reCharSetRange.lhsCharSetAtom = rewriteExpr(reCharSetRange.lhsCharSetAtom);\n reCharSetRange.dash = rewriteExpr(reCharSetRange.dash);\n reCharSetRange.rhsCharSetAtom = rewriteExpr(reCharSetRange.rhsCharSetAtom);\n result = reCharSetRange;\n }\n\n @Override\n public void visit(BLangReCapturingGroups reCapturingGroups) {\n reCapturingGroups.openParen = rewriteExpr(reCapturingGroups.openParen);\n \n if (reCapturingGroups.flagExpr == null) {\n reCapturingGroups.flagExpr = ASTBuilderUtil.createEmptyFlagExpression(reCapturingGroups.pos,\n symTable.anydataType, symTable.stringType);\n }\n reCapturingGroups.flagExpr = rewriteExpr(reCapturingGroups.flagExpr);\n reCapturingGroups.disjunction = rewriteExpr(reCapturingGroups.disjunction);\n reCapturingGroups.closeParen = rewriteExpr(reCapturingGroups.closeParen);\n result = reCapturingGroups;\n }\n\n @Override\n public void visit(BLangReFlagExpression reFlagExpression) {\n reFlagExpression.questionMark = rewriteExpr(reFlagExpression.questionMark);\n \n if (reFlagExpression.flagsOnOff == null) {\n reFlagExpression.flagsOnOff = ASTBuilderUtil.createEmptyFlagOnOff(reFlagExpression.pos,\n symTable.anydataType, symTable.stringType);\n }\n reFlagExpression.flagsOnOff = rewriteExpr(reFlagExpression.flagsOnOff);\n reFlagExpression.colon = rewriteExpr(reFlagExpression.colon);\n result = reFlagExpression;\n }\n\n @Override\n public void visit(BLangReFlagsOnOff reFlagsOnOff) {\n reFlagsOnOff.flags = rewriteExpr(reFlagsOnOff.flags);\n result = reFlagsOnOff;\n }\n\n \n\n \n BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,\n BInvokableSymbol iteratorInvokableSymbol,\n boolean isIteratorFuncFromLangLib) {\n\n\n BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);\n BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();\n iteratorInvocation.pos = pos;\n iteratorInvocation.expr = dataReference;\n iteratorInvocation.symbol = iteratorInvokableSymbol;\n iteratorInvocation.setBType(iteratorInvokableSymbol.retType);\n iteratorInvocation.argExprs = Lists.of(dataReference);\n iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;\n iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;\n BVarSymbol iteratorSymbol = new BVarSymbol(0, Names.fromString(\"$iterator$\"), this.env.scope.owner.pkgID,\n iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);\n\n \n BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, \"$iterator$\",\n iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);\n return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);\n }\n\n \n BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,\n BVarSymbol iteratorSymbol,\n BVarSymbol resultSymbol) {\n BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);\n BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, \"$result$\",\n nillableResultType, nextInvocation, resultSymbol);\n return ASTBuilderUtil.createVariableDef(pos, resultVariable);\n }\n\n BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {\n BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, \"next\");\n BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);\n BInvokableSymbol nextFuncSymbol =\n getNextFunc((BObjectType) Types.getImpliedType(iteratorSymbol.type)).symbol;\n BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();\n nextInvocation.pos = pos;\n nextInvocation.name = nextIdentifier;\n nextInvocation.expr = iteratorReferenceInNext;\n nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));\n nextInvocation.argExprs = nextInvocation.requiredArgs;\n nextInvocation.symbol = nextFuncSymbol;\n nextInvocation.setBType(nextFuncSymbol.retType);\n return nextInvocation;\n }\n\n private BAttachedFunction getNextFunc(BObjectType iteratorType) {\n BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;\n for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {\n if (bAttachedFunction.funcName.value.equals(\"next\")) {\n return bAttachedFunction;\n }\n }\n return null;\n }\n\n \n BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,\n BVarSymbol resultSymbol) {\n return getFieldAccessExpression(location, \"value\", varType, resultSymbol);\n }\n\n BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,\n BVarSymbol resultSymbol) {\n BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);\n BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);\n\n BLangFieldBasedAccess fieldBasedAccessExpression =\n ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);\n fieldBasedAccessExpression.pos = pos;\n fieldBasedAccessExpression.setBType(varType);\n fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();\n return fieldBasedAccessExpression;\n }\n\n private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {\n BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();\n BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();\n returnNode.pos = bLangArrowFunction.body.expr.pos;\n returnNode.setExpression(bLangArrowFunction.body.expr);\n blockNode.addStatement(returnNode);\n return blockNode;\n }\n\n protected BLangInvocation createInvocationNode(String functionName, List args, BType retType) {\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n name.setLiteral(false);\n name.setValue(functionName);\n invocationNode.name = name;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n \n invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;\n invocationNode.setBType(retType);\n invocationNode.requiredArgs = args;\n return invocationNode;\n }\n\n protected BLangInvocation createLangLibInvocationNode(String functionName,\n BLangExpression onExpr,\n List args,\n BType retType,\n Location pos) {\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n invocationNode.pos = pos;\n BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n name.setLiteral(false);\n name.setValue(functionName);\n name.pos = pos;\n invocationNode.name = name;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n invocationNode.expr = onExpr;\n invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), Names.fromString(functionName), env);\n\n ArrayList requiredArgs = new ArrayList<>();\n requiredArgs.add(onExpr);\n requiredArgs.addAll(args);\n invocationNode.requiredArgs = requiredArgs;\n\n invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);\n invocationNode.langLibInvocation = true;\n return invocationNode;\n }\n\n private BLangInvocation createLangLibInvocationNode(String functionName,\n List requiredArgs,\n BType retType,\n Location pos) {\n return createLangLibInvocationNode(functionName, requiredArgs, new ArrayList<>(), retType, pos);\n }\n\n private BLangInvocation createLangLibInvocationNode(String functionName,\n List requiredArgs,\n List restArgs,\n BType retType,\n Location pos) {\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n invocationNode.pos = pos;\n BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n name.setLiteral(false);\n name.setValue(functionName);\n name.pos = pos;\n invocationNode.name = name;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,\n Names.fromString(functionName), env);\n\n invocationNode.requiredArgs = new ArrayList<>(requiredArgs);\n invocationNode.restArgs = new ArrayList<>(restArgs);\n\n invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);\n invocationNode.langLibInvocation = true;\n return invocationNode;\n }\n\n private BLangArrayLiteral createArrayLiteralExprNode() {\n BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();\n expr.exprs = new ArrayList<>();\n expr.setBType(new BArrayType(symTable.anyType));\n return expr;\n }\n\n private BFunctionPointerInvocation visitFunctionPointerInvocation(BLangInvocation iExpr) {\n BLangExpression rewritten = rewriteExpr(getFunctionPointerExpr(iExpr));\n return new BFunctionPointerInvocation(iExpr, rewritten);\n }\n\n protected BLangValueExpression getFunctionPointerExpr(BLangInvocation iExpr) {\n BLangValueExpression expr;\n if (iExpr.expr == null) {\n BLangSimpleVarRef varRef = new BLangSimpleVarRef();\n varRef.variableName = iExpr.name;\n expr = varRef;\n } else {\n BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();\n fieldBasedAccess.expr = iExpr.expr;\n fieldBasedAccess.field = iExpr.name;\n expr = fieldBasedAccess;\n }\n expr.symbol = iExpr.symbol;\n expr.setBType(iExpr.symbol.type);\n return expr;\n }\n\n private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {\n if (types.isValueType(expr.getBType())) {\n return expr;\n }\n if (Types.getImpliedType(expr.getBType()).tag == TypeTags.ERROR) {\n return expr;\n }\n BLangInvocation cloneInvok = createLangLibInvocationNode(\"clone\", expr, new ArrayList<>(), null, expr.pos);\n return types.addConversionExprIfRequired(cloneInvok, lhsType);\n }\n\n private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {\n if (types.isValueType(expr.getBType())) {\n return expr;\n }\n if (Types.getImpliedType(expr.getBType()).tag == TypeTags.ERROR) {\n return expr;\n }\n BLangInvocation cloneInvok = createLangLibInvocationNode(\"cloneReadOnly\", expr, new ArrayList<>(),\n expr.getBType(),\n expr.pos);\n return types.addConversionExprIfRequired(cloneInvok, lhsType);\n }\n\n @SuppressWarnings(\"unchecked\")\n E rewrite(E node, SymbolEnv env) {\n if (node == null) {\n return null;\n }\n\n if (node.desugared) {\n return node;\n }\n\n SymbolEnv previousEnv = this.env;\n this.env = env;\n\n node.accept(this);\n BLangNode resultNode = this.result;\n this.result = null;\n resultNode.desugared = true;\n\n this.env = previousEnv;\n return (E) resultNode;\n }\n\n @SuppressWarnings(\"unchecked\")\n E rewriteExpr(E node) {\n if (node == null) {\n return null;\n }\n\n if (node.desugared) {\n return node;\n }\n\n BLangExpression expr = node;\n if (node.impConversionExpr != null) {\n expr = node.impConversionExpr;\n node.impConversionExpr = null;\n }\n\n expr.accept(this);\n BLangNode resultNode = this.result;\n this.result = null;\n resultNode.desugared = true;\n\n return (E) resultNode;\n }\n\n @SuppressWarnings(\"unchecked\")\n E rewrite(E statement, SymbolEnv env) {\n if (statement == null) {\n return null;\n }\n BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);\n return (E) stmt;\n }\n\n private List rewriteStmt(List nodeList, SymbolEnv env) {\n for (int i = 0; i < nodeList.size(); i++) {\n nodeList.set(i, rewrite(nodeList.get(i), env));\n }\n return nodeList;\n }\n\n private List rewrite(List nodeList, SymbolEnv env) {\n for (int i = 0; i < nodeList.size(); i++) {\n nodeList.set(i, rewrite(nodeList.get(i), env));\n }\n return nodeList;\n }\n\n private List rewriteExprs(List nodeList) {\n for (int i = 0; i < nodeList.size(); i++) {\n nodeList.set(i, rewriteExpr(nodeList.get(i)));\n }\n return nodeList;\n }\n\n private BLangLiteral createStringLiteral(Location pos, String value) {\n BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);\n stringLit.pos = pos;\n return stringLit;\n }\n\n private BLangLiteral createIntLiteral(long value) {\n BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();\n literal.value = value;\n literal.setBType(symTable.intType);\n return literal;\n }\n\n private BLangLiteral createByteLiteral(Location pos, Byte value) {\n BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);\n byteLiteral.pos = pos;\n return byteLiteral;\n }\n\n private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {\n if (types.isSameType(expr.getBType(), targetType)) {\n return expr;\n }\n\n BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();\n conversionExpr.pos = expr.pos;\n conversionExpr.expr = expr;\n conversionExpr.setBType(targetType);\n conversionExpr.targetType = targetType;\n conversionExpr.internal = true;\n return conversionExpr;\n }\n\n private BType getElementType(BType bType) {\n BType type = Types.getImpliedType(bType);\n if (type.tag != TypeTags.ARRAY) {\n return bType;\n }\n\n return getElementType(((BArrayType) type).getElementType());\n }\n\n \n private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {\n if (Symbols.isNative(invokableNode.symbol) ||\n (invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {\n return;\n }\n \n \n BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;\n if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.isEmpty()\n || funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {\n Location invPos = invokableNode.pos;\n Location returnStmtPos;\n if (invPos != null && !invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {\n returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().fileName(),\n invPos.lineRange().endLine().line(),\n invPos.lineRange().endLine().line(),\n invPos.lineRange().startLine().offset(),\n invPos.lineRange().startLine().offset(), 0, 0);\n } else {\n returnStmtPos = null;\n }\n BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);\n funcBody.addStatement(returnStmt);\n }\n }\n\n /**\n * Reorder the invocation arguments to match the original function signature.\n *\n * @param iExpr Function invocation expressions to reorder the arguments\n */\n private void reorderArguments(BLangInvocation iExpr) {\n BSymbol symbol = iExpr.symbol;\n\n if (symbol == null || Types.getImpliedType(symbol.type).tag != TypeTags.INVOKABLE) {\n return;\n }\n\n BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;\n\n List restArgs = iExpr.restArgs;\n int originalRequiredArgCount = iExpr.requiredArgs.size();\n\n \n BLangSimpleVarRef varargRef = null;\n BLangBlockStmt blockStmt = null;\n BType varargVarType = null;\n\n int restArgCount = restArgs.size();\n\n if (restArgCount > 0 &&\n restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&\n originalRequiredArgCount < invokableSymbol.params.size()) {\n \n \n \n BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;\n Location varargExpPos = expr.pos;\n varargVarType = expr.getBType();\n String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;\n\n BVarSymbol varargVarSymbol = new BVarSymbol(0, Names.fromString(varargVarName), this.env.scope.owner.pkgID,\n varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);\n varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);\n\n BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);\n\n BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);\n varDef.var = var;\n varDef.setBType(varargVarType);\n\n blockStmt = createBlockStmt(varargExpPos);\n blockStmt.stmts.add(varDef);\n }\n\n if (!invokableSymbol.params.isEmpty()) {\n \n reorderNamedArgs(iExpr, invokableSymbol, varargRef);\n }\n\n \n if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {\n if (invokableSymbol.restParam == null) {\n return;\n }\n\n BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();\n List exprs = new ArrayList<>();\n\n BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;\n BType elemType = arrayType.eType;\n\n for (BLangExpression restArg : restArgs) {\n exprs.add(types.addConversionExprIfRequired(restArg, elemType));\n }\n\n arrayLiteral.exprs = exprs;\n arrayLiteral.setBType(arrayType);\n\n if (restArgCount != 0) {\n iExpr.restArgs = new ArrayList<>();\n }\n\n iExpr.restArgs.add(arrayLiteral);\n return;\n }\n\n \n if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {\n\n \n \n if (iExpr.requiredArgs.size() == originalRequiredArgCount) {\n return;\n }\n\n \n \n \n \n BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);\n BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);\n BType type = firstNonRestArg.impConversionExpr == null ?\n firstNonRestArg.getBType() : firstNonRestArg.impConversionExpr.targetType;\n stmtExpression.setBType(type);\n iExpr.requiredArgs.add(0, stmtExpression);\n\n \n if (invokableSymbol.restParam == null) {\n restArgs.remove(0);\n return;\n }\n\n \n \n \n \n BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);\n BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;\n if (Types.getImpliedType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {\n BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);\n restArgs.add(expr);\n return;\n }\n Location pos = restArgsExpression.pos;\n\n BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();\n newArrayLiteral.setBType(restParamType);\n\n String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;\n BVarSymbol varSymbol = new BVarSymbol(0, Names.fromString(name), this.env.scope.owner.pkgID,\n restParamType, this.env.scope.owner, pos, VIRTUAL);\n BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);\n\n BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);\n BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);\n varDef.var = var;\n varDef.setBType(restParamType);\n\n BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);\n BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);\n BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,\n getModifiedIntRangeEndExpr(lengthInvocation));\n\n BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();\n foreach.pos = pos;\n foreach.collection = intRangeInvocation;\n types.setForeachTypedBindingPatternType(foreach);\n\n final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, \"$foreach$i\",\n foreach.varType);\n foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),\n this.env.scope.owner.pkgID, foreachVariable.getBType(),\n this.env.scope.owner, pos, VIRTUAL);\n BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);\n foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);\n foreach.isDeclaredWithVar = true;\n BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);\n\n BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);\n\n BType refType = Types.getImpliedType(varargVarType);\n if (refType.tag == TypeTags.ARRAY) {\n BArrayType arrayType = (BArrayType) refType;\n if (arrayType.state == BArrayState.CLOSED &&\n arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {\n \n \n valueExpr.setBType(restParamType.eType);\n } else {\n valueExpr.setBType(arrayType.eType);\n }\n } else {\n valueExpr.setBType(symTable.anyOrErrorType); \n }\n\n BLangExpression pushExpr = types.addConversionExprIfRequired(valueExpr, restParamType.eType);\n BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);\n BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,\n List.of(pushExpr),\n restParamType, pos);\n pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));\n expressionStmt.expr = pushInvocation;\n foreach.body = foreachBody;\n BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);\n newArrayBlockStmt.addStatement(varDef);\n newArrayBlockStmt.addStatement(foreach);\n\n BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);\n newArrayStmtExpression.setBType(restParamType);\n\n restArgs.add(types.addConversionExprIfRequired(newArrayStmtExpression, restParamType));\n return;\n }\n\n \n \n BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;\n\n BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();\n arrayLiteral.setBType(restParamType);\n\n BType elemType = restParamType.eType;\n Location pos = restArgs.get(0).pos;\n\n List exprs = new ArrayList<>();\n\n for (int i = 0; i < restArgCount - 1; i++) {\n exprs.add(types.addConversionExprIfRequired(restArgs.get(i), elemType));\n }\n arrayLiteral.exprs = exprs;\n\n BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();\n pushRestArgsExpr.pos = pos;\n pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);\n\n String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;\n BVarSymbol varSymbol = new BVarSymbol(0, Names.fromString(name), this.env.scope.owner.pkgID, restParamType,\n this.env.scope.owner, pos, VIRTUAL);\n BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);\n\n BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);\n BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);\n varDef.var = var;\n varDef.setBType(restParamType);\n\n BLangBlockStmt pushBlockStmt = createBlockStmt(pos);\n pushBlockStmt.stmts.add(varDef);\n\n BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);\n BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,\n new ArrayList() {{\n add(pushRestArgsExpr);\n }}, restParamType, pos);\n pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));\n expressionStmt.expr = pushInvocation;\n\n BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);\n stmtExpression.setBType(restParamType);\n\n iExpr.restArgs = new ArrayList(1) {{ add(stmtExpression); }};\n }\n\n private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {\n List args = new ArrayList<>();\n Map namedArgs = new LinkedHashMap<>();\n iExpr.requiredArgs.stream()\n .filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)\n .forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));\n\n List params = invokableSymbol.params;\n List incRecordLiterals = new ArrayList<>();\n BLangRecordLiteral incRecordParamAllowAdditionalFields = null;\n\n int varargIndex = 0;\n\n BType varargType = null;\n boolean tupleTypedVararg = false;\n\n if (varargRef != null) {\n varargType = Types.getImpliedType(varargRef.getBType());\n tupleTypedVararg = varargType.tag == TypeTags.TUPLE;\n }\n\n \n for (int i = 0; i < params.size(); i++) {\n BVarSymbol param = params.get(i);\n if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {\n \n args.add(iExpr.requiredArgs.get(i));\n } else if (namedArgs.containsKey(param.name.value)) {\n \n args.add(namedArgs.remove(param.name.value));\n } else if (param.getFlags().contains(Flag.INCLUDED)) {\n BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();\n BType paramType = param.type;\n recordLiteral.setBType(paramType);\n args.add(recordLiteral);\n incRecordLiterals.add(recordLiteral);\n if (((BRecordType) Types.getImpliedType(paramType)).restFieldType != symTable.noType) {\n incRecordParamAllowAdditionalFields = recordLiteral;\n }\n } else if (varargRef == null) {\n \n BLangExpression expr = new BLangIgnoreExpr();\n expr.setBType(param.type);\n args.add(expr);\n } else {\n \n \n if (Types.getImpliedType(varargRef.getBType()).tag == TypeTags.RECORD) {\n if (param.isDefaultable) {\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varargRef.pos);\n BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,\n List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);\n BLangSimpleVariableDef variableDef = createVarDef(\"$hasKey$\", hasKeyInvocation.getBType(),\n hasKeyInvocation, hasKeyInvocation.pos);\n blockStmt.stmts.add(variableDef);\n BLangSimpleVarRef simpleVarRef = ASTBuilderUtil.createVariableRef(variableDef.pos,\n variableDef.var.symbol);\n BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));\n BLangIndexBasedAccess memberAccessExpr =\n ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);\n BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);\n BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type, simpleVarRef,\n memberAccessExpr, ignoreExpr);\n BLangDynamicArgExpr dynamicArgExpr =\n ASTBuilderUtil.createDynamicParamExpression(simpleVarRef, param, ternaryExpr);\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, dynamicArgExpr);\n stmtExpr.setBType(dynamicArgExpr.getBType());\n args.add(rewriteExpr(stmtExpr));\n } else {\n BLangFieldBasedAccess fieldBasedAccessExpression =\n ASTBuilderUtil.createFieldAccessExpr(varargRef,\n ASTBuilderUtil.createIdentifier(param.pos, param.name.value));\n fieldBasedAccessExpression.setBType(param.type);\n args.add(fieldBasedAccessExpression);\n }\n } else {\n BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));\n BType memberAccessExprType = tupleTypedVararg ?\n ((BTupleType) varargType).getTupleTypes().get(varargIndex)\n : ((BArrayType) varargType).eType;\n args.add(types.addConversionExprIfRequired(\n ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType, varargRef, indexExpr),\n param.type));\n varargIndex++;\n }\n }\n }\n if (!namedArgs.isEmpty()) {\n setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);\n }\n iExpr.requiredArgs = args;\n }\n\n private void setFieldsForIncRecordLiterals(Map namedArgs,\n List incRecordLiterals,\n BLangRecordLiteral incRecordParamAllowAdditionalFields) {\n for (String name : namedArgs.keySet()) {\n boolean isAdditionalField = true;\n BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);\n for (BLangRecordLiteral recordLiteral : incRecordLiterals) {\n LinkedHashMap fields =\n ((BRecordType) Types.getImpliedType(recordLiteral.getBType())).fields;\n if (fields.containsKey(name) &&\n Types.getImpliedType(fields.get(name).type).tag != TypeTags.NEVER) {\n isAdditionalField = false;\n createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);\n break;\n }\n }\n if (isAdditionalField) {\n createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);\n }\n }\n }\n\n private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,\n BLangNamedArgsExpression expr) {\n BLangSimpleVarRef varRef = new BLangSimpleVarRef();\n varRef.variableName = expr.name;\n BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.\n createBLangRecordKeyValue(varRef, expr.expr);\n recordLiteral.fields.add(recordKeyValueField);\n }\n\n private BLangBlockStmt getSafeErrorAssignment(Location location, BLangSimpleVarRef ref,\n BSymbol invokableSymbol,\n List equivalentErrorTypes,\n boolean isCheckPanicExpr) {\n \n \n BType enclosingFuncReturnType = Types.getImpliedType(((BInvokableType) invokableSymbol.type).retType);\n Set returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?\n ((BUnionType) enclosingFuncReturnType).getMemberTypes() :\n new LinkedHashSet<>() {{\n add(enclosingFuncReturnType);\n }};\n\n \n boolean returnOnError = equivalentErrorTypes.stream()\n .allMatch(errorType -> returnTypeSet.stream()\n .anyMatch(retType -> types.isAssignable(errorType, retType)));\n\n String patternFailureCaseVarName = GEN_VAR_PREFIX.value + \"t_failure\";\n BLangSimpleVariable errorVar =\n ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType,\n createTypeCastExpr(ref, symTable.errorType),\n new BVarSymbol(0, Names.fromString(patternFailureCaseVarName),\n this.env.scope.owner.pkgID, symTable.errorType,\n this.env.scope.owner, location, VIRTUAL));\n\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(location);\n BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(location, errorVar);\n blockStmt.addStatement(errorVarDef);\n BLangVariableReference errorVarRef = ASTBuilderUtil.createVariableRef(location, errorVar.symbol);\n if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {\n \n BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();\n failStmt.pos = location;\n failStmt.expr = errorVarRef;\n blockStmt.addStatement(failStmt);\n if (returnOnError && this.shouldReturnErrors) {\n BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(errorVarRef, env));\n errorReturn.desugared = true;\n failStmt.exprStmt = errorReturn;\n }\n } else {\n \n BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();\n panicNode.pos = location;\n panicNode.expr = errorVarRef;\n blockStmt.addStatement(panicNode);\n }\n\n return blockStmt;\n }\n\n private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {\n if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {\n BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;\n List memberTypes = new ArrayList<>();\n for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {\n BType member = getStructuredBindingPatternType(tupleVariable.memberVariables.get(i));\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(member);\n memberTypes.add(\n new BTupleMember(member, varSymbol));\n }\n BTupleType tupleType = new BTupleType(memberTypes);\n if (tupleVariable.restVariable != null) {\n BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);\n tupleType.restType = restArrayType.eType;\n }\n return tupleType;\n }\n\n if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {\n BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;\n\n BRecordTypeSymbol recordSymbol =\n Symbols.createRecordSymbol(0, Names.fromString(\"$anonRecordType$\" + UNDERSCORE + recordCount++),\n env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,\n VIRTUAL);\n recordSymbol.scope = new Scope(recordSymbol);\n\n LinkedHashMap fields = new LinkedHashMap<>();\n List typeDefFields = new ArrayList<>();\n\n for (int i = 0; i < recordVariable.variableList.size(); i++) {\n String fieldNameStr = recordVariable.variableList.get(i).key.value;\n Name fieldName = Names.fromString(fieldNameStr);\n BType fieldType = getStructuredBindingPatternType(\n recordVariable.variableList.get(i).valueBindingPattern);\n BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,\n recordSymbol, bindingPatternVariable.pos, VIRTUAL);\n\n \n fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));\n typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));\n recordSymbol.scope.define(fieldName, fieldSymbol);\n }\n\n BRecordType recordVarType = new BRecordType(recordSymbol);\n recordVarType.fields = fields;\n\n \n recordVarType.restFieldType = recordVariable.restParam != null ?\n ((BRecordType) recordVariable.restParam.getBType()).restFieldType :\n symTable.anydataType;\n recordSymbol.type = recordVarType;\n recordVarType.tsymbol = recordSymbol;\n\n BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,\n recordVarType,\n bindingPatternVariable.pos);\n TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);\n\n return recordVarType;\n }\n\n if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {\n BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;\n BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(\n SymTag.ERROR,\n Flags.PUBLIC,\n Names.fromString(\"$anonErrorType$\" + UNDERSCORE + errorCount++),\n env.enclPkg.symbol.pkgID,\n null, null, errorVariable.pos, VIRTUAL);\n BType detailType;\n if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {\n detailType = symTable.detailType;\n } else {\n detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,\n errorVariable.pos);\n\n BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);\n TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,\n recordTypeNode, env);\n }\n BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);\n errorTypeSymbol.type = errorType;\n\n TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,\n createErrorTypeNode(errorType), env);\n return errorType;\n }\n\n return bindingPatternVariable.getBType();\n }\n\n private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {\n List fieldList = new ArrayList<>();\n for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {\n BVarSymbol symbol = field.valueBindingPattern.symbol;\n if (symbol == null) {\n symbol = new BVarSymbol(Flags.PUBLIC, Names.fromString(field.key.value + \"$\"),\n this.env.enclPkg.packageID, symTable.pureType, null,\n field.valueBindingPattern.pos, VIRTUAL);\n }\n BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(\n field.valueBindingPattern.pos,\n symbol.name.value,\n field.valueBindingPattern.getBType(),\n field.valueBindingPattern.expr,\n symbol);\n fieldList.add(fieldVar);\n }\n return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);\n }\n\n private BType createDetailType(List detail,\n BLangSimpleVariable restDetail, int errorNo, Location pos) {\n BRecordType detailRecordType = createAnonRecordType(pos);\n\n if (restDetail == null) {\n detailRecordType.sealed = true;\n }\n\n for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {\n Name fieldName = names.fromIdNode(detailEntry.key);\n BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);\n BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,\n detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);\n detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));\n detailRecordType.tsymbol.scope.define(fieldName, fieldSym);\n }\n\n return detailRecordType;\n }\n\n private BRecordType createAnonRecordType(Location pos) {\n BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(\n SymTag.RECORD,\n Flags.PUBLIC,\n Names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),\n env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);\n detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);\n\n BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);\n detailRecordType.restFieldType = symTable.anydataType;\n return detailRecordType;\n }\n\n BLangErrorType createErrorTypeNode(BErrorType errorType) {\n BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();\n errorTypeNode.setBType(errorType);\n return errorTypeNode;\n }\n\n private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,\n BLangExpression expression) {\n\n BLangBinaryExpr binaryExpr;\n if (NodeKind.GROUP_EXPR == expression.getKind()) {\n return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);\n }\n\n if (NodeKind.BINARY_EXPR == expression.getKind()) {\n binaryExpr = (BLangBinaryExpr) expression;\n BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);\n BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);\n\n binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,\n (BOperatorSymbol) symResolver\n .resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));\n } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF\n && ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {\n BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();\n anyType.setBType(symTable.anyType);\n anyType.typeKind = TypeKind.ANY;\n return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);\n } else {\n binaryExpr = ASTBuilderUtil\n .createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);\n BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),\n expression.getBType());\n if (opSymbol == symTable.notFoundSymbol) {\n opSymbol = symResolver\n .getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),\n binaryExpr, env);\n }\n binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;\n }\n return binaryExpr;\n }\n\n private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {\n return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);\n }\n\n private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {\n BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();\n varRef.pos = variable.pos;\n varRef.variableName = variable.name;\n varRef.symbol = variable.symbol;\n varRef.setBType(variable.getBType());\n\n BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();\n assignmentStmt.expr = variable.expr;\n assignmentStmt.pos = variable.pos;\n assignmentStmt.setVariable(varRef);\n return assignmentStmt;\n }\n\n private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,\n BVarSymbol selfSymbol) {\n return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,\n variable.name);\n }\n\n private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,\n BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,\n BLangIdentifier fieldName) {\n BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);\n BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);\n fieldAccess.symbol = fieldSymbol;\n fieldAccess.setBType(fieldType);\n fieldAccess.isStoreOnCreation = true;\n\n BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();\n \n expr.pos = this.symTable.builtinPos;\n fieldName.pos = this.symTable.builtinPos;\n fieldSymbol.pos = this.symTable.builtinPos;\n assignmentStmt.expr = expr;\n assignmentStmt.pos = function.pos;\n assignmentStmt.setVariable(fieldAccess);\n\n SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);\n return rewrite(assignmentStmt, initFuncEnv);\n }\n\n private boolean safeNavigate(BLangAccessExpression accessExpr) {\n if (accessExpr.isLValue || accessExpr.expr == null) {\n return false;\n }\n\n if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {\n return true;\n }\n\n NodeKind kind = accessExpr.expr.getKind();\n if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||\n kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n return safeNavigate((BLangAccessExpression) accessExpr.expr);\n }\n\n return false;\n }\n\n private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {\n BType originalExprType = accessExpr.getBType();\n \n String matchTempResultVarName = GEN_VAR_PREFIX.value + \"temp_result\";\n BLangSimpleVariable tempResultVar =\n ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,\n new BVarSymbol(0, Names.fromString(matchTempResultVarName),\n this.env.scope.owner.pkgID, accessExpr.getBType(),\n this.env.scope.owner, accessExpr.pos, VIRTUAL));\n BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);\n BLangVariableReference tempResultVarRef =\n ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);\n\n \n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(accessExpr.pos);\n blockStmt.stmts.add(tempResultVarDef);\n handleSafeNavigation(blockStmt, accessExpr, accessExpr.getBType(), tempResultVar);\n\n \n BLangMatchStatement matchStmt = this.matchStmtStack.firstElement();\n blockStmt.stmts.add(matchStmt);\n BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);\n stmtExpression.setBType(originalExprType);\n \n this.matchStmtStack = new Stack<>();\n this.accessExprStack = new Stack<>();\n this.successClause = null;\n this.safeNavigationAssignment = null;\n return stmtExpression;\n }\n\n private void handleSafeNavigation(BLangBlockStmt blockStmt, BLangAccessExpression accessExpr, BType type,\n BLangSimpleVariable tempResultVar) {\n if (accessExpr.expr == null) {\n return;\n }\n\n \n NodeKind kind = accessExpr.expr.getKind();\n if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n handleSafeNavigation(blockStmt, (BLangAccessExpression) accessExpr.expr, type, tempResultVar);\n }\n\n if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {\n BType originalType = Types.getImpliedType(accessExpr.originalType);\n if (isMapJson(originalType, false)) {\n accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));\n } else {\n accessExpr.setBType(originalType);\n }\n if (this.safeNavigationAssignment != null) {\n this.safeNavigationAssignment.expr =\n types.addConversionExprIfRequired(accessExpr, tempResultVar.getBType());\n }\n return;\n }\n\n /*\n * If the field access is a safe navigation, create a match expression.\n * Then chain the current expression as the success-pattern of the parent\n * match expr, if available.\n * eg:\n * x but { <--- parent match expr\n * error e => e,\n * T t => t.y but { <--- current expr\n * error e => e,\n * R r => r.z\n * }\n * }\n */\n\n BLangExpression matchExpr = accessExpr.expr;\n BLangSimpleVariableDef variableDef =\n createVarDef(\"$varDef$\", matchExpr.getBType(), matchExpr, matchExpr.pos);\n BLangSimpleVarRef simpleVarRef = ASTBuilderUtil.createVariableRef(variableDef.pos, variableDef.var.symbol);\n accessExpr.expr = simpleVarRef;\n blockStmt.stmts.add(variableDef);\n Location pos = accessExpr.pos;\n BLangMatchStatement matchStmt = ASTBuilderUtil.createMatchStatement(simpleVarRef, pos);\n BType matchExprType = accessExpr.expr.getBType();\n\n boolean isAllTypesRecords = false;\n LinkedHashSet memTypes = new LinkedHashSet<>();\n BType referredType = Types.getImpliedType(matchExpr.getBType());\n if (referredType.tag == TypeTags.UNION) {\n memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());\n isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);\n }\n\n \n if (accessExpr.nilSafeNavigation) {\n matchStmt.addMatchClause(getMatchNullClause(matchExpr, tempResultVar));\n matchStmt.setBType(type);\n memTypes.remove(symTable.nilType);\n }\n\n \n if (accessExpr.errorSafeNavigation) {\n matchStmt.addMatchClause(getMatchErrorClause(matchExpr, tempResultVar));\n matchStmt.setBType(type);\n matchStmt.pos = pos;\n memTypes.remove(symTable.errorType);\n }\n\n BLangMatchClause successClause = null;\n Name field = getFieldName(accessExpr);\n if (field == Names.EMPTY) {\n successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,\n accessExpr.errorSafeNavigation);\n matchStmt.addMatchClause(successClause);\n pushToMatchStatementStack(matchStmt, successClause, pos);\n return;\n }\n\n if (isAllTypesRecords) {\n for (BType memberType : memTypes) {\n BRecordType recordType = (BRecordType) Types.getImpliedType(memberType);\n if (recordType.fields.containsKey(field.value) || !recordType.sealed) {\n successClause = getSuccessPatternClause(memberType, matchExpr, accessExpr, tempResultVar,\n accessExpr.errorSafeNavigation);\n matchStmt.addMatchClause(successClause);\n }\n }\n matchStmt.addMatchClause(getMatchAllAndNilReturnClause(matchExpr, tempResultVar));\n pushToMatchStatementStack(matchStmt, successClause, pos);\n return;\n }\n\n \n successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,\n accessExpr.errorSafeNavigation);\n matchStmt.addMatchClause(successClause);\n pushToMatchStatementStack(matchStmt, successClause, pos);\n }\n\n private boolean isMapJson(BType originalType, boolean fromMap) {\n originalType = Types.getImpliedType(originalType);\n return ((originalType.tag == TypeTags.MAP) && isMapJson(((BMapType) originalType).getConstraint(), true))\n || ((originalType.tag == TypeTags.JSON) && fromMap);\n }\n\n private void pushToMatchStatementStack(BLangMatchStatement matchStmt, BLangMatchClause successClause,\n Location pos) {\n this.matchStmtStack.push(matchStmt);\n if (this.successClause != null) {\n this.successClause.blockStmt = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(matchStmt));\n }\n this.successClause = successClause;\n }\n\n private Name getFieldName(BLangAccessExpression accessExpr) {\n Name field = Names.EMPTY;\n if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {\n field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);\n } else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;\n if (indexBasedExpression.getKind() == NodeKind.LITERAL) {\n field = new Name(((BLangLiteral) indexBasedExpression).value.toString());\n }\n }\n return field;\n }\n\n private boolean isAllTypesAreRecordsInUnion(LinkedHashSet memTypes) {\n for (BType memType : memTypes) {\n int typeTag = Types.getImpliedType(memType).tag;\n if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {\n return false;\n }\n }\n return true;\n }\n\n private BLangMatchClause getMatchErrorClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {\n String errorPatternVarName = GEN_VAR_PREFIX.value + \"t_match_error\";\n Location pos = matchExpr.pos;\n BVarSymbol errorPatternVarSymbol = new BVarSymbol(0, Names.fromString(errorPatternVarName),\n this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);\n BLangCaptureBindingPattern captureBindingPattern =\n ASTBuilderUtil.createCaptureBindingPattern(errorPatternVarSymbol, errorPatternVarName);\n BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =\n ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);\n\n BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, errorPatternVarSymbol);\n BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);\n BLangAssignment assignmentStmt =\n ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);\n BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));\n BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getErrorTypeNode());\n matchGuard.setBType(symTable.booleanType);\n\n return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);\n }\n\n private BLangMatchClause getMatchNullClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {\n String nullPatternVarName = GEN_VAR_PREFIX.value + \"t_match_null\";\n Location pos = matchExpr.pos;\n BVarSymbol nullPatternVarSymbol = new BVarSymbol(0, Names.fromString(nullPatternVarName),\n this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);\n BLangCaptureBindingPattern captureBindingPattern =\n ASTBuilderUtil.createCaptureBindingPattern(nullPatternVarSymbol, nullPatternVarName);\n BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =\n ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);\n\n BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, nullPatternVarSymbol);\n BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);\n BLangAssignment assignmentStmt =\n ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);\n BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));\n BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getNillTypeNode());\n matchGuard.setBType(symTable.booleanType);\n\n return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);\n }\n\n private BLangMatchClause getMatchAllAndNilReturnClause(BLangExpression matchExpr,\n BLangSimpleVariable tempResultVar) {\n Location pos = matchExpr.pos;\n BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);\n BLangAssignment assignmentStmt =\n ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, createLiteral(pos, symTable.nilType,\n Names.NIL_VALUE));\n BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));\n\n BLangWildCardMatchPattern wildCardMatchPattern = ASTBuilderUtil.createWildCardMatchPattern(matchExpr);\n wildCardMatchPattern.setBType(symTable.anyType);\n return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, null, wildCardMatchPattern);\n }\n\n private BLangMatchClause getSuccessPatternClause(BType type, BLangExpression matchExpr,\n BLangAccessExpression accessExpr,\n BLangSimpleVariable tempResultVar, boolean liftError) {\n type = types.getSafeType(type, true, liftError);\n String successPatternVarName = GEN_VAR_PREFIX.value + \"t_match_success\";\n\n Location pos = accessExpr.pos;\n BVarSymbol successPatternSymbol;\n if (Types.getImpliedType(type).tag == TypeTags.INVOKABLE) {\n successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, Names.fromString(successPatternVarName),\n this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);\n } else {\n successPatternSymbol = new BVarSymbol(0, Names.fromString(successPatternVarName),\n this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);\n }\n\n BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,\n type, null, successPatternSymbol);\n BLangSimpleVarRef successPatternVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos,\n successPatternVar.symbol);\n\n BLangCaptureBindingPattern captureBindingPattern =\n ASTBuilderUtil.createCaptureBindingPattern(successPatternSymbol, successPatternVarName);\n BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =\n ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);\n\n BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);\n if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n ((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;\n }\n if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {\n ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =\n ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;\n }\n\n tempAccessExpr.expr = types.addConversionExprIfRequired(successPatternVarRef, type);\n tempAccessExpr.errorSafeNavigation = false;\n tempAccessExpr.nilSafeNavigation = false;\n accessExpr.cloneRef = null;\n\n \n \n \n if (TypeTags.isXMLTypeTag(Types.getImpliedType(tempAccessExpr.expr.getBType()).tag)) {\n \n tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,\n symTable.nilType));\n } else {\n tempAccessExpr.setBType(accessExpr.originalType);\n }\n tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;\n\n BLangVariableReference tempResultVarRef =\n ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);\n\n BLangExpression assignmentRhsExpr =\n types.addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());\n BLangAssignment assignmentStmt =\n ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);\n BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, this.env.scope,\n Lists.of(assignmentStmt));\n BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, successPatternVarRef, createTypeNode(type));\n matchGuard.setBType(symTable.booleanType);\n\n return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);\n }\n\n BLangValueType getNillTypeNode() {\n BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();\n nillTypeNode.typeKind = TypeKind.NIL;\n nillTypeNode.setBType(symTable.nilType);\n return nillTypeNode;\n }\n\n BLangValueType createTypeNode(BType type) {\n BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();\n typeNode.typeKind = type.getKind();\n typeNode.setBType(type);\n return typeNode;\n }\n\n private BLangValueExpression cloneExpression(BLangExpression expr) {\n switch (expr.getKind()) {\n case SIMPLE_VARIABLE_REF:\n return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);\n case FIELD_BASED_ACCESS_EXPR:\n case INDEX_BASED_ACCESS_EXPR:\n return cloneAccessExpr((BLangAccessExpression) expr);\n default:\n throw new IllegalStateException();\n }\n }\n\n private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {\n if (originalAccessExpr.expr == null) {\n return originalAccessExpr;\n }\n\n BLangExpression varRef;\n NodeKind kind = originalAccessExpr.expr.getKind();\n if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);\n } else {\n varRef = cloneExpression(originalAccessExpr.expr);\n }\n varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));\n\n BLangAccessExpression accessExpr;\n switch (originalAccessExpr.getKind()) {\n case FIELD_BASED_ACCESS_EXPR:\n accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,\n ((BLangFieldBasedAccess) originalAccessExpr).field);\n break;\n case INDEX_BASED_ACCESS_EXPR:\n accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,\n ((BLangIndexBasedAccess) originalAccessExpr).indexExpr);\n break;\n default:\n throw new IllegalStateException();\n }\n\n accessExpr.originalType = originalAccessExpr.originalType;\n accessExpr.pos = originalAccessExpr.pos;\n accessExpr.isLValue = originalAccessExpr.isLValue;\n accessExpr.symbol = originalAccessExpr.symbol;\n accessExpr.errorSafeNavigation = false;\n accessExpr.nilSafeNavigation = false;\n\n \n \n \n accessExpr.setBType(originalAccessExpr.originalType);\n return accessExpr;\n }\n\n private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {\n BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);\n return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,\n (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,\n symTable.intType,\n symTable.intType));\n }\n\n private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {\n BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);\n return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,\n (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,\n symTable.intType,\n symTable.intType));\n }\n\n BLangLiteral getBooleanLiteral(boolean value) {\n BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();\n literal.value = value;\n literal.setBType(symTable.booleanType);\n literal.pos = symTable.builtinPos;\n return literal;\n }\n\n private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {\n BType returnType = symTable.nilType;\n\n \n if (classDefinition.initFunction != null) {\n returnType = classDefinition.initFunction.getBType().getReturnType();\n }\n\n BLangFunction initFunction =\n TypeDefBuilderHelper.createInitFunctionForStructureType(classDefinition.symbol, env, names,\n GENERATED_INIT_SUFFIX, classDefinition.getBType(), returnType);\n BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);\n typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,\n (BInvokableType) initFunction.getBType(), null);\n classDefinition.generatedInitFunction = initFunction;\n initFunction.returnTypeNode.setBType(returnType);\n return initFunction;\n }\n\n private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {\n /*\n * Desugar (lhsExpr && rhsExpr) to following if-else:\n *\n * logical AND:\n * -------------\n * T $result$;\n * if (lhsExpr) {\n * $result$ = rhsExpr;\n * } else {\n * $result$ = false;\n * }\n *\n * logical OR:\n * -------------\n * T $result$;\n * if (lhsExpr) {\n * $result$ = true;\n * } else {\n * $result$ = rhsExpr;\n * }\n *\n */\n BLangSimpleVariableDef resultVarDef = createVarDef(\"$result$\", binaryExpr.getBType(), null,\n symTable.builtinPos);\n BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n\n \n BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,\n resultVarDef.var.symbol);\n BLangExpression thenResult;\n if (binaryExpr.opKind == OperatorKind.AND) {\n thenResult = binaryExpr.rhsExpr;\n } else {\n thenResult = getBooleanLiteral(true);\n }\n BLangAssignment thenAssignment =\n ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);\n thenBody.addStatement(thenAssignment);\n\n \n BLangExpression elseResult;\n BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,\n resultVarDef.var.symbol);\n if (binaryExpr.opKind == OperatorKind.AND) {\n elseResult = getBooleanLiteral(false);\n } else {\n elseResult = binaryExpr.rhsExpr;\n }\n BLangAssignment elseAssignment =\n ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);\n elseBody.addStatement(elseAssignment);\n\n \n BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);\n BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);\n\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(binaryExpr.getBType());\n\n result = rewriteExpr(stmtExpr);\n }\n\n protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {\n switch (expression.getKind()) {\n case TYPE_INIT_EXPR:\n case RECORD_LITERAL_EXPR:\n case OBJECT_CTOR_EXPRESSION:\n return true;\n case CHECK_EXPR:\n return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);\n case TYPE_CONVERSION_EXPR:\n return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);\n default:\n return false;\n }\n }\n\n private BType getRestType(BInvokableSymbol invokableSymbol) {\n if (invokableSymbol != null && invokableSymbol.restParam != null) {\n return invokableSymbol.restParam.type;\n }\n return null;\n }\n\n private BType getRestType(BLangFunction function) {\n if (function != null && function.restParam != null) {\n return function.restParam.getBType();\n }\n return null;\n }\n\n private BVarSymbol getRestSymbol(BLangFunction function) {\n if (function != null && function.restParam != null) {\n return function.restParam.symbol;\n }\n return null;\n }\n\n private boolean isComputedKey(RecordLiteralNode.RecordField field) {\n if (!field.isKeyValueField()) {\n return false;\n }\n return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;\n }\n\n private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {\n List fields = mappingConstructorExpr.fields;\n\n BType type = mappingConstructorExpr.getBType();\n Location pos = mappingConstructorExpr.pos;\n\n List rewrittenFields = new ArrayList<>(fields.size());\n\n for (RecordLiteralNode.RecordField field : fields) {\n if (field.isKeyValueField()) {\n BLangRecordLiteral.BLangRecordKeyValueField keyValueField =\n (BLangRecordLiteral.BLangRecordKeyValueField) field;\n\n BLangRecordLiteral.BLangRecordKey key = keyValueField.key;\n BLangExpression origKey = key.expr;\n BLangExpression keyExpr;\n if (key.computedKey) {\n keyExpr = origKey;\n } else {\n keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,\n Utils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :\n ((BLangLiteral) origKey);\n }\n\n BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =\n ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),\n rewriteExpr(keyValueField.valueExpr));\n rewrittenField.pos = keyValueField.pos;\n rewrittenField.key.pos = key.pos;\n rewrittenFields.add(rewrittenField);\n } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;\n rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(\n rewriteExpr(createStringLiteral(pos, Utils.unescapeJava(varRefField.variableName.value))),\n rewriteExpr(varRefField)));\n } else {\n BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =\n (BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\n spreadOpField.expr = rewriteExpr(spreadOpField.expr);\n rewrittenFields.add(spreadOpField);\n }\n }\n\n fields.clear();\n return new BLangMapLiteral(pos, type, rewrittenFields);\n }\n\n protected void addTransactionInternalModuleImport() {\n if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {\n BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();\n List pkgNameComps = new ArrayList<>();\n pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));\n importDcl.pkgNameComps = pkgNameComps;\n importDcl.pos = env.enclPkg.symbol.pos;\n importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);\n importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, \"trx\");\n importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, \"\");\n importDcl.symbol = symTable.internalTransactionModuleSymbol;\n env.enclPkg.imports.add(importDcl);\n env.enclPkg.symbol.imports.add(importDcl.symbol);\n }\n }\n}", "target_code": "if (type.getKind() != TypeKind.RECORD || isSpreadingAnOpenRecord(userSpecifiedFields)) {", "method_body_after": "private BLangBlockStmt desugarForeachToWhileWithIterator(BLangForeach foreach, BLangSimpleVariableDef varDef) {\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n\n \n\n BVarSymbol iteratorSymbol = varDef.var.symbol;\n\n \n BVarSymbol resultSymbol = new BVarSymbol(0, Names.fromString(\"$result$\"), this.env.scope.owner.pkgID,\n foreach.nillableResultType, this.env.scope.owner, foreach.pos,\n VIRTUAL);\n\n \n BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,\n foreach.nillableResultType, iteratorSymbol, resultSymbol);\n\n \n BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);\n BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(\n resultVariableDefinition, resultReferenceInWhile);\n statementExpression.setBType(foreach.nillableResultType);\n\n \n BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);\n BLangTypeTestExpr typeTestExpr = ASTBuilderUtil\n .createTypeTestExpr(foreach.pos, statementExpression, userDefineType);\n \n BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();\n whileNode.pos = foreach.pos;\n whileNode.expr = typeTestExpr;\n whileNode.body = foreach.body;\n\n VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;\n\n \n \n \n BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);\n\n BLangExpression expr = valueAccessExpr.expr;\n \n \n valueAccessExpr.expr = types.addConversionExprIfRequired(expr, symTable.mapAllType);\n variableDefinitionNode.getVariable()\n .setInitialExpression(types.addConversionExprIfRequired(valueAccessExpr, foreach.varType));\n whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);\n\n \n BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);\n\n \n blockNode.addStatement(varDef);\n\n \n blockNode.addStatement(whileNode);\n return blockNode;\n }\n\n private BLangType getUserDefineTypeNode(BType type) {\n BLangUserDefinedType recordType =\n new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, \"\"),\n ASTBuilderUtil.createIdentifier(null, \"\"));\n recordType.setBType(type);\n return recordType;\n }\n\n @Override\n public void visit(BLangWhile whileNode) {\n if (whileNode.onFailClause != null) {\n BLangOnFailClause onFailClause = whileNode.onFailClause;\n whileNode.onFailClause = null;\n whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;\n BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);\n result = rewrite(doStmt, env);\n } else {\n whileNode.expr = rewriteExpr(whileNode.expr);\n whileNode.body = rewrite(whileNode.body, env);\n result = whileNode;\n }\n }\n\n private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,\n BLangOnFailClause onFailClause) {\n BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();\n BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);\n doBlock.scope = new Scope(env.scope.owner);\n bLDo.body = doBlock;\n bLDo.pos = location;\n bLDo.onFailClause = onFailClause;\n bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;\n doBlock.stmts.add(statement);\n return bLDo;\n }\n\n @Override\n public void visit(BLangLock lockNode) {\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n BLangOnFailClause currentOnFailClause = this.onFailClause;\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);\n if (lockNode.onFailClause != null) {\n blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;\n rewrite(lockNode.onFailClause, env);\n }\n BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);\n blockStmt.addStatement(lockStmt);\n\n enclLocks.push(lockStmt);\n\n BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);\n BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);\n BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);\n statementExpression.setBType(symTable.nilType);\n\n BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();\n trapExpr.setBType(nillableError);\n trapExpr.expr = statementExpression;\n BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, Names.fromString(\"$errorResult\"),\n this.env.scope.owner.pkgID, nillableError,\n this.env.scope.owner, lockNode.pos, VIRTUAL);\n BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, \"$errorResult\",\n nillableError, trapExpr,\n nillableErrorVarSymbol);\n BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);\n blockStmt.addStatement(simpleVariableDef);\n\n BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);\n unLockStmt.relatedLock = lockStmt; \n blockStmt.addStatement(unLockStmt);\n BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);\n\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);\n BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();\n panicNode.pos = lockNode.pos;\n panicNode.expr = types.addConversionExprIfRequired(varRef, symTable.errorType);\n ifBody.addStatement(panicNode);\n\n BLangTypeTestExpr isErrorTest =\n ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());\n isErrorTest.setBType(symTable.booleanType);\n\n BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);\n blockStmt.addStatement(ifelse);\n result = rewrite(blockStmt, env);\n enclLocks.pop();\n this.onFailClause = currentOnFailClause;\n }\n\n @Override\n public void visit(BLangLockStmt lockStmt) {\n result = lockStmt;\n }\n\n @Override\n public void visit(BLangUnLockStmt unLockStmt) {\n result = unLockStmt;\n }\n\n\n private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,\n BLangSimpleVarRef shouldRetryRef) {\n BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();\n trxOnFailClause.pos = pos;\n trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);\n trxOnFailClause.body.scope = new Scope(env.scope.owner);\n trxOnFailClause.isInternal = true;\n\n \n \n BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, Names.fromString(\"$trxError$\"),\n env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);\n BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,\n \"$trxError$\", symTable.errorType, null, trxOnFailErrorSym);\n trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,\n trxOnFailError);\n trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);\n\n \n \n \n \n transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,\n trxBlockId, shouldRetryRef);\n\n BLangGroupExpr shouldNotPanic = new BLangGroupExpr();\n shouldNotPanic.setBType(symTable.booleanType);\n shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);\n\n BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);\n\n BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);\n\n BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();\n panicNode.pos = pos;\n panicNode.expr = caughtError;\n\n \n \n \n \n \n BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);\n trxOnFailClause.body.stmts.add(exitIf);\n\n BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();\n failStmt.pos = pos;\n failStmt.expr = caughtError;\n failBlock.stmts.add(failStmt);\n trxOnFailClause.bodyContainsFail = true;\n\n \n \n \n \n \n \n \n \n \n \n \n \n return trxOnFailClause;\n }\n\n @Override\n public void visit(BLangTransaction transactionNode) {\n if (transactionNode.onFailClause != null) {\n \n BLangOnFailClause onFailClause = transactionNode.onFailClause;\n transactionNode.onFailClause = null;\n transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;\n BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);\n \n \n \n \n \n \n \n \n result = rewrite(doStmt, env);\n } else {\n BLangLiteral currentTrxBlockId = this.trxBlockId;\n String uniqueId = String.valueOf(++transactionBlockCount);\n this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);\n boolean currShouldReturnErrors = this.shouldReturnErrors;\n this.shouldReturnErrors = true;\n\n BLangOnFailClause currOnFailClause = this.onFailClause;\n\n \n BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);\n BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, Names.fromString(\"$shouldPanic$\"),\n env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);\n shouldPanicVarSymbol.closure = true;\n BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,\n \"$shouldPanic$\", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);\n\n BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,\n shouldPanicVariable);\n BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,\n shouldPanicVarSymbol);\n\n \n \n \n \n \n \n \n \n \n \n \n BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,\n this.shouldRetryRef);\n enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);\n\n boolean userDefinedOnFailAvbl = this.onFailClause != null;\n analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);\n\n BLangBlockStmt transactionStmtBlock =\n transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);\n\n transactionStmtBlock.stmts.add(0, shouldPanicDef);\n transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);\n transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?\n BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :\n BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n result = rewrite(transactionStmtBlock, this.env);\n\n this.shouldReturnErrors = currShouldReturnErrors;\n this.trxBlockId = currentTrxBlockId;\n swapAndResetEnclosingOnFail(currOnFailClause);\n }\n }\n\n @Override\n public void visit(BLangRollback rollbackNode) {\n BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);\n result = rewrite(rollbackStmtExpr, env);\n }\n\n private BLangOnFailClause createRetryInternalOnFail(Location pos,\n BLangSimpleVarRef retryResultRef,\n BLangSimpleVarRef retryManagerRef,\n BLangSimpleVarRef shouldRetryRef,\n BLangSimpleVarRef continueLoopRef,\n BLangSimpleVarRef returnResult) {\n BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();\n internalOnFail.pos = pos;\n internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);\n internalOnFail.body.scope = new Scope(env.scope.owner);\n\n BVarSymbol caughtErrorSym = new BVarSymbol(0, Names.fromString(\"$caughtError$\"),\n env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);\n BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,\n \"$caughtError$\", symTable.errorType, null, caughtErrorSym);\n internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,\n caughtError);\n env.scope.define(caughtErrorSym.name, caughtErrorSym);\n BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);\n\n \n BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);\n internalOnFail.body.stmts.add(errorAssignment);\n\n \n BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,\n ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));\n internalOnFail.body.stmts.add(continueLoopTrue);\n\n \n BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,\n retryManagerRef, caughtErrorRef);\n BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,\n shouldRetryInvocation);\n internalOnFail.body.stmts.add(shouldRetryAssignment);\n\n BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();\n shouldNotRetryCheck.setBType(symTable.booleanType);\n shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);\n\n BLangGroupExpr exitCheck = new BLangGroupExpr();\n exitCheck.setBType(symTable.booleanType);\n exitCheck.expression = shouldNotRetryCheck;\n\n BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);\n BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);\n\n if (this.onFailClause != null) {\n \n \n BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();\n failStmt.pos = pos;\n failStmt.expr = retryResultRef;\n\n exitLogicBlock.stmts.add(failStmt);\n internalOnFail.bodyContainsFail = true;\n internalOnFail.body.stmts.add(exitIf);\n\n \n BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();\n loopContinueStmt.pos = pos;\n internalOnFail.body.stmts.add(loopContinueStmt);\n\n \n \n \n \n } else {\n BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,\n ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));\n exitLogicBlock.stmts.add(returnErrorTrue);\n internalOnFail.body.stmts.add(exitIf);\n \n \n \n }\n return internalOnFail;\n }\n\n BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {\n List paramTypes = new ArrayList<>();\n paramTypes.add(symTable.booleanType);\n BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,\n null);\n BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(\n Names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,\n symTable.builtinPos, VIRTUAL);\n return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,\n OperatorKind.NOT, notOperatorSymbol);\n }\n\n BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,\n List lambdaFunctionVariable,\n TypeNode returnType, BLangFunctionBody lambdaBody) {\n BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();\n BLangFunction func =\n ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);\n lambdaFunction.function = func;\n func.requiredParams.addAll(lambdaFunctionVariable);\n func.setReturnTypeNode(returnType);\n func.desugaredReturnType = true;\n defineFunction(func, env.enclPkg);\n lambdaFunctionVariable = func.requiredParams;\n\n func.body = lambdaBody;\n func.desugared = false;\n lambdaFunction.pos = pos;\n List paramTypes = new ArrayList<>();\n lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));\n lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),\n null));\n return lambdaFunction;\n }\n\n private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {\n final BPackageSymbol packageSymbol = targetPkg.symbol;\n final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);\n symbolEnter.defineNode(funcNode, packageEnv);\n packageEnv.enclPkg.functions.add(funcNode);\n packageEnv.enclPkg.topLevelNodes.add(funcNode);\n }\n\n @Override\n public void visit(BLangForkJoin forkJoin) {\n result = forkJoin;\n }\n\n \n\n @Override\n public void visit(BLangLiteral literalExpr) {\n int tag = Types.getImpliedType(literalExpr.getBType()).tag;\n if (tag == TypeTags.ARRAY || tag == TypeTags.TUPLE) {\n \n result = rewriteBlobLiteral(literalExpr);\n return;\n }\n result = literalExpr;\n }\n\n private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {\n byte[] values = types.convertToByteArray((String) literalExpr.value);\n BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();\n arrayLiteralNode.setBType(literalExpr.getBType());\n arrayLiteralNode.pos = literalExpr.pos;\n arrayLiteralNode.exprs = new ArrayList<>();\n for (byte b : values) {\n arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));\n }\n return arrayLiteralNode;\n }\n\n @Override\n public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {\n listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);\n result = listConstructorSpreadOpExpr;\n }\n\n @Override\n public void visit(BLangListConstructorExpr listConstructor) {\n listConstructor.exprs = rewriteExprs(listConstructor.exprs);\n BLangExpression expr;\n BType listConstructorType = Types.getImpliedType(listConstructor.getBType());\n if (listConstructorType.tag == TypeTags.TUPLE) {\n expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());\n result = rewriteExpr(expr);\n } else if (listConstructorType.tag == TypeTags.JSON) {\n expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));\n result = rewriteExpr(expr);\n } else if (getElementType(listConstructorType).tag == TypeTags.JSON) {\n expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());\n result = rewriteExpr(expr);\n } else if (listConstructorType.tag == TypeTags.TYPEDESC) {\n final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();\n typedescExpr.resolvedType = listConstructor.typedescType;\n typedescExpr.setBType(symTable.typeDesc);\n result = rewriteExpr(typedescExpr);\n } else {\n expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());\n result = rewriteExpr(expr);\n }\n }\n\n @Override\n public void visit(BLangTableConstructorExpr tableConstructorExpr) {\n rewriteExprs(tableConstructorExpr.recordLiteralList);\n result = tableConstructorExpr;\n }\n\n @Override\n public void visit(BLangArrayLiteral arrayLiteral) {\n arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);\n BType arrayLiteralType = Types.getImpliedType(arrayLiteral.getBType());\n if (arrayLiteralType.tag == TypeTags.JSON) {\n result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));\n return;\n } else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {\n result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());\n return;\n }\n result = arrayLiteral;\n }\n\n @Override\n public void visit(BLangTupleLiteral tupleLiteral) {\n if (tupleLiteral.isTypedescExpr) {\n final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();\n typedescExpr.resolvedType = tupleLiteral.typedescType;\n typedescExpr.setBType(symTable.typeDesc);\n result = rewriteExpr(typedescExpr);\n return;\n }\n List exprs = tupleLiteral.exprs;\n BTupleType tupleType = (BTupleType) Types.getImpliedType(tupleLiteral.getBType());\n List tupleMemberTypes = tupleType.getTupleTypes();\n int tupleMemberTypeSize = tupleMemberTypes.size();\n int tupleExprSize = exprs.size();\n\n boolean isInRestType = false;\n int i = 0;\n for (BLangExpression expr: exprs) {\n if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {\n BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();\n spreadOpType = Types.getImpliedType(spreadOpType);\n if (spreadOpType.tag == TypeTags.ARRAY) {\n BArrayType spreadOpBArray = (BArrayType) spreadOpType;\n if (spreadOpBArray.size >= 0) {\n i += spreadOpBArray.size;\n continue;\n }\n } else {\n BTupleType spreadOpTuple = spreadOpType.tag == TypeTags.INTERSECTION ?\n (BTupleType) ((BIntersectionType) spreadOpType).effectiveType : (BTupleType) spreadOpType;\n if (types.isFixedLengthTuple(spreadOpTuple)) {\n i += spreadOpTuple.getMembers().size();\n continue;\n }\n }\n isInRestType = true;\n continue;\n }\n\n BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();\n\n BType targetType = tupleType.restType;\n if (!isInRestType && i < tupleMemberTypeSize) {\n targetType = tupleMemberTypes.get(i);\n }\n\n types.setImplicitCastExpr(expr, expType, targetType);\n i++;\n }\n\n tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);\n result = tupleLiteral;\n }\n\n @Override\n public void visit(BLangGroupExpr groupExpr) {\n result = rewriteExpr(groupExpr.expression);\n }\n\n @Override\n public void visit(BLangRecordLiteral recordLiteral) {\n List fields = recordLiteral.fields;\n generateFieldsForUserUnspecifiedRecordFields(recordLiteral, fields);\n fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));\n result = rewriteExpr(rewriteMappingConstructor(recordLiteral));\n }\n\n private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {\n if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {\n return env.enclEnv;\n }\n\n if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {\n return env.enclEnv;\n }\n\n if (env.enclInvokable != null && env.enclInvokable == encInvokable) {\n return findEnclosingInvokableEnv(env.enclEnv, encInvokable);\n }\n return env;\n }\n\n private void updateClosureVariable(BVarSymbol varSymbol, BLangInvokableNode encInvokable, Location pos) {\n if (!varSymbol.closure) {\n SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);\n BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, varSymbol);\n if (resolvedSymbol != symTable.notFoundSymbol) {\n varSymbol.closure = true;\n ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(varSymbol, pos));\n }\n }\n }\n\n private List getNamesOfUserSpecifiedRecordFields(List userSpecifiedFields) {\n List fieldNames = new ArrayList<>();\n\n for (RecordLiteralNode.RecordField field : userSpecifiedFields) {\n if (field.isKeyValueField()) {\n BLangExpression key = ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.expr;\n if (key.getKind() == NodeKind.LITERAL) {\n fieldNames.add(Utils.unescapeBallerina(((BLangLiteral) key).value.toString()));\n } else if (key.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n fieldNames.add(Utils.unescapeBallerina(((BLangSimpleVarRef) key).variableName.value));\n }\n } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n fieldNames.add(Utils.unescapeBallerina(((BLangSimpleVarRef) field).variableName.value));\n } else {\n addRequiredFieldsFromSpreadOperator(field, fieldNames);\n }\n }\n\n return fieldNames;\n }\n\n private void addRequiredFieldsFromSpreadOperator(RecordLiteralNode.RecordField field, List fieldNames) {\n BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =\n (BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\n BType type = Types.getReferredType(spreadOpField.expr.getBType());\n if (type.tag != TypeTags.RECORD) {\n return;\n }\n for (BField bField : ((BRecordType) type).fields.values()) {\n fieldNames.add(Utils.unescapeBallerina(bField.name.value));\n }\n }\n\n private void generateFieldsForUserUnspecifiedRecordFields(List fields,\n List fieldNames,\n Map defaultValues,\n Location pos, boolean isReadonly) {\n for (Map.Entry entry : defaultValues.entrySet()) {\n String fieldName = entry.getKey();\n if (fieldNames.contains(fieldName)) {\n continue;\n }\n fieldNames.add(fieldName);\n BInvokableSymbol invokableSymbol = entry.getValue();\n BLangExpression expression = getFunctionPointerInvocation(invokableSymbol);\n\n if (isReadonly && !Symbols.isFlagOn(invokableSymbol.retType.flags, Flags.READONLY)) {\n expression = visitCloneReadonly(expression, invokableSymbol.retType);\n }\n if (env.enclInvokable != null) {\n BLangInvocation invocation = (BLangInvocation) expression;\n if (invocation.expr.getKind() == NodeKind.INVOCATION) {\n updateClosureVariable((BVarSymbol) ((BLangInvocation) invocation.expr).symbol, env.enclInvokable,\n pos);\n } else {\n updateClosureVariable((BVarSymbol) invocation.symbol, env.enclInvokable, pos);\n }\n }\n BLangRecordLiteral.BLangRecordKeyValueField member = createRecordKeyValueField(pos, fieldName, expression);\n fields.add(member);\n }\n }\n\n private BLangRecordLiteral.BLangRecordKeyValueField createRecordKeyValueField(Location pos,\n String fieldName,\n BLangExpression expression) {\n BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();\n member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(pos, symTable.stringType,\n Utils.unescapeJava(fieldName)));\n member.valueExpr = types.addConversionExprIfRequired(expression, expression.getBType());\n return member;\n }\n\n public void generateFieldsForUserUnspecifiedRecordFields(BLangRecordLiteral recordLiteral,\n List userSpecifiedFields) {\n BType type = Types.getImpliedType(recordLiteral.getBType());\n \n \n \n if (type.getKind() != TypeKind.RECORD || isSpreadingAnOpenRecord(userSpecifiedFields)) {\n return;\n }\n List fieldNames = getNamesOfUserSpecifiedRecordFields(userSpecifiedFields);\n Location pos = recordLiteral.pos;\n BRecordType recordType = (BRecordType) type;\n boolean isReadonly = Symbols.isFlagOn(recordType.flags, Flags.READONLY);\n generateFieldsForUserUnspecifiedRecordFields(recordType, userSpecifiedFields, fieldNames, pos, isReadonly);\n }\n\n private boolean isSpreadingAnOpenRecord(List userSpecifiedFields) {\n for (RecordLiteralNode.RecordField field : userSpecifiedFields) {\n if (!(field instanceof BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOperatorField)) {\n continue;\n }\n BType type = Types.getReferredType(spreadOperatorField.expr.getBType());\n if (!(type instanceof BRecordType recordType)) {\n return true;\n }\n if (recordType.restFieldType != null &&\n !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(recordType.restFieldType)) {\n return true;\n }\n }\n return false;\n }\n\n private void generateFieldsForUserUnspecifiedRecordFields(BRecordType recordType,\n List fields,\n List fieldNames, Location pos,\n boolean isReadonly) {\n Map defaultValues = ((BRecordTypeSymbol) recordType.tsymbol).defaultValues;\n generateFieldsForUserUnspecifiedRecordFields(fields, fieldNames, defaultValues, pos, isReadonly);\n List typeInclusions = recordType.typeInclusions;\n for (BType typeInclusion : typeInclusions) {\n generateFieldsForUserUnspecifiedRecordFields((BRecordType) Types.getImpliedType(typeInclusion), fields,\n fieldNames, pos, isReadonly);\n }\n }\n\n @Override\n public void visit(BLangSimpleVarRef varRefExpr) {\n BLangSimpleVarRef genVarRefExpr = varRefExpr;\n\n \n if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {\n BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);\n qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;\n qnameExpr.localname = varRefExpr.variableName;\n qnameExpr.prefix = varRefExpr.pkgAlias;\n qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;\n qnameExpr.isUsedInXML = false;\n qnameExpr.pos = varRefExpr.pos;\n qnameExpr.setBType(symTable.stringType);\n result = qnameExpr;\n return;\n }\n\n if (varRefExpr.symbol == null) {\n result = varRefExpr;\n return;\n }\n\n \n if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {\n BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;\n if (varSymbol.originalSymbol != null) {\n varRefExpr.symbol = varSymbol.originalSymbol;\n if (varSymbol.closure) {\n varRefExpr.symbol.closure = true;\n }\n }\n }\n\n BType type = varRefExpr.getBType();\n\n BSymbol ownerSymbol = varRefExpr.symbol.owner;\n if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&\n Types.getImpliedType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {\n genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);\n } else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&\n !((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {\n genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);\n } else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||\n (ownerSymbol.tag & SymTag.FUNCTION_TYPE) == SymTag.FUNCTION_TYPE ||\n (ownerSymbol.tag & SymTag.LET) == SymTag.LET) {\n \n genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);\n } else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {\n genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);\n } else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||\n (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {\n\n \n \n if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {\n BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;\n BType referredType = Types.getImpliedType(constSymbol.literalType);\n if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {\n BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,\n constSymbol.value.value);\n result = rewriteExpr(types.addConversionExprIfRequired(literal, varRefExpr.getBType()));\n return;\n }\n }\n\n \n \n genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);\n\n if (!enclLocks.isEmpty()) {\n BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;\n BLangLockStmt lockStmt = enclLocks.peek();\n lockStmt.addLockVariable(symbol);\n lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));\n }\n }\n\n genVarRefExpr.setBType(type);\n genVarRefExpr.pos = varRefExpr.pos;\n\n if ((varRefExpr.isLValue)\n || genVarRefExpr.symbol.name.equals(IGNORE)) { \n genVarRefExpr.isLValue = varRefExpr.isLValue;\n genVarRefExpr.setBType(varRefExpr.symbol.type);\n result = genVarRefExpr;\n return;\n }\n\n \n \n genVarRefExpr.isLValue = varRefExpr.isLValue;\n BType targetType = genVarRefExpr.getBType();\n genVarRefExpr.setBType(genVarRefExpr.symbol.type);\n BLangExpression expression = types.addConversionExprIfRequired(genVarRefExpr, targetType);\n result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;\n }\n\n @Override\n public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {\n rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);\n }\n\n private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {\n if (safeNavigate(fieldAccessExpr)) {\n result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));\n return;\n }\n\n BLangAccessExpression targetVarRef = fieldAccessExpr;\n\n \n \n BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());\n fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);\n if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {\n fieldAccessExpr.expr = types.addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);\n }\n\n BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,\n Utils.unescapeJava(fieldAccessExpr.field.value));\n BType refType = Types.getImpliedType(varRefType);\n int varRefTypeTag = refType.tag;\n if (varRefTypeTag == TypeTags.OBJECT ||\n (varRefTypeTag == TypeTags.UNION &&\n Types.getImpliedType(\n ((BUnionType) refType).getMemberTypes().iterator().next()).tag == TypeTags.OBJECT)) {\n if (fieldAccessExpr.symbol != null &&\n Types.getImpliedType(fieldAccessExpr.symbol.type).tag == TypeTags.INVOKABLE &&\n ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {\n result = rewriteObjectMemberAccessAsField(fieldAccessExpr);\n return;\n } else {\n boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;\n\n if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {\n BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;\n BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;\n BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;\n BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;\n\n if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||\n (initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {\n isStoreOnCreation = true;\n }\n }\n\n targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,\n (BVarSymbol) fieldAccessExpr.symbol, false,\n isStoreOnCreation);\n \n }\n } else if (varRefTypeTag == TypeTags.RECORD ||\n (varRefTypeTag == TypeTags.UNION &&\n Types.getImpliedType(\n ((BUnionType) refType).getMemberTypes().iterator().next()).tag == TypeTags.RECORD)) {\n if (fieldAccessExpr.symbol != null &&\n Types.getImpliedType(fieldAccessExpr.symbol.type).tag == TypeTags.INVOKABLE\n && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {\n targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);\n } else {\n targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,\n (BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);\n }\n } else if (types.isLaxFieldAccessAllowed(refType)) {\n if (!types.isAssignable(refType, symTable.xmlType)) {\n if (varRefTypeTag == TypeTags.MAP &&\n TypeTags.isXMLTypeTag(Types.getImpliedType(((BMapType) refType).constraint).tag)) {\n result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));\n return;\n }\n \n \n fieldAccessExpr.expr = types.addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);\n targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);\n } else {\n fieldAccessExpr.expr = types.addConversionExprIfRequired(fieldAccessExpr.expr, symTable.xmlType);\n BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);\n xmlAccessInvocation.setBType(fieldAccessExpr.getBType());\n result = xmlAccessInvocation;\n return;\n }\n } else if (varRefTypeTag == TypeTags.MAP) {\n \n targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,\n fieldAccessExpr.isStoreOnCreation);\n } else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {\n targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,\n fieldAccessExpr.fieldKind);\n }\n\n targetVarRef.isLValue = fieldAccessExpr.isLValue;\n targetVarRef.setBType(fieldAccessExpr.getBType());\n targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;\n result = targetVarRef;\n }\n\n @Override\n public void visit(BLangFieldBasedAccess fieldAccessExpr) {\n rewriteFieldBasedAccess(fieldAccessExpr);\n }\n\n private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {\n Location pos = fieldAccessExpr.pos;\n BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;\n \n BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();\n String funcName = \"$anon$method$delegate$\" + originalMemberFuncSymbol.name.value + \"$\" + lambdaFunctionCount++;\n BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),\n Names.fromString(funcName), env.enclPkg.packageID,\n originalMemberFuncSymbol.type, env.scope.owner, pos,\n VIRTUAL);\n funcSymbol.retType = originalMemberFuncSymbol.retType;\n funcSymbol.bodyExist = true;\n funcSymbol.params = new ArrayList<>();\n funcSymbol.scope = new Scope(funcSymbol);\n func.pos = pos;\n func.name = createIdentifier(pos, funcName);\n func.flagSet.add(Flag.LAMBDA);\n func.flagSet.add(Flag.ANONYMOUS);\n func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();\n func.symbol = funcSymbol;\n func.setBType(funcSymbol.type);\n func.closureVarSymbols = new LinkedHashSet<>();\n \n BLangExpression receiver = fieldAccessExpr.expr;\n \n \n BLangSimpleVariableDef intermediateObjDef = null;\n if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;\n receiverSymbol.closure = true;\n func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));\n } else {\n BLangSimpleVariableDef varDef = createVarDef(\"$$temp$obj$\" + annonVarCount++, receiver.getBType(),\n receiver, pos);\n intermediateObjDef = varDef;\n varDef.var.symbol.closure = true;\n env.scope.define(varDef.var.symbol.name, varDef.var.symbol);\n BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);\n func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));\n receiver = variableRef;\n }\n\n \n\n ArrayList requiredArgs = new ArrayList<>();\n for (BVarSymbol param : originalMemberFuncSymbol.params) {\n BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();\n fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,\n VIRTUAL);\n fParam.pos = pos;\n fParam.name = createIdentifier(pos, param.name.value);\n fParam.setBType(param.type);\n func.requiredParams.add(fParam);\n funcSymbol.params.add(fParam.symbol);\n funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);\n\n BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);\n requiredArgs.add(paramRef);\n }\n\n ArrayList restArgs = new ArrayList<>();\n if (originalMemberFuncSymbol.restParam != null) {\n BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();\n func.restParam = restParam;\n BVarSymbol restSym = originalMemberFuncSymbol.restParam;\n restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);\n restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,\n VIRTUAL);\n restParam.pos = pos;\n restParam.setBType(restSym.type);\n funcSymbol.restParam = restParam.symbol;\n funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);\n\n BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);\n restArgs.add(createRestArgsExpression(restArg));\n }\n\n BLangIdentifier field = fieldAccessExpr.field;\n BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();\n retStmt.expr = createObjectMethodInvocation(\n receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);\n ((BLangBlockFunctionBody) func.body).addStatement(retStmt);\n\n BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();\n lambdaFunction.function = func;\n lambdaFunction.capturedClosureEnv = env;\n env.enclPkg.functions.add(func);\n env.enclPkg.topLevelNodes.add(func);\n \n lambdaFunction.parent = env.enclInvokable;\n lambdaFunction.setBType(func.getBType());\n\n if (intermediateObjDef == null) {\n return rewrite(lambdaFunction, env);\n } else {\n BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));\n expr.setBType(lambdaFunction.getBType());\n return rewrite(expr, env);\n }\n }\n\n private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,\n BSymbol invocableSymbol,\n List requiredArgs,\n List restArgs) {\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n invocationNode.name = field;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n invocationNode.expr = receiver;\n\n invocationNode.symbol = invocableSymbol;\n invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);\n invocationNode.requiredArgs = requiredArgs;\n invocationNode.restArgs = restArgs;\n return invocationNode;\n }\n\n private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {\n BLangStatementExpression statementExpression = new BLangStatementExpression();\n BLangBlockStmt block = new BLangBlockStmt();\n statementExpression.stmt = block;\n BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);\n Location pos = fieldAccessExpr.pos;\n BLangSimpleVariableDef result = createVarDef(\"$mapAccessResult$\", fieldAccessType, null, pos);\n block.addStatement(result);\n BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);\n resultRef.setBType(fieldAccessType);\n statementExpression.setBType(fieldAccessType);\n\n\n \n \n BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(\n fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);\n BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);\n BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);\n mapAccessExpr.setBType(xmlOrNil);\n BLangSimpleVariableDef mapResult = createVarDef(\"$mapAccess\", xmlOrNil, mapAccessExpr, pos);\n BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);\n block.addStatement(mapResult);\n\n BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);\n\n BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);\n\n ifStmt.expr = isLikeNilExpr;\n BLangBlockStmt resultNilBody = new BLangBlockStmt();\n ifStmt.body = resultNilBody;\n BLangBlockStmt resultHasValueBody = new BLangBlockStmt();\n ifStmt.elseStmt = resultHasValueBody;\n\n BLangErrorConstructorExpr errorConstructorExpr =\n (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();\n BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,\n Names.fromString(\"\"), Names.fromString(\"error\"));\n errorConstructorExpr.setBType(symbol.type);\n\n List positionalArgs = new ArrayList<>();\n List namedArgs = new ArrayList<>();\n positionalArgs.add(createStringLiteral(pos, \"{\" + RuntimeConstants.MAP_LANG_LIB + \"}InvalidKey\"));\n BLangNamedArgsExpression message = new BLangNamedArgsExpression();\n message.name = ASTBuilderUtil.createIdentifier(pos, \"key\");\n message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);\n namedArgs.add(message);\n errorConstructorExpr.positionalArgs = positionalArgs;\n errorConstructorExpr.namedArgs = namedArgs;\n\n BLangSimpleVariableDef errorDef =\n createVarDef(\"$_invalid_key_error\", symTable.errorType, errorConstructorExpr, pos);\n resultNilBody.addStatement(errorDef);\n\n BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);\n\n BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);\n errorVarAssignment.varRef = resultRef;\n errorVarAssignment.expr = errorRef;\n\n BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(\n pos, resultHasValueBody);\n mapResultAssignment.varRef = resultRef;\n mapResultAssignment.expr = mapResultRef;\n\n statementExpression.expr = resultRef;\n return statementExpression;\n }\n\n private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {\n ArrayList args = new ArrayList<>();\n\n String fieldName = fieldAccessExpr.field.value;\n if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {\n BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =\n (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;\n fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);\n }\n\n \n if (fieldName.equals(\"_\")) {\n return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,\n fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());\n }\n\n BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);\n args.add(attributeNameLiteral);\n args.add(isOptionalAccessToLiteral(fieldAccessExpr));\n\n return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,\n new ArrayList<>());\n }\n\n private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {\n return rewrite(\n createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);\n }\n\n private String createExpandedQName(String nsURI, String localName) {\n return \"{\" + nsURI + \"}\" + localName;\n }\n\n @Override\n public void visit(BLangIndexBasedAccess indexAccessExpr) {\n if (safeNavigate(indexAccessExpr)) {\n result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));\n return;\n }\n\n BLangIndexBasedAccess targetVarRef = indexAccessExpr;\n indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);\n\n \n \n BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());\n BType varRefType = Types.getImpliedType(effectiveType);\n indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);\n if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {\n indexAccessExpr.expr = types.addConversionExprIfRequired(indexAccessExpr.expr, varRefType);\n }\n\n if (varRefType.tag == TypeTags.MAP) {\n targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);\n } else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {\n targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr,\n (BVarSymbol) indexAccessExpr.symbol, false);\n } else if (types.isSubTypeOfList(varRefType)) {\n targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr);\n } else if (types.isAssignable(varRefType, symTable.xmlType)) {\n BLangExpression indexAccessExprExpr = indexAccessExpr.expr;\n \n if (Types.getImpliedType(indexAccessExprExpr.getBType()).tag == TypeTags.UNION) {\n indexAccessExprExpr = createTypeCastExpr(indexAccessExprExpr, symTable.xmlType);\n }\n targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExprExpr, indexAccessExpr.indexExpr);\n } else if (types.isAssignable(varRefType, symTable.stringType)) {\n indexAccessExpr.expr = types.addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);\n targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr);\n } else if (varRefType.tag == TypeTags.TABLE) {\n targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,\n indexAccessExpr.indexExpr);\n }\n\n targetVarRef.isLValue = indexAccessExpr.isLValue;\n targetVarRef.setBType(indexAccessExpr.getBType());\n result = targetVarRef;\n }\n\n @Override\n public void visit(BLangInvocation iExpr) {\n BLangExpression invocation = rewriteInvocation(iExpr, false);\n if (invocation.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {\n ((BLangTypeConversionExpr) invocation).expr =\n createStmtExpr((BLangInvocation) ((BLangTypeConversionExpr) invocation).expr);\n result = invocation;\n } else {\n result = createStmtExpr((BLangInvocation) invocation);\n }\n }\n\n \n @Override\n public void visit(BFunctionPointerInvocation invocation) {\n visitArgs(invocation);\n\n invocation.expr = rewriteExpr(invocation.expr);\n result = invocation;\n }\n\n private void visitArgs(BLangInvocation invocation) {\n \n reorderArguments(invocation);\n\n rewriteExprs(invocation.requiredArgs);\n fixStreamTypeCastsInInvocationParams(invocation);\n fixNonRestArgTypeCastInTypeParamInvocation(invocation);\n\n rewriteExprs(invocation.restArgs);\n }\n\n private BLangStatementExpression createStmtExpr(BLangInvocation invocation) {\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(invocation.pos);\n BType type = Types.getImpliedType(invocation.symbol.type);\n BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) type.tsymbol;\n\n if (invokableTypeSymbol == null) {\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, invocation);\n stmtExpr.setBType(invocation.getBType());\n return stmtExpr;\n }\n TreeMap arguments = new TreeMap<>();\n Map defaultValues = invokableTypeSymbol.defaultValues;\n\n for (int i = 0; i < invokableTypeSymbol.params.size(); i++) {\n BLangExpression arg;\n BLangSimpleVariableDef variableDef;\n BLangSimpleVarRef simpleVarRef;\n if (invocation instanceof BLangInvocation.BLangAttachedFunctionInvocation) {\n arg = invocation.requiredArgs.get(i + 1);\n } else {\n arg = invocation.requiredArgs.get(i);\n }\n BVarSymbol param = invokableTypeSymbol.params.get(i);\n String paramName = param.name.value;\n if (arg.getKind() != NodeKind.IGNORE_EXPR) {\n if (invocation.expr == arg) {\n arguments.put(paramName, arg);\n continue;\n }\n if (arg.impConversionExpr != null) {\n variableDef = createSimpleVarDef(\"$\" + paramName + \"$\" + funcParamCount++, param.type, arg);\n } else {\n variableDef = createSimpleVarDef(\"$\" + paramName + \"$\" + funcParamCount++, arg.getBType(), arg);\n }\n simpleVarRef = ASTBuilderUtil.createVariableRef(invocation.pos, variableDef.var.symbol);\n simpleVarRef = rewrite(simpleVarRef, env);\n blockStmt.addStatement(variableDef);\n arguments.put(paramName, simpleVarRef);\n if (invocation instanceof BLangInvocation.BLangAttachedFunctionInvocation) {\n invocation.requiredArgs.set(i + 1, simpleVarRef);\n } else {\n invocation.requiredArgs.set(i, simpleVarRef);\n }\n continue;\n }\n\n BInvokableSymbol invokableSymbol = defaultValues.get(Utils.unescapeBallerina(paramName));\n BLangInvocation closureInvocation = getFunctionPointerInvocation(invokableSymbol);\n for (int m = 0; m < invokableSymbol.params.size(); m++) {\n String langLibFuncParam = invokableSymbol.params.get(m).name.value;\n closureInvocation.requiredArgs.add(arguments.get(langLibFuncParam));\n }\n variableDef = createVarDef(\"$\" + paramName + \"$\" + funcParamCount++, closureInvocation.getBType(),\n closureInvocation, arg.pos);\n simpleVarRef = ASTBuilderUtil.createVariableRef(invocation.pos, variableDef.var.symbol);\n simpleVarRef = rewrite(simpleVarRef, env);\n blockStmt.addStatement(variableDef);\n arguments.put(paramName, simpleVarRef);\n if (invocation instanceof BLangInvocation.BLangAttachedFunctionInvocation) {\n invocation.requiredArgs.set(i + 1, simpleVarRef);\n } else {\n invocation.requiredArgs.set(i, simpleVarRef);\n }\n }\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, invocation);\n stmtExpr.setBType(invocation.getBType());\n\n return stmtExpr;\n }\n\n private BLangInvocation getFunctionPointerInvocation(BInvokableSymbol symbol) {\n BLangInvocation funcInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();\n funcInvocation.setBType(symbol.retType);\n funcInvocation.symbol = symbol;\n funcInvocation.name = ASTBuilderUtil.createIdentifier(symbol.pos, symbol.name.value);\n return visitFunctionPointerInvocation(funcInvocation);\n }\n\n @Override\n public void visit(BLangErrorConstructorExpr errorConstructorExpr) {\n if (errorConstructorExpr.positionalArgs.size() == 1) {\n errorConstructorExpr.positionalArgs.add(createNilLiteral());\n }\n errorConstructorExpr.positionalArgs.set(1,\n types.addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));\n rewriteExprs(errorConstructorExpr.positionalArgs);\n\n BLangExpression errorDetail;\n BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,\n ((BErrorType) Types.getImpliedType(errorConstructorExpr.getBType())).detailType);\n if (errorConstructorExpr.namedArgs.isEmpty()) {\n errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());\n } else {\n for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {\n BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();\n member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,\n symTable.stringType, Utils.unescapeJava(namedArg.name.value)));\n\n if (Types.getImpliedType(recordLiteral.getBType()).tag == TypeTags.RECORD) {\n member.valueExpr = types.addConversionExprIfRequired(namedArg.expr, symTable.anyType);\n } else {\n member.valueExpr = types.addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());\n }\n recordLiteral.fields.add(member);\n }\n errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),\n ((BErrorType) Types.getImpliedType(errorConstructorExpr.getBType())).detailType);\n }\n errorConstructorExpr.errorDetail = errorDetail;\n result = errorConstructorExpr;\n }\n\n @Override\n public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {\n if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {\n transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);\n }\n\n \n if (!actionInvocation.functionPointerInvocation && actionInvocation.async &&\n Symbols.isFlagOn(actionInvocation.symbol.type.flags, Flags.ISOLATED)) {\n addStrandAnnotationWithThreadAny(actionInvocation.pos);\n actionInvocation.addAnnotationAttachment(this.strandAnnotAttachement);\n ((BInvokableSymbol) actionInvocation.symbol)\n .addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);\n }\n\n BLangExpression invocation = rewriteInvocation(actionInvocation, actionInvocation.async);\n if (invocation.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {\n ((BLangTypeConversionExpr) invocation).expr =\n createStmtExpr((BLangInvocation) ((BLangTypeConversionExpr) invocation).expr);\n result = invocation;\n } else {\n result = createStmtExpr((BLangInvocation) invocation);\n }\n }\n\n private void addStrandAnnotationWithThreadAny(Location pos) {\n if (this.strandAnnotAttachement == null) {\n \n this.strandAnnotAttachement = annotationDesugar.createStrandAnnotationWithThreadAny(pos, env);\n }\n }\n\n @Override\n public void visit(BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n if (resourceAccessInvocation.invokedInsideTransaction) {\n transactionDesugar.startTransactionCoordinatorOnce(env, resourceAccessInvocation.pos);\n }\n\n \n BLangInvocation pathParamInvocation = createInvocationForPathParams(resourceAccessInvocation);\n reorderArguments(pathParamInvocation);\n\n BResourceFunction targetResourceFunc = resourceAccessInvocation.targetResourceFunc;\n List pathSegmentSymbols = targetResourceFunc.pathSegmentSymbols;\n\n int pathParamInvocationRequiredArgCount = pathParamInvocation.requiredArgs.size();\n\n BLangInvocation bLangInvocation = new BLangInvocation();\n\n \n \n \n \n \n \n \n \n BLangStatementExpression firstRequiredArgFromRestArg = null;\n boolean isFirstRequiredArgFromRestArgIncluded = false;\n for (int i = 0; i < pathParamInvocationRequiredArgCount; i++) {\n BLangExpression requiredArg = pathParamInvocation.requiredArgs.get(i);\n \n Name resourcePathName = pathSegmentSymbols.get(i).name;\n if (firstRequiredArgFromRestArg == null && requiredArg.getKind() == NodeKind.STATEMENT_EXPRESSION) {\n firstRequiredArgFromRestArg = (BLangStatementExpression) requiredArg;\n if (resourcePathName.value.equals(\"^\")) {\n isFirstRequiredArgFromRestArgIncluded = true;\n bLangInvocation.requiredArgs.add(requiredArg);\n continue;\n }\n }\n\n if (resourcePathName.value.equals(\"^\")) {\n if (firstRequiredArgFromRestArg != null && !isFirstRequiredArgFromRestArgIncluded) {\n BLangStatementExpression statementExpression = new BLangStatementExpression();\n statementExpression.expr = requiredArg;\n statementExpression.stmt = firstRequiredArgFromRestArg.stmt;\n statementExpression.setBType(requiredArg.getBType());\n bLangInvocation.requiredArgs.add(statementExpression);\n isFirstRequiredArgFromRestArgIncluded = true;\n } else {\n bLangInvocation.requiredArgs.add(requiredArg);\n }\n }\n }\n\n Name lastResourcePathName = pathSegmentSymbols.get(pathSegmentSymbols.size() - 1).name;\n if (lastResourcePathName.value.equals(\"^^\")) {\n \n for (BLangExpression restArg : pathParamInvocation.restArgs) {\n if (firstRequiredArgFromRestArg != null && !isFirstRequiredArgFromRestArgIncluded &&\n restArg.getKind() == NodeKind.STATEMENT_EXPRESSION) {\n BLangStatementExpression restArgStmtExpr = (BLangStatementExpression) restArg;\n ((BLangBlockStmt) restArgStmtExpr.stmt).stmts.add(0,\n ((BLangBlockStmt) firstRequiredArgFromRestArg.stmt).stmts.get(0));\n }\n bLangInvocation.requiredArgs.add(restArg);\n }\n }\n\n bLangInvocation.requiredArgs.addAll(resourceAccessInvocation.requiredArgs);\n bLangInvocation.pkgAlias = resourceAccessInvocation.pkgAlias;\n bLangInvocation.name = resourceAccessInvocation.name;\n bLangInvocation.expr = resourceAccessInvocation.expr;\n bLangInvocation.restArgs = resourceAccessInvocation.restArgs;\n bLangInvocation.symbol = resourceAccessInvocation.symbol;\n bLangInvocation.setBType(resourceAccessInvocation.getBType());\n bLangInvocation.parent = resourceAccessInvocation.parent;\n bLangInvocation.pos = resourceAccessInvocation.pos;\n result = rewriteExpr(bLangInvocation);\n }\n\n private BLangInvocation createInvocationForPathParams(\n BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n BLangInvocation bLangInvocation = new BLangInvocation();\n\n BInvokableSymbol invokableSymbol = new BInvokableSymbol(\n resourceAccessInvocation.symbol.tag,\n resourceAccessInvocation.symbol.flags,\n resourceAccessInvocation.symbol.name,\n resourceAccessInvocation.symbol.pkgID,\n resourceAccessInvocation.symbol.type,\n resourceAccessInvocation.symbol,\n resourceAccessInvocation.symbol.pos, VIRTUAL);\n\n BResourceFunction targetResourceFunc = resourceAccessInvocation.targetResourceFunc;\n List pathSegmentSymbols = targetResourceFunc.pathSegmentSymbols;\n List resourceAccessPathSegments = resourceAccessInvocation.resourceAccessPathSegments.exprs;\n\n List invocationParams = new ArrayList<>(pathSegmentSymbols.size());\n\n int pathSegmentCount = pathSegmentSymbols.size();\n BResourcePathSegmentSymbol lastPathSegmentSym = pathSegmentSymbols.get(pathSegmentSymbols.size() - 1);\n if (lastPathSegmentSym.kind == SymbolKind.RESOURCE_PATH_REST_PARAM_SEGMENT) {\n invokableSymbol.restParam = new BVarSymbol(0, Names.EMPTY, this.env.scope.owner.pkgID,\n new BArrayType(lastPathSegmentSym.type), this.env.scope.owner, lastPathSegmentSym.pos, VIRTUAL);\n pathSegmentCount--;\n }\n\n if (pathSegmentCount > 0 && lastPathSegmentSym.kind != SymbolKind.RESOURCE_ROOT_PATH_SEGMENT) {\n invocationParams.addAll(pathSegmentSymbols.subList(0, pathSegmentCount).stream()\n .map(s -> new BVarSymbol(0, Names.EMPTY, this.env.scope.owner.pkgID, s.type,\n this.env.scope.owner, s.pos, VIRTUAL)).toList());\n }\n\n invokableSymbol.params = invocationParams;\n\n bLangInvocation.symbol = invokableSymbol;\n\n for (int i = 0; i < resourceAccessPathSegments.size(); i++) {\n BLangExpression resourceAccessPathSeg = resourceAccessPathSegments.get(i);\n if (resourceAccessPathSeg.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {\n bLangInvocation.restArgs.add(createRestArgsExpression(\n ((BLangListConstructorSpreadOpExpr) resourceAccessPathSeg).expr));\n } else if (i > invocationParams.size() - 1) {\n bLangInvocation.restArgs.add(resourceAccessPathSeg);\n } else {\n bLangInvocation.requiredArgs.add(resourceAccessPathSeg);\n }\n }\n\n return bLangInvocation;\n }\n\n private BLangRestArgsExpression createRestArgsExpression(BLangExpression expr) {\n BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression();\n bLangRestArgsExpression.expr = expr;\n bLangRestArgsExpression.pos = expr.pos;\n bLangRestArgsExpression.setBType(expr.getBType());\n bLangRestArgsExpression.expectedType = bLangRestArgsExpression.getBType();\n return bLangRestArgsExpression;\n }\n\n private BLangExpression rewriteInvocation(BLangInvocation invocation, boolean async) {\n BLangInvocation invRef = invocation;\n\n if (!enclLocks.isEmpty()) {\n BLangLockStmt lock = enclLocks.peek();\n lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);\n }\n\n visitArgs(invocation);\n\n annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,\n invocation.symbol.pkgID, invocation.symbol.owner, env);\n\n if (invocation.functionPointerInvocation) {\n return visitFunctionPointerInvocation(invocation);\n }\n result = invRef;\n\n BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;\n if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {\n BType retType = unifier.build(invSym.retType);\n invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);\n }\n\n if (invocation.expr == null) {\n BLangExpression expression = fixTypeCastInTypeParamInvocation(invocation, invRef);\n if (invocation.exprSymbol == null) {\n return expression;\n }\n invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);\n invocation.expr = rewriteExpr(invocation.expr);\n }\n switch (Types.getImpliedType(invocation.expr.getBType()).tag) {\n case TypeTags.OBJECT:\n case TypeTags.RECORD:\n case TypeTags.UNION:\n if (!invocation.langLibInvocation) {\n invocation.expr = rewriteExpr(invocation.expr);\n List argExprs = new ArrayList<>(invocation.requiredArgs);\n argExprs.add(0, invocation.expr);\n BLangAttachedFunctionInvocation attachedFunctionInvocation =\n new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,\n invocation.symbol, invocation.getBType(),\n invocation.expr, async);\n attachedFunctionInvocation.name = invocation.name;\n attachedFunctionInvocation.annAttachments = invocation.annAttachments;\n invRef = attachedFunctionInvocation;\n }\n break;\n }\n populateOCEInvocation(invocation, invRef);\n return fixTypeCastInTypeParamInvocation(invocation, invRef);\n }\n\n private void populateOCEInvocation(BLangInvocation invocation,\n BLangInvocation invRef) {\n if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {\n BObjectType initializingObject = (BObjectType) invocation.expr.getBType();\n BLangClassDefinition classDef = initializingObject.classDef;\n if (classDef.hasClosureVars) {\n OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;\n if (oceEnvData.attachedFunctionInvocation == null) {\n oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) invRef;\n }\n }\n }\n }\n\n private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {\n if (!iExpr.langLibInvocation) {\n return;\n }\n\n List requiredArgs = iExpr.requiredArgs;\n\n List params = ((BInvokableSymbol) iExpr.symbol).params;\n\n for (int i = 0; i < requiredArgs.size(); i++) {\n requiredArgs.set(i, types.addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));\n }\n }\n\n /* This function is a workaround and need improvement\n * Notes for improvement :\n * 1. Both arguments are same.\n * 2. Due to current type param logic we put type param flag on the original type.\n * 3. Error type having Cloneable type with type param flag, change expression type by this code.\n * 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest\n * added never to CloneableType type param\n * @typeParam type\n * CloneableType Cloneable|never;\n *\n */\n private BLangExpression fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {\n var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;\n if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {\n return genIExpr;\n }\n\n \n BType originalInvType = genIExpr.getBType();\n if (!genIExpr.async) {\n genIExpr.setBType(returnTypeOfInvokable);\n }\n return types.addConversionExprIfRequired(genIExpr, originalInvType);\n }\n\n private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {\n List requiredArgs = iExpr.requiredArgs;\n List params = ((BInvokableSymbol) iExpr.symbol).params;\n if (!params.isEmpty()) {\n for (int i = 0; i < requiredArgs.size(); i++) {\n BVarSymbol param = params.get(i);\n if (Types.getImpliedType(param.type).tag == TypeTags.STREAM) {\n requiredArgs.set(i, types.addConversionExprIfRequired(requiredArgs.get(i), param.type));\n }\n }\n }\n }\n\n private BLangLiteral createNilLiteral() {\n BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();\n literal.value = null;\n literal.setBType(symTable.nilType);\n return literal;\n }\n\n @Override\n public void visit(BLangTypeInit typeInitExpr) {\n if (Types.getImpliedType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {\n result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));\n } else {\n result = rewrite(desugarObjectTypeInit(typeInitExpr), env);\n }\n }\n\n private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {\n typeInitExpr.desugared = true;\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);\n\n \n BLangInvocation initInvocation = (BLangInvocation) typeInitExpr.initInvocation;\n initInvocation.objectInitMethod = true;\n BType objType = getObjectType(typeInitExpr.getBType());\n BLangSimpleVariableDef objVarDef = createVarDef(\"$obj$\", objType, typeInitExpr, typeInitExpr.pos);\n \n BLangSimpleVariableDef initInvRetValVarDef = createVarDef(\"$temp$\", initInvocation.getBType(),\n initInvocation, initInvocation.pos);\n objVarDef.var.name.pos = symTable.builtinPos;\n BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);\n BLangSimpleVarRef objInitVarRef = ASTBuilderUtil.createVariableRef(initInvocation.pos,\n initInvRetValVarDef.var.symbol);\n blockStmt.addStatement(objVarDef);\n blockStmt.addStatement(initInvRetValVarDef);\n initInvocation.exprSymbol = objVarDef.var.symbol;\n initInvocation.symbol =\n ((BObjectTypeSymbol) Types.getImpliedType(objType).tsymbol).generatedInitializerFunc.symbol;\n\n \n if (initInvocation.getBType().tag == TypeTags.NIL) {\n initInvocation.name.value = GENERATED_INIT_SUFFIX.value;\n BLangNode parent = initInvocation.parent;\n if (parent != null && parent.getKind() == NodeKind.OBJECT_CTOR_EXPRESSION) {\n BLangObjectConstructorExpression oceExpression = (BLangObjectConstructorExpression) parent;\n OCEDynamicEnvironmentData oceData = oceExpression.classNode.oceEnvData;\n oceData.initInvocation = typeInitExpr.initInvocation;\n }\n typeInitExpr.initInvocation = objInitVarRef;\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);\n stmtExpr.setBType(objVarRef.symbol.type);\n return stmtExpr;\n }\n\n \n BLangSimpleVariableDef resultVarDef = createVarDef(\"$result$\", typeInitExpr.getBType(), null, typeInitExpr.pos);\n blockStmt.addStatement(resultVarDef);\n\n \n \n \n \n \n\n \n BLangSimpleVarRef initRetValVarRefInCondition =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);\n BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);\n BLangTypeTestExpr isErrorTest =\n ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());\n isErrorTest.setBType(symTable.booleanType);\n\n \n BLangSimpleVarRef thenInitRetValVarRef =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);\n BLangSimpleVarRef thenResultVarRef =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);\n BLangAssignment errAssignment =\n ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);\n thenStmt.addStatement(errAssignment);\n\n \n BLangSimpleVarRef elseResultVarRef =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);\n BLangAssignment objAssignment =\n ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);\n BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);\n elseStmt.addStatement(objAssignment);\n\n BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);\n blockStmt.addStatement(ifelse);\n\n BLangSimpleVarRef resultVarRef =\n ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(resultVarRef.symbol.type);\n return stmtExpr;\n }\n\n private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {\n BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope\n .lookup(Names.CONSTRUCT_STREAM).symbol;\n\n BStreamType referredStreamType = (BStreamType) Types.getImpliedType(typeInitExpr.getBType());\n BType constraintType = referredStreamType.constraint;\n BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);\n BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();\n constraintTdExpr.resolvedType = constraintType;\n constraintTdExpr.setBType(constraintTdType);\n\n BType completionType = referredStreamType.completionType;\n BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);\n BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();\n completionTdExpr.resolvedType = completionType;\n completionTdExpr.setBType(completionTdType);\n\n List args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));\n if (!typeInitExpr.argsExpr.isEmpty()) {\n args.add(typeInitExpr.argsExpr.get(0));\n }\n BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(\n typeInitExpr.pos, symbol, args, symResolver);\n streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));\n return streamConstructInvocation;\n }\n\n private BLangSimpleVariableDef createSimpleVarDef(String name, BType type, BLangExpression expr) {\n BVarSymbol varSymbol = new BVarSymbol(0, Names.fromString(name), this.env.scope.owner.pkgID, type,\n this.env.scope.owner, expr.pos, VIRTUAL);\n BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(expr.pos, name, type, expr, varSymbol);\n BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(expr.pos);\n simpleVariableDef.var = simpleVariable;\n simpleVariableDef.setBType(simpleVariable.getBType());\n return simpleVariableDef;\n }\n\n private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, Location location) {\n BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, Names.fromString(name));\n \n if (objSym == null || objSym == symTable.notFoundSymbol) {\n objSym = new BVarSymbol(0, Names.fromString(name), this.env.scope.owner.pkgID, type,\n this.env.scope.owner, location, VIRTUAL);\n }\n BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);\n BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);\n objVarDef.var = objVar;\n objVarDef.setBType(objVar.getBType());\n return objVarDef;\n }\n\n private BType getObjectType(BType bType) {\n BType type = Types.getImpliedType(bType);\n if (type.tag == TypeTags.OBJECT) {\n return bType;\n } else if (type.tag == TypeTags.UNION) {\n return ((BUnionType) type).getMemberTypes().stream()\n .filter(t -> Types.getImpliedType(t).tag == TypeTags.OBJECT)\n .findFirst()\n .orElse(symTable.noType);\n }\n\n throw new IllegalStateException(\"None object type '\" + type.toString() + \"' found in object init context\");\n }\n\n BLangErrorType getErrorTypeNode() {\n BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();\n errorTypeNode.setBType(symTable.errorType);\n errorTypeNode.pos = symTable.builtinPos;\n return errorTypeNode;\n }\n\n BLangErrorType getErrorOrNillTypeNode() {\n BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();\n errorTypeNode.setBType(symTable.errorOrNilType);\n return errorTypeNode;\n }\n\n @Override\n public void visit(BLangTernaryExpr ternaryExpr) {\n /*\n * First desugar to if-else:\n *\n * T $result$;\n * if () {\n * $result$ = thenExpr;\n * } else {\n * $result$ = elseExpr;\n * }\n *\n */\n BLangSimpleVariableDef resultVarDef =\n createVarDef(\"$ternary_result$\", ternaryExpr.getBType(), null, ternaryExpr.pos);\n BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);\n\n \n BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);\n BLangAssignment thenAssignment =\n ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);\n thenBody.addStatement(thenAssignment);\n\n \n BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);\n BLangAssignment elseAssignment =\n ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);\n elseBody.addStatement(elseAssignment);\n\n \n BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);\n BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);\n\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(ternaryExpr.getBType());\n\n result = rewriteExpr(stmtExpr);\n }\n\n @Override\n public void visit(BLangWaitExpr waitExpr) {\n \n if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {\n waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());\n } else { \n waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));\n }\n result = waitExpr;\n }\n\n private List collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List exprs) {\n visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);\n visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);\n return exprs;\n }\n\n private void visitBinaryExprOfWait(BLangExpression expr, List exprs) {\n if (expr.getKind() == NodeKind.BINARY_EXPR) {\n collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);\n } else {\n expr = rewriteExpr(expr);\n exprs.add(expr);\n }\n }\n\n @Override\n public void visit(BLangWaitForAllExpr waitExpr) {\n waitExpr.keyValuePairs.forEach(keyValue -> {\n if (keyValue.valueExpr != null) {\n keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);\n } else {\n keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);\n }\n });\n BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());\n expr.pos = waitExpr.pos;\n result = rewriteExpr(expr);\n }\n\n @Override\n public void visit(BLangTrapExpr trapExpr) {\n trapExpr.expr = rewriteExpr(trapExpr.expr);\n if (Types.getImpliedType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {\n trapExpr.expr = types.addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());\n }\n result = trapExpr;\n }\n\n @Override\n public void visit(BLangBinaryExpr binaryExpr) {\n if (isNullableBinaryExpr(binaryExpr)) {\n BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);\n result = rewrite(stmtExpr, env);\n return;\n }\n\n if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {\n BLangExpression lhsExpr = binaryExpr.lhsExpr;\n BLangExpression rhsExpr = binaryExpr.rhsExpr;\n\n \n lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);\n rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);\n\n if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {\n rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);\n }\n\n result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));\n return;\n }\n\n if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {\n visitBinaryLogicalExpr(binaryExpr);\n return;\n }\n\n OperatorKind binaryOpKind = binaryExpr.opKind;\n\n if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||\n binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||\n binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||\n binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {\n checkByteTypeIncompatibleOperations(binaryExpr);\n }\n\n binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);\n binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);\n result = binaryExpr;\n\n int rhsExprTypeTag = Types.getImpliedType(binaryExpr.rhsExpr.getBType()).tag;\n int lhsExprTypeTag = Types.getImpliedType(binaryExpr.lhsExpr.getBType()).tag;\n\n \n if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||\n binaryExpr.opKind == OperatorKind.NOT_EQUAL ||\n binaryExpr.opKind == OperatorKind.REF_EQUAL ||\n binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {\n if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);\n return;\n }\n }\n\n boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);\n boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);\n\n \n if (lhsExprTypeTag == rhsExprTypeTag) {\n if (!isBinaryShiftOperator && !isArithmeticOperator) {\n return;\n }\n if (types.isValueType(binaryExpr.lhsExpr.getBType())) {\n return;\n }\n }\n\n if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&\n (rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {\n \n binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,\n binaryExpr.lhsExpr.pos, symTable.xmlType);\n return;\n }\n\n if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&\n (lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {\n \n binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,\n binaryExpr.rhsExpr.pos, symTable.xmlType);\n return;\n }\n\n if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {\n createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.DECIMAL) {\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());\n return;\n }\n\n if (rhsExprTypeTag == TypeTags.DECIMAL) {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.FLOAT) {\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());\n return;\n }\n\n if (rhsExprTypeTag == TypeTags.FLOAT) {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());\n return;\n }\n\n if (isArithmeticOperator) {\n createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);\n return;\n }\n\n if (isBinaryShiftOperator) {\n createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);\n return;\n }\n }\n\n private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {\n /*\n * int? x = 3;\n * int? y = 5;\n * int? z = x + y;\n * Above is desugared to\n * int? $result$;\n * \n * int? $lhsExprVar$ = x;\n * int? $rhsExprVar$ = y;\n * if (lhsVar is () or rhsVar is ()) {\n * $result$ = ();\n * } else {\n * $result$ = $lhsExprVar$ + $rhsExprVar$;\n * }\n * int z = $result$;\n */\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n\n BUnionType exprBType = (BUnionType) binaryExpr.getBType();\n BType nonNilType = exprBType.getMemberTypes().iterator().next();\n\n BType rhsType;\n BType lhsType;\n if (symResolver.isArithmeticOperator(binaryExpr.opKind)) {\n rhsType = nonNilType;\n lhsType = nonNilType;\n } else {\n \n rhsType = getBinaryExprOperandNonNilType(binaryExpr.rhsExpr.getBType());\n lhsType = getBinaryExprOperandNonNilType(binaryExpr.lhsExpr.getBType());\n }\n\n if (binaryExpr.lhsExpr.getBType().isNullable()) {\n binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);\n }\n\n BLangSimpleVariableDef tempVarDef = createVarDef(\"$result$\",\n binaryExpr.getBType(), null, binaryExpr.pos);\n BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);\n blockStmt.addStatement(tempVarDef);\n\n BLangSimpleVariableDef lhsVarDef = createVarDef(\"$lhsExprVar$\", binaryExpr.lhsExpr.getBType(),\n binaryExpr.lhsExpr, binaryExpr.pos);\n BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);\n blockStmt.addStatement(lhsVarDef);\n\n BLangSimpleVariableDef rhsVarDef = createVarDef(\"$rhsExprVar$\", binaryExpr.rhsExpr.getBType(),\n binaryExpr.rhsExpr, binaryExpr.pos);\n BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);\n blockStmt.addStatement(rhsVarDef);\n\n BLangTypeTestExpr typeTestExprOne = getNilTypeTestExpr(binaryExpr.pos, lhsVarRef);\n BLangTypeTestExpr typeTestExprTwo = getNilTypeTestExpr(binaryExpr.pos, rhsVarRef);\n\n BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,\n typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);\n\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);\n bLangAssignmentIf.varRef = tempVarRef;\n bLangAssignmentIf.expr = createNilLiteral();\n\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);\n bLangAssignmentElse.varRef = tempVarRef;\n\n BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,\n nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);\n newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);\n newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);\n bLangAssignmentElse.expr = newBinaryExpr;\n\n BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);\n ifStatement.expr = ifBlockCondition;\n ifStatement.body = ifBody;\n ifStatement.elseStmt = elseBody;\n\n BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);\n stmtExpr.setBType(binaryExpr.getBType());\n\n return stmtExpr;\n }\n\n BLangTypeTestExpr getNilTypeTestExpr(Location pos, BLangExpression expr) {\n return createTypeCheckExpr(pos, expr, getNillTypeNode());\n }\n\n private BType getBinaryExprOperandNonNilType(BType operandType) {\n return operandType.isNullable() ? types.getSafeType(operandType, true, false) : operandType;\n }\n\n private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {\n if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&\n (binaryExpr.rhsExpr.getBType().isNullable() ||\n binaryExpr.lhsExpr.getBType().isNullable())) {\n switch (binaryExpr.getOperatorKind()) {\n case ADD:\n case SUB:\n case MUL:\n case DIV:\n case MOD:\n case BITWISE_LEFT_SHIFT:\n case BITWISE_RIGHT_SHIFT:\n case BITWISE_UNSIGNED_RIGHT_SHIFT:\n case BITWISE_AND:\n case BITWISE_OR:\n case BITWISE_XOR:\n return true;\n }\n }\n return false;\n }\n\n private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,\n int rhsExprTypeTag) {\n if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||\n (TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||\n (TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {\n return;\n }\n if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {\n if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {\n binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,\n binaryExpr.rhsExpr.pos, symTable.xmlType);\n return;\n }\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);\n return;\n }\n if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {\n if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {\n binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,\n binaryExpr.rhsExpr.pos, symTable.xmlType);\n return;\n }\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);\n return;\n }\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());\n }\n\n private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,\n int rhsExprTypeTag) {\n boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);\n boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);\n if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {\n if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {\n return;\n }\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);\n return;\n }\n\n if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);\n return;\n }\n\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);\n }\n\n private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,\n int rhsExprTypeTag) {\n boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);\n boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);\n BType lhsExprType = binaryExpr.lhsExpr.getBType();\n BType rhsExprType = binaryExpr.rhsExpr.getBType();\n\n if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&\n rhsExprTypeTag == TypeTags.BYTE)) {\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.DECIMAL) {\n addTypeCastForBinaryExprB(binaryExpr, lhsExprType, rhsExprType);\n return;\n }\n\n if (rhsExprTypeTag == TypeTags.DECIMAL) {\n addTypeCastForBinaryExprA(binaryExpr, rhsExprType, lhsExprType);\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.FLOAT) {\n addTypeCastForBinaryExprB(binaryExpr, lhsExprType, rhsExprType);\n return;\n }\n\n if (rhsExprTypeTag == TypeTags.FLOAT) {\n addTypeCastForBinaryExprA(binaryExpr, rhsExprType, lhsExprType);\n return;\n }\n\n if (isLhsIntegerType && !isRhsIntegerType) {\n addTypeCastForBinaryExprB(binaryExpr, symTable.intType, rhsExprType);\n return;\n }\n\n if (!isLhsIntegerType && isRhsIntegerType) {\n addTypeCastForBinaryExprA(binaryExpr, symTable.intType, lhsExprType);\n return;\n }\n\n if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {\n if ((lhsExprTypeTag == TypeTags.UNION && lhsExprType.isNullable()) ||\n (rhsExprTypeTag == TypeTags.UNION && rhsExprType.isNullable())) {\n binaryExpr.lhsExpr = addNilType(symTable.intType, binaryExpr.lhsExpr);\n binaryExpr.rhsExpr = addNilType(symTable.intType, binaryExpr.rhsExpr);\n return;\n }\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);\n return;\n }\n\n boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);\n boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);\n\n if (isLhsStringType && isRhsStringType) {\n return;\n }\n\n if (isLhsStringType && !isRhsStringType) {\n addTypeCastForBinaryExprB(binaryExpr, symTable.stringType, rhsExprType);\n return;\n }\n\n if (!isLhsStringType && isRhsStringType) {\n addTypeCastForBinaryExprA(binaryExpr, symTable.stringType, lhsExprType);\n }\n }\n\n private void addTypeCastForBinaryExprA(BLangBinaryExpr binaryExpr, BType rhsExprType, BType lhsExprType) {\n if (Types.getImpliedType(lhsExprType).tag == TypeTags.UNION && lhsExprType.isNullable()) {\n binaryExpr.rhsExpr = addNilType(rhsExprType, binaryExpr.rhsExpr);\n } else {\n binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, rhsExprType);\n }\n }\n\n private void addTypeCastForBinaryExprB(BLangBinaryExpr binaryExpr, BType lhsExprType, BType rhsExprType) {\n if (Types.getImpliedType(rhsExprType).tag == TypeTags.UNION && rhsExprType.isNullable()) {\n binaryExpr.lhsExpr = addNilType(lhsExprType, binaryExpr.lhsExpr);\n } else {\n binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, lhsExprType);\n }\n }\n\n private BLangExpression addNilType(BType exprType, BLangExpression expr) {\n LinkedHashSet members = new LinkedHashSet<>(2);\n members.add(exprType);\n members.add(symTable.nilType);\n BUnionType unionType = new BUnionType(null, members, true, false);\n return createTypeCastExpr(expr, unionType);\n }\n\n private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,\n BLangExpression rhsExpr) {\n BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope\n .lookup(Names.CREATE_INT_RANGE).symbol;\n BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,\n new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);\n createIntRangeInvocation.setBType(symTable.intRangeType);\n return createIntRangeInvocation;\n }\n\n private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {\n if (binaryExpr.expectedType == null) {\n return;\n }\n\n int rhsExprTypeTag = Types.getImpliedType(binaryExpr.rhsExpr.getBType()).tag;\n int lhsExprTypeTag = Types.getImpliedType(binaryExpr.lhsExpr.getBType()).tag;\n if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {\n return;\n }\n\n int resultTypeTag = Types.getImpliedType(binaryExpr.expectedType).tag;\n if (resultTypeTag == TypeTags.INT) {\n if (rhsExprTypeTag == TypeTags.BYTE) {\n binaryExpr.rhsExpr = types.addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);\n }\n\n if (lhsExprTypeTag == TypeTags.BYTE) {\n binaryExpr.lhsExpr = types.addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);\n }\n }\n }\n\n /**\n * This method checks whether given binary expression is related to shift operation.\n * If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.\n *

\n * byte a = 12;\n * byte b = 34;\n * int i = 234;\n * int j = -4;\n *

\n * true: where binary expression's expected type is 'int'\n * int i1 = a >> b;\n * int i2 = a << b;\n * int i3 = a >> i;\n * int i4 = a << i;\n * int i5 = i >> j;\n * int i6 = i << j;\n */\n private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {\n return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||\n binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||\n binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;\n }\n\n @Override\n public void visit(BLangElvisExpr elvisExpr) {\n Location pos = elvisExpr.pos;\n String resultVarName = \"_$result$_\";\n BType resultType = elvisExpr.getBType();\n BLangSimpleVariable resultVar =\n ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,\n new BVarSymbol(0, Names.fromString(resultVarName),\n this.env.scope.owner.pkgID, resultType,\n this.env.scope.owner, pos, VIRTUAL));\n BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);\n resultVarDef.desugared = true;\n BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);\n\n String lhsResultVarName = GEN_VAR_PREFIX.value;\n BLangSimpleVariable lhsResultVar =\n ASTBuilderUtil.createVariable(pos, lhsResultVarName, elvisExpr.lhsExpr.getBType(), elvisExpr.lhsExpr,\n new BVarSymbol(0, Names.fromString(lhsResultVarName),\n this.env.scope.owner.pkgID, elvisExpr.lhsExpr.getBType(),\n this.env.scope.owner, elvisExpr.pos, VIRTUAL));\n BLangSimpleVariableDef lhsResultVarDef = ASTBuilderUtil.createVariableDef(pos, lhsResultVar);\n BLangSimpleVarRef lhsResultVarRef = ASTBuilderUtil.createVariableRef(pos, lhsResultVar.symbol);\n\n BLangAssignment nilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef, elvisExpr.rhsExpr);\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);\n ifBody.addStatement(nilAssignment);\n\n BLangAssignment notNilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,\n createTypeCastExpr(lhsResultVarRef, resultType));\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(pos);\n elseBody.addStatement(notNilAssignment);\n\n BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos, getNilTypeTestExpr(pos, lhsResultVarRef),\n ifBody, elseBody);\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{\n add(resultVarDef);\n add(lhsResultVarDef);\n add(ifStmt);\n }});\n BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(resultType);\n result = rewriteExpr(stmtExpr);\n }\n\n @Override\n public void visit(BLangUnaryExpr unaryExpr) {\n\n if (isNullableUnaryExpr(unaryExpr)) {\n BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);\n result = rewrite(statementExpression, env);\n return;\n }\n\n if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {\n \n \n rewriteBitwiseComplementOperator(unaryExpr);\n return;\n }\n\n \n \n if (types.isExpressionInUnaryValid(unaryExpr.expr) &&\n Types.getImpliedType(unaryExpr.expectedType).tag == TypeTags.FINITE) {\n result = rewriteExpr(Types.constructNumericLiteralFromUnaryExpr(unaryExpr));\n return;\n }\n\n OperatorKind opKind = unaryExpr.operator;\n if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {\n createTypeCastExprForUnaryPlusAndMinus(unaryExpr);\n }\n unaryExpr.expr = rewriteExpr(unaryExpr.expr);\n result = unaryExpr;\n }\n\n private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {\n BLangExpression expr = unaryExpr.expr;\n if (TypeTags.isIntegerTypeTag(Types.getImpliedType(expr.getBType()).tag)) {\n return;\n }\n unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());\n }\n\n /**\n * This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.\n * Example : ~a -> a ^ -1;\n * ~ 11110011 -> 00001100\n * 11110011 ^ 11111111 -> 00001100\n *\n * @param unaryExpr the bitwise complement expression\n */\n private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {\n final Location pos = unaryExpr.pos;\n final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();\n binaryExpr.pos = pos;\n binaryExpr.opKind = OperatorKind.BITWISE_XOR;\n binaryExpr.lhsExpr = unaryExpr.expr;\n if (TypeTags.BYTE == Types.getImpliedType(unaryExpr.getBType()).tag) {\n binaryExpr.setBType(symTable.byteType);\n binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);\n binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,\n symTable.byteType, symTable.byteType);\n } else {\n binaryExpr.setBType(symTable.intType);\n binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);\n binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,\n symTable.intType, symTable.intType);\n }\n result = rewriteExpr(binaryExpr);\n }\n\n private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {\n /*\n * int? x = 3;\n * int? y = +x;\n *\n *\n * Above is desugared to\n * int? $result$;\n * if (x is ()) {\n * $result$ = ();\n * } else {\n * $result$ = +x;\n * }\n * int y = $result$\n */\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);\n\n BUnionType exprBType = (BUnionType) unaryExpr.getBType();\n BType nilLiftType = exprBType.getMemberTypes().iterator().next();\n\n unaryExpr.expr = rewriteExpr(unaryExpr.expr);\n\n BLangSimpleVariableDef tempVarDef = createVarDef(\"$result\",\n unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);\n BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);\n\n blockStmt.addStatement(tempVarDef);\n\n BLangTypeTestExpr typeTestExpr = getNilTypeTestExpr(unaryExpr.pos, unaryExpr.expr);\n\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);\n BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);\n bLangAssignmentIf.varRef = tempVarRef;\n bLangAssignmentIf.expr = createNilLiteral();\n\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);\n BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);\n bLangAssignmentElse.varRef = tempVarRef;\n\n BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);\n bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,\n nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);\n\n BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);\n ifStatement.expr = typeTestExpr;\n ifStatement.body = ifBody;\n ifStatement.elseStmt = elseBody;\n\n BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);\n stmtExpr.setBType(unaryExpr.getBType());\n\n return stmtExpr;\n }\n\n private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {\n if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {\n switch (unaryExpr.operator) {\n case ADD:\n case SUB:\n case BITWISE_COMPLEMENT:\n return true;\n }\n }\n return false;\n }\n\n @Override\n public void visit(BLangTypeConversionExpr conversionExpr) {\n \n \n \n \n\n \n if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {\n result = rewriteExpr(conversionExpr.expr);\n return;\n }\n\n BType targetType = conversionExpr.targetType;\n conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);\n\n conversionExpr.expr = rewriteExpr(conversionExpr.expr);\n result = conversionExpr;\n }\n\n @Override\n public void visit(BLangLambdaFunction bLangLambdaFunction) {\n bLangLambdaFunction.function = rewrite(bLangLambdaFunction.function, bLangLambdaFunction.capturedClosureEnv);\n BLangFunction function = bLangLambdaFunction.function;\n \n if (function.flagSet.contains(Flag.WORKER) && Symbols.isFlagOn(function.symbol.type.flags, Flags.ISOLATED) &&\n Symbols.isFlagOn(env.enclInvokable.symbol.flags, Flags.ISOLATED)) {\n addStrandAnnotationWithThreadAny(function.pos);\n function.addAnnotationAttachment(this.strandAnnotAttachement);\n BInvokableSymbol funcSymbol = function.symbol;\n funcSymbol.addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);\n funcSymbol.schedulerPolicy = SchedulerPolicy.ANY;\n }\n bLangLambdaFunction.capturedClosureEnv = null;\n result = bLangLambdaFunction;\n }\n\n @Override\n public void visit(BLangArrowFunction bLangArrowFunction) {\n BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();\n bLangFunction.setName(bLangArrowFunction.functionName);\n\n BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();\n lambdaFunction.pos = bLangArrowFunction.pos;\n bLangFunction.addFlag(Flag.LAMBDA);\n lambdaFunction.function = bLangFunction;\n\n \n BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();\n returnType.setBType(bLangArrowFunction.body.expr.getBType());\n bLangFunction.setReturnTypeNode(returnType);\n bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));\n\n bLangArrowFunction.params.forEach(bLangFunction::addParameter);\n lambdaFunction.parent = bLangArrowFunction.parent;\n lambdaFunction.setBType(bLangArrowFunction.funcType);\n\n \n BLangFunction funcNode = lambdaFunction.function;\n BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),\n new Name(funcNode.name.value),\n new Name(funcNode.name.originalValue),\n env.enclPkg.symbol.pkgID,\n bLangArrowFunction.funcType,\n env.enclEnv.enclVarSym, true,\n bLangArrowFunction.pos, VIRTUAL);\n\n funcSymbol.originalName = new Name(funcNode.name.originalValue);\n\n SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);\n defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);\n\n List paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {\n Scope enclScope = invokableEnv.scope;\n varNode.symbol.kind = SymbolKind.FUNCTION;\n varNode.symbol.owner = invokableEnv.scope.owner;\n enclScope.define(varNode.symbol.name, varNode.symbol);\n }).map(varNode -> varNode.symbol).collect(Collectors.toList());\n\n funcSymbol.params = paramSymbols;\n funcSymbol.restParam = getRestSymbol(funcNode);\n funcSymbol.retType = funcNode.returnTypeNode.getBType();\n \n List paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());\n funcNode.setBType(new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(),\n funcSymbol.type.tsymbol));\n\n lambdaFunction.function.pos = bLangArrowFunction.pos;\n lambdaFunction.function.body.pos = bLangArrowFunction.pos;\n \n lambdaFunction.capturedClosureEnv = env;\n env.enclPkg.addFunction(lambdaFunction.function);\n result = rewriteExpr(lambdaFunction);\n }\n\n private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,\n SymbolEnv invokableEnv) {\n invokableNode.symbol = funcSymbol;\n funcSymbol.scope = new Scope(funcSymbol);\n invokableEnv.scope = funcSymbol.scope;\n }\n\n @Override\n public void visit(BLangXMLQName xmlQName) {\n result = xmlQName;\n }\n\n @Override\n public void visit(BLangXMLAttribute xmlAttribute) {\n xmlAttribute.name = rewriteExpr(xmlAttribute.name);\n xmlAttribute.value = rewriteExpr(xmlAttribute.value);\n result = xmlAttribute;\n }\n\n @Override\n public void visit(BLangXMLElementLiteral xmlElementLiteral) {\n xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);\n\n \n Iterator attributesItr = xmlElementLiteral.attributes.iterator();\n while (attributesItr.hasNext()) {\n BLangXMLAttribute attribute = attributesItr.next();\n if (!attribute.isNamespaceDeclr) {\n continue;\n }\n\n \n BLangXMLNS xmlns;\n if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {\n xmlns = new BLangPackageXMLNS();\n } else {\n xmlns = new BLangLocalXMLNS();\n }\n xmlns.namespaceURI = attribute.value.concatExpr;\n xmlns.prefix = ((BLangXMLQName) attribute.name).localname;\n xmlns.symbol = attribute.symbol;\n\n xmlElementLiteral.inlineNamespaces.add(xmlns);\n }\n\n List prevInlineNamespaces = this.inlineXMLNamespaces;\n if (isVisitingQuery && this.inlineXMLNamespaces != null) {\n \n xmlElementLiteral.inlineNamespaces.addAll(this.inlineXMLNamespaces);\n }\n this.inlineXMLNamespaces = xmlElementLiteral.inlineNamespaces;\n\n xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);\n xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);\n xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);\n\n this.inlineXMLNamespaces = prevInlineNamespaces;\n result = xmlElementLiteral;\n }\n\n @Override\n public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {\n for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {\n rewriteExpr(xmlItem);\n }\n result = xmlSequenceLiteral;\n }\n\n @Override\n public void visit(BLangXMLTextLiteral xmlTextLiteral) {\n xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));\n result = xmlTextLiteral;\n }\n\n @Override\n public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {\n xmlCommentLiteral.concatExpr = rewriteExpr(\n constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));\n result = xmlCommentLiteral;\n }\n\n @Override\n public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {\n xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);\n xmlProcInsLiteral.dataConcatExpr =\n rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));\n result = xmlProcInsLiteral;\n }\n\n @Override\n public void visit(BLangXMLQuotedString xmlQuotedString) {\n xmlQuotedString.concatExpr = rewriteExpr(\n constructStringTemplateConcatExpression(xmlQuotedString.textFragments));\n result = xmlQuotedString;\n }\n\n @Override\n public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {\n result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));\n }\n\n /**\n * The raw template literal gets desugared to a type init expression. For each literal, a new object class type\n * def is generated from the object type. The type init expression creates an instance of this generated object\n * type. For example, consider the following statements:\n * string name = \"Pubudu\";\n * 'object:RawTemplate rt = `Hello ${name}!`;\n *\n * The raw template literal above is desugared to:\n * type RawTemplate$Impl$0 object {\n * public string[] strings = [\"Hello \", \"!\"];\n * public (any|error)[] insertions;\n *\n * function init((any|error)[] insertions) {\n * self.insertions = insertions;\n * }\n * };\n *\n * \n * 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);\n *\n * @param rawTemplateLiteral The raw template literal to be desugared.\n */\n @Override\n public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {\n Location pos = rawTemplateLiteral.pos;\n BObjectType objType = (BObjectType) Types.getImpliedType(rawTemplateLiteral.getBType());\n BLangClassDefinition objClassDef =\n desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);\n BObjectType classObjType = (BObjectType) objClassDef.getBType();\n\n BVarSymbol insertionsSym = classObjType.fields.get(\"insertions\").symbol;\n BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);\n insertionsList.exprs.addAll(rawTemplateLiteral.insertions);\n insertionsList.expectedType = insertionsSym.type;\n\n \n BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);\n typeNewExpr.argsExpr.add(insertionsList);\n BLangInvocation initInvocation = (BLangInvocation) typeNewExpr.initInvocation;\n initInvocation.argExprs.add(insertionsList);\n initInvocation.requiredArgs.add(insertionsList);\n\n result = rewriteExpr(typeNewExpr);\n }\n\n /**\n * This method desugars a raw template literal object class for the provided raw template object type as follows.\n * A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;\n * is desugared to,\n * type $anonType$0 object {\n * public string[] strings = [\"Hello \", \"!\"];\n * public (any|error)[] insertions;\n *\n * function init((any|error)[] insertions) {\n * self.insertions = insertions;\n * }\n * };\n * @param strings The string portions of the literal\n * @param objectType The abstract object type for which an object class needs to be generated\n * @param pos The diagnostic position info for the type node\n * @return Returns the generated concrete object class def\n */\n private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List strings, BObjectType objectType,\n Location pos) {\n \n BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;\n Name objectClassName = Names.fromString(\n anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));\n\n BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,\n env.enclPkg.packageID, null, env.enclPkg.symbol,\n pos, VIRTUAL, false);\n classTSymbol.flags |= Flags.CLASS;\n\n \n BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);\n objectClassType.fields = objectType.fields;\n classTSymbol.type = objectClassType;\n objectClassType.typeIdSet.add(objectType.typeIdSet);\n\n \n\n\n\n BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);\n classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);\n\n \n \n BType stringsType = objectClassType.fields.get(\"strings\").symbol.type;\n BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);\n stringsList.exprs.addAll(strings);\n stringsList.expectedType = stringsType;\n classDef.fields.get(0).expr = stringsList;\n\n \n BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);\n classDef.initFunction = userDefinedInitFunction;\n env.enclPkg.functions.add(userDefinedInitFunction);\n env.enclPkg.topLevelNodes.add(userDefinedInitFunction);\n\n \n BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);\n tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,\n tempGeneratedInitFunction.symbol.scope, env);\n this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env);\n classDef.generatedInitFunction = tempGeneratedInitFunction;\n env.enclPkg.functions.add(classDef.generatedInitFunction);\n env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);\n\n return rewrite(classDef, env);\n }\n\n /**\n * Creates a user-defined init() method for the provided object type node. If there are fields without default\n * values specified in the type node, this will add parameters for those fields in the init() method and assign the\n * param values to the respective fields in the method body.\n *\n * @param classDefn The object type node for which the init() method is generated\n * @param env The symbol env for the object type node\n * @return The generated init() method\n */\n private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {\n BLangFunction initFunction =\n TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.symbol, env, names,\n Names.USER_DEFINED_INIT_SUFFIX, symTable, classDefn.getBType());\n BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);\n typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,\n (BInvokableType) initFunction.getBType(), classDefn.pos);\n classDefn.initFunction = initFunction;\n initFunction.returnTypeNode.setBType(symTable.nilType);\n\n BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;\n BInvokableType initFnType = (BInvokableType) initFunction.getBType();\n for (BLangSimpleVariable field : classDefn.fields) {\n if (field.expr != null) {\n continue;\n }\n BVarSymbol fieldSym = field.symbol;\n BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,\n initFunction.symbol, classDefn.pos, VIRTUAL);\n BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,\n fieldSym.type, null, paramSym);\n param.flagSet.add(Flag.FINAL);\n initFunction.symbol.scope.define(paramSym.name, paramSym);\n initFunction.symbol.params.add(paramSym);\n initFnType.paramTypes.add(param.getBType());\n initFunction.requiredParams.add(param);\n\n BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);\n BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),\n initFunction.receiver.symbol, field.name);\n initFuncBody.addStatement(fieldInit);\n }\n\n return initFunction;\n }\n\n @Override\n public void visit(BLangWorkerAsyncSendExpr asyncSendExpr) {\n asyncSendExpr.expr = visitCloneInvocation(rewriteExpr(asyncSendExpr.expr), asyncSendExpr.expr.getBType());\n this.channelsWithinIfStmt.add(asyncSendExpr.getChannel());\n result = asyncSendExpr;\n }\n\n @Override\n public void visit(BLangWorkerSyncSendExpr syncSendExpr) {\n syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());\n this.channelsWithinIfStmt.add(syncSendExpr.getChannel());\n result = syncSendExpr;\n }\n\n @Override\n public void visit(BLangAlternateWorkerReceive altWorkerReceive) {\n result = altWorkerReceive;\n }\n\n @Override\n public void visit(BLangMultipleWorkerReceive multipleWorkerReceive) {\n result = multipleWorkerReceive;\n }\n\n @Override\n public void visit(BLangWorkerReceive workerReceiveNode) {\n result = workerReceiveNode;\n }\n\n @Override\n public void visit(BLangWorkerFlushExpr workerFlushExpr) {\n workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts\n .stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());\n result = workerFlushExpr;\n }\n\n @Override\n public void visit(BLangTransactionalExpr transactionalExpr) {\n BInvokableSymbol isTransactionalSymbol =\n (BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);\n result = ASTBuilderUtil\n .createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),\n Collections.emptyList(), symResolver);\n }\n\n @Override\n public void visit(BLangCommitExpr commitExpr) {\n BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);\n result = rewriteExpr(stmtExpr);\n }\n\n @Override\n public void visit(BLangFail failNode) {\n if (this.onFailClause != null && !this.desugarToReturn) {\n if (this.onFailClause.bodyContainsFail) {\n result = rewriteNestedOnFail(this.onFailClause, failNode);\n } else {\n result = createOnFailInvocation(onFailClause, failNode);\n }\n } else {\n BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));\n stmt.desugared = true;\n result = stmt;\n }\n }\n\n \n \n\n @Override\n public void visit(BLangLocalVarRef localVarRef) {\n result = localVarRef;\n }\n\n @Override\n public void visit(BLangFieldVarRef fieldVarRef) {\n result = fieldVarRef;\n }\n\n @Override\n public void visit(BLangPackageVarRef packageVarRef) {\n result = packageVarRef;\n }\n\n @Override\n public void visit(BLangFunctionVarRef functionVarRef) {\n result = functionVarRef;\n }\n\n @Override\n public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {\n result = fieldAccessExpr;\n }\n\n @Override\n public void visit(BLangStructFunctionVarRef functionVarRef) {\n result = functionVarRef;\n }\n\n @Override\n public void visit(BLangMapAccessExpr mapKeyAccessExpr) {\n result = mapKeyAccessExpr;\n }\n\n @Override\n public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {\n result = arrayIndexAccessExpr;\n }\n\n @Override\n public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {\n result = arrayIndexAccessExpr;\n }\n\n @Override\n public void visit(BLangTableAccessExpr tableKeyAccessExpr) {\n result = tableKeyAccessExpr;\n }\n\n @Override\n public void visit(BLangMapLiteral mapLiteral) {\n result = mapLiteral;\n }\n\n @Override\n public void visit(BLangStructLiteral structLiteral) {\n result = structLiteral;\n }\n\n @Override\n public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {\n result = waitLiteral;\n }\n\n @Override\n public void visit(BLangXMLElementAccess xmlElementAccess) {\n \n \n xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);\n\n ArrayList filters = expandFilters(xmlElementAccess.filters);\n\n BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,\n xmlElementAccess.expr, new ArrayList<>(), filters);\n result = rewriteExpr(invocationNode);\n }\n\n private ArrayList expandFilters(List filters) {\n ArrayList args = new ArrayList<>();\n for (BLangXMLElementFilter filter : filters) {\n BSymbol nsSymbol = filter.namespaceSymbol;\n String filterName = filter.name;\n if (nsSymbol != null &&\n !(filter.namespace.equals(XMLConstants.DEFAULT_NS_PREFIX) && filterName.equals(\"*\"))) {\n String expandedName = createExpandedQName(((BXMLNSSymbol) nsSymbol).namespaceURI, filterName);\n args.add(createStringLiteral(filter.elemNamePos, expandedName));\n } else {\n args.add(createStringLiteral(filter.elemNamePos, filterName));\n }\n }\n return args;\n }\n\n private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,\n BLangExpression invokeOnExpr,\n ArrayList args,\n ArrayList restArgs) {\n invokeOnExpr = rewriteExpr(invokeOnExpr);\n\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n invocationNode.pos = pos;\n BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n name.setLiteral(false);\n name.setValue(functionName);\n name.pos = pos;\n invocationNode.name = name;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n invocationNode.expr = invokeOnExpr;\n\n invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, Names.fromString(functionName), env);\n\n ArrayList requiredArgs = new ArrayList<>();\n requiredArgs.add(invokeOnExpr);\n requiredArgs.addAll(args);\n invocationNode.requiredArgs = requiredArgs;\n invocationNode.restArgs = rewriteExprs(restArgs);\n\n invocationNode.setBType(invocationNode.symbol.type.getReturnType());\n invocationNode.langLibInvocation = true;\n return invocationNode;\n }\n\n @Override\n public void visit(BLangXMLNavigationAccess xmlNavigation) {\n xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);\n xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);\n\n ArrayList filters = expandFilters(xmlNavigation.filters);\n\n \n if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {\n BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,\n XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);\n result = rewriteExpr(invocationNode);\n } else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {\n \n BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,\n xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());\n result = rewriteExpr(invocationNode);\n } else {\n BLangExpression childIndexExpr;\n \n if (xmlNavigation.childIndex == null) {\n childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);\n } else {\n \n childIndexExpr = xmlNavigation.childIndex;\n }\n ArrayList args = new ArrayList<>();\n args.add(rewriteExpr(childIndexExpr));\n\n BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,\n XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);\n result = rewriteExpr(invocationNode);\n }\n }\n\n @Override\n public void visit(BLangIsAssignableExpr assignableExpr) {\n assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);\n result = assignableExpr;\n }\n\n @Override\n public void visit(BLangTypedescExpr typedescExpr) {\n typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);\n result = typedescExpr;\n }\n\n @Override\n public void visit(BLangRestArgsExpression bLangVarArgsExpression) {\n result = rewriteExpr(bLangVarArgsExpression.expr);\n }\n\n @Override\n public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {\n bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);\n result = bLangNamedArgsExpression.expr;\n }\n\n @Override\n public void visit(BLangCheckedExpr checkedExpr) {\n visitCheckAndCheckPanicExpr(checkedExpr, false);\n }\n\n @Override\n public void visit(BLangCheckPanickedExpr checkedExpr) {\n visitCheckAndCheckPanicExpr(checkedExpr, true);\n }\n\n private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {\n \n if (checkedExpr.isRedundantChecking) {\n result = rewriteExpr(checkedExpr.expr);\n return;\n }\n\n \n \n \n \n \n \n \n \n \n \n \n \n\n Location pos = checkedExpr.pos;\n \n String resultVarName = \"_$result$_\";\n BType resultType = checkedExpr.getBType();\n BLangSimpleVariable resultVar =\n ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,\n new BVarSymbol(0, Names.fromString(resultVarName),\n this.env.scope.owner.pkgID, resultType,\n this.env.scope.owner, pos, VIRTUAL));\n BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);\n resultVarDef.desugared = true;\n BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);\n\n String checkedExprVarName = GEN_VAR_PREFIX.value;\n BType checkedExprType = checkedExpr.expr.getBType();\n BLangSimpleVariable checkedExprVar =\n ASTBuilderUtil.createVariable(pos, checkedExprVarName, checkedExprType,\n checkedExpr.expr, new BVarSymbol(0, Names.fromString(checkedExprVarName),\n this.env.scope.owner.pkgID, checkedExprType,\n this.env.scope.owner, pos, VIRTUAL));\n BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(pos, checkedExprVar);\n BLangSimpleVarRef checkedExprVarRef = ASTBuilderUtil.createVariableRef(pos, checkedExprVar.symbol);\n\n BLangAssignment successAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,\n createTypeCastExpr(checkedExprVarRef, resultType));\n BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);\n ifBody.addStatement(successAssignment);\n\n BLangBlockStmt elseBody = getSafeErrorAssignment(pos, checkedExprVarRef, this.env.enclInvokable.symbol,\n checkedExpr.equivalentErrorTypeList, isCheckPanic);\n\n BLangValueType checkedExprTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();\n checkedExprTypeNode.setBType(resultType);\n checkedExprTypeNode.typeKind = resultType.getKind();\n BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,\n createTypeCheckExpr(pos, checkedExprVarRef, checkedExprTypeNode), ifBody, elseBody);\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{\n add(resultVarDef);\n add(checkedExprVarDef);\n add(ifStmt);\n }});\n\n BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(resultType);\n result = rewriteExpr(stmtExpr);\n }\n\n @Override\n public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {\n final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,\n serviceConstructorExpr.serviceNode.serviceClass.symbol.type);\n serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));\n result = rewriteExpr(typeInit);\n }\n\n @Override\n public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {\n visit(bLangObjectConstructorExpression.classNode);\n bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));\n result = rewriteExpr(bLangObjectConstructorExpression.typeInit);\n }\n\n @Override\n public void visit(BLangAnnotAccessExpr annotAccessExpr) {\n\n BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();\n binaryExpr.pos = annotAccessExpr.pos;\n binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;\n binaryExpr.lhsExpr = annotAccessExpr.expr;\n binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,\n annotAccessExpr.annotationSymbol.bvmAlias());\n binaryExpr.setBType(annotAccessExpr.getBType());\n binaryExpr.opSymbol = new BOperatorSymbol(Names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,\n new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),\n binaryExpr.rhsExpr.getBType()),\n annotAccessExpr.getBType(), null), null,\n symTable.builtinPos, VIRTUAL);\n result = rewriteExpr(binaryExpr);\n }\n\n @Override\n public void visit(BLangTypeTestExpr typeTestExpr) {\n BLangExpression expr = typeTestExpr.expr;\n if (types.isValueType(expr.getBType())) {\n expr = types.addConversionExprIfRequired(expr, symTable.anyType);\n }\n if (typeTestExpr.isNegation) {\n BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,\n typeTestExpr.expr, typeTestExpr.typeNode);\n BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();\n bLangGroupExpr.expression = bLangTypeTestExpr;\n bLangGroupExpr.setBType(typeTestExpr.getBType());\n BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,\n typeTestExpr.getBType(),\n OperatorKind.NOT, null);\n result = rewriteExpr(unaryExpr);\n return;\n }\n typeTestExpr.expr = rewriteExpr(expr);\n typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);\n result = typeTestExpr;\n }\n\n @Override\n public void visit(BLangIsLikeExpr isLikeExpr) {\n isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);\n result = isLikeExpr;\n }\n\n @Override\n public void visit(BLangStatementExpression bLangStatementExpression) {\n bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);\n bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);\n result = bLangStatementExpression;\n }\n\n @Override\n public void visit(BLangQueryExpr queryExpr) {\n boolean prevIsVisitingQuery = this.isVisitingQuery;\n boolean prevDesugarToReturn = this.desugarToReturn;\n this.isVisitingQuery = true;\n this.desugarToReturn = true;\n BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env, getVisibleXMLNSStmts(env));\n result = rewrite(stmtExpr, env);\n this.isVisitingQuery = prevIsVisitingQuery;\n this.desugarToReturn = prevDesugarToReturn;\n }\n\n List getVisibleXMLNSStmts(SymbolEnv env) {\n Map nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);\n return nameBXMLNSSymbolMap.keySet().stream()\n .map(key -> this.stmtsToBePropagatedToQuery.get(key))\n .filter(Objects::nonNull)\n .collect(Collectors.toList());\n }\n\n @Override\n public void visit(BLangQueryAction queryAction) {\n boolean prevIsVisitingQuery = this.isVisitingQuery;\n boolean prevDesugarToReturn = this.desugarToReturn;\n this.desugarToReturn = true;\n this.isVisitingQuery = true;\n BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env, getVisibleXMLNSStmts(env));\n result = rewrite(stmtExpr, env);\n this.isVisitingQuery = prevIsVisitingQuery;\n this.desugarToReturn = prevDesugarToReturn;\n }\n\n @Override\n public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {\n jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);\n result = jsonArrayLiteral;\n }\n\n @Override\n public void visit(BLangConstant constant) {\n\n BConstantSymbol constSymbol = constant.symbol;\n BType refType = Types.getImpliedType(constSymbol.literalType);\n if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {\n if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||\n constSymbol.value.value == null)) {\n throw new IllegalStateException();\n }\n BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,\n constSymbol.value.value);\n constant.expr = rewriteExpr(literal);\n } else {\n constant.expr = rewriteExpr(constant.expr);\n }\n constant.annAttachments.forEach(attachment -> rewrite(attachment, env));\n result = constant;\n }\n\n @Override\n public void visit(BLangIgnoreExpr ignoreExpr) {\n result = ignoreExpr;\n }\n\n @Override\n public void visit(BLangDynamicArgExpr dynamicParamExpr) {\n dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);\n dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);\n result = dynamicParamExpr;\n }\n\n @Override\n public void visit(BLangConstRef constantRef) {\n result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);\n }\n\n @Override\n public void visit(BLangRegExpTemplateLiteral regExpTemplateLiteral) {\n regExpTemplateLiteral.reDisjunction = rewriteExpr(regExpTemplateLiteral.reDisjunction);\n result = regExpTemplateLiteral;\n }\n\n @Override\n public void visit(BLangReDisjunction reDisjunction) {\n reDisjunction.sequenceList.forEach(this::rewriteExpr);\n result = reDisjunction;\n }\n\n @Override\n public void visit(BLangReSequence reSequence) {\n reSequence.termList.forEach(this::rewriteExpr);\n result = reSequence;\n }\n\n @Override\n public void visit(BLangReAssertion reAssertion) {\n reAssertion.assertion = rewriteExpr(reAssertion.assertion);\n result = reAssertion;\n }\n\n @Override\n public void visit(BLangReAtomQuantifier reAtomQuantifier) {\n BLangExpression reAtom = reAtomQuantifier.atom;\n if (symResolver.isReAtomNode(reAtom.getKind())) {\n reAtomQuantifier.atom = rewriteExpr(reAtom);\n } else {\n \n reAtomQuantifier.atom = rewriteExpr(getToStringInvocationOnExpr(reAtom));\n }\n\n \n if (reAtomQuantifier.quantifier == null) {\n reAtomQuantifier.quantifier = ASTBuilderUtil.createEmptyQuantifier(reAtomQuantifier.pos,\n symTable.anydataType, symTable.stringType);\n }\n reAtomQuantifier.quantifier = rewriteExpr(reAtomQuantifier.quantifier);\n result = reAtomQuantifier;\n }\n\n @Override\n public void visit(BLangReAtomCharOrEscape reAtomCharOrEscape) {\n reAtomCharOrEscape.charOrEscape = rewriteExpr(reAtomCharOrEscape.charOrEscape);\n result = reAtomCharOrEscape;\n }\n\n @Override\n public void visit(BLangReQuantifier reQuantifier) {\n reQuantifier.quantifier = rewriteExpr(reQuantifier.quantifier);\n \n if (reQuantifier.nonGreedyChar == null) {\n reQuantifier.nonGreedyChar = ASTBuilderUtil.createLiteral(reQuantifier.pos,\n symTable.stringType, \"\");\n }\n reQuantifier.nonGreedyChar = rewriteExpr(reQuantifier.nonGreedyChar);\n result = reQuantifier;\n }\n\n @Override\n public void visit(BLangReCharacterClass reCharacterClass) {\n reCharacterClass.characterClassStart = rewriteExpr(reCharacterClass.characterClassStart);\n \n if (reCharacterClass.negation == null) {\n reCharacterClass.negation = ASTBuilderUtil.createLiteral(reCharacterClass.pos,\n symTable.stringType, \"\");\n }\n reCharacterClass.negation = rewriteExpr(reCharacterClass.negation);\n \n if (reCharacterClass.charSet == null) {\n reCharacterClass.charSet = ASTBuilderUtil.createEmptyCharSet(symTable.anydataType);\n }\n reCharacterClass.charSet = rewriteExpr(reCharacterClass.charSet);\n reCharacterClass.characterClassEnd = rewriteExpr(reCharacterClass.characterClassEnd);\n result = reCharacterClass;\n }\n\n @Override\n public void visit(BLangReCharSet reCharSet) {\n reCharSet.charSetAtoms.forEach(this::rewriteExpr);\n result = reCharSet;\n }\n\n @Override\n public void visit(BLangReCharSetRange reCharSetRange) {\n reCharSetRange.lhsCharSetAtom = rewriteExpr(reCharSetRange.lhsCharSetAtom);\n reCharSetRange.dash = rewriteExpr(reCharSetRange.dash);\n reCharSetRange.rhsCharSetAtom = rewriteExpr(reCharSetRange.rhsCharSetAtom);\n result = reCharSetRange;\n }\n\n @Override\n public void visit(BLangReCapturingGroups reCapturingGroups) {\n reCapturingGroups.openParen = rewriteExpr(reCapturingGroups.openParen);\n \n if (reCapturingGroups.flagExpr == null) {\n reCapturingGroups.flagExpr = ASTBuilderUtil.createEmptyFlagExpression(reCapturingGroups.pos,\n symTable.anydataType, symTable.stringType);\n }\n reCapturingGroups.flagExpr = rewriteExpr(reCapturingGroups.flagExpr);\n reCapturingGroups.disjunction = rewriteExpr(reCapturingGroups.disjunction);\n reCapturingGroups.closeParen = rewriteExpr(reCapturingGroups.closeParen);\n result = reCapturingGroups;\n }\n\n @Override\n public void visit(BLangReFlagExpression reFlagExpression) {\n reFlagExpression.questionMark = rewriteExpr(reFlagExpression.questionMark);\n \n if (reFlagExpression.flagsOnOff == null) {\n reFlagExpression.flagsOnOff = ASTBuilderUtil.createEmptyFlagOnOff(reFlagExpression.pos,\n symTable.anydataType, symTable.stringType);\n }\n reFlagExpression.flagsOnOff = rewriteExpr(reFlagExpression.flagsOnOff);\n reFlagExpression.colon = rewriteExpr(reFlagExpression.colon);\n result = reFlagExpression;\n }\n\n @Override\n public void visit(BLangReFlagsOnOff reFlagsOnOff) {\n reFlagsOnOff.flags = rewriteExpr(reFlagsOnOff.flags);\n result = reFlagsOnOff;\n }\n\n \n\n \n BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,\n BInvokableSymbol iteratorInvokableSymbol,\n boolean isIteratorFuncFromLangLib) {\n\n\n BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);\n BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();\n iteratorInvocation.pos = pos;\n iteratorInvocation.expr = dataReference;\n iteratorInvocation.symbol = iteratorInvokableSymbol;\n iteratorInvocation.setBType(iteratorInvokableSymbol.retType);\n iteratorInvocation.argExprs = Lists.of(dataReference);\n iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;\n iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;\n BVarSymbol iteratorSymbol = new BVarSymbol(0, Names.fromString(\"$iterator$\"), this.env.scope.owner.pkgID,\n iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);\n\n \n BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, \"$iterator$\",\n iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);\n return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);\n }\n\n \n BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,\n BVarSymbol iteratorSymbol,\n BVarSymbol resultSymbol) {\n BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);\n BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, \"$result$\",\n nillableResultType, nextInvocation, resultSymbol);\n return ASTBuilderUtil.createVariableDef(pos, resultVariable);\n }\n\n BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {\n BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, \"next\");\n BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);\n BInvokableSymbol nextFuncSymbol =\n getNextFunc((BObjectType) Types.getImpliedType(iteratorSymbol.type)).symbol;\n BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();\n nextInvocation.pos = pos;\n nextInvocation.name = nextIdentifier;\n nextInvocation.expr = iteratorReferenceInNext;\n nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));\n nextInvocation.argExprs = nextInvocation.requiredArgs;\n nextInvocation.symbol = nextFuncSymbol;\n nextInvocation.setBType(nextFuncSymbol.retType);\n return nextInvocation;\n }\n\n private BAttachedFunction getNextFunc(BObjectType iteratorType) {\n BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;\n for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {\n if (bAttachedFunction.funcName.value.equals(\"next\")) {\n return bAttachedFunction;\n }\n }\n return null;\n }\n\n \n BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,\n BVarSymbol resultSymbol) {\n return getFieldAccessExpression(location, \"value\", varType, resultSymbol);\n }\n\n BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,\n BVarSymbol resultSymbol) {\n BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);\n BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);\n\n BLangFieldBasedAccess fieldBasedAccessExpression =\n ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);\n fieldBasedAccessExpression.pos = pos;\n fieldBasedAccessExpression.setBType(varType);\n fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();\n return fieldBasedAccessExpression;\n }\n\n private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {\n BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();\n BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();\n returnNode.pos = bLangArrowFunction.body.expr.pos;\n returnNode.setExpression(bLangArrowFunction.body.expr);\n blockNode.addStatement(returnNode);\n return blockNode;\n }\n\n protected BLangInvocation createInvocationNode(String functionName, List args, BType retType) {\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n name.setLiteral(false);\n name.setValue(functionName);\n invocationNode.name = name;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n \n invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;\n invocationNode.setBType(retType);\n invocationNode.requiredArgs = args;\n return invocationNode;\n }\n\n protected BLangInvocation createLangLibInvocationNode(String functionName,\n BLangExpression onExpr,\n List args,\n BType retType,\n Location pos) {\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n invocationNode.pos = pos;\n BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n name.setLiteral(false);\n name.setValue(functionName);\n name.pos = pos;\n invocationNode.name = name;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n invocationNode.expr = onExpr;\n invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), Names.fromString(functionName), env);\n\n ArrayList requiredArgs = new ArrayList<>();\n requiredArgs.add(onExpr);\n requiredArgs.addAll(args);\n invocationNode.requiredArgs = requiredArgs;\n\n invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);\n invocationNode.langLibInvocation = true;\n return invocationNode;\n }\n\n private BLangInvocation createLangLibInvocationNode(String functionName,\n List requiredArgs,\n BType retType,\n Location pos) {\n return createLangLibInvocationNode(functionName, requiredArgs, new ArrayList<>(), retType, pos);\n }\n\n private BLangInvocation createLangLibInvocationNode(String functionName,\n List requiredArgs,\n List restArgs,\n BType retType,\n Location pos) {\n BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();\n invocationNode.pos = pos;\n BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n name.setLiteral(false);\n name.setValue(functionName);\n name.pos = pos;\n invocationNode.name = name;\n invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();\n\n invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,\n Names.fromString(functionName), env);\n\n invocationNode.requiredArgs = new ArrayList<>(requiredArgs);\n invocationNode.restArgs = new ArrayList<>(restArgs);\n\n invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);\n invocationNode.langLibInvocation = true;\n return invocationNode;\n }\n\n private BLangArrayLiteral createArrayLiteralExprNode() {\n BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();\n expr.exprs = new ArrayList<>();\n expr.setBType(new BArrayType(symTable.anyType));\n return expr;\n }\n\n private BFunctionPointerInvocation visitFunctionPointerInvocation(BLangInvocation iExpr) {\n BLangExpression rewritten = rewriteExpr(getFunctionPointerExpr(iExpr));\n return new BFunctionPointerInvocation(iExpr, rewritten);\n }\n\n protected BLangValueExpression getFunctionPointerExpr(BLangInvocation iExpr) {\n BLangValueExpression expr;\n if (iExpr.expr == null) {\n BLangSimpleVarRef varRef = new BLangSimpleVarRef();\n varRef.variableName = iExpr.name;\n expr = varRef;\n } else {\n BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();\n fieldBasedAccess.expr = iExpr.expr;\n fieldBasedAccess.field = iExpr.name;\n expr = fieldBasedAccess;\n }\n expr.symbol = iExpr.symbol;\n expr.setBType(iExpr.symbol.type);\n return expr;\n }\n\n private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {\n if (types.isValueType(expr.getBType())) {\n return expr;\n }\n if (Types.getImpliedType(expr.getBType()).tag == TypeTags.ERROR) {\n return expr;\n }\n BLangInvocation cloneInvok = createLangLibInvocationNode(\"clone\", expr, new ArrayList<>(), null, expr.pos);\n return types.addConversionExprIfRequired(cloneInvok, lhsType);\n }\n\n private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {\n if (types.isValueType(expr.getBType())) {\n return expr;\n }\n if (Types.getImpliedType(expr.getBType()).tag == TypeTags.ERROR) {\n return expr;\n }\n BLangInvocation cloneInvok = createLangLibInvocationNode(\"cloneReadOnly\", expr, new ArrayList<>(),\n expr.getBType(),\n expr.pos);\n return types.addConversionExprIfRequired(cloneInvok, lhsType);\n }\n\n @SuppressWarnings(\"unchecked\")\n E rewrite(E node, SymbolEnv env) {\n if (node == null) {\n return null;\n }\n\n if (node.desugared) {\n return node;\n }\n\n SymbolEnv previousEnv = this.env;\n this.env = env;\n\n node.accept(this);\n BLangNode resultNode = this.result;\n this.result = null;\n resultNode.desugared = true;\n\n this.env = previousEnv;\n return (E) resultNode;\n }\n\n @SuppressWarnings(\"unchecked\")\n E rewriteExpr(E node) {\n if (node == null) {\n return null;\n }\n\n if (node.desugared) {\n return node;\n }\n\n BLangExpression expr = node;\n if (node.impConversionExpr != null) {\n expr = node.impConversionExpr;\n node.impConversionExpr = null;\n }\n\n expr.accept(this);\n BLangNode resultNode = this.result;\n this.result = null;\n resultNode.desugared = true;\n\n return (E) resultNode;\n }\n\n @SuppressWarnings(\"unchecked\")\n E rewrite(E statement, SymbolEnv env) {\n if (statement == null) {\n return null;\n }\n BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);\n return (E) stmt;\n }\n\n private List rewriteStmt(List nodeList, SymbolEnv env) {\n for (int i = 0; i < nodeList.size(); i++) {\n nodeList.set(i, rewrite(nodeList.get(i), env));\n }\n return nodeList;\n }\n\n private List rewrite(List nodeList, SymbolEnv env) {\n for (int i = 0; i < nodeList.size(); i++) {\n nodeList.set(i, rewrite(nodeList.get(i), env));\n }\n return nodeList;\n }\n\n private List rewriteExprs(List nodeList) {\n for (int i = 0; i < nodeList.size(); i++) {\n nodeList.set(i, rewriteExpr(nodeList.get(i)));\n }\n return nodeList;\n }\n\n private BLangLiteral createStringLiteral(Location pos, String value) {\n BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);\n stringLit.pos = pos;\n return stringLit;\n }\n\n private BLangLiteral createIntLiteral(long value) {\n BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();\n literal.value = value;\n literal.setBType(symTable.intType);\n return literal;\n }\n\n private BLangLiteral createByteLiteral(Location pos, Byte value) {\n BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);\n byteLiteral.pos = pos;\n return byteLiteral;\n }\n\n private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {\n if (types.isSameType(expr.getBType(), targetType)) {\n return expr;\n }\n\n BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();\n conversionExpr.pos = expr.pos;\n conversionExpr.expr = expr;\n conversionExpr.setBType(targetType);\n conversionExpr.targetType = targetType;\n conversionExpr.internal = true;\n return conversionExpr;\n }\n\n private BType getElementType(BType bType) {\n BType type = Types.getImpliedType(bType);\n if (type.tag != TypeTags.ARRAY) {\n return bType;\n }\n\n return getElementType(((BArrayType) type).getElementType());\n }\n\n \n private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {\n if (Symbols.isNative(invokableNode.symbol) ||\n (invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {\n return;\n }\n \n \n BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;\n if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.isEmpty()\n || funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {\n Location invPos = invokableNode.pos;\n Location returnStmtPos;\n if (invPos != null && !invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {\n returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().fileName(),\n invPos.lineRange().endLine().line(),\n invPos.lineRange().endLine().line(),\n invPos.lineRange().startLine().offset(),\n invPos.lineRange().startLine().offset(), 0, 0);\n } else {\n returnStmtPos = null;\n }\n BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);\n funcBody.addStatement(returnStmt);\n }\n }\n\n /**\n * Reorder the invocation arguments to match the original function signature.\n *\n * @param iExpr Function invocation expressions to reorder the arguments\n */\n private void reorderArguments(BLangInvocation iExpr) {\n BSymbol symbol = iExpr.symbol;\n\n if (symbol == null || Types.getImpliedType(symbol.type).tag != TypeTags.INVOKABLE) {\n return;\n }\n\n BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;\n\n List restArgs = iExpr.restArgs;\n int originalRequiredArgCount = iExpr.requiredArgs.size();\n\n \n BLangSimpleVarRef varargRef = null;\n BLangBlockStmt blockStmt = null;\n BType varargVarType = null;\n\n int restArgCount = restArgs.size();\n\n if (restArgCount > 0 &&\n restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&\n originalRequiredArgCount < invokableSymbol.params.size()) {\n \n \n \n BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;\n Location varargExpPos = expr.pos;\n varargVarType = expr.getBType();\n String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;\n\n BVarSymbol varargVarSymbol = new BVarSymbol(0, Names.fromString(varargVarName), this.env.scope.owner.pkgID,\n varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);\n varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);\n\n BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);\n\n BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);\n varDef.var = var;\n varDef.setBType(varargVarType);\n\n blockStmt = createBlockStmt(varargExpPos);\n blockStmt.stmts.add(varDef);\n }\n\n if (!invokableSymbol.params.isEmpty()) {\n \n reorderNamedArgs(iExpr, invokableSymbol, varargRef);\n }\n\n \n if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {\n if (invokableSymbol.restParam == null) {\n return;\n }\n\n BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();\n List exprs = new ArrayList<>();\n\n BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;\n BType elemType = arrayType.eType;\n\n for (BLangExpression restArg : restArgs) {\n exprs.add(types.addConversionExprIfRequired(restArg, elemType));\n }\n\n arrayLiteral.exprs = exprs;\n arrayLiteral.setBType(arrayType);\n\n if (restArgCount != 0) {\n iExpr.restArgs = new ArrayList<>();\n }\n\n iExpr.restArgs.add(arrayLiteral);\n return;\n }\n\n \n if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {\n\n \n \n if (iExpr.requiredArgs.size() == originalRequiredArgCount) {\n return;\n }\n\n \n \n \n \n BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);\n BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);\n BType type = firstNonRestArg.impConversionExpr == null ?\n firstNonRestArg.getBType() : firstNonRestArg.impConversionExpr.targetType;\n stmtExpression.setBType(type);\n iExpr.requiredArgs.add(0, stmtExpression);\n\n \n if (invokableSymbol.restParam == null) {\n restArgs.remove(0);\n return;\n }\n\n \n \n \n \n BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);\n BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;\n if (Types.getImpliedType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {\n BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);\n restArgs.add(expr);\n return;\n }\n Location pos = restArgsExpression.pos;\n\n BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();\n newArrayLiteral.setBType(restParamType);\n\n String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;\n BVarSymbol varSymbol = new BVarSymbol(0, Names.fromString(name), this.env.scope.owner.pkgID,\n restParamType, this.env.scope.owner, pos, VIRTUAL);\n BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);\n\n BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);\n BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);\n varDef.var = var;\n varDef.setBType(restParamType);\n\n BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);\n BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);\n BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,\n getModifiedIntRangeEndExpr(lengthInvocation));\n\n BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();\n foreach.pos = pos;\n foreach.collection = intRangeInvocation;\n types.setForeachTypedBindingPatternType(foreach);\n\n final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, \"$foreach$i\",\n foreach.varType);\n foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),\n this.env.scope.owner.pkgID, foreachVariable.getBType(),\n this.env.scope.owner, pos, VIRTUAL);\n BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);\n foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);\n foreach.isDeclaredWithVar = true;\n BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);\n\n BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);\n\n BType refType = Types.getImpliedType(varargVarType);\n if (refType.tag == TypeTags.ARRAY) {\n BArrayType arrayType = (BArrayType) refType;\n if (arrayType.state == BArrayState.CLOSED &&\n arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {\n \n \n valueExpr.setBType(restParamType.eType);\n } else {\n valueExpr.setBType(arrayType.eType);\n }\n } else {\n valueExpr.setBType(symTable.anyOrErrorType); \n }\n\n BLangExpression pushExpr = types.addConversionExprIfRequired(valueExpr, restParamType.eType);\n BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);\n BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,\n List.of(pushExpr),\n restParamType, pos);\n pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));\n expressionStmt.expr = pushInvocation;\n foreach.body = foreachBody;\n BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);\n newArrayBlockStmt.addStatement(varDef);\n newArrayBlockStmt.addStatement(foreach);\n\n BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);\n newArrayStmtExpression.setBType(restParamType);\n\n restArgs.add(types.addConversionExprIfRequired(newArrayStmtExpression, restParamType));\n return;\n }\n\n \n \n BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;\n\n BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();\n arrayLiteral.setBType(restParamType);\n\n BType elemType = restParamType.eType;\n Location pos = restArgs.get(0).pos;\n\n List exprs = new ArrayList<>();\n\n for (int i = 0; i < restArgCount - 1; i++) {\n exprs.add(types.addConversionExprIfRequired(restArgs.get(i), elemType));\n }\n arrayLiteral.exprs = exprs;\n\n BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();\n pushRestArgsExpr.pos = pos;\n pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);\n\n String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;\n BVarSymbol varSymbol = new BVarSymbol(0, Names.fromString(name), this.env.scope.owner.pkgID, restParamType,\n this.env.scope.owner, pos, VIRTUAL);\n BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);\n\n BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);\n BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);\n varDef.var = var;\n varDef.setBType(restParamType);\n\n BLangBlockStmt pushBlockStmt = createBlockStmt(pos);\n pushBlockStmt.stmts.add(varDef);\n\n BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);\n BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,\n new ArrayList() {{\n add(pushRestArgsExpr);\n }}, restParamType, pos);\n pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));\n expressionStmt.expr = pushInvocation;\n\n BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);\n stmtExpression.setBType(restParamType);\n\n iExpr.restArgs = new ArrayList(1) {{ add(stmtExpression); }};\n }\n\n private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {\n List args = new ArrayList<>();\n Map namedArgs = new LinkedHashMap<>();\n iExpr.requiredArgs.stream()\n .filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)\n .forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));\n\n List params = invokableSymbol.params;\n List incRecordLiterals = new ArrayList<>();\n BLangRecordLiteral incRecordParamAllowAdditionalFields = null;\n\n int varargIndex = 0;\n\n BType varargType = null;\n boolean tupleTypedVararg = false;\n\n if (varargRef != null) {\n varargType = Types.getImpliedType(varargRef.getBType());\n tupleTypedVararg = varargType.tag == TypeTags.TUPLE;\n }\n\n \n for (int i = 0; i < params.size(); i++) {\n BVarSymbol param = params.get(i);\n if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {\n \n args.add(iExpr.requiredArgs.get(i));\n } else if (namedArgs.containsKey(param.name.value)) {\n \n args.add(namedArgs.remove(param.name.value));\n } else if (param.getFlags().contains(Flag.INCLUDED)) {\n BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();\n BType paramType = param.type;\n recordLiteral.setBType(paramType);\n args.add(recordLiteral);\n incRecordLiterals.add(recordLiteral);\n if (((BRecordType) Types.getImpliedType(paramType)).restFieldType != symTable.noType) {\n incRecordParamAllowAdditionalFields = recordLiteral;\n }\n } else if (varargRef == null) {\n \n BLangExpression expr = new BLangIgnoreExpr();\n expr.setBType(param.type);\n args.add(expr);\n } else {\n \n \n if (Types.getImpliedType(varargRef.getBType()).tag == TypeTags.RECORD) {\n if (param.isDefaultable) {\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varargRef.pos);\n BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,\n List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);\n BLangSimpleVariableDef variableDef = createVarDef(\"$hasKey$\", hasKeyInvocation.getBType(),\n hasKeyInvocation, hasKeyInvocation.pos);\n blockStmt.stmts.add(variableDef);\n BLangSimpleVarRef simpleVarRef = ASTBuilderUtil.createVariableRef(variableDef.pos,\n variableDef.var.symbol);\n BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));\n BLangIndexBasedAccess memberAccessExpr =\n ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);\n BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);\n BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type, simpleVarRef,\n memberAccessExpr, ignoreExpr);\n BLangDynamicArgExpr dynamicArgExpr =\n ASTBuilderUtil.createDynamicParamExpression(simpleVarRef, param, ternaryExpr);\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, dynamicArgExpr);\n stmtExpr.setBType(dynamicArgExpr.getBType());\n args.add(rewriteExpr(stmtExpr));\n } else {\n BLangFieldBasedAccess fieldBasedAccessExpression =\n ASTBuilderUtil.createFieldAccessExpr(varargRef,\n ASTBuilderUtil.createIdentifier(param.pos, param.name.value));\n fieldBasedAccessExpression.setBType(param.type);\n args.add(fieldBasedAccessExpression);\n }\n } else {\n BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));\n BType memberAccessExprType = tupleTypedVararg ?\n ((BTupleType) varargType).getTupleTypes().get(varargIndex)\n : ((BArrayType) varargType).eType;\n args.add(types.addConversionExprIfRequired(\n ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType, varargRef, indexExpr),\n param.type));\n varargIndex++;\n }\n }\n }\n if (!namedArgs.isEmpty()) {\n setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);\n }\n iExpr.requiredArgs = args;\n }\n\n private void setFieldsForIncRecordLiterals(Map namedArgs,\n List incRecordLiterals,\n BLangRecordLiteral incRecordParamAllowAdditionalFields) {\n for (String name : namedArgs.keySet()) {\n boolean isAdditionalField = true;\n BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);\n for (BLangRecordLiteral recordLiteral : incRecordLiterals) {\n LinkedHashMap fields =\n ((BRecordType) Types.getImpliedType(recordLiteral.getBType())).fields;\n if (fields.containsKey(name) &&\n Types.getImpliedType(fields.get(name).type).tag != TypeTags.NEVER) {\n isAdditionalField = false;\n createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);\n break;\n }\n }\n if (isAdditionalField) {\n createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);\n }\n }\n }\n\n private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,\n BLangNamedArgsExpression expr) {\n BLangSimpleVarRef varRef = new BLangSimpleVarRef();\n varRef.variableName = expr.name;\n BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.\n createBLangRecordKeyValue(varRef, expr.expr);\n recordLiteral.fields.add(recordKeyValueField);\n }\n\n private BLangBlockStmt getSafeErrorAssignment(Location location, BLangSimpleVarRef ref,\n BSymbol invokableSymbol,\n List equivalentErrorTypes,\n boolean isCheckPanicExpr) {\n \n \n BType enclosingFuncReturnType = Types.getImpliedType(((BInvokableType) invokableSymbol.type).retType);\n Set returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?\n ((BUnionType) enclosingFuncReturnType).getMemberTypes() :\n new LinkedHashSet<>() {{\n add(enclosingFuncReturnType);\n }};\n\n \n boolean returnOnError = equivalentErrorTypes.stream()\n .allMatch(errorType -> returnTypeSet.stream()\n .anyMatch(retType -> types.isAssignable(errorType, retType)));\n\n String patternFailureCaseVarName = GEN_VAR_PREFIX.value + \"t_failure\";\n BLangSimpleVariable errorVar =\n ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType,\n createTypeCastExpr(ref, symTable.errorType),\n new BVarSymbol(0, Names.fromString(patternFailureCaseVarName),\n this.env.scope.owner.pkgID, symTable.errorType,\n this.env.scope.owner, location, VIRTUAL));\n\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(location);\n BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(location, errorVar);\n blockStmt.addStatement(errorVarDef);\n BLangVariableReference errorVarRef = ASTBuilderUtil.createVariableRef(location, errorVar.symbol);\n if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {\n \n BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();\n failStmt.pos = location;\n failStmt.expr = errorVarRef;\n blockStmt.addStatement(failStmt);\n if (returnOnError && this.shouldReturnErrors) {\n BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(errorVarRef, env));\n errorReturn.desugared = true;\n failStmt.exprStmt = errorReturn;\n }\n } else {\n \n BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();\n panicNode.pos = location;\n panicNode.expr = errorVarRef;\n blockStmt.addStatement(panicNode);\n }\n\n return blockStmt;\n }\n\n private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {\n if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {\n BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;\n List memberTypes = new ArrayList<>();\n for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {\n BType member = getStructuredBindingPatternType(tupleVariable.memberVariables.get(i));\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(member);\n memberTypes.add(\n new BTupleMember(member, varSymbol));\n }\n BTupleType tupleType = new BTupleType(memberTypes);\n if (tupleVariable.restVariable != null) {\n BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);\n tupleType.restType = restArrayType.eType;\n }\n return tupleType;\n }\n\n if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {\n BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;\n\n BRecordTypeSymbol recordSymbol =\n Symbols.createRecordSymbol(0, Names.fromString(\"$anonRecordType$\" + UNDERSCORE + recordCount++),\n env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,\n VIRTUAL);\n recordSymbol.scope = new Scope(recordSymbol);\n\n LinkedHashMap fields = new LinkedHashMap<>();\n List typeDefFields = new ArrayList<>();\n\n for (int i = 0; i < recordVariable.variableList.size(); i++) {\n String fieldNameStr = recordVariable.variableList.get(i).key.value;\n Name fieldName = Names.fromString(fieldNameStr);\n BType fieldType = getStructuredBindingPatternType(\n recordVariable.variableList.get(i).valueBindingPattern);\n BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,\n recordSymbol, bindingPatternVariable.pos, VIRTUAL);\n\n \n fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));\n typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));\n recordSymbol.scope.define(fieldName, fieldSymbol);\n }\n\n BRecordType recordVarType = new BRecordType(recordSymbol);\n recordVarType.fields = fields;\n\n \n recordVarType.restFieldType = recordVariable.restParam != null ?\n ((BRecordType) recordVariable.restParam.getBType()).restFieldType :\n symTable.anydataType;\n recordSymbol.type = recordVarType;\n recordVarType.tsymbol = recordSymbol;\n\n BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,\n recordVarType,\n bindingPatternVariable.pos);\n TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);\n\n return recordVarType;\n }\n\n if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {\n BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;\n BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(\n SymTag.ERROR,\n Flags.PUBLIC,\n Names.fromString(\"$anonErrorType$\" + UNDERSCORE + errorCount++),\n env.enclPkg.symbol.pkgID,\n null, null, errorVariable.pos, VIRTUAL);\n BType detailType;\n if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {\n detailType = symTable.detailType;\n } else {\n detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,\n errorVariable.pos);\n\n BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);\n TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,\n recordTypeNode, env);\n }\n BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);\n errorTypeSymbol.type = errorType;\n\n TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,\n createErrorTypeNode(errorType), env);\n return errorType;\n }\n\n return bindingPatternVariable.getBType();\n }\n\n private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {\n List fieldList = new ArrayList<>();\n for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {\n BVarSymbol symbol = field.valueBindingPattern.symbol;\n if (symbol == null) {\n symbol = new BVarSymbol(Flags.PUBLIC, Names.fromString(field.key.value + \"$\"),\n this.env.enclPkg.packageID, symTable.pureType, null,\n field.valueBindingPattern.pos, VIRTUAL);\n }\n BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(\n field.valueBindingPattern.pos,\n symbol.name.value,\n field.valueBindingPattern.getBType(),\n field.valueBindingPattern.expr,\n symbol);\n fieldList.add(fieldVar);\n }\n return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);\n }\n\n private BType createDetailType(List detail,\n BLangSimpleVariable restDetail, int errorNo, Location pos) {\n BRecordType detailRecordType = createAnonRecordType(pos);\n\n if (restDetail == null) {\n detailRecordType.sealed = true;\n }\n\n for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {\n Name fieldName = names.fromIdNode(detailEntry.key);\n BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);\n BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,\n detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);\n detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));\n detailRecordType.tsymbol.scope.define(fieldName, fieldSym);\n }\n\n return detailRecordType;\n }\n\n private BRecordType createAnonRecordType(Location pos) {\n BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(\n SymTag.RECORD,\n Flags.PUBLIC,\n Names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),\n env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);\n detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);\n\n BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);\n detailRecordType.restFieldType = symTable.anydataType;\n return detailRecordType;\n }\n\n BLangErrorType createErrorTypeNode(BErrorType errorType) {\n BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();\n errorTypeNode.setBType(errorType);\n return errorTypeNode;\n }\n\n private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,\n BLangExpression expression) {\n\n BLangBinaryExpr binaryExpr;\n if (NodeKind.GROUP_EXPR == expression.getKind()) {\n return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);\n }\n\n if (NodeKind.BINARY_EXPR == expression.getKind()) {\n binaryExpr = (BLangBinaryExpr) expression;\n BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);\n BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);\n\n binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,\n (BOperatorSymbol) symResolver\n .resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));\n } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF\n && ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {\n BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();\n anyType.setBType(symTable.anyType);\n anyType.typeKind = TypeKind.ANY;\n return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);\n } else {\n binaryExpr = ASTBuilderUtil\n .createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);\n BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),\n expression.getBType());\n if (opSymbol == symTable.notFoundSymbol) {\n opSymbol = symResolver\n .getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),\n binaryExpr, env);\n }\n binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;\n }\n return binaryExpr;\n }\n\n private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {\n return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);\n }\n\n private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {\n BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();\n varRef.pos = variable.pos;\n varRef.variableName = variable.name;\n varRef.symbol = variable.symbol;\n varRef.setBType(variable.getBType());\n\n BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();\n assignmentStmt.expr = variable.expr;\n assignmentStmt.pos = variable.pos;\n assignmentStmt.setVariable(varRef);\n return assignmentStmt;\n }\n\n private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,\n BVarSymbol selfSymbol) {\n return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,\n variable.name);\n }\n\n private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,\n BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,\n BLangIdentifier fieldName) {\n BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);\n BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);\n fieldAccess.symbol = fieldSymbol;\n fieldAccess.setBType(fieldType);\n fieldAccess.isStoreOnCreation = true;\n\n BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();\n \n expr.pos = this.symTable.builtinPos;\n fieldName.pos = this.symTable.builtinPos;\n fieldSymbol.pos = this.symTable.builtinPos;\n assignmentStmt.expr = expr;\n assignmentStmt.pos = function.pos;\n assignmentStmt.setVariable(fieldAccess);\n\n SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);\n return rewrite(assignmentStmt, initFuncEnv);\n }\n\n private boolean safeNavigate(BLangAccessExpression accessExpr) {\n if (accessExpr.isLValue || accessExpr.expr == null) {\n return false;\n }\n\n if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {\n return true;\n }\n\n NodeKind kind = accessExpr.expr.getKind();\n if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||\n kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n return safeNavigate((BLangAccessExpression) accessExpr.expr);\n }\n\n return false;\n }\n\n private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {\n BType originalExprType = accessExpr.getBType();\n \n String matchTempResultVarName = GEN_VAR_PREFIX.value + \"temp_result\";\n BLangSimpleVariable tempResultVar =\n ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,\n new BVarSymbol(0, Names.fromString(matchTempResultVarName),\n this.env.scope.owner.pkgID, accessExpr.getBType(),\n this.env.scope.owner, accessExpr.pos, VIRTUAL));\n BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);\n BLangVariableReference tempResultVarRef =\n ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);\n\n \n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(accessExpr.pos);\n blockStmt.stmts.add(tempResultVarDef);\n handleSafeNavigation(blockStmt, accessExpr, accessExpr.getBType(), tempResultVar);\n\n \n BLangMatchStatement matchStmt = this.matchStmtStack.firstElement();\n blockStmt.stmts.add(matchStmt);\n BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);\n stmtExpression.setBType(originalExprType);\n \n this.matchStmtStack = new Stack<>();\n this.accessExprStack = new Stack<>();\n this.successClause = null;\n this.safeNavigationAssignment = null;\n return stmtExpression;\n }\n\n private void handleSafeNavigation(BLangBlockStmt blockStmt, BLangAccessExpression accessExpr, BType type,\n BLangSimpleVariable tempResultVar) {\n if (accessExpr.expr == null) {\n return;\n }\n\n \n NodeKind kind = accessExpr.expr.getKind();\n if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n handleSafeNavigation(blockStmt, (BLangAccessExpression) accessExpr.expr, type, tempResultVar);\n }\n\n if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {\n BType originalType = Types.getImpliedType(accessExpr.originalType);\n if (isMapJson(originalType, false)) {\n accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));\n } else {\n accessExpr.setBType(originalType);\n }\n if (this.safeNavigationAssignment != null) {\n this.safeNavigationAssignment.expr =\n types.addConversionExprIfRequired(accessExpr, tempResultVar.getBType());\n }\n return;\n }\n\n /*\n * If the field access is a safe navigation, create a match expression.\n * Then chain the current expression as the success-pattern of the parent\n * match expr, if available.\n * eg:\n * x but { <--- parent match expr\n * error e => e,\n * T t => t.y but { <--- current expr\n * error e => e,\n * R r => r.z\n * }\n * }\n */\n\n BLangExpression matchExpr = accessExpr.expr;\n BLangSimpleVariableDef variableDef =\n createVarDef(\"$varDef$\", matchExpr.getBType(), matchExpr, matchExpr.pos);\n BLangSimpleVarRef simpleVarRef = ASTBuilderUtil.createVariableRef(variableDef.pos, variableDef.var.symbol);\n accessExpr.expr = simpleVarRef;\n blockStmt.stmts.add(variableDef);\n Location pos = accessExpr.pos;\n BLangMatchStatement matchStmt = ASTBuilderUtil.createMatchStatement(simpleVarRef, pos);\n BType matchExprType = accessExpr.expr.getBType();\n\n boolean isAllTypesRecords = false;\n LinkedHashSet memTypes = new LinkedHashSet<>();\n BType referredType = Types.getImpliedType(matchExpr.getBType());\n if (referredType.tag == TypeTags.UNION) {\n memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());\n isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);\n }\n\n \n if (accessExpr.nilSafeNavigation) {\n matchStmt.addMatchClause(getMatchNullClause(matchExpr, tempResultVar));\n matchStmt.setBType(type);\n memTypes.remove(symTable.nilType);\n }\n\n \n if (accessExpr.errorSafeNavigation) {\n matchStmt.addMatchClause(getMatchErrorClause(matchExpr, tempResultVar));\n matchStmt.setBType(type);\n matchStmt.pos = pos;\n memTypes.remove(symTable.errorType);\n }\n\n BLangMatchClause successClause = null;\n Name field = getFieldName(accessExpr);\n if (field == Names.EMPTY) {\n successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,\n accessExpr.errorSafeNavigation);\n matchStmt.addMatchClause(successClause);\n pushToMatchStatementStack(matchStmt, successClause, pos);\n return;\n }\n\n if (isAllTypesRecords) {\n for (BType memberType : memTypes) {\n BRecordType recordType = (BRecordType) Types.getImpliedType(memberType);\n if (recordType.fields.containsKey(field.value) || !recordType.sealed) {\n successClause = getSuccessPatternClause(memberType, matchExpr, accessExpr, tempResultVar,\n accessExpr.errorSafeNavigation);\n matchStmt.addMatchClause(successClause);\n }\n }\n matchStmt.addMatchClause(getMatchAllAndNilReturnClause(matchExpr, tempResultVar));\n pushToMatchStatementStack(matchStmt, successClause, pos);\n return;\n }\n\n \n successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,\n accessExpr.errorSafeNavigation);\n matchStmt.addMatchClause(successClause);\n pushToMatchStatementStack(matchStmt, successClause, pos);\n }\n\n private boolean isMapJson(BType originalType, boolean fromMap) {\n originalType = Types.getImpliedType(originalType);\n return ((originalType.tag == TypeTags.MAP) && isMapJson(((BMapType) originalType).getConstraint(), true))\n || ((originalType.tag == TypeTags.JSON) && fromMap);\n }\n\n private void pushToMatchStatementStack(BLangMatchStatement matchStmt, BLangMatchClause successClause,\n Location pos) {\n this.matchStmtStack.push(matchStmt);\n if (this.successClause != null) {\n this.successClause.blockStmt = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(matchStmt));\n }\n this.successClause = successClause;\n }\n\n private Name getFieldName(BLangAccessExpression accessExpr) {\n Name field = Names.EMPTY;\n if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {\n field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);\n } else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;\n if (indexBasedExpression.getKind() == NodeKind.LITERAL) {\n field = new Name(((BLangLiteral) indexBasedExpression).value.toString());\n }\n }\n return field;\n }\n\n private boolean isAllTypesAreRecordsInUnion(LinkedHashSet memTypes) {\n for (BType memType : memTypes) {\n int typeTag = Types.getImpliedType(memType).tag;\n if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {\n return false;\n }\n }\n return true;\n }\n\n private BLangMatchClause getMatchErrorClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {\n String errorPatternVarName = GEN_VAR_PREFIX.value + \"t_match_error\";\n Location pos = matchExpr.pos;\n BVarSymbol errorPatternVarSymbol = new BVarSymbol(0, Names.fromString(errorPatternVarName),\n this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);\n BLangCaptureBindingPattern captureBindingPattern =\n ASTBuilderUtil.createCaptureBindingPattern(errorPatternVarSymbol, errorPatternVarName);\n BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =\n ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);\n\n BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, errorPatternVarSymbol);\n BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);\n BLangAssignment assignmentStmt =\n ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);\n BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));\n BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getErrorTypeNode());\n matchGuard.setBType(symTable.booleanType);\n\n return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);\n }\n\n private BLangMatchClause getMatchNullClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {\n String nullPatternVarName = GEN_VAR_PREFIX.value + \"t_match_null\";\n Location pos = matchExpr.pos;\n BVarSymbol nullPatternVarSymbol = new BVarSymbol(0, Names.fromString(nullPatternVarName),\n this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);\n BLangCaptureBindingPattern captureBindingPattern =\n ASTBuilderUtil.createCaptureBindingPattern(nullPatternVarSymbol, nullPatternVarName);\n BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =\n ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);\n\n BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, nullPatternVarSymbol);\n BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);\n BLangAssignment assignmentStmt =\n ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);\n BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));\n BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getNillTypeNode());\n matchGuard.setBType(symTable.booleanType);\n\n return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);\n }\n\n private BLangMatchClause getMatchAllAndNilReturnClause(BLangExpression matchExpr,\n BLangSimpleVariable tempResultVar) {\n Location pos = matchExpr.pos;\n BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);\n BLangAssignment assignmentStmt =\n ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, createLiteral(pos, symTable.nilType,\n Names.NIL_VALUE));\n BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));\n\n BLangWildCardMatchPattern wildCardMatchPattern = ASTBuilderUtil.createWildCardMatchPattern(matchExpr);\n wildCardMatchPattern.setBType(symTable.anyType);\n return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, null, wildCardMatchPattern);\n }\n\n private BLangMatchClause getSuccessPatternClause(BType type, BLangExpression matchExpr,\n BLangAccessExpression accessExpr,\n BLangSimpleVariable tempResultVar, boolean liftError) {\n type = types.getSafeType(type, true, liftError);\n String successPatternVarName = GEN_VAR_PREFIX.value + \"t_match_success\";\n\n Location pos = accessExpr.pos;\n BVarSymbol successPatternSymbol;\n if (Types.getImpliedType(type).tag == TypeTags.INVOKABLE) {\n successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, Names.fromString(successPatternVarName),\n this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);\n } else {\n successPatternSymbol = new BVarSymbol(0, Names.fromString(successPatternVarName),\n this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);\n }\n\n BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,\n type, null, successPatternSymbol);\n BLangSimpleVarRef successPatternVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos,\n successPatternVar.symbol);\n\n BLangCaptureBindingPattern captureBindingPattern =\n ASTBuilderUtil.createCaptureBindingPattern(successPatternSymbol, successPatternVarName);\n BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =\n ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);\n\n BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);\n if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n ((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;\n }\n if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {\n ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =\n ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;\n }\n\n tempAccessExpr.expr = types.addConversionExprIfRequired(successPatternVarRef, type);\n tempAccessExpr.errorSafeNavigation = false;\n tempAccessExpr.nilSafeNavigation = false;\n accessExpr.cloneRef = null;\n\n \n \n \n if (TypeTags.isXMLTypeTag(Types.getImpliedType(tempAccessExpr.expr.getBType()).tag)) {\n \n tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,\n symTable.nilType));\n } else {\n tempAccessExpr.setBType(accessExpr.originalType);\n }\n tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;\n\n BLangVariableReference tempResultVarRef =\n ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);\n\n BLangExpression assignmentRhsExpr =\n types.addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());\n BLangAssignment assignmentStmt =\n ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);\n BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, this.env.scope,\n Lists.of(assignmentStmt));\n BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, successPatternVarRef, createTypeNode(type));\n matchGuard.setBType(symTable.booleanType);\n\n return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);\n }\n\n BLangValueType getNillTypeNode() {\n BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();\n nillTypeNode.typeKind = TypeKind.NIL;\n nillTypeNode.setBType(symTable.nilType);\n return nillTypeNode;\n }\n\n BLangValueType createTypeNode(BType type) {\n BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();\n typeNode.typeKind = type.getKind();\n typeNode.setBType(type);\n return typeNode;\n }\n\n private BLangValueExpression cloneExpression(BLangExpression expr) {\n switch (expr.getKind()) {\n case SIMPLE_VARIABLE_REF:\n return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);\n case FIELD_BASED_ACCESS_EXPR:\n case INDEX_BASED_ACCESS_EXPR:\n return cloneAccessExpr((BLangAccessExpression) expr);\n default:\n throw new IllegalStateException();\n }\n }\n\n private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {\n if (originalAccessExpr.expr == null) {\n return originalAccessExpr;\n }\n\n BLangExpression varRef;\n NodeKind kind = originalAccessExpr.expr.getKind();\n if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {\n varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);\n } else {\n varRef = cloneExpression(originalAccessExpr.expr);\n }\n varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));\n\n BLangAccessExpression accessExpr;\n switch (originalAccessExpr.getKind()) {\n case FIELD_BASED_ACCESS_EXPR:\n accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,\n ((BLangFieldBasedAccess) originalAccessExpr).field);\n break;\n case INDEX_BASED_ACCESS_EXPR:\n accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,\n ((BLangIndexBasedAccess) originalAccessExpr).indexExpr);\n break;\n default:\n throw new IllegalStateException();\n }\n\n accessExpr.originalType = originalAccessExpr.originalType;\n accessExpr.pos = originalAccessExpr.pos;\n accessExpr.isLValue = originalAccessExpr.isLValue;\n accessExpr.symbol = originalAccessExpr.symbol;\n accessExpr.errorSafeNavigation = false;\n accessExpr.nilSafeNavigation = false;\n\n \n \n \n accessExpr.setBType(originalAccessExpr.originalType);\n return accessExpr;\n }\n\n private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {\n BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);\n return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,\n (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,\n symTable.intType,\n symTable.intType));\n }\n\n private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {\n BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);\n return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,\n (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,\n symTable.intType,\n symTable.intType));\n }\n\n BLangLiteral getBooleanLiteral(boolean value) {\n BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();\n literal.value = value;\n literal.setBType(symTable.booleanType);\n literal.pos = symTable.builtinPos;\n return literal;\n }\n\n private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {\n BType returnType = symTable.nilType;\n\n \n if (classDefinition.initFunction != null) {\n returnType = classDefinition.initFunction.getBType().getReturnType();\n }\n\n BLangFunction initFunction =\n TypeDefBuilderHelper.createInitFunctionForStructureType(classDefinition.symbol, env, names,\n GENERATED_INIT_SUFFIX, classDefinition.getBType(), returnType);\n BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);\n typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,\n (BInvokableType) initFunction.getBType(), null);\n classDefinition.generatedInitFunction = initFunction;\n initFunction.returnTypeNode.setBType(returnType);\n return initFunction;\n }\n\n private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {\n /*\n * Desugar (lhsExpr && rhsExpr) to following if-else:\n *\n * logical AND:\n * -------------\n * T $result$;\n * if (lhsExpr) {\n * $result$ = rhsExpr;\n * } else {\n * $result$ = false;\n * }\n *\n * logical OR:\n * -------------\n * T $result$;\n * if (lhsExpr) {\n * $result$ = true;\n * } else {\n * $result$ = rhsExpr;\n * }\n *\n */\n BLangSimpleVariableDef resultVarDef = createVarDef(\"$result$\", binaryExpr.getBType(), null,\n symTable.builtinPos);\n BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);\n\n \n BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,\n resultVarDef.var.symbol);\n BLangExpression thenResult;\n if (binaryExpr.opKind == OperatorKind.AND) {\n thenResult = binaryExpr.rhsExpr;\n } else {\n thenResult = getBooleanLiteral(true);\n }\n BLangAssignment thenAssignment =\n ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);\n thenBody.addStatement(thenAssignment);\n\n \n BLangExpression elseResult;\n BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,\n resultVarDef.var.symbol);\n if (binaryExpr.opKind == OperatorKind.AND) {\n elseResult = getBooleanLiteral(false);\n } else {\n elseResult = binaryExpr.rhsExpr;\n }\n BLangAssignment elseAssignment =\n ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);\n elseBody.addStatement(elseAssignment);\n\n \n BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);\n BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);\n\n BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));\n BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);\n stmtExpr.setBType(binaryExpr.getBType());\n\n result = rewriteExpr(stmtExpr);\n }\n\n protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {\n switch (expression.getKind()) {\n case TYPE_INIT_EXPR:\n case RECORD_LITERAL_EXPR:\n case OBJECT_CTOR_EXPRESSION:\n return true;\n case CHECK_EXPR:\n return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);\n case TYPE_CONVERSION_EXPR:\n return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);\n default:\n return false;\n }\n }\n\n private BType getRestType(BInvokableSymbol invokableSymbol) {\n if (invokableSymbol != null && invokableSymbol.restParam != null) {\n return invokableSymbol.restParam.type;\n }\n return null;\n }\n\n private BType getRestType(BLangFunction function) {\n if (function != null && function.restParam != null) {\n return function.restParam.getBType();\n }\n return null;\n }\n\n private BVarSymbol getRestSymbol(BLangFunction function) {\n if (function != null && function.restParam != null) {\n return function.restParam.symbol;\n }\n return null;\n }\n\n private boolean isComputedKey(RecordLiteralNode.RecordField field) {\n if (!field.isKeyValueField()) {\n return false;\n }\n return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;\n }\n\n private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {\n List fields = mappingConstructorExpr.fields;\n\n BType type = mappingConstructorExpr.getBType();\n Location pos = mappingConstructorExpr.pos;\n\n List rewrittenFields = new ArrayList<>(fields.size());\n\n for (RecordLiteralNode.RecordField field : fields) {\n if (field.isKeyValueField()) {\n BLangRecordLiteral.BLangRecordKeyValueField keyValueField =\n (BLangRecordLiteral.BLangRecordKeyValueField) field;\n\n BLangRecordLiteral.BLangRecordKey key = keyValueField.key;\n BLangExpression origKey = key.expr;\n BLangExpression keyExpr;\n if (key.computedKey) {\n keyExpr = origKey;\n } else {\n keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,\n Utils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :\n ((BLangLiteral) origKey);\n }\n\n BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =\n ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),\n rewriteExpr(keyValueField.valueExpr));\n rewrittenField.pos = keyValueField.pos;\n rewrittenField.key.pos = key.pos;\n rewrittenFields.add(rewrittenField);\n } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;\n rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(\n rewriteExpr(createStringLiteral(pos, Utils.unescapeJava(varRefField.variableName.value))),\n rewriteExpr(varRefField)));\n } else {\n BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =\n (BLangRecordLiteral.BLangRecordSpreadOperatorField) field;\n spreadOpField.expr = rewriteExpr(spreadOpField.expr);\n rewrittenFields.add(spreadOpField);\n }\n }\n\n fields.clear();\n return new BLangMapLiteral(pos, type, rewrittenFields);\n }\n\n protected void addTransactionInternalModuleImport() {\n if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {\n BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();\n List pkgNameComps = new ArrayList<>();\n pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));\n importDcl.pkgNameComps = pkgNameComps;\n importDcl.pos = env.enclPkg.symbol.pos;\n importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);\n importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, \"trx\");\n importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, \"\");\n importDcl.symbol = symTable.internalTransactionModuleSymbol;\n env.enclPkg.imports.add(importDcl);\n env.enclPkg.symbol.imports.add(importDcl.symbol);\n }\n }\n}", "context_before": "class definition node for which the initializer is created\n * @param env The env for the type node\n * @return The generated initializer method\n */\n private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {\n BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);\n if (classDefinition.initFunction == null) {\n return rewrite(generatedInitFunc, env);\n }\n\n return wireUpGeneratedInitFunction(generatedInitFunc,\n (BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);\n }", "context_after": "class definition node for which the initializer is created\n * @param env The env for the type node\n * @return The generated initializer method\n */\n private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {\n BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);\n if (classDefinition.initFunction == null) {\n return rewrite(generatedInitFunc, env);\n }\n\n return wireUpGeneratedInitFunction(generatedInitFunc,\n (BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);\n }" }, { "comment": "The purpose of this `TODO` is to avoid conflict between `workerResourceSpec` and `process.size`/`flink.size` in `flinkConfig`. It is not about forbidden workers with different sizes. Actually, one of the main purpose of this PR is to make the RMs not assuming workers have the same size. I was thinking about change the `Configuration` at creating the `SlotManager`, where we known which implementation of `SlotManager` is used and unset flink/process size if the plugin supports dynamic worker sizes.", "method_body": "private KubernetesTaskManagerParameters createKubernetesTaskManagerParameters(WorkerResourceSpec workerResourceSpec) {\n\t\t\n\t\tfinal TaskExecutorProcessSpec taskExecutorProcessSpec =\n\t\t\tTaskExecutorProcessUtils.processSpecFromWorkerResourceSpec(flinkConfig, workerResourceSpec);\n\n\t\tfinal String podName = String.format(\n\t\t\tTASK_MANAGER_POD_FORMAT,\n\t\t\tclusterId,\n\t\t\tcurrentMaxAttemptId,\n\t\t\t++currentMaxPodId);\n\n\t\tfinal ContaineredTaskManagerParameters taskManagerParameters =\n\t\t\tContaineredTaskManagerParameters.create(flinkConfig, taskExecutorProcessSpec);\n\n\t\tfinal String dynamicProperties =\n\t\t\tBootstrapTools.getDynamicPropertiesAsString(flinkClientConfig, flinkConfig);\n\n\t\treturn new KubernetesTaskManagerParameters(\n\t\t\tflinkConfig,\n\t\t\tpodName,\n\t\t\tdynamicProperties,\n\t\t\ttaskManagerParameters);\n\t}", "target_code": "", "method_body_after": "private KubernetesTaskManagerParameters createKubernetesTaskManagerParameters(WorkerResourceSpec workerResourceSpec) {\n\t\tfinal TaskExecutorProcessSpec taskExecutorProcessSpec =\n\t\t\tTaskExecutorProcessUtils.processSpecFromWorkerResourceSpec(flinkConfig, workerResourceSpec);\n\n\t\tfinal String podName = String.format(\n\t\t\tTASK_MANAGER_POD_FORMAT,\n\t\t\tclusterId,\n\t\t\tcurrentMaxAttemptId,\n\t\t\t++currentMaxPodId);\n\n\t\tfinal ContaineredTaskManagerParameters taskManagerParameters =\n\t\t\tContaineredTaskManagerParameters.create(flinkConfig, taskExecutorProcessSpec);\n\n\t\tfinal String dynamicProperties =\n\t\t\tBootstrapTools.getDynamicPropertiesAsString(flinkClientConfig, flinkConfig);\n\n\t\treturn new KubernetesTaskManagerParameters(\n\t\t\tflinkConfig,\n\t\t\tpodName,\n\t\t\tdynamicProperties,\n\t\t\ttaskManagerParameters);\n\t}", "context_before": "class KubernetesResourceManager extends ActiveResourceManager\n\timplements FlinkKubeClient.PodCallbackHandler {\n\n\tprivate static final Logger LOG = LoggerFactory.getLogger(KubernetesResourceManager.class);\n\n\t/** The taskmanager pod name pattern is {clusterId}-{taskmanager}-{attemptId}-{podIndex}. */\n\tprivate static final String TASK_MANAGER_POD_FORMAT = \"%s-taskmanager-%d-%d\";\n\n\tprivate final Map workerNodes = new HashMap<>();\n\n\t/** When ResourceManager failover, the max attempt should recover. */\n\tprivate long currentMaxAttemptId = 0;\n\n\t/** Current max pod index. When creating a new pod, it should increase one. */\n\tprivate long currentMaxPodId = 0;\n\n\tprivate final String clusterId;\n\n\tprivate final FlinkKubeClient kubeClient;\n\n\t/** Map from pod name to worker resource. */\n\tprivate final Map podWorkerResources;\n\n\tpublic KubernetesResourceManager(\n\t\t\tRpcService rpcService,\n\t\t\tString resourceManagerEndpointId,\n\t\t\tResourceID resourceId,\n\t\t\tConfiguration flinkConfig,\n\t\t\tHighAvailabilityServices highAvailabilityServices,\n\t\t\tHeartbeatServices heartbeatServices,\n\t\t\tSlotManager slotManager,\n\t\t\tJobLeaderIdService jobLeaderIdService,\n\t\t\tClusterInformation clusterInformation,\n\t\t\tFatalErrorHandler fatalErrorHandler,\n\t\t\tResourceManagerMetricGroup resourceManagerMetricGroup) {\n\t\tsuper(\n\t\t\tflinkConfig,\n\t\t\tSystem.getenv(),\n\t\t\trpcService,\n\t\t\tresourceManagerEndpointId,\n\t\t\tresourceId,\n\t\t\thighAvailabilityServices,\n\t\t\theartbeatServices,\n\t\t\tslotManager,\n\t\t\tjobLeaderIdService,\n\t\t\tclusterInformation,\n\t\t\tfatalErrorHandler,\n\t\t\tresourceManagerMetricGroup);\n\t\tthis.clusterId = flinkConfig.getString(KubernetesConfigOptions.CLUSTER_ID);\n\n\t\tthis.kubeClient = createFlinkKubeClient();\n\n\t\tthis.podWorkerResources = new HashMap<>();\n\t}\n\n\t@Override\n\tprotected Configuration loadClientConfiguration() {\n\t\treturn GlobalConfiguration.loadConfiguration();\n\t}\n\n\t@Override\n\tprotected void initialize() throws ResourceManagerException {\n\t\trecoverWorkerNodesFromPreviousAttempts();\n\n\t\tkubeClient.watchPodsAndDoCallback(KubernetesUtils.getTaskManagerLabels(clusterId), this);\n\t}\n\n\t@Override\n\tpublic CompletableFuture onStop() {\n\t\t\n\t\tThrowable exception = null;\n\n\t\ttry {\n\t\t\tkubeClient.close();\n\t\t} catch (Throwable t) {\n\t\t\texception = t;\n\t\t}\n\n\t\treturn getStopTerminationFutureOrCompletedExceptionally(exception);\n\t}\n\n\t@Override\n\tprotected void internalDeregisterApplication(ApplicationStatus finalStatus, @Nullable String diagnostics) {\n\t\tLOG.info(\n\t\t\t\"Stopping kubernetes cluster, clusterId: {}, diagnostics: {}\",\n\t\t\tclusterId,\n\t\t\tdiagnostics == null ? \"\" : diagnostics);\n\t\tkubeClient.stopAndCleanupCluster(clusterId);\n\t}\n\n\t@Override\n\tpublic boolean startNewWorker(WorkerResourceSpec workerResourceSpec) {\n\t\tLOG.info(\"Starting new worker with worker resource spec, {}\", workerResourceSpec);\n\t\trequestKubernetesPod(workerResourceSpec);\n\t\treturn true;\n\t}\n\n\t@Override\n\tprotected KubernetesWorkerNode workerStarted(ResourceID resourceID) {\n\t\treturn workerNodes.get(resourceID);\n\t}\n\n\t@Override\n\tpublic boolean stopWorker(final KubernetesWorkerNode worker) {\n\t\tLOG.info(\"Stopping Worker {}.\", worker.getResourceID());\n\t\tremoveWorkerNodeAndResourceSpec(worker.getResourceID());\n\t\ttry {\n\t\t\tkubeClient.stopPod(worker.getResourceID().toString());\n\t\t} catch (Exception e) {\n\t\t\tkubeClient.handleException(e);\n\t\t\treturn false;\n\t\t}\n\t\treturn true;\n\t}\n\n\t@Override\n\tpublic void onAdded(List pods) {\n\t\trunAsync(() -> {\n\t\t\tpods.forEach(pod -> {\n\t\t\t\tWorkerResourceSpec workerResourceSpec = podWorkerResources.get(pod.getName());\n\t\t\t\tfinal int pendingNum = pendingWorkerCounter.getNum(workerResourceSpec);\n\t\t\t\tif (pendingNum > 0) {\n\t\t\t\t\tpendingWorkerCounter.decreaseAndGet(workerResourceSpec);\n\t\t\t\t\tfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\n\t\t\t\t\tworkerNodes.putIfAbsent(worker.getResourceID(), worker);\n\t\t\t\t}\n\t\t\t\tlog.info(\"Received new TaskManager pod: {}\", pod.getName());\n\t\t\t});\n\t\t\tlog.info(\"Received {} new TaskManager pods. Remaining pending pod requests: {}\",\n\t\t\t\tpods.size(), pendingWorkerCounter.getTotalNum());\n\t\t});\n\t}\n\n\t@Override\n\tpublic void onModified(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodIfTerminated));\n\t}\n\n\t@Override\n\tpublic void onDeleted(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodIfTerminated));\n\t}\n\n\t@Override\n\tpublic void onError(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodIfTerminated));\n\t}\n\n\t@VisibleForTesting\n\tMap getWorkerNodes() {\n\t\treturn workerNodes;\n\t}\n\n\tprivate void recoverWorkerNodesFromPreviousAttempts() throws ResourceManagerException {\n\t\tfinal List podList = kubeClient.getPodsWithLabels(KubernetesUtils.getTaskManagerLabels(clusterId));\n\t\tfor (KubernetesPod pod : podList) {\n\t\t\tfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\n\t\t\tworkerNodes.put(worker.getResourceID(), worker);\n\t\t\tfinal long attempt = worker.getAttempt();\n\t\t\tif (attempt > currentMaxAttemptId) {\n\t\t\t\tcurrentMaxAttemptId = attempt;\n\t\t\t}\n\t\t}\n\n\t\tlog.info(\"Recovered {} pods from previous attempts, current attempt id is {}.\",\n\t\t\tworkerNodes.size(),\n\t\t\t++currentMaxAttemptId);\n\t}\n\n\tprivate void requestKubernetesPod(WorkerResourceSpec workerResourceSpec) {\n\t\tfinal KubernetesTaskManagerParameters parameters =\n\t\t\tcreateKubernetesTaskManagerParameters(workerResourceSpec);\n\n\t\tpodWorkerResources.put(parameters.getPodName(), workerResourceSpec);\n\t\tfinal int pendingWorkerNum = pendingWorkerCounter.increaseAndGet(workerResourceSpec);\n\n\t\tlog.info(\"Requesting new TaskManager pod with <{},{}>. Number pending requests {}.\",\n\t\t\tparameters.getTaskManagerMemoryMB(),\n\t\t\tparameters.getTaskManagerCPU(),\n\t\t\tpendingWorkerNum);\n\t\tlog.info(\"TaskManager {} will be started with {}.\", parameters.getPodName(), workerResourceSpec);\n\n\t\tfinal KubernetesPod taskManagerPod =\n\t\t\tKubernetesTaskManagerFactory.createTaskManagerComponent(parameters);\n\t\tkubeClient.createTaskManagerPod(taskManagerPod);\n\t}\n\n\t\n\n\t/**\n\t * Request new pod if pending pods cannot satisfy pending slot requests.\n\t */\n\tprivate void requestKubernetesPodIfRequired(WorkerResourceSpec workerResourceSpec) {\n\t\tfinal int requiredTaskManagers = getPendingWorkerNums().get(workerResourceSpec);\n\t\tfinal int pendingWorkerNum = pendingWorkerCounter.getNum(workerResourceSpec);\n\n\t\tif (requiredTaskManagers > pendingWorkerNum) {\n\t\t\trequestKubernetesPod(workerResourceSpec);\n\t\t}\n\t}\n\n\tprivate void removePodIfTerminated(KubernetesPod pod) {\n\t\tif (pod.isTerminated()) {\n\t\t\tkubeClient.stopPod(pod.getName());\n\t\t\tfinal WorkerResourceSpec workerResourceSpec = removeWorkerNodeAndResourceSpec(new ResourceID(pod.getName()));\n\t\t\tif (workerResourceSpec != null) {\n\t\t\t\trequestKubernetesPodIfRequired(workerResourceSpec);\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate WorkerResourceSpec removeWorkerNodeAndResourceSpec(ResourceID resourceId) {\n\t\tfinal KubernetesWorkerNode kubernetesWorkerNode = workerNodes.remove(resourceId);\n\t\treturn kubernetesWorkerNode != null ? podWorkerResources.remove(resourceId.toString()) : null;\n\t}\n\n\tprotected FlinkKubeClient createFlinkKubeClient() {\n\t\treturn KubeClientFactory.fromConfiguration(flinkConfig);\n\t}\n\n\t@Override\n\tprotected double getCpuCores(Configuration configuration) {\n\t\treturn TaskExecutorProcessUtils.getCpuCoresWithFallbackConfigOption(configuration, KubernetesConfigOptions.TASK_MANAGER_CPU);\n\t}\n}", "context_after": "class KubernetesResourceManager extends ActiveResourceManager\n\timplements FlinkKubeClient.PodCallbackHandler {\n\n\tprivate static final Logger LOG = LoggerFactory.getLogger(KubernetesResourceManager.class);\n\n\t/** The taskmanager pod name pattern is {clusterId}-{taskmanager}-{attemptId}-{podIndex}. */\n\tprivate static final String TASK_MANAGER_POD_FORMAT = \"%s-taskmanager-%d-%d\";\n\n\tprivate final Map workerNodes = new HashMap<>();\n\n\t/** When ResourceManager failover, the max attempt should recover. */\n\tprivate long currentMaxAttemptId = 0;\n\n\t/** Current max pod index. When creating a new pod, it should increase one. */\n\tprivate long currentMaxPodId = 0;\n\n\tprivate final String clusterId;\n\n\tprivate final FlinkKubeClient kubeClient;\n\n\tprivate final KubernetesResourceManagerConfiguration configuration;\n\n\t/** Map from pod name to worker resource. */\n\tprivate final Map podWorkerResources;\n\n\tpublic KubernetesResourceManager(\n\t\t\tRpcService rpcService,\n\t\t\tResourceID resourceId,\n\t\t\tConfiguration flinkConfig,\n\t\t\tHighAvailabilityServices highAvailabilityServices,\n\t\t\tHeartbeatServices heartbeatServices,\n\t\t\tSlotManager slotManager,\n\t\t\tResourceManagerPartitionTrackerFactory clusterPartitionTrackerFactory,\n\t\t\tJobLeaderIdService jobLeaderIdService,\n\t\t\tClusterInformation clusterInformation,\n\t\t\tFatalErrorHandler fatalErrorHandler,\n\t\t\tResourceManagerMetricGroup resourceManagerMetricGroup,\n\t\t\tFlinkKubeClient kubeClient,\n\t\t\tKubernetesResourceManagerConfiguration configuration) {\n\t\tsuper(\n\t\t\tflinkConfig,\n\t\t\tSystem.getenv(),\n\t\t\trpcService,\n\t\t\tresourceId,\n\t\t\thighAvailabilityServices,\n\t\t\theartbeatServices,\n\t\t\tslotManager,\n\t\t\tclusterPartitionTrackerFactory,\n\t\t\tjobLeaderIdService,\n\t\t\tclusterInformation,\n\t\t\tfatalErrorHandler,\n\t\t\tresourceManagerMetricGroup);\n\t\tthis.clusterId = configuration.getClusterId();\n\t\tthis.kubeClient = kubeClient;\n\t\tthis.configuration = configuration;\n\t\tthis.podWorkerResources = new HashMap<>();\n\t}\n\n\t@Override\n\tprotected Configuration loadClientConfiguration() {\n\t\treturn GlobalConfiguration.loadConfiguration();\n\t}\n\n\t@Override\n\tprotected void initialize() throws ResourceManagerException {\n\t\trecoverWorkerNodesFromPreviousAttempts();\n\n\t\tkubeClient.watchPodsAndDoCallback(KubernetesUtils.getTaskManagerLabels(clusterId), this);\n\t}\n\n\t@Override\n\tpublic CompletableFuture onStop() {\n\t\t\n\t\tThrowable exception = null;\n\n\t\ttry {\n\t\t\tkubeClient.close();\n\t\t} catch (Throwable t) {\n\t\t\texception = t;\n\t\t}\n\n\t\treturn getStopTerminationFutureOrCompletedExceptionally(exception);\n\t}\n\n\t@Override\n\tprotected void internalDeregisterApplication(ApplicationStatus finalStatus, @Nullable String diagnostics) {\n\t\tLOG.info(\n\t\t\t\"Stopping kubernetes cluster, clusterId: {}, diagnostics: {}\",\n\t\t\tclusterId,\n\t\t\tdiagnostics == null ? \"\" : diagnostics);\n\t\tkubeClient.stopAndCleanupCluster(clusterId);\n\t}\n\n\t@Override\n\tpublic boolean startNewWorker(WorkerResourceSpec workerResourceSpec) {\n\t\tLOG.info(\"Starting new worker with worker resource spec, {}\", workerResourceSpec);\n\t\trequestKubernetesPod(workerResourceSpec);\n\t\treturn true;\n\t}\n\n\t@Override\n\tprotected KubernetesWorkerNode workerStarted(ResourceID resourceID) {\n\t\treturn workerNodes.get(resourceID);\n\t}\n\n\t@Override\n\tpublic boolean stopWorker(final KubernetesWorkerNode worker) {\n\t\tfinal ResourceID resourceId = worker.getResourceID();\n\t\tLOG.info(\"Stopping Worker {}.\", resourceId);\n\t\tinternalStopPod(resourceId.toString());\n\t\treturn true;\n\t}\n\n\t@Override\n\tpublic void onAdded(List pods) {\n\t\trunAsync(() -> {\n\t\t\tint duplicatePodNum = 0;\n\t\t\tfor (KubernetesPod pod : pods) {\n\t\t\t\tfinal String podName = pod.getName();\n\t\t\t\tfinal ResourceID resourceID = new ResourceID(podName);\n\n\t\t\t\tif (workerNodes.containsKey(resourceID)) {\n\t\t\t\t\tlog.debug(\"Ignore TaskManager pod that is already added: {}\", podName);\n\t\t\t\t\t++duplicatePodNum;\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\tfinal WorkerResourceSpec workerResourceSpec = Preconditions.checkNotNull(\n\t\t\t\t\tpodWorkerResources.get(podName),\n\t\t\t\t\t\"Unrecognized pod {}. Pods from previous attempt should have already been added.\", podName);\n\n\t\t\t\tfinal int pendingNum = getNumPendingWorkersFor(workerResourceSpec);\n\t\t\t\tPreconditions.checkState(pendingNum > 0, \"Should not receive more workers than requested.\");\n\n\t\t\t\tnotifyNewWorkerAllocated(workerResourceSpec);\n\t\t\t\tfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(resourceID);\n\t\t\t\tworkerNodes.put(resourceID, worker);\n\n\t\t\t\tlog.info(\"Received new TaskManager pod: {}\", podName);\n\t\t\t}\n\t\t\tlog.info(\"Received {} new TaskManager pods. Remaining pending pod requests: {}\",\n\t\t\t\tpods.size() - duplicatePodNum, getNumPendingWorkers());\n\t\t});\n\t}\n\n\t@Override\n\tpublic void onModified(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodAndTryRestartIfRequired));\n\t}\n\n\t@Override\n\tpublic void onDeleted(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodAndTryRestartIfRequired));\n\t}\n\n\t@Override\n\tpublic void onError(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodAndTryRestartIfRequired));\n\t}\n\n\t@VisibleForTesting\n\tMap getWorkerNodes() {\n\t\treturn workerNodes;\n\t}\n\n\tprivate void recoverWorkerNodesFromPreviousAttempts() throws ResourceManagerException {\n\t\tfinal List podList = kubeClient.getPodsWithLabels(KubernetesUtils.getTaskManagerLabels(clusterId));\n\t\tfor (KubernetesPod pod : podList) {\n\t\t\tfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\n\t\t\tworkerNodes.put(worker.getResourceID(), worker);\n\t\t\tfinal long attempt = worker.getAttempt();\n\t\t\tif (attempt > currentMaxAttemptId) {\n\t\t\t\tcurrentMaxAttemptId = attempt;\n\t\t\t}\n\t\t}\n\n\t\tlog.info(\"Recovered {} pods from previous attempts, current attempt id is {}.\",\n\t\t\tworkerNodes.size(),\n\t\t\t++currentMaxAttemptId);\n\t}\n\n\tprivate void requestKubernetesPod(WorkerResourceSpec workerResourceSpec) {\n\t\tfinal KubernetesTaskManagerParameters parameters =\n\t\t\tcreateKubernetesTaskManagerParameters(workerResourceSpec);\n\n\t\tpodWorkerResources.put(parameters.getPodName(), workerResourceSpec);\n\t\tfinal int pendingWorkerNum = notifyNewWorkerRequested(workerResourceSpec);\n\n\t\tlog.info(\"Requesting new TaskManager pod with <{},{}>. Number pending requests {}.\",\n\t\t\tparameters.getTaskManagerMemoryMB(),\n\t\t\tparameters.getTaskManagerCPU(),\n\t\t\tpendingWorkerNum);\n\n\t\tfinal KubernetesPod taskManagerPod =\n\t\t\tKubernetesTaskManagerFactory.buildTaskManagerKubernetesPod(parameters);\n\t\tkubeClient.createTaskManagerPod(taskManagerPod)\n\t\t\t.whenCompleteAsync(\n\t\t\t\t(ignore, throwable) -> {\n\t\t\t\t\tif (throwable != null) {\n\t\t\t\t\t\tfinal Time retryInterval = configuration.getPodCreationRetryInterval();\n\t\t\t\t\t\tlog.warn(\"Could not start TaskManager in pod {}, retry in {}. \",\n\t\t\t\t\t\t\ttaskManagerPod.getName(), retryInterval, throwable);\n\t\t\t\t\t\tpodWorkerResources.remove(parameters.getPodName());\n\t\t\t\t\t\tnotifyNewWorkerAllocationFailed(workerResourceSpec);\n\t\t\t\t\t\tscheduleRunAsync(\n\t\t\t\t\t\t\tthis::requestKubernetesPodIfRequired,\n\t\t\t\t\t\t\tretryInterval);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tlog.info(\"TaskManager {} will be started with {}.\", parameters.getPodName(), workerResourceSpec);\n\t\t\t\t\t}\n\t\t\t\t},\n\t\t\t\tgetMainThreadExecutor());\n\t}\n\n\t\n\n\t/**\n\t * Request new pod if pending pods cannot satisfy pending slot requests.\n\t */\n\tprivate void requestKubernetesPodIfRequired() {\n\t\tfor (Map.Entry entry : getRequiredResources().entrySet()) {\n\t\t\tfinal WorkerResourceSpec workerResourceSpec = entry.getKey();\n\t\t\tfinal int requiredTaskManagers = entry.getValue();\n\n\t\t\twhile (requiredTaskManagers > getNumPendingWorkersFor(workerResourceSpec)) {\n\t\t\t\trequestKubernetesPod(workerResourceSpec);\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate void removePodAndTryRestartIfRequired(KubernetesPod pod) {\n\t\tif (pod.isTerminated()) {\n\t\t\tinternalStopPod(pod.getName());\n\t\t\trequestKubernetesPodIfRequired();\n\t\t}\n\t}\n\n\tprivate void internalStopPod(String podName) {\n\t\tfinal ResourceID resourceId = new ResourceID(podName);\n\t\tfinal boolean isPendingWorkerOfCurrentAttempt = isPendingWorkerOfCurrentAttempt(podName);\n\n\t\tkubeClient.stopPod(podName)\n\t\t\t.whenComplete(\n\t\t\t\t(ignore, throwable) -> {\n\t\t\t\t\tif (throwable != null) {\n\t\t\t\t\t\tlog.warn(\"Could not stop TaskManager in pod {}.\", podName, throwable);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t);\n\n\t\tfinal WorkerResourceSpec workerResourceSpec = podWorkerResources.remove(podName);\n\t\tworkerNodes.remove(resourceId);\n\n\t\tif (isPendingWorkerOfCurrentAttempt) {\n\t\t\tnotifyNewWorkerAllocationFailed(\n\t\t\t\tPreconditions.checkNotNull(workerResourceSpec,\n\t\t\t\t\t\"Worker resource spec of current attempt pending worker should be known.\"));\n\t\t}\n\t}\n\n\tprivate boolean isPendingWorkerOfCurrentAttempt(String podName) {\n\t\treturn podWorkerResources.containsKey(podName) &&\n\t\t\t!workerNodes.containsKey(new ResourceID(podName));\n\t}\n}" }, { "comment": "`env.node.parent` was replaced with `env.enclInvokable`", "method_body": "private void resolveAssociatedWorkerFunc(BWorkerSymbol worker, SymbolEnv env) {\n LineRange workerVarPos = worker.pos.lineRange();\n\n for (BLangLambdaFunction lambdaFn : env.enclPkg.lambdaFunctions) {\n LineRange workerBodyPos = lambdaFn.function.pos.lineRange();\n Location targetRangePos = env.node.pos;\n\n \n \n \n if (targetRangePos == null) {\n targetRangePos = env.node.parent.pos;\n }\n\n if (worker.name.value.equals(lambdaFn.function.defaultWorkerName.value)\n && withinRange(workerVarPos, targetRangePos.lineRange())\n && withinRange(workerBodyPos, targetRangePos.lineRange())) {\n worker.setAssociatedFuncSymbol(lambdaFn.function.symbol);\n return;\n }\n }\n\n throw new IllegalStateException(\n \"Matching function node not found for worker: \" + worker.name.value + \" at \" + worker.pos);\n }", "target_code": "if (targetRangePos == null) {", "method_body_after": "private void resolveAssociatedWorkerFunc(BWorkerSymbol worker, SymbolEnv env) {\n LineRange workerVarPos = worker.pos.lineRange();\n\n for (BLangLambdaFunction lambdaFn : env.enclPkg.lambdaFunctions) {\n LineRange workerBodyPos = lambdaFn.function.pos.lineRange();\n Location targetRangePos = env.node.pos;\n\n \n \n \n if (targetRangePos == null) {\n targetRangePos = env.enclInvokable.pos;\n }\n\n if (worker.name.value.equals(lambdaFn.function.defaultWorkerName.value)\n && withinRange(workerVarPos, targetRangePos.lineRange())\n && withinRange(workerBodyPos, targetRangePos.lineRange())) {\n worker.setAssociatedFuncSymbol(lambdaFn.function.symbol);\n return;\n }\n }\n\n throw new IllegalStateException(\n \"Matching function node not found for worker: \" + worker.name.value + \" at \" + worker.pos);\n }", "context_before": "class fields and object fields\n defineReferencedClassFields(classDefinition, typeDefEnv, objType, false);\n }\n\n private void defineFieldsOfObjectOrRecordTypeDef(BLangTypeDefinition typeDef, SymbolEnv pkgEnv) {\n NodeKind nodeKind = typeDef.typeNode.getKind();\n if (nodeKind != NodeKind.OBJECT_TYPE && nodeKind != NodeKind.RECORD_TYPE) {\n return;\n }\n\n \n BStructureType structureType = (BStructureType) typeDef.symbol.type;\n BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDef.typeNode;\n\n if (typeDef.symbol.kind == SymbolKind.TYPE_DEF && structureType.tsymbol.kind == SymbolKind.RECORD) {\n BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) structureTypeNode;\n BRecordType recordType = (BRecordType) structureType;\n \n recordType.sealed = recordTypeNode.sealed;\n }\n\n Scope recordScope = structureType.tsymbol.scope;\n SymbolEnv typeDefEnv = SymbolEnv.createTypeEnv(structureTypeNode, recordScope, pkgEnv);\n\n \n resolveFields(structureType, structureTypeNode, typeDefEnv);\n\n if (typeDef.symbol.kind == SymbolKind.TYPE_DEF && structureType.tsymbol.kind != SymbolKind.RECORD) {\n return;\n }\n\n BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) structureTypeNode;\n BRecordType recordType = (BRecordType) structureType;\n recordType.sealed = recordTypeNode.sealed;\n if (recordTypeNode.sealed && recordTypeNode.restFieldType != null) {\n dlog.error(recordTypeNode.restFieldType.pos, DiagnosticErrorCode.REST_FIELD_NOT_ALLOWED_IN_CLOSED_RECORDS);\n return;\n }\n\n if (recordTypeNode.restFieldType != null) {\n recordType.restFieldType = symResolver.resolveTypeNode(recordTypeNode.restFieldType, typeDefEnv);\n return;\n }\n\n if (!recordTypeNode.sealed) {\n recordType.restFieldType = symTable.anydataType;\n return;\n }\n\n \n for (BLangType typeRef : recordTypeNode.typeRefs) {\n BType refType = Types.getReferredType(typeRef.getBType());\n if (refType.tag != TypeTags.RECORD) {\n continue;\n }\n BType restFieldType = ((BRecordType) refType).restFieldType;\n if (restFieldType == symTable.noType) {\n continue;\n }\n if (recordType.restFieldType != null && !types.isSameType(recordType.restFieldType, restFieldType)) {\n recordType.restFieldType = symTable.noType;\n dlog.error(recordTypeNode.pos,\n DiagnosticErrorCode.\n CANNOT_USE_TYPE_INCLUSION_WITH_MORE_THAN_ONE_OPEN_RECORD_WITH_DIFFERENT_REST_DESCRIPTOR_TYPES);\n return;\n }\n recordType.restFieldType = restFieldType;\n recordType.sealed = false;\n }\n\n if (recordType.restFieldType != null) {\n return;\n }\n recordType.restFieldType = symTable.noType;\n }", "context_after": "class fields and object fields\n defineReferencedClassFields(classDefinition, typeDefEnv, objType, false);\n }\n\n private void defineFieldsOfObjectOrRecordTypeDef(BLangTypeDefinition typeDef, SymbolEnv pkgEnv) {\n NodeKind nodeKind = typeDef.typeNode.getKind();\n if (nodeKind != NodeKind.OBJECT_TYPE && nodeKind != NodeKind.RECORD_TYPE) {\n return;\n }\n\n \n BStructureType structureType = (BStructureType) typeDef.symbol.type;\n BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDef.typeNode;\n\n if (typeDef.symbol.kind == SymbolKind.TYPE_DEF && structureType.tsymbol.kind == SymbolKind.RECORD) {\n BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) structureTypeNode;\n BRecordType recordType = (BRecordType) structureType;\n \n recordType.sealed = recordTypeNode.sealed;\n }\n\n Scope recordScope = structureType.tsymbol.scope;\n SymbolEnv typeDefEnv = SymbolEnv.createTypeEnv(structureTypeNode, recordScope, pkgEnv);\n\n \n resolveFields(structureType, structureTypeNode, typeDefEnv);\n\n if (typeDef.symbol.kind == SymbolKind.TYPE_DEF && structureType.tsymbol.kind != SymbolKind.RECORD) {\n return;\n }\n\n BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) structureTypeNode;\n BRecordType recordType = (BRecordType) structureType;\n recordType.sealed = recordTypeNode.sealed;\n if (recordTypeNode.sealed && recordTypeNode.restFieldType != null) {\n dlog.error(recordTypeNode.restFieldType.pos, DiagnosticErrorCode.REST_FIELD_NOT_ALLOWED_IN_CLOSED_RECORDS);\n return;\n }\n\n if (recordTypeNode.restFieldType != null) {\n recordType.restFieldType = symResolver.resolveTypeNode(recordTypeNode.restFieldType, typeDefEnv);\n return;\n }\n\n if (!recordTypeNode.sealed) {\n recordType.restFieldType = symTable.anydataType;\n return;\n }\n\n \n for (BLangType typeRef : recordTypeNode.typeRefs) {\n BType refType = Types.getReferredType(typeRef.getBType());\n if (refType.tag != TypeTags.RECORD) {\n continue;\n }\n BType restFieldType = ((BRecordType) refType).restFieldType;\n if (restFieldType == symTable.noType) {\n continue;\n }\n if (recordType.restFieldType != null && !types.isSameType(recordType.restFieldType, restFieldType)) {\n recordType.restFieldType = symTable.noType;\n dlog.error(recordTypeNode.pos,\n DiagnosticErrorCode.\n CANNOT_USE_TYPE_INCLUSION_WITH_MORE_THAN_ONE_OPEN_RECORD_WITH_DIFFERENT_REST_DESCRIPTOR_TYPES);\n return;\n }\n recordType.restFieldType = restFieldType;\n recordType.sealed = false;\n }\n\n if (recordType.restFieldType != null) {\n return;\n }\n recordType.restFieldType = symTable.noType;\n }" }, { "comment": "you could update loaded rows in OlapTableTxnLogApplier.applyCommitLog, or DatabaseTransactionMgr.updateCatalogAfterCommitted and only need do once.", "method_body": "private boolean loadTxnCommitImpl(TLoadTxnCommitRequest request) throws UserException {\n String cluster = request.getCluster();\n if (Strings.isNullOrEmpty(cluster)) {\n cluster = SystemInfoService.DEFAULT_CLUSTER;\n }\n if (request.isSetAuth_code()) {\n \n } else {\n checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\n request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);\n }\n\n \n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n String fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\n Database db = globalStateMgr.getDb(fullDbName);\n if (db == null) {\n String dbName = fullDbName;\n if (Strings.isNullOrEmpty(request.getCluster())) {\n dbName = request.getDb();\n }\n throw new UserException(\"unknown database, database=\" + dbName);\n }\n TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment);\n long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000;\n \n \n \n timeoutMs = timeoutMs * 3 / 4;\n boolean ret = GlobalStateMgr.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(\n db, request.getTxnId(),\n TabletCommitInfo.fromThrift(request.getCommitInfos()),\n timeoutMs, attachment);\n if (!ret) {\n return ret;\n }\n \n MetricRepo.COUNTER_LOAD_FINISHED.increase(1L);\n if (null == attachment) {\n return ret;\n }\n \n Table tbl = db.getTable(request.getTbl());\n if (null == tbl) {\n return ret;\n }\n TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(tbl.getId());\n BasicStatsMeta basicStatsMeta =\n GlobalStateMgr.getCurrentAnalyzeMgr().getBasicStatsMetaMap().get(tbl.getId());\n switch (request.txnCommitAttachment.getLoadType()) {\n case ROUTINE_LOAD:\n if (!(attachment instanceof RLTaskTxnCommitAttachment)) {\n break;\n }\n RLTaskTxnCommitAttachment routineAttachment = (RLTaskTxnCommitAttachment) attachment;\n entity.counterRoutineLoadFinishedTotal.increase(1L);\n entity.counterRoutineLoadBytesTotal.increase(routineAttachment.getReceivedBytes());\n entity.counterRoutineLoadRowsTotal.increase(routineAttachment.getLoadedRows());\n if (basicStatsMeta != null) {\n basicStatsMeta.increase(routineAttachment.getLoadedRows());\n }\n\n break;\n case MANUAL_LOAD:\n if (!(attachment instanceof ManualLoadTxnCommitAttachment)) {\n break;\n }\n ManualLoadTxnCommitAttachment streamAttachment = (ManualLoadTxnCommitAttachment) attachment;\n entity.counterStreamLoadFinishedTotal.increase(1L);\n entity.counterStreamLoadBytesTotal.increase(streamAttachment.getReceivedBytes());\n entity.counterStreamLoadRowsTotal.increase(streamAttachment.getLoadedRows());\n if (basicStatsMeta != null) {\n basicStatsMeta.increase(streamAttachment.getLoadedRows());\n }\n\n break;\n default:\n break;\n }\n return ret;\n }", "target_code": "basicStatsMeta.increase(routineAttachment.getLoadedRows());", "method_body_after": "private boolean loadTxnCommitImpl(TLoadTxnCommitRequest request) throws UserException {\n String cluster = request.getCluster();\n if (Strings.isNullOrEmpty(cluster)) {\n cluster = SystemInfoService.DEFAULT_CLUSTER;\n }\n if (request.isSetAuth_code()) {\n \n } else {\n checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\n request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);\n }\n\n \n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n String fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\n Database db = globalStateMgr.getDb(fullDbName);\n if (db == null) {\n String dbName = fullDbName;\n if (Strings.isNullOrEmpty(request.getCluster())) {\n dbName = request.getDb();\n }\n throw new UserException(\"unknown database, database=\" + dbName);\n }\n TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment);\n long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000;\n \n \n \n timeoutMs = timeoutMs * 3 / 4;\n boolean ret = GlobalStateMgr.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(\n db, request.getTxnId(),\n TabletCommitInfo.fromThrift(request.getCommitInfos()),\n timeoutMs, attachment);\n if (!ret) {\n return ret;\n }\n \n MetricRepo.COUNTER_LOAD_FINISHED.increase(1L);\n if (null == attachment) {\n return ret;\n }\n \n Table tbl = db.getTable(request.getTbl());\n if (null == tbl) {\n return ret;\n }\n TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(tbl.getId());\n BasicStatsMeta basicStatsMeta =\n GlobalStateMgr.getCurrentAnalyzeMgr().getBasicStatsMetaMap().get(tbl.getId());\n switch (request.txnCommitAttachment.getLoadType()) {\n case ROUTINE_LOAD:\n if (!(attachment instanceof RLTaskTxnCommitAttachment)) {\n break;\n }\n RLTaskTxnCommitAttachment routineAttachment = (RLTaskTxnCommitAttachment) attachment;\n entity.counterRoutineLoadFinishedTotal.increase(1L);\n entity.counterRoutineLoadBytesTotal.increase(routineAttachment.getReceivedBytes());\n entity.counterRoutineLoadRowsTotal.increase(routineAttachment.getLoadedRows());\n if (basicStatsMeta != null) {\n basicStatsMeta.increase(routineAttachment.getLoadedRows());\n }\n\n break;\n case MANUAL_LOAD:\n if (!(attachment instanceof ManualLoadTxnCommitAttachment)) {\n break;\n }\n ManualLoadTxnCommitAttachment streamAttachment = (ManualLoadTxnCommitAttachment) attachment;\n entity.counterStreamLoadFinishedTotal.increase(1L);\n entity.counterStreamLoadBytesTotal.increase(streamAttachment.getReceivedBytes());\n entity.counterStreamLoadRowsTotal.increase(streamAttachment.getLoadedRows());\n if (basicStatsMeta != null) {\n basicStatsMeta.increase(streamAttachment.getLoadedRows());\n }\n\n break;\n default:\n break;\n }\n return ret;\n }", "context_before": "class FrontendServiceImpl implements FrontendService.Iface {\n private static final Logger LOG = LogManager.getLogger(MasterImpl.class);\n private MasterImpl masterImpl;\n private ExecuteEnv exeEnv;\n\n public FrontendServiceImpl(ExecuteEnv exeEnv) {\n masterImpl = new MasterImpl();\n this.exeEnv = exeEnv;\n }\n\n @Override\n public TGetDbsResult getDbNames(TGetDbsParams params) throws TException {\n LOG.debug(\"get db request: {}\", params);\n TGetDbsResult result = new TGetDbsResult();\n\n List dbs = Lists.newArrayList();\n PatternMatcher matcher = null;\n if (params.isSetPattern()) {\n try {\n matcher = PatternMatcher.createMysqlPattern(params.getPattern(),\n CaseSensibility.DATABASE.getCaseSensibility());\n } catch (AnalysisException e) {\n throw new TException(\"Pattern is in bad format: \" + params.getPattern());\n }\n }\n\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n List dbNames = globalStateMgr.getDbNames();\n LOG.debug(\"get db names: {}\", dbNames);\n\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n } else {\n currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n }\n for (String fullName : dbNames) {\n if (!globalStateMgr.getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {\n continue;\n }\n\n final String db = ClusterNamespace.getNameFromFullName(fullName);\n if (matcher != null && !matcher.match(db)) {\n continue;\n }\n\n dbs.add(fullName);\n }\n result.setDbs(dbs);\n return result;\n }\n\n @Override\n public TGetTablesResult getTableNames(TGetTablesParams params) throws TException {\n LOG.debug(\"get table name request: {}\", params);\n TGetTablesResult result = new TGetTablesResult();\n List tablesResult = Lists.newArrayList();\n result.setTables(tablesResult);\n PatternMatcher matcher = null;\n if (params.isSetPattern()) {\n try {\n matcher = PatternMatcher.createMysqlPattern(params.getPattern(),\n CaseSensibility.TABLE.getCaseSensibility());\n } catch (AnalysisException e) {\n throw new TException(\"Pattern is in bad format: \" + params.getPattern());\n }\n }\n\n \n\n Database db = GlobalStateMgr.getCurrentState().getDb(params.db);\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n } else {\n currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n }\n if (db != null) {\n for (String tableName : db.getTableNamesWithLock()) {\n LOG.debug(\"get table: {}, wait to check\", tableName);\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,\n tableName, PrivPredicate.SHOW)) {\n continue;\n }\n\n if (matcher != null && !matcher.match(tableName)) {\n continue;\n }\n tablesResult.add(tableName);\n }\n }\n return result;\n }\n\n @Override\n public TListTableStatusResult listTableStatus(TGetTablesParams params) throws TException {\n LOG.debug(\"get list table request: {}\", params);\n TListTableStatusResult result = new TListTableStatusResult();\n List tablesResult = Lists.newArrayList();\n result.setTables(tablesResult);\n PatternMatcher matcher = null;\n if (params.isSetPattern()) {\n try {\n matcher = PatternMatcher.createMysqlPattern(params.getPattern(),\n CaseSensibility.TABLE.getCaseSensibility());\n } catch (AnalysisException e) {\n throw new TException(\"Pattern is in bad format \" + params.getPattern());\n }\n }\n\n \n\n Database db = GlobalStateMgr.getCurrentState().getDb(params.db);\n long limit = params.isSetLimit() ? params.getLimit() : -1;\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n } else {\n currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n }\n if (params.isSetType() && TTableType.MATERIALIZED_VIEW.equals(params.getType())) {\n listMaterializedViewStatus(tablesResult, limit, matcher, currentUser, params.db);\n return result;\n }\n if (db != null) {\n db.readLock();\n try {\n boolean listingViews = params.isSetType() && TTableType.VIEW.equals(params.getType());\n List

tables = listingViews ? db.getViews() : db.getTables();\n for (Table table : tables) {\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,\n table.getName(), PrivPredicate.SHOW)) {\n continue;\n }\n if (matcher != null && !matcher.match(table.getName())) {\n continue;\n }\n TTableStatus status = new TTableStatus();\n status.setName(table.getName());\n status.setType(table.getMysqlType());\n status.setEngine(table.getEngine());\n status.setComment(table.getComment());\n status.setCreate_time(table.getCreateTime());\n status.setLast_check_time(table.getLastCheckTime());\n if (listingViews) {\n View view = (View) table;\n String ddlSql = view.getInlineViewDef();\n List tblRefs = new ArrayList<>();\n view.getQueryStmt().collectTableRefs(tblRefs);\n for (TableRef tblRef : tblRefs) {\n if (!GlobalStateMgr.getCurrentState().getAuth()\n .checkTblPriv(currentUser, tblRef.getName().getDb(),\n tblRef.getName().getTbl(), PrivPredicate.SHOW)) {\n ddlSql = \"\";\n break;\n }\n }\n status.setDdl_sql(ddlSql);\n }\n tablesResult.add(status);\n \n if (limit > 0 && tablesResult.size() >= limit) {\n break;\n }\n }\n } finally {\n db.readUnlock();\n }\n }\n return result;\n }\n\n \n public void listMaterializedViewStatus(List tablesResult, long limit, PatternMatcher matcher,\n UserIdentity currentUser, String dbName) {\n Database db = GlobalStateMgr.getCurrentState().getDb(dbName);\n if (db == null) {\n LOG.warn(\"database not exists: {}\", dbName);\n return;\n }\n db.readLock();\n try {\n for (Table materializedView : db.getMaterializedViews()) {\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, dbName,\n materializedView.getName(), PrivPredicate.SHOW)) {\n continue;\n }\n if (matcher != null && !matcher.match(materializedView.getName())) {\n continue;\n }\n MaterializedView mvTable = (MaterializedView) materializedView;\n TTableStatus status = new TTableStatus();\n status.setId(String.valueOf(mvTable.getId()));\n status.setName(mvTable.getName());\n status.setDdl_sql(mvTable.getViewDefineSql());\n status.setRows(String.valueOf(mvTable.getRowCount()));\n status.setType(mvTable.getMysqlType());\n status.setComment(mvTable.getComment());\n tablesResult.add(status);\n if (limit > 0 && tablesResult.size() >= limit) {\n return;\n }\n }\n for (Table table : db.getTables()) {\n if (table.getType() == Table.TableType.OLAP) {\n OlapTable olapTable = (OlapTable) table;\n List visibleMaterializedViews = olapTable.getVisibleIndex();\n long baseIdx = olapTable.getBaseIndexId();\n\n for (MaterializedIndex mvIdx : visibleMaterializedViews) {\n if (baseIdx == mvIdx.getId()) {\n continue;\n }\n if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvIdx.getId()))) {\n continue;\n }\n MaterializedIndexMeta mvMeta = olapTable.getVisibleIndexIdToMeta().get(mvIdx.getId());\n TTableStatus status = new TTableStatus();\n status.setId(String.valueOf(mvIdx.getId()));\n status.setName(olapTable.getIndexNameById(mvIdx.getId()));\n if (mvMeta.getOriginStmt() == null) {\n StringBuilder originStmtBuilder = new StringBuilder(\n \"create materialized view \" + olapTable.getIndexNameById(mvIdx.getId()) +\n \" as select \");\n String groupByString = \"\";\n for (Column column : mvMeta.getSchema()) {\n if (column.isKey()) {\n groupByString += column.getName() + \",\";\n }\n }\n originStmtBuilder.append(groupByString);\n for (Column column : mvMeta.getSchema()) {\n if (!column.isKey()) {\n originStmtBuilder.append(column.getAggregationType().toString()).append(\"(\")\n .append(column.getName()).append(\")\").append(\",\");\n }\n }\n originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());\n originStmtBuilder.append(\" from \").append(olapTable.getName()).append(\" group by \")\n .append(groupByString);\n originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());\n status.setDdl_sql(originStmtBuilder.toString());\n } else {\n status.setDdl_sql(mvMeta.getOriginStmt().replace(\"\\n\", \"\").replace(\"\\t\", \"\")\n .replaceAll(\"[ ]+\", \" \"));\n }\n status.setRows(String.valueOf(mvIdx.getRowCount()));\n \n status.setType(\"\");\n status.setComment(\"\");\n tablesResult.add(status);\n if (limit > 0 && tablesResult.size() >= limit) {\n return;\n }\n }\n }\n }\n } finally {\n db.readUnlock();\n }\n }\n\n @Override\n public TGetTaskInfoResult getTasks(TGetTasksParams params) throws TException {\n LOG.debug(\"get show task request: {}\", params);\n TGetTaskInfoResult result = new TGetTaskInfoResult();\n List tasksResult = Lists.newArrayList();\n result.setTasks(tasksResult);\n\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n }\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n TaskManager taskManager = globalStateMgr.getTaskManager();\n List taskList = taskManager.showTasks(null);\n\n for (Task task : taskList) {\n\n if (!globalStateMgr.getAuth().checkDbPriv(currentUser, task.getDbName(), PrivPredicate.SHOW)) {\n continue;\n }\n\n TTaskInfo info = new TTaskInfo();\n info.setTask_name(task.getName());\n info.setCreate_time(task.getCreateTime() / 1000);\n \n info.setSchedule(\"MANUAL\");\n info.setDatabase(ClusterNamespace.getNameFromFullName(task.getDbName()));\n info.setDefinition(task.getDefinition());\n info.setExpire_time(task.getExpireTime() / 1000);\n tasksResult.add(info);\n }\n\n return result;\n }\n\n @Override\n public TGetTaskRunInfoResult getTaskRuns(TGetTasksParams params) throws TException {\n LOG.debug(\"get show task run request: {}\", params);\n TGetTaskRunInfoResult result = new TGetTaskRunInfoResult();\n List tasksResult = Lists.newArrayList();\n result.setTask_runs(tasksResult);\n\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n }\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n TaskManager taskManager = globalStateMgr.getTaskManager();\n List taskRunList = taskManager.showTaskRunStatus(null);\n\n for (TaskRunStatus status : taskRunList) {\n\n if (!globalStateMgr.getAuth().checkDbPriv(currentUser, status.getDbName(), PrivPredicate.SHOW)) {\n continue;\n }\n\n TTaskRunInfo info = new TTaskRunInfo();\n info.setQuery_id(status.getQueryId());\n info.setTask_name(status.getTaskName());\n info.setCreate_time(status.getCreateTime() / 1000);\n info.setFinish_time(status.getFinishTime() / 1000);\n info.setState(status.getState().toString());\n info.setDatabase(ClusterNamespace.getNameFromFullName(status.getDbName()));\n info.setDefinition(status.getDefinition());\n info.setError_code(status.getErrorCode());\n info.setError_message(status.getErrorMessage());\n info.setExpire_time(status.getExpireTime() / 1000);\n tasksResult.add(info);\n }\n return result;\n }\n\n @Override\n public TGetDBPrivsResult getDBPrivs(TGetDBPrivsParams params) throws TException {\n LOG.debug(\"get database privileges request: {}\", params);\n TGetDBPrivsResult result = new TGetDBPrivsResult();\n List tDBPrivs = Lists.newArrayList();\n result.setDb_privs(tDBPrivs);\n UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);\n List dbPrivEntries = GlobalStateMgr.getCurrentState().getAuth().getDBPrivEntries(currentUser);\n \n for (DbPrivEntry entry : dbPrivEntries) {\n PrivBitSet savedPrivs = entry.getPrivSet();\n String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;\n String userIdentStr = currentUser.toString().replace(clusterPrefix, \"\");\n String dbName = ClusterNamespace.getNameFromFullName(entry.getOrigDb());\n boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT);\n List tPrivs = savedPrivs.toPrivilegeList().stream().map(\n priv -> {\n TDBPrivDesc privDesc = new TDBPrivDesc();\n privDesc.setDb_name(dbName);\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv.getUpperNameForMysql());\n return privDesc;\n }\n ).collect(Collectors.toList());\n if (savedPrivs.satisfy(PrivPredicate.LOAD)) {\n \n tPrivs.addAll(Lists.newArrayList(\"INSERT\", \"UPDATE\", \"DELETE\").stream().map(priv -> {\n TDBPrivDesc privDesc = new TDBPrivDesc();\n privDesc.setDb_name(dbName);\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv);\n return privDesc;\n }).collect(Collectors.toList()));\n }\n tDBPrivs.addAll(tPrivs);\n }\n return result;\n }\n\n @Override\n public TGetTablePrivsResult getTablePrivs(TGetTablePrivsParams params) throws TException {\n LOG.debug(\"get table privileges request: {}\", params);\n TGetTablePrivsResult result = new TGetTablePrivsResult();\n List tTablePrivs = Lists.newArrayList();\n result.setTable_privs(tTablePrivs);\n UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);\n List tablePrivEntries =\n GlobalStateMgr.getCurrentState().getAuth().getTablePrivEntries(currentUser);\n \n for (TablePrivEntry entry : tablePrivEntries) {\n PrivBitSet savedPrivs = entry.getPrivSet();\n String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;\n String userIdentStr = currentUser.toString().replace(clusterPrefix, \"\");\n String dbName = ClusterNamespace.getNameFromFullName(entry.getOrigDb());\n boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT);\n List tPrivs = savedPrivs.toPrivilegeList().stream().map(\n priv -> {\n TTablePrivDesc privDesc = new TTablePrivDesc();\n privDesc.setDb_name(dbName);\n privDesc.setTable_name(entry.getOrigTbl());\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv.getUpperNameForMysql());\n return privDesc;\n }\n ).collect(Collectors.toList());\n if (savedPrivs.satisfy(PrivPredicate.LOAD)) {\n \n tPrivs.addAll(Lists.newArrayList(\"INSERT\", \"UPDATE\", \"DELETE\").stream().map(priv -> {\n TTablePrivDesc privDesc = new TTablePrivDesc();\n privDesc.setDb_name(dbName);\n privDesc.setTable_name(entry.getOrigTbl());\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv);\n return privDesc;\n }).collect(Collectors.toList()));\n }\n tTablePrivs.addAll(tPrivs);\n }\n return result;\n }\n\n @Override\n public TGetUserPrivsResult getUserPrivs(TGetUserPrivsParams params) throws TException {\n LOG.debug(\"get user privileges request: {}\", params);\n TGetUserPrivsResult result = new TGetUserPrivsResult();\n List tUserPrivs = Lists.newArrayList();\n result.setUser_privs(tUserPrivs);\n UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);\n Auth currAuth = GlobalStateMgr.getCurrentState().getAuth();\n UserPrivTable userPrivTable = currAuth.getUserPrivTable();\n List userIdents = Lists.newArrayList();\n \n userIdents.add(currentUser);\n\n \n \n \n \n \n \n \n \n\n \n for (UserIdentity userIdent : userIdents) {\n PrivBitSet savedPrivs = new PrivBitSet();\n userPrivTable.getPrivs(userIdent, savedPrivs);\n String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;\n String userIdentStr = currentUser.toString().replace(clusterPrefix, \"\");\n \n List tPrivs = savedPrivs.toPrivilegeList().stream().map(\n priv -> {\n boolean isGrantable =\n Privilege.NODE_PRIV != priv \n && userPrivTable.hasPriv(userIdent,\n PrivPredicate.GRANT);\n TUserPrivDesc privDesc = new TUserPrivDesc();\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv.getUpperNameForMysql());\n return privDesc;\n }\n ).collect(Collectors.toList());\n tUserPrivs.addAll(tPrivs);\n }\n return result;\n }\n\n @Override\n public TFeResult updateExportTaskStatus(TUpdateExportTaskStatusRequest request) throws TException {\n TStatus status = new TStatus(TStatusCode.OK);\n TFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\n\n return result;\n }\n\n @Override\n public TDescribeTableResult describeTable(TDescribeTableParams params) throws TException {\n LOG.debug(\"get desc table request: {}\", params);\n TDescribeTableResult result = new TDescribeTableResult();\n List columns = Lists.newArrayList();\n result.setColumns(columns);\n\n \n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n } else {\n currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n }\n long limit = params.isSetLimit() ? params.getLimit() : -1;\n\n \n \n \n \n if (!params.isSetDb() && StringUtils.isBlank(params.getTable_name())) {\n describeWithoutDbAndTable(currentUser, columns, limit);\n return result;\n }\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,\n params.getTable_name(), PrivPredicate.SHOW)) {\n return result;\n }\n Database db = GlobalStateMgr.getCurrentState().getDb(params.db);\n if (db != null) {\n db.readLock();\n try {\n Table table = db.getTable(params.getTable_name());\n setColumnDesc(columns, table, limit, false, params.db, params.getTable_name());\n } finally {\n db.readUnlock();\n }\n }\n return result;\n }\n\n \n \n private void describeWithoutDbAndTable(UserIdentity currentUser, List columns, long limit) {\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n List dbNames = globalStateMgr.getDbNames();\n boolean reachLimit;\n for (String fullName : dbNames) {\n if (!GlobalStateMgr.getCurrentState().getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {\n continue;\n }\n Database db = GlobalStateMgr.getCurrentState().getDb(fullName);\n if (db != null) {\n for (String tableName : db.getTableNamesWithLock()) {\n LOG.debug(\"get table: {}, wait to check\", tableName);\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, fullName,\n tableName, PrivPredicate.SHOW)) {\n continue;\n }\n db.readLock();\n try {\n Table table = db.getTable(tableName);\n reachLimit = setColumnDesc(columns, table, limit, true, fullName, tableName);\n } finally {\n db.readUnlock();\n }\n if (reachLimit) {\n return;\n }\n }\n }\n }\n }\n\n private boolean setColumnDesc(List columns, Table table, long limit,\n boolean needSetDbAndTable, String db, String tbl) {\n if (table != null) {\n String tableKeysType = \"\";\n if (TableType.OLAP.equals(table.getType())) {\n OlapTable olapTable = (OlapTable) table;\n tableKeysType = olapTable.getKeysType().name().substring(0, 3).toUpperCase();\n }\n for (Column column : table.getBaseSchema()) {\n final TColumnDesc desc =\n new TColumnDesc(column.getName(), column.getPrimitiveType().toThrift());\n final Integer precision = column.getType().getPrecision();\n if (precision != null) {\n desc.setColumnPrecision(precision);\n }\n final Integer columnLength = column.getType().getColumnSize();\n if (columnLength != null) {\n desc.setColumnLength(columnLength);\n }\n final Integer decimalDigits = column.getType().getDecimalDigits();\n if (decimalDigits != null) {\n desc.setColumnScale(decimalDigits);\n }\n if (column.isKey()) {\n \n desc.setColumnKey(tableKeysType);\n } else {\n desc.setColumnKey(\"\");\n }\n final TColumnDef colDef = new TColumnDef(desc);\n final String comment = column.getComment();\n if (comment != null) {\n colDef.setComment(comment);\n }\n columns.add(colDef);\n \n if (needSetDbAndTable) {\n columns.get(columns.size() - 1).columnDesc.setDbName(db);\n columns.get(columns.size() - 1).columnDesc.setTableName(tbl);\n }\n \n if (limit > 0 && columns.size() >= limit) {\n return true;\n }\n }\n }\n return false;\n }\n\n @Override\n public TShowVariableResult showVariables(TShowVariableRequest params) throws TException {\n TShowVariableResult result = new TShowVariableResult();\n Map map = Maps.newHashMap();\n result.setVariables(map);\n \n ConnectContext ctx = exeEnv.getScheduler().getContext(params.getThreadId());\n if (ctx == null) {\n return result;\n }\n List> rows = VariableMgr.dump(SetType.fromThrift(params.getVarType()), ctx.getSessionVariable(),\n null);\n for (List row : rows) {\n map.put(row.get(0), row.get(1));\n }\n return result;\n }\n\n @Override\n public TReportExecStatusResult reportExecStatus(TReportExecStatusParams params) throws TException {\n return QeProcessorImpl.INSTANCE.reportExecStatus(params, getClientAddr());\n }\n\n @Override\n public TMasterResult finishTask(TFinishTaskRequest request) throws TException {\n return masterImpl.finishTask(request);\n }\n\n @Override\n public TMasterResult report(TReportRequest request) throws TException {\n return masterImpl.report(request);\n }\n\n @Override\n public TFetchResourceResult fetchResource() throws TException {\n throw new TException(\"not supported\");\n }\n\n @Override\n public TFeResult isMethodSupported(TIsMethodSupportedRequest request) throws TException {\n TStatus status = new TStatus(TStatusCode.OK);\n TFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\n switch (request.getFunction_name()) {\n case \"STREAMING_MINI_LOAD\":\n break;\n default:\n status.setStatus_code(NOT_IMPLEMENTED_ERROR);\n break;\n }\n return result;\n }\n\n @Override\n public TMasterOpResult forward(TMasterOpRequest params) throws TException {\n TNetworkAddress clientAddr = getClientAddr();\n if (clientAddr != null) {\n Frontend fe = GlobalStateMgr.getCurrentState().getFeByHost(clientAddr.getHostname());\n if (fe == null) {\n LOG.warn(\"reject request from invalid host. client: {}\", clientAddr);\n throw new TException(\"request from invalid host was rejected.\");\n }\n }\n\n \n LOG.info(\"receive forwarded stmt {} from FE: {}\", params.getStmt_id(), clientAddr.getHostname());\n ConnectContext context = new ConnectContext(null);\n ConnectProcessor processor = new ConnectProcessor(context);\n TMasterOpResult result = processor.proxyExecute(params);\n ConnectContext.remove();\n return result;\n }\n\n private void checkPasswordAndPrivs(String cluster, String user, String passwd, String db, String tbl,\n String clientIp, PrivPredicate predicate) throws AuthenticationException {\n\n final String fullUserName = ClusterNamespace.getFullName(cluster, user);\n final String fullDbName = ClusterNamespace.getFullName(cluster, db);\n List currentUser = Lists.newArrayList();\n if (!GlobalStateMgr.getCurrentState().getAuth()\n .checkPlainPassword(fullUserName, clientIp, passwd, currentUser)) {\n throw new AuthenticationException(\"Access denied for \" + fullUserName + \"@\" + clientIp);\n }\n\n Preconditions.checkState(currentUser.size() == 1);\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser.get(0), fullDbName, tbl, predicate)) {\n throw new AuthenticationException(\n \"Access denied; you need (at least one of) the LOAD privilege(s) for this operation\");\n }\n }\n\n @Override\n public TLoadTxnBeginResult loadTxnBegin(TLoadTxnBeginRequest request) throws TException {\n String clientAddr = getClientAddrAsString();\n LOG.info(\"receive txn begin request, db: {}, tbl: {}, label: {}, backend: {}\",\n request.getDb(), request.getTbl(), request.getLabel(), clientAddr);\n LOG.debug(\"txn begin request: {}\", request);\n\n TLoadTxnBeginResult result = new TLoadTxnBeginResult();\n \n if (!GlobalStateMgr.getCurrentState().isMaster()) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(\"current fe is not master\"));\n result.setStatus(status);\n return result;\n }\n\n TStatus status = new TStatus(TStatusCode.OK);\n result.setStatus(status);\n try {\n result.setTxnId(loadTxnBeginImpl(request, clientAddr));\n } catch (DuplicatedRequestException e) {\n \n LOG.info(\"duplicate request for stream load. request id: {}, txn_id: {}\", e.getDuplicatedRequestId(),\n e.getTxnId());\n result.setTxnId(e.getTxnId());\n } catch (LabelAlreadyUsedException e) {\n status.setStatus_code(TStatusCode.LABEL_ALREADY_EXISTS);\n status.addToError_msgs(e.getMessage());\n result.setJob_status(e.getJobStatus());\n } catch (UserException e) {\n LOG.warn(\"failed to begin: {}\", e.getMessage());\n status.setStatus_code(TStatusCode.ANALYSIS_ERROR);\n status.addToError_msgs(e.getMessage());\n } catch (Throwable e) {\n LOG.warn(\"catch unknown result.\", e);\n status.setStatus_code(TStatusCode.INTERNAL_ERROR);\n status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));\n return result;\n }\n return result;\n }\n\n private long loadTxnBeginImpl(TLoadTxnBeginRequest request, String clientIp) throws UserException {\n String cluster = request.getCluster();\n if (Strings.isNullOrEmpty(cluster)) {\n cluster = SystemInfoService.DEFAULT_CLUSTER;\n }\n\n checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\n request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);\n\n \n if (Strings.isNullOrEmpty(request.getLabel())) {\n throw new UserException(\"empty label in begin request\");\n }\n \n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n String fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\n Database db = globalStateMgr.getDb(fullDbName);\n if (db == null) {\n String dbName = fullDbName;\n if (Strings.isNullOrEmpty(request.getCluster())) {\n dbName = request.getDb();\n }\n throw new UserException(\"unknown database, database=\" + dbName);\n }\n Table table = db.getTable(request.getTbl());\n if (table == null) {\n throw new UserException(\"unknown table \\\"\" + request.getDb() + \".\" + request.getTbl() + \"\\\"\");\n }\n\n \n long timeoutSecond = request.isSetTimeout() ? request.getTimeout() : Config.stream_load_default_timeout_second;\n MetricRepo.COUNTER_LOAD_ADD.increase(1L);\n return GlobalStateMgr.getCurrentGlobalTransactionMgr().beginTransaction(\n db.getId(), Lists.newArrayList(table.getId()), request.getLabel(), request.getRequest_id(),\n new TxnCoordinator(TxnSourceType.BE, clientIp),\n TransactionState.LoadJobSourceType.BACKEND_STREAMING, -1, timeoutSecond);\n }\n\n @Override\n public TLoadTxnCommitResult loadTxnCommit(TLoadTxnCommitRequest request) throws TException {\n String clientAddr = getClientAddrAsString();\n LOG.info(\"receive txn commit request. db: {}, tbl: {}, txn_id: {}, backend: {}\",\n request.getDb(), request.getTbl(), request.getTxnId(), clientAddr);\n LOG.debug(\"txn commit request: {}\", request);\n\n TLoadTxnCommitResult result = new TLoadTxnCommitResult();\n \n if (!GlobalStateMgr.getCurrentState().isMaster()) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(\"current fe is not master\"));\n result.setStatus(status);\n return result;\n }\n\n TStatus status = new TStatus(TStatusCode.OK);\n result.setStatus(status);\n try {\n if (!loadTxnCommitImpl(request)) {\n \n status.setStatus_code(TStatusCode.PUBLISH_TIMEOUT);\n status.addToError_msgs(\"Publish timeout. The data will be visible after a while\");\n }\n } catch (UserException e) {\n LOG.warn(\"failed to commit txn_id: {}: {}\", request.getTxnId(), e.getMessage());\n status.setStatus_code(TStatusCode.ANALYSIS_ERROR);\n status.addToError_msgs(e.getMessage());\n } catch (Throwable e) {\n LOG.warn(\"catch unknown result.\", e);\n status.setStatus_code(TStatusCode.INTERNAL_ERROR);\n status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));\n return result;\n }\n return result;\n }\n\n \n \n\n @Override\n public TLoadTxnRollbackResult loadTxnRollback(TLoadTxnRollbackRequest request) throws TException {\n String clientAddr = getClientAddrAsString();\n LOG.info(\"receive txn rollback request. db: {}, tbl: {}, txn_id: {}, reason: {}, backend: {}\",\n request.getDb(), request.getTbl(), request.getTxnId(), request.getReason(), clientAddr);\n LOG.debug(\"txn rollback request: {}\", request);\n\n TLoadTxnRollbackResult result = new TLoadTxnRollbackResult();\n \n if (!GlobalStateMgr.getCurrentState().isMaster()) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(\"current fe is not master\"));\n result.setStatus(status);\n return result;\n }\n\n TStatus status = new TStatus(TStatusCode.OK);\n result.setStatus(status);\n try {\n loadTxnRollbackImpl(request);\n } catch (TransactionNotFoundException e) {\n LOG.warn(\"failed to rollback txn {}: {}\", request.getTxnId(), e.getMessage());\n status.setStatus_code(TStatusCode.TXN_NOT_EXISTS);\n status.addToError_msgs(e.getMessage());\n } catch (UserException e) {\n LOG.warn(\"failed to rollback txn {}: {}\", request.getTxnId(), e.getMessage());\n status.setStatus_code(TStatusCode.ANALYSIS_ERROR);\n status.addToError_msgs(e.getMessage());\n } catch (Throwable e) {\n LOG.warn(\"catch unknown result.\", e);\n status.setStatus_code(TStatusCode.INTERNAL_ERROR);\n status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));\n return result;\n }\n\n return result;\n }\n\n private void loadTxnRollbackImpl(TLoadTxnRollbackRequest request) throws UserException {\n String cluster = request.getCluster();\n if (Strings.isNullOrEmpty(cluster)) {\n cluster = SystemInfoService.DEFAULT_CLUSTER;\n }\n\n if (request.isSetAuth_code()) {\n \n } else {\n checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\n request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);\n }\n String dbName = ClusterNamespace.getFullName(cluster, request.getDb());\n Database db = GlobalStateMgr.getCurrentState().getDb(dbName);\n if (db == null) {\n throw new MetaNotFoundException(\"db \" + request.getDb() + \" does not exist\");\n }\n long dbId = db.getId();\n GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction(dbId, request.getTxnId(),\n request.isSetReason() ? request.getReason() : \"system cancel\",\n TxnCommitAttachment.fromThrift(request.getTxnCommitAttachment()));\n }\n\n @Override\n public TStreamLoadPutResult streamLoadPut(TStreamLoadPutRequest request) {\n String clientAddr = getClientAddrAsString();\n LOG.info(\"receive stream load put request. db:{}, tbl: {}, txn_id: {}, load id: {}, backend: {}\",\n request.getDb(), request.getTbl(), request.getTxnId(), DebugUtil.printId(request.getLoadId()),\n clientAddr);\n LOG.debug(\"stream load put request: {}\", request);\n\n TStreamLoadPutResult result = new TStreamLoadPutResult();\n TStatus status = new TStatus(TStatusCode.OK);\n result.setStatus(status);\n try {\n result.setParams(streamLoadPutImpl(request));\n } catch (UserException e) {\n LOG.warn(\"failed to get stream load plan: {}\", e.getMessage());\n status.setStatus_code(TStatusCode.ANALYSIS_ERROR);\n status.addToError_msgs(e.getMessage());\n } catch (Throwable e) {\n LOG.warn(\"catch unknown result.\", e);\n status.setStatus_code(TStatusCode.INTERNAL_ERROR);\n status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));\n return result;\n }\n return result;\n }\n\n private TExecPlanFragmentParams streamLoadPutImpl(TStreamLoadPutRequest request) throws UserException {\n String cluster = request.getCluster();\n if (Strings.isNullOrEmpty(cluster)) {\n cluster = SystemInfoService.DEFAULT_CLUSTER;\n }\n\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n String fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\n Database db = globalStateMgr.getDb(fullDbName);\n if (db == null) {\n String dbName = fullDbName;\n if (Strings.isNullOrEmpty(request.getCluster())) {\n dbName = request.getDb();\n }\n throw new UserException(\"unknown database, database=\" + dbName);\n }\n long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000;\n if (!db.tryReadLock(timeoutMs, TimeUnit.MILLISECONDS)) {\n throw new UserException(\"get database read lock timeout, database=\" + fullDbName);\n }\n try {\n Table table = db.getTable(request.getTbl());\n if (table == null) {\n throw new UserException(\"unknown table, table=\" + request.getTbl());\n }\n if (!(table instanceof OlapTable)) {\n throw new UserException(\"load table type is not OlapTable, type=\" + table.getClass());\n }\n if (table instanceof MaterializedView) {\n throw new UserException(String.format(\n \"The data of '%s' cannot be inserted because '%s' is a materialized view,\" +\n \"and the data of materialized view must be consistent with the base table.\",\n table.getName(), table.getName()));\n }\n StreamLoadTask streamLoadTask = StreamLoadTask.fromTStreamLoadPutRequest(request, db);\n StreamLoadPlanner planner = new StreamLoadPlanner(db, (OlapTable) table, streamLoadTask);\n TExecPlanFragmentParams plan = planner.plan(streamLoadTask.getId());\n \n TransactionState txnState =\n GlobalStateMgr.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), request.getTxnId());\n if (txnState == null) {\n throw new UserException(\"txn does not exist: \" + request.getTxnId());\n }\n txnState.addTableIndexes((OlapTable) table);\n\n return plan;\n } finally {\n db.readUnlock();\n }\n }\n\n @Override\n public TStatus snapshotLoaderReport(TSnapshotLoaderReportRequest request) throws TException {\n if (GlobalStateMgr.getCurrentState().getBackupHandler().report(request.getTask_type(), request.getJob_id(),\n request.getTask_id(), request.getFinished_num(), request.getTotal_num())) {\n return new TStatus(TStatusCode.OK);\n }\n return new TStatus(TStatusCode.CANCELLED);\n }\n\n @Override\n public TRefreshTableResponse refreshTable(TRefreshTableRequest request) throws TException {\n try {\n \n if (request.getCatalog_name() == null) {\n request.setCatalog_name(InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME);\n }\n GlobalStateMgr.getCurrentState().refreshExternalTable(new TableName(request.getCatalog_name(),\n request.getDb_name(), request.getTable_name()), request.getPartitions());\n return new TRefreshTableResponse(new TStatus(TStatusCode.OK));\n } catch (DdlException e) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(e.getMessage()));\n return new TRefreshTableResponse(status);\n }\n }\n\n private TNetworkAddress getClientAddr() {\n ThriftServerContext connectionContext = ThriftServerEventProcessor.getConnectionContext();\n \n if (connectionContext != null) {\n return connectionContext.getClient();\n }\n return null;\n }\n\n private String getClientAddrAsString() {\n TNetworkAddress addr = getClientAddr();\n return addr == null ? \"unknown\" : addr.hostname;\n }\n\n @Override\n public TGetTableMetaResponse getTableMeta(TGetTableMetaRequest request) throws TException {\n return masterImpl.getTableMeta(request);\n }\n\n @Override\n public TBeginRemoteTxnResponse beginRemoteTxn(TBeginRemoteTxnRequest request) throws TException {\n return masterImpl.beginRemoteTxn(request);\n }\n\n @Override\n public TCommitRemoteTxnResponse commitRemoteTxn(TCommitRemoteTxnRequest request) throws TException {\n return masterImpl.commitRemoteTxn(request);\n }\n\n @Override\n public TAbortRemoteTxnResponse abortRemoteTxn(TAbortRemoteTxnRequest request) throws TException {\n return masterImpl.abortRemoteTxn(request);\n }\n\n @Override\n public TSetConfigResponse setConfig(TSetConfigRequest request) throws TException {\n try {\n Preconditions.checkState(request.getKeys().size() == request.getValues().size());\n Map configs = new HashMap<>();\n for (int i = 0; i < request.getKeys().size(); i++) {\n configs.put(request.getKeys().get(i), request.getValues().get(i));\n }\n\n GlobalStateMgr.getCurrentState().setFrontendConfig(configs);\n return new TSetConfigResponse(new TStatus(TStatusCode.OK));\n } catch (DdlException e) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(e.getMessage()));\n return new TSetConfigResponse(status);\n }\n }\n}", "context_after": "class FrontendServiceImpl implements FrontendService.Iface {\n private static final Logger LOG = LogManager.getLogger(MasterImpl.class);\n private MasterImpl masterImpl;\n private ExecuteEnv exeEnv;\n\n public FrontendServiceImpl(ExecuteEnv exeEnv) {\n masterImpl = new MasterImpl();\n this.exeEnv = exeEnv;\n }\n\n @Override\n public TGetDbsResult getDbNames(TGetDbsParams params) throws TException {\n LOG.debug(\"get db request: {}\", params);\n TGetDbsResult result = new TGetDbsResult();\n\n List dbs = Lists.newArrayList();\n PatternMatcher matcher = null;\n if (params.isSetPattern()) {\n try {\n matcher = PatternMatcher.createMysqlPattern(params.getPattern(),\n CaseSensibility.DATABASE.getCaseSensibility());\n } catch (AnalysisException e) {\n throw new TException(\"Pattern is in bad format: \" + params.getPattern());\n }\n }\n\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n List dbNames = globalStateMgr.getDbNames();\n LOG.debug(\"get db names: {}\", dbNames);\n\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n } else {\n currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n }\n for (String fullName : dbNames) {\n if (!globalStateMgr.getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {\n continue;\n }\n\n final String db = ClusterNamespace.getNameFromFullName(fullName);\n if (matcher != null && !matcher.match(db)) {\n continue;\n }\n\n dbs.add(fullName);\n }\n result.setDbs(dbs);\n return result;\n }\n\n @Override\n public TGetTablesResult getTableNames(TGetTablesParams params) throws TException {\n LOG.debug(\"get table name request: {}\", params);\n TGetTablesResult result = new TGetTablesResult();\n List tablesResult = Lists.newArrayList();\n result.setTables(tablesResult);\n PatternMatcher matcher = null;\n if (params.isSetPattern()) {\n try {\n matcher = PatternMatcher.createMysqlPattern(params.getPattern(),\n CaseSensibility.TABLE.getCaseSensibility());\n } catch (AnalysisException e) {\n throw new TException(\"Pattern is in bad format: \" + params.getPattern());\n }\n }\n\n \n\n Database db = GlobalStateMgr.getCurrentState().getDb(params.db);\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n } else {\n currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n }\n if (db != null) {\n for (String tableName : db.getTableNamesWithLock()) {\n LOG.debug(\"get table: {}, wait to check\", tableName);\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,\n tableName, PrivPredicate.SHOW)) {\n continue;\n }\n\n if (matcher != null && !matcher.match(tableName)) {\n continue;\n }\n tablesResult.add(tableName);\n }\n }\n return result;\n }\n\n @Override\n public TListTableStatusResult listTableStatus(TGetTablesParams params) throws TException {\n LOG.debug(\"get list table request: {}\", params);\n TListTableStatusResult result = new TListTableStatusResult();\n List tablesResult = Lists.newArrayList();\n result.setTables(tablesResult);\n PatternMatcher matcher = null;\n if (params.isSetPattern()) {\n try {\n matcher = PatternMatcher.createMysqlPattern(params.getPattern(),\n CaseSensibility.TABLE.getCaseSensibility());\n } catch (AnalysisException e) {\n throw new TException(\"Pattern is in bad format \" + params.getPattern());\n }\n }\n\n \n\n Database db = GlobalStateMgr.getCurrentState().getDb(params.db);\n long limit = params.isSetLimit() ? params.getLimit() : -1;\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n } else {\n currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n }\n if (params.isSetType() && TTableType.MATERIALIZED_VIEW.equals(params.getType())) {\n listMaterializedViewStatus(tablesResult, limit, matcher, currentUser, params.db);\n return result;\n }\n if (db != null) {\n db.readLock();\n try {\n boolean listingViews = params.isSetType() && TTableType.VIEW.equals(params.getType());\n List
tables = listingViews ? db.getViews() : db.getTables();\n for (Table table : tables) {\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,\n table.getName(), PrivPredicate.SHOW)) {\n continue;\n }\n if (matcher != null && !matcher.match(table.getName())) {\n continue;\n }\n TTableStatus status = new TTableStatus();\n status.setName(table.getName());\n status.setType(table.getMysqlType());\n status.setEngine(table.getEngine());\n status.setComment(table.getComment());\n status.setCreate_time(table.getCreateTime());\n status.setLast_check_time(table.getLastCheckTime());\n if (listingViews) {\n View view = (View) table;\n String ddlSql = view.getInlineViewDef();\n List tblRefs = new ArrayList<>();\n view.getQueryStmt().collectTableRefs(tblRefs);\n for (TableRef tblRef : tblRefs) {\n if (!GlobalStateMgr.getCurrentState().getAuth()\n .checkTblPriv(currentUser, tblRef.getName().getDb(),\n tblRef.getName().getTbl(), PrivPredicate.SHOW)) {\n ddlSql = \"\";\n break;\n }\n }\n status.setDdl_sql(ddlSql);\n }\n tablesResult.add(status);\n \n if (limit > 0 && tablesResult.size() >= limit) {\n break;\n }\n }\n } finally {\n db.readUnlock();\n }\n }\n return result;\n }\n\n \n public void listMaterializedViewStatus(List tablesResult, long limit, PatternMatcher matcher,\n UserIdentity currentUser, String dbName) {\n Database db = GlobalStateMgr.getCurrentState().getDb(dbName);\n if (db == null) {\n LOG.warn(\"database not exists: {}\", dbName);\n return;\n }\n db.readLock();\n try {\n for (Table materializedView : db.getMaterializedViews()) {\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, dbName,\n materializedView.getName(), PrivPredicate.SHOW)) {\n continue;\n }\n if (matcher != null && !matcher.match(materializedView.getName())) {\n continue;\n }\n MaterializedView mvTable = (MaterializedView) materializedView;\n TTableStatus status = new TTableStatus();\n status.setId(String.valueOf(mvTable.getId()));\n status.setName(mvTable.getName());\n status.setDdl_sql(mvTable.getViewDefineSql());\n status.setRows(String.valueOf(mvTable.getRowCount()));\n status.setType(mvTable.getMysqlType());\n status.setComment(mvTable.getComment());\n tablesResult.add(status);\n if (limit > 0 && tablesResult.size() >= limit) {\n return;\n }\n }\n for (Table table : db.getTables()) {\n if (table.getType() == Table.TableType.OLAP) {\n OlapTable olapTable = (OlapTable) table;\n List visibleMaterializedViews = olapTable.getVisibleIndex();\n long baseIdx = olapTable.getBaseIndexId();\n\n for (MaterializedIndex mvIdx : visibleMaterializedViews) {\n if (baseIdx == mvIdx.getId()) {\n continue;\n }\n if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvIdx.getId()))) {\n continue;\n }\n MaterializedIndexMeta mvMeta = olapTable.getVisibleIndexIdToMeta().get(mvIdx.getId());\n TTableStatus status = new TTableStatus();\n status.setId(String.valueOf(mvIdx.getId()));\n status.setName(olapTable.getIndexNameById(mvIdx.getId()));\n if (mvMeta.getOriginStmt() == null) {\n StringBuilder originStmtBuilder = new StringBuilder(\n \"create materialized view \" + olapTable.getIndexNameById(mvIdx.getId()) +\n \" as select \");\n String groupByString = \"\";\n for (Column column : mvMeta.getSchema()) {\n if (column.isKey()) {\n groupByString += column.getName() + \",\";\n }\n }\n originStmtBuilder.append(groupByString);\n for (Column column : mvMeta.getSchema()) {\n if (!column.isKey()) {\n originStmtBuilder.append(column.getAggregationType().toString()).append(\"(\")\n .append(column.getName()).append(\")\").append(\",\");\n }\n }\n originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());\n originStmtBuilder.append(\" from \").append(olapTable.getName()).append(\" group by \")\n .append(groupByString);\n originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());\n status.setDdl_sql(originStmtBuilder.toString());\n } else {\n status.setDdl_sql(mvMeta.getOriginStmt().replace(\"\\n\", \"\").replace(\"\\t\", \"\")\n .replaceAll(\"[ ]+\", \" \"));\n }\n status.setRows(String.valueOf(mvIdx.getRowCount()));\n \n status.setType(\"\");\n status.setComment(\"\");\n tablesResult.add(status);\n if (limit > 0 && tablesResult.size() >= limit) {\n return;\n }\n }\n }\n }\n } finally {\n db.readUnlock();\n }\n }\n\n @Override\n public TGetTaskInfoResult getTasks(TGetTasksParams params) throws TException {\n LOG.debug(\"get show task request: {}\", params);\n TGetTaskInfoResult result = new TGetTaskInfoResult();\n List tasksResult = Lists.newArrayList();\n result.setTasks(tasksResult);\n\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n }\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n TaskManager taskManager = globalStateMgr.getTaskManager();\n List taskList = taskManager.showTasks(null);\n\n for (Task task : taskList) {\n\n if (!globalStateMgr.getAuth().checkDbPriv(currentUser, task.getDbName(), PrivPredicate.SHOW)) {\n continue;\n }\n\n TTaskInfo info = new TTaskInfo();\n info.setTask_name(task.getName());\n info.setCreate_time(task.getCreateTime() / 1000);\n \n info.setSchedule(\"MANUAL\");\n info.setDatabase(ClusterNamespace.getNameFromFullName(task.getDbName()));\n info.setDefinition(task.getDefinition());\n info.setExpire_time(task.getExpireTime() / 1000);\n tasksResult.add(info);\n }\n\n return result;\n }\n\n @Override\n public TGetTaskRunInfoResult getTaskRuns(TGetTasksParams params) throws TException {\n LOG.debug(\"get show task run request: {}\", params);\n TGetTaskRunInfoResult result = new TGetTaskRunInfoResult();\n List tasksResult = Lists.newArrayList();\n result.setTask_runs(tasksResult);\n\n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n }\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n TaskManager taskManager = globalStateMgr.getTaskManager();\n List taskRunList = taskManager.showTaskRunStatus(null);\n\n for (TaskRunStatus status : taskRunList) {\n\n if (!globalStateMgr.getAuth().checkDbPriv(currentUser, status.getDbName(), PrivPredicate.SHOW)) {\n continue;\n }\n\n TTaskRunInfo info = new TTaskRunInfo();\n info.setQuery_id(status.getQueryId());\n info.setTask_name(status.getTaskName());\n info.setCreate_time(status.getCreateTime() / 1000);\n info.setFinish_time(status.getFinishTime() / 1000);\n info.setState(status.getState().toString());\n info.setDatabase(ClusterNamespace.getNameFromFullName(status.getDbName()));\n info.setDefinition(status.getDefinition());\n info.setError_code(status.getErrorCode());\n info.setError_message(status.getErrorMessage());\n info.setExpire_time(status.getExpireTime() / 1000);\n tasksResult.add(info);\n }\n return result;\n }\n\n @Override\n public TGetDBPrivsResult getDBPrivs(TGetDBPrivsParams params) throws TException {\n LOG.debug(\"get database privileges request: {}\", params);\n TGetDBPrivsResult result = new TGetDBPrivsResult();\n List tDBPrivs = Lists.newArrayList();\n result.setDb_privs(tDBPrivs);\n UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);\n List dbPrivEntries = GlobalStateMgr.getCurrentState().getAuth().getDBPrivEntries(currentUser);\n \n for (DbPrivEntry entry : dbPrivEntries) {\n PrivBitSet savedPrivs = entry.getPrivSet();\n String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;\n String userIdentStr = currentUser.toString().replace(clusterPrefix, \"\");\n String dbName = ClusterNamespace.getNameFromFullName(entry.getOrigDb());\n boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT);\n List tPrivs = savedPrivs.toPrivilegeList().stream().map(\n priv -> {\n TDBPrivDesc privDesc = new TDBPrivDesc();\n privDesc.setDb_name(dbName);\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv.getUpperNameForMysql());\n return privDesc;\n }\n ).collect(Collectors.toList());\n if (savedPrivs.satisfy(PrivPredicate.LOAD)) {\n \n tPrivs.addAll(Lists.newArrayList(\"INSERT\", \"UPDATE\", \"DELETE\").stream().map(priv -> {\n TDBPrivDesc privDesc = new TDBPrivDesc();\n privDesc.setDb_name(dbName);\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv);\n return privDesc;\n }).collect(Collectors.toList()));\n }\n tDBPrivs.addAll(tPrivs);\n }\n return result;\n }\n\n @Override\n public TGetTablePrivsResult getTablePrivs(TGetTablePrivsParams params) throws TException {\n LOG.debug(\"get table privileges request: {}\", params);\n TGetTablePrivsResult result = new TGetTablePrivsResult();\n List tTablePrivs = Lists.newArrayList();\n result.setTable_privs(tTablePrivs);\n UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);\n List tablePrivEntries =\n GlobalStateMgr.getCurrentState().getAuth().getTablePrivEntries(currentUser);\n \n for (TablePrivEntry entry : tablePrivEntries) {\n PrivBitSet savedPrivs = entry.getPrivSet();\n String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;\n String userIdentStr = currentUser.toString().replace(clusterPrefix, \"\");\n String dbName = ClusterNamespace.getNameFromFullName(entry.getOrigDb());\n boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT);\n List tPrivs = savedPrivs.toPrivilegeList().stream().map(\n priv -> {\n TTablePrivDesc privDesc = new TTablePrivDesc();\n privDesc.setDb_name(dbName);\n privDesc.setTable_name(entry.getOrigTbl());\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv.getUpperNameForMysql());\n return privDesc;\n }\n ).collect(Collectors.toList());\n if (savedPrivs.satisfy(PrivPredicate.LOAD)) {\n \n tPrivs.addAll(Lists.newArrayList(\"INSERT\", \"UPDATE\", \"DELETE\").stream().map(priv -> {\n TTablePrivDesc privDesc = new TTablePrivDesc();\n privDesc.setDb_name(dbName);\n privDesc.setTable_name(entry.getOrigTbl());\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv);\n return privDesc;\n }).collect(Collectors.toList()));\n }\n tTablePrivs.addAll(tPrivs);\n }\n return result;\n }\n\n @Override\n public TGetUserPrivsResult getUserPrivs(TGetUserPrivsParams params) throws TException {\n LOG.debug(\"get user privileges request: {}\", params);\n TGetUserPrivsResult result = new TGetUserPrivsResult();\n List tUserPrivs = Lists.newArrayList();\n result.setUser_privs(tUserPrivs);\n UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident);\n Auth currAuth = GlobalStateMgr.getCurrentState().getAuth();\n UserPrivTable userPrivTable = currAuth.getUserPrivTable();\n List userIdents = Lists.newArrayList();\n \n userIdents.add(currentUser);\n\n \n \n \n \n \n \n \n \n\n \n for (UserIdentity userIdent : userIdents) {\n PrivBitSet savedPrivs = new PrivBitSet();\n userPrivTable.getPrivs(userIdent, savedPrivs);\n String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;\n String userIdentStr = currentUser.toString().replace(clusterPrefix, \"\");\n \n List tPrivs = savedPrivs.toPrivilegeList().stream().map(\n priv -> {\n boolean isGrantable =\n Privilege.NODE_PRIV != priv \n && userPrivTable.hasPriv(userIdent,\n PrivPredicate.GRANT);\n TUserPrivDesc privDesc = new TUserPrivDesc();\n privDesc.setIs_grantable(isGrantable);\n privDesc.setUser_ident_str(userIdentStr);\n privDesc.setPriv(priv.getUpperNameForMysql());\n return privDesc;\n }\n ).collect(Collectors.toList());\n tUserPrivs.addAll(tPrivs);\n }\n return result;\n }\n\n @Override\n public TFeResult updateExportTaskStatus(TUpdateExportTaskStatusRequest request) throws TException {\n TStatus status = new TStatus(TStatusCode.OK);\n TFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\n\n return result;\n }\n\n @Override\n public TDescribeTableResult describeTable(TDescribeTableParams params) throws TException {\n LOG.debug(\"get desc table request: {}\", params);\n TDescribeTableResult result = new TDescribeTableResult();\n List columns = Lists.newArrayList();\n result.setColumns(columns);\n\n \n UserIdentity currentUser = null;\n if (params.isSetCurrent_user_ident()) {\n currentUser = UserIdentity.fromThrift(params.current_user_ident);\n } else {\n currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip);\n }\n long limit = params.isSetLimit() ? params.getLimit() : -1;\n\n \n \n \n \n if (!params.isSetDb() && StringUtils.isBlank(params.getTable_name())) {\n describeWithoutDbAndTable(currentUser, columns, limit);\n return result;\n }\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db,\n params.getTable_name(), PrivPredicate.SHOW)) {\n return result;\n }\n Database db = GlobalStateMgr.getCurrentState().getDb(params.db);\n if (db != null) {\n db.readLock();\n try {\n Table table = db.getTable(params.getTable_name());\n setColumnDesc(columns, table, limit, false, params.db, params.getTable_name());\n } finally {\n db.readUnlock();\n }\n }\n return result;\n }\n\n \n \n private void describeWithoutDbAndTable(UserIdentity currentUser, List columns, long limit) {\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n List dbNames = globalStateMgr.getDbNames();\n boolean reachLimit;\n for (String fullName : dbNames) {\n if (!GlobalStateMgr.getCurrentState().getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) {\n continue;\n }\n Database db = GlobalStateMgr.getCurrentState().getDb(fullName);\n if (db != null) {\n for (String tableName : db.getTableNamesWithLock()) {\n LOG.debug(\"get table: {}, wait to check\", tableName);\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, fullName,\n tableName, PrivPredicate.SHOW)) {\n continue;\n }\n db.readLock();\n try {\n Table table = db.getTable(tableName);\n reachLimit = setColumnDesc(columns, table, limit, true, fullName, tableName);\n } finally {\n db.readUnlock();\n }\n if (reachLimit) {\n return;\n }\n }\n }\n }\n }\n\n private boolean setColumnDesc(List columns, Table table, long limit,\n boolean needSetDbAndTable, String db, String tbl) {\n if (table != null) {\n String tableKeysType = \"\";\n if (TableType.OLAP.equals(table.getType())) {\n OlapTable olapTable = (OlapTable) table;\n tableKeysType = olapTable.getKeysType().name().substring(0, 3).toUpperCase();\n }\n for (Column column : table.getBaseSchema()) {\n final TColumnDesc desc =\n new TColumnDesc(column.getName(), column.getPrimitiveType().toThrift());\n final Integer precision = column.getType().getPrecision();\n if (precision != null) {\n desc.setColumnPrecision(precision);\n }\n final Integer columnLength = column.getType().getColumnSize();\n if (columnLength != null) {\n desc.setColumnLength(columnLength);\n }\n final Integer decimalDigits = column.getType().getDecimalDigits();\n if (decimalDigits != null) {\n desc.setColumnScale(decimalDigits);\n }\n if (column.isKey()) {\n \n desc.setColumnKey(tableKeysType);\n } else {\n desc.setColumnKey(\"\");\n }\n final TColumnDef colDef = new TColumnDef(desc);\n final String comment = column.getComment();\n if (comment != null) {\n colDef.setComment(comment);\n }\n columns.add(colDef);\n \n if (needSetDbAndTable) {\n columns.get(columns.size() - 1).columnDesc.setDbName(db);\n columns.get(columns.size() - 1).columnDesc.setTableName(tbl);\n }\n \n if (limit > 0 && columns.size() >= limit) {\n return true;\n }\n }\n }\n return false;\n }\n\n @Override\n public TShowVariableResult showVariables(TShowVariableRequest params) throws TException {\n TShowVariableResult result = new TShowVariableResult();\n Map map = Maps.newHashMap();\n result.setVariables(map);\n \n ConnectContext ctx = exeEnv.getScheduler().getContext(params.getThreadId());\n if (ctx == null) {\n return result;\n }\n List> rows = VariableMgr.dump(SetType.fromThrift(params.getVarType()), ctx.getSessionVariable(),\n null);\n for (List row : rows) {\n map.put(row.get(0), row.get(1));\n }\n return result;\n }\n\n @Override\n public TReportExecStatusResult reportExecStatus(TReportExecStatusParams params) throws TException {\n return QeProcessorImpl.INSTANCE.reportExecStatus(params, getClientAddr());\n }\n\n @Override\n public TMasterResult finishTask(TFinishTaskRequest request) throws TException {\n return masterImpl.finishTask(request);\n }\n\n @Override\n public TMasterResult report(TReportRequest request) throws TException {\n return masterImpl.report(request);\n }\n\n @Override\n public TFetchResourceResult fetchResource() throws TException {\n throw new TException(\"not supported\");\n }\n\n @Override\n public TFeResult isMethodSupported(TIsMethodSupportedRequest request) throws TException {\n TStatus status = new TStatus(TStatusCode.OK);\n TFeResult result = new TFeResult(FrontendServiceVersion.V1, status);\n switch (request.getFunction_name()) {\n case \"STREAMING_MINI_LOAD\":\n break;\n default:\n status.setStatus_code(NOT_IMPLEMENTED_ERROR);\n break;\n }\n return result;\n }\n\n @Override\n public TMasterOpResult forward(TMasterOpRequest params) throws TException {\n TNetworkAddress clientAddr = getClientAddr();\n if (clientAddr != null) {\n Frontend fe = GlobalStateMgr.getCurrentState().getFeByHost(clientAddr.getHostname());\n if (fe == null) {\n LOG.warn(\"reject request from invalid host. client: {}\", clientAddr);\n throw new TException(\"request from invalid host was rejected.\");\n }\n }\n\n \n LOG.info(\"receive forwarded stmt {} from FE: {}\", params.getStmt_id(), clientAddr.getHostname());\n ConnectContext context = new ConnectContext(null);\n ConnectProcessor processor = new ConnectProcessor(context);\n TMasterOpResult result = processor.proxyExecute(params);\n ConnectContext.remove();\n return result;\n }\n\n private void checkPasswordAndPrivs(String cluster, String user, String passwd, String db, String tbl,\n String clientIp, PrivPredicate predicate) throws AuthenticationException {\n\n final String fullUserName = ClusterNamespace.getFullName(cluster, user);\n final String fullDbName = ClusterNamespace.getFullName(cluster, db);\n List currentUser = Lists.newArrayList();\n if (!GlobalStateMgr.getCurrentState().getAuth()\n .checkPlainPassword(fullUserName, clientIp, passwd, currentUser)) {\n throw new AuthenticationException(\"Access denied for \" + fullUserName + \"@\" + clientIp);\n }\n\n Preconditions.checkState(currentUser.size() == 1);\n if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser.get(0), fullDbName, tbl, predicate)) {\n throw new AuthenticationException(\n \"Access denied; you need (at least one of) the LOAD privilege(s) for this operation\");\n }\n }\n\n @Override\n public TLoadTxnBeginResult loadTxnBegin(TLoadTxnBeginRequest request) throws TException {\n String clientAddr = getClientAddrAsString();\n LOG.info(\"receive txn begin request, db: {}, tbl: {}, label: {}, backend: {}\",\n request.getDb(), request.getTbl(), request.getLabel(), clientAddr);\n LOG.debug(\"txn begin request: {}\", request);\n\n TLoadTxnBeginResult result = new TLoadTxnBeginResult();\n \n if (!GlobalStateMgr.getCurrentState().isMaster()) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(\"current fe is not master\"));\n result.setStatus(status);\n return result;\n }\n\n TStatus status = new TStatus(TStatusCode.OK);\n result.setStatus(status);\n try {\n result.setTxnId(loadTxnBeginImpl(request, clientAddr));\n } catch (DuplicatedRequestException e) {\n \n LOG.info(\"duplicate request for stream load. request id: {}, txn_id: {}\", e.getDuplicatedRequestId(),\n e.getTxnId());\n result.setTxnId(e.getTxnId());\n } catch (LabelAlreadyUsedException e) {\n status.setStatus_code(TStatusCode.LABEL_ALREADY_EXISTS);\n status.addToError_msgs(e.getMessage());\n result.setJob_status(e.getJobStatus());\n } catch (UserException e) {\n LOG.warn(\"failed to begin: {}\", e.getMessage());\n status.setStatus_code(TStatusCode.ANALYSIS_ERROR);\n status.addToError_msgs(e.getMessage());\n } catch (Throwable e) {\n LOG.warn(\"catch unknown result.\", e);\n status.setStatus_code(TStatusCode.INTERNAL_ERROR);\n status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));\n return result;\n }\n return result;\n }\n\n private long loadTxnBeginImpl(TLoadTxnBeginRequest request, String clientIp) throws UserException {\n String cluster = request.getCluster();\n if (Strings.isNullOrEmpty(cluster)) {\n cluster = SystemInfoService.DEFAULT_CLUSTER;\n }\n\n checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\n request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);\n\n \n if (Strings.isNullOrEmpty(request.getLabel())) {\n throw new UserException(\"empty label in begin request\");\n }\n \n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n String fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\n Database db = globalStateMgr.getDb(fullDbName);\n if (db == null) {\n String dbName = fullDbName;\n if (Strings.isNullOrEmpty(request.getCluster())) {\n dbName = request.getDb();\n }\n throw new UserException(\"unknown database, database=\" + dbName);\n }\n Table table = db.getTable(request.getTbl());\n if (table == null) {\n throw new UserException(\"unknown table \\\"\" + request.getDb() + \".\" + request.getTbl() + \"\\\"\");\n }\n\n \n long timeoutSecond = request.isSetTimeout() ? request.getTimeout() : Config.stream_load_default_timeout_second;\n MetricRepo.COUNTER_LOAD_ADD.increase(1L);\n return GlobalStateMgr.getCurrentGlobalTransactionMgr().beginTransaction(\n db.getId(), Lists.newArrayList(table.getId()), request.getLabel(), request.getRequest_id(),\n new TxnCoordinator(TxnSourceType.BE, clientIp),\n TransactionState.LoadJobSourceType.BACKEND_STREAMING, -1, timeoutSecond);\n }\n\n @Override\n public TLoadTxnCommitResult loadTxnCommit(TLoadTxnCommitRequest request) throws TException {\n String clientAddr = getClientAddrAsString();\n LOG.info(\"receive txn commit request. db: {}, tbl: {}, txn_id: {}, backend: {}\",\n request.getDb(), request.getTbl(), request.getTxnId(), clientAddr);\n LOG.debug(\"txn commit request: {}\", request);\n\n TLoadTxnCommitResult result = new TLoadTxnCommitResult();\n \n if (!GlobalStateMgr.getCurrentState().isMaster()) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(\"current fe is not master\"));\n result.setStatus(status);\n return result;\n }\n\n TStatus status = new TStatus(TStatusCode.OK);\n result.setStatus(status);\n try {\n if (!loadTxnCommitImpl(request)) {\n \n status.setStatus_code(TStatusCode.PUBLISH_TIMEOUT);\n status.addToError_msgs(\"Publish timeout. The data will be visible after a while\");\n }\n } catch (UserException e) {\n LOG.warn(\"failed to commit txn_id: {}: {}\", request.getTxnId(), e.getMessage());\n status.setStatus_code(TStatusCode.ANALYSIS_ERROR);\n status.addToError_msgs(e.getMessage());\n } catch (Throwable e) {\n LOG.warn(\"catch unknown result.\", e);\n status.setStatus_code(TStatusCode.INTERNAL_ERROR);\n status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));\n return result;\n }\n return result;\n }\n\n \n \n\n @Override\n public TLoadTxnRollbackResult loadTxnRollback(TLoadTxnRollbackRequest request) throws TException {\n String clientAddr = getClientAddrAsString();\n LOG.info(\"receive txn rollback request. db: {}, tbl: {}, txn_id: {}, reason: {}, backend: {}\",\n request.getDb(), request.getTbl(), request.getTxnId(), request.getReason(), clientAddr);\n LOG.debug(\"txn rollback request: {}\", request);\n\n TLoadTxnRollbackResult result = new TLoadTxnRollbackResult();\n \n if (!GlobalStateMgr.getCurrentState().isMaster()) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(\"current fe is not master\"));\n result.setStatus(status);\n return result;\n }\n\n TStatus status = new TStatus(TStatusCode.OK);\n result.setStatus(status);\n try {\n loadTxnRollbackImpl(request);\n } catch (TransactionNotFoundException e) {\n LOG.warn(\"failed to rollback txn {}: {}\", request.getTxnId(), e.getMessage());\n status.setStatus_code(TStatusCode.TXN_NOT_EXISTS);\n status.addToError_msgs(e.getMessage());\n } catch (UserException e) {\n LOG.warn(\"failed to rollback txn {}: {}\", request.getTxnId(), e.getMessage());\n status.setStatus_code(TStatusCode.ANALYSIS_ERROR);\n status.addToError_msgs(e.getMessage());\n } catch (Throwable e) {\n LOG.warn(\"catch unknown result.\", e);\n status.setStatus_code(TStatusCode.INTERNAL_ERROR);\n status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));\n return result;\n }\n\n return result;\n }\n\n private void loadTxnRollbackImpl(TLoadTxnRollbackRequest request) throws UserException {\n String cluster = request.getCluster();\n if (Strings.isNullOrEmpty(cluster)) {\n cluster = SystemInfoService.DEFAULT_CLUSTER;\n }\n\n if (request.isSetAuth_code()) {\n \n } else {\n checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(),\n request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD);\n }\n String dbName = ClusterNamespace.getFullName(cluster, request.getDb());\n Database db = GlobalStateMgr.getCurrentState().getDb(dbName);\n if (db == null) {\n throw new MetaNotFoundException(\"db \" + request.getDb() + \" does not exist\");\n }\n long dbId = db.getId();\n GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction(dbId, request.getTxnId(),\n request.isSetReason() ? request.getReason() : \"system cancel\",\n TxnCommitAttachment.fromThrift(request.getTxnCommitAttachment()));\n }\n\n @Override\n public TStreamLoadPutResult streamLoadPut(TStreamLoadPutRequest request) {\n String clientAddr = getClientAddrAsString();\n LOG.info(\"receive stream load put request. db:{}, tbl: {}, txn_id: {}, load id: {}, backend: {}\",\n request.getDb(), request.getTbl(), request.getTxnId(), DebugUtil.printId(request.getLoadId()),\n clientAddr);\n LOG.debug(\"stream load put request: {}\", request);\n\n TStreamLoadPutResult result = new TStreamLoadPutResult();\n TStatus status = new TStatus(TStatusCode.OK);\n result.setStatus(status);\n try {\n result.setParams(streamLoadPutImpl(request));\n } catch (UserException e) {\n LOG.warn(\"failed to get stream load plan: {}\", e.getMessage());\n status.setStatus_code(TStatusCode.ANALYSIS_ERROR);\n status.addToError_msgs(e.getMessage());\n } catch (Throwable e) {\n LOG.warn(\"catch unknown result.\", e);\n status.setStatus_code(TStatusCode.INTERNAL_ERROR);\n status.addToError_msgs(Strings.nullToEmpty(e.getMessage()));\n return result;\n }\n return result;\n }\n\n private TExecPlanFragmentParams streamLoadPutImpl(TStreamLoadPutRequest request) throws UserException {\n String cluster = request.getCluster();\n if (Strings.isNullOrEmpty(cluster)) {\n cluster = SystemInfoService.DEFAULT_CLUSTER;\n }\n\n GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();\n String fullDbName = ClusterNamespace.getFullName(cluster, request.getDb());\n Database db = globalStateMgr.getDb(fullDbName);\n if (db == null) {\n String dbName = fullDbName;\n if (Strings.isNullOrEmpty(request.getCluster())) {\n dbName = request.getDb();\n }\n throw new UserException(\"unknown database, database=\" + dbName);\n }\n long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000;\n if (!db.tryReadLock(timeoutMs, TimeUnit.MILLISECONDS)) {\n throw new UserException(\"get database read lock timeout, database=\" + fullDbName);\n }\n try {\n Table table = db.getTable(request.getTbl());\n if (table == null) {\n throw new UserException(\"unknown table, table=\" + request.getTbl());\n }\n if (!(table instanceof OlapTable)) {\n throw new UserException(\"load table type is not OlapTable, type=\" + table.getClass());\n }\n if (table instanceof MaterializedView) {\n throw new UserException(String.format(\n \"The data of '%s' cannot be inserted because '%s' is a materialized view,\" +\n \"and the data of materialized view must be consistent with the base table.\",\n table.getName(), table.getName()));\n }\n StreamLoadTask streamLoadTask = StreamLoadTask.fromTStreamLoadPutRequest(request, db);\n StreamLoadPlanner planner = new StreamLoadPlanner(db, (OlapTable) table, streamLoadTask);\n TExecPlanFragmentParams plan = planner.plan(streamLoadTask.getId());\n \n TransactionState txnState =\n GlobalStateMgr.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), request.getTxnId());\n if (txnState == null) {\n throw new UserException(\"txn does not exist: \" + request.getTxnId());\n }\n txnState.addTableIndexes((OlapTable) table);\n\n return plan;\n } finally {\n db.readUnlock();\n }\n }\n\n @Override\n public TStatus snapshotLoaderReport(TSnapshotLoaderReportRequest request) throws TException {\n if (GlobalStateMgr.getCurrentState().getBackupHandler().report(request.getTask_type(), request.getJob_id(),\n request.getTask_id(), request.getFinished_num(), request.getTotal_num())) {\n return new TStatus(TStatusCode.OK);\n }\n return new TStatus(TStatusCode.CANCELLED);\n }\n\n @Override\n public TRefreshTableResponse refreshTable(TRefreshTableRequest request) throws TException {\n try {\n \n if (request.getCatalog_name() == null) {\n request.setCatalog_name(InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME);\n }\n GlobalStateMgr.getCurrentState().refreshExternalTable(new TableName(request.getCatalog_name(),\n request.getDb_name(), request.getTable_name()), request.getPartitions());\n return new TRefreshTableResponse(new TStatus(TStatusCode.OK));\n } catch (DdlException e) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(e.getMessage()));\n return new TRefreshTableResponse(status);\n }\n }\n\n private TNetworkAddress getClientAddr() {\n ThriftServerContext connectionContext = ThriftServerEventProcessor.getConnectionContext();\n \n if (connectionContext != null) {\n return connectionContext.getClient();\n }\n return null;\n }\n\n private String getClientAddrAsString() {\n TNetworkAddress addr = getClientAddr();\n return addr == null ? \"unknown\" : addr.hostname;\n }\n\n @Override\n public TGetTableMetaResponse getTableMeta(TGetTableMetaRequest request) throws TException {\n return masterImpl.getTableMeta(request);\n }\n\n @Override\n public TBeginRemoteTxnResponse beginRemoteTxn(TBeginRemoteTxnRequest request) throws TException {\n return masterImpl.beginRemoteTxn(request);\n }\n\n @Override\n public TCommitRemoteTxnResponse commitRemoteTxn(TCommitRemoteTxnRequest request) throws TException {\n return masterImpl.commitRemoteTxn(request);\n }\n\n @Override\n public TAbortRemoteTxnResponse abortRemoteTxn(TAbortRemoteTxnRequest request) throws TException {\n return masterImpl.abortRemoteTxn(request);\n }\n\n @Override\n public TSetConfigResponse setConfig(TSetConfigRequest request) throws TException {\n try {\n Preconditions.checkState(request.getKeys().size() == request.getValues().size());\n Map configs = new HashMap<>();\n for (int i = 0; i < request.getKeys().size(); i++) {\n configs.put(request.getKeys().get(i), request.getValues().get(i));\n }\n\n GlobalStateMgr.getCurrentState().setFrontendConfig(configs);\n return new TSetConfigResponse(new TStatus(TStatusCode.OK));\n } catch (DdlException e) {\n TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR);\n status.setError_msgs(Lists.newArrayList(e.getMessage()));\n return new TSetConfigResponse(status);\n }\n }\n}" }, { "comment": "Great point, adding an individual `try-catch` block.", "method_body": "public void shutdown() throws Exception {\n Throwable firstException = null;\n\n try {\n futureExecutor.shutdownNow();\n ioExecutor.shutdownNow();\n } catch (Throwable t) {\n firstException = t;\n }\n\n try {\n shuffleMaster.close();\n } catch (Throwable t) {\n firstException = firstException == null ? t : firstException;\n }\n\n libraryCacheManager.shutdown();\n\n if (firstException != null) {\n ExceptionUtils.rethrowException(\n firstException, \"Error while shutting down JobManager services\");\n }\n }", "target_code": "ioExecutor.shutdownNow();", "method_body_after": "public void shutdown() throws Exception {\n Throwable exception = null;\n\n try {\n ExecutorUtils.gracefulShutdown(\n SHUTDOWN_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS, futureExecutor, ioExecutor);\n } catch (Throwable t) {\n exception = t;\n }\n\n try {\n shuffleMaster.close();\n } catch (Throwable t) {\n exception = ExceptionUtils.firstOrSuppressed(t, exception);\n }\n\n libraryCacheManager.shutdown();\n\n if (exception != null) {\n ExceptionUtils.rethrowException(\n exception, \"Error while shutting down JobManager services\");\n }\n }", "context_before": "class JobManagerSharedServices {\n\n private final ScheduledExecutorService futureExecutor;\n\n private final ScheduledExecutorService ioExecutor;\n\n private final LibraryCacheManager libraryCacheManager;\n\n private final ShuffleMaster shuffleMaster;\n\n @Nonnull private final BlobWriter blobWriter;\n\n public JobManagerSharedServices(\n ScheduledExecutorService futureExecutor,\n ScheduledExecutorService ioExecutor,\n LibraryCacheManager libraryCacheManager,\n ShuffleMaster shuffleMaster,\n @Nonnull BlobWriter blobWriter) {\n\n this.futureExecutor = checkNotNull(futureExecutor);\n this.ioExecutor = checkNotNull(ioExecutor);\n this.libraryCacheManager = checkNotNull(libraryCacheManager);\n this.shuffleMaster = checkNotNull(shuffleMaster);\n this.blobWriter = blobWriter;\n }\n\n public ScheduledExecutorService getFutureExecutor() {\n return futureExecutor;\n }\n\n public ScheduledExecutorService getIoExecutor() {\n return ioExecutor;\n }\n\n public LibraryCacheManager getLibraryCacheManager() {\n return libraryCacheManager;\n }\n\n public ShuffleMaster getShuffleMaster() {\n return shuffleMaster;\n }\n\n @Nonnull\n public BlobWriter getBlobWriter() {\n return blobWriter;\n }\n\n /**\n * Shutdown the {@link JobMaster} services.\n *\n *

This method makes sure all services are closed or shut down, even when an exception\n * occurred in the shutdown of one component. The first encountered exception is thrown, with\n * successive exceptions added as suppressed exceptions.\n *\n * @throws Exception The first Exception encountered during shutdown.\n */\n \n\n \n \n \n\n public static JobManagerSharedServices fromConfiguration(\n Configuration config, BlobServer blobServer, FatalErrorHandler fatalErrorHandler)\n throws Exception {\n\n checkNotNull(config);\n checkNotNull(blobServer);\n\n final String classLoaderResolveOrder =\n config.getString(CoreOptions.CLASSLOADER_RESOLVE_ORDER);\n\n final String[] alwaysParentFirstLoaderPatterns =\n CoreOptions.getParentFirstLoaderPatterns(config);\n\n final boolean failOnJvmMetaspaceOomError =\n config.getBoolean(CoreOptions.FAIL_ON_USER_CLASS_LOADING_METASPACE_OOM);\n final boolean checkClassLoaderLeak =\n config.getBoolean(CoreOptions.CHECK_LEAKED_CLASSLOADER);\n final BlobLibraryCacheManager libraryCacheManager =\n new BlobLibraryCacheManager(\n blobServer,\n BlobLibraryCacheManager.defaultClassLoaderFactory(\n FlinkUserCodeClassLoaders.ResolveOrder.fromString(\n classLoaderResolveOrder),\n alwaysParentFirstLoaderPatterns,\n failOnJvmMetaspaceOomError ? fatalErrorHandler : null,\n checkClassLoaderLeak));\n\n final int numJobManagerFutureThreads =\n config.getInteger(RestOptions.JOBMANAGER_FUTURE_THREADS);\n final ScheduledExecutorService futureExecutor =\n Executors.newScheduledThreadPool(\n (numJobManagerFutureThreads != 0)\n ? numJobManagerFutureThreads\n : Hardware.getNumberCPUCores(),\n new ExecutorThreadFactory(\"jobmanager-future\"));\n\n final int numJobManagerIoThreads = config.getInteger(RestOptions.JOBMANAGER_IO_THREADS);\n final ScheduledExecutorService ioExecutor =\n Executors.newScheduledThreadPool(\n (numJobManagerIoThreads != 0)\n ? numJobManagerIoThreads\n : Hardware.getNumberCPUCores(),\n new ExecutorThreadFactory(\"jobmanager-io\"));\n\n final ShuffleMasterContext shuffleMasterContext =\n new ShuffleMasterContextImpl(config, fatalErrorHandler);\n final ShuffleMaster shuffleMaster =\n ShuffleServiceLoader.loadShuffleServiceFactory(config)\n .createShuffleMaster(shuffleMasterContext);\n shuffleMaster.start();\n\n return new JobManagerSharedServices(\n futureExecutor, ioExecutor, libraryCacheManager, shuffleMaster, blobServer);\n }\n}", "context_after": "class JobManagerSharedServices {\n\n private static final Duration SHUTDOWN_TIMEOUT = Duration.ofSeconds(10);\n\n private final ScheduledExecutorService futureExecutor;\n\n private final ExecutorService ioExecutor;\n\n private final LibraryCacheManager libraryCacheManager;\n\n private final ShuffleMaster shuffleMaster;\n\n @Nonnull private final BlobWriter blobWriter;\n\n public JobManagerSharedServices(\n ScheduledExecutorService futureExecutor,\n ExecutorService ioExecutor,\n LibraryCacheManager libraryCacheManager,\n ShuffleMaster shuffleMaster,\n @Nonnull BlobWriter blobWriter) {\n\n this.futureExecutor = checkNotNull(futureExecutor);\n this.ioExecutor = checkNotNull(ioExecutor);\n this.libraryCacheManager = checkNotNull(libraryCacheManager);\n this.shuffleMaster = checkNotNull(shuffleMaster);\n this.blobWriter = blobWriter;\n }\n\n public ScheduledExecutorService getFutureExecutor() {\n return futureExecutor;\n }\n\n public Executor getIoExecutor() {\n return ioExecutor;\n }\n\n public LibraryCacheManager getLibraryCacheManager() {\n return libraryCacheManager;\n }\n\n public ShuffleMaster getShuffleMaster() {\n return shuffleMaster;\n }\n\n @Nonnull\n public BlobWriter getBlobWriter() {\n return blobWriter;\n }\n\n /**\n * Shutdown the {@link JobMaster} services.\n *\n *

This method makes sure all services are closed or shut down, even when an exception\n * occurred in the shutdown of one component. The first encountered exception is thrown, with\n * successive exceptions added as suppressed exceptions.\n *\n * @throws Exception The first Exception encountered during shutdown.\n */\n \n\n \n \n \n\n public static JobManagerSharedServices fromConfiguration(\n Configuration config, BlobServer blobServer, FatalErrorHandler fatalErrorHandler)\n throws Exception {\n\n checkNotNull(config);\n checkNotNull(blobServer);\n\n final String classLoaderResolveOrder =\n config.getString(CoreOptions.CLASSLOADER_RESOLVE_ORDER);\n\n final String[] alwaysParentFirstLoaderPatterns =\n CoreOptions.getParentFirstLoaderPatterns(config);\n\n final boolean failOnJvmMetaspaceOomError =\n config.getBoolean(CoreOptions.FAIL_ON_USER_CLASS_LOADING_METASPACE_OOM);\n final boolean checkClassLoaderLeak =\n config.getBoolean(CoreOptions.CHECK_LEAKED_CLASSLOADER);\n final BlobLibraryCacheManager libraryCacheManager =\n new BlobLibraryCacheManager(\n blobServer,\n BlobLibraryCacheManager.defaultClassLoaderFactory(\n FlinkUserCodeClassLoaders.ResolveOrder.fromString(\n classLoaderResolveOrder),\n alwaysParentFirstLoaderPatterns,\n failOnJvmMetaspaceOomError ? fatalErrorHandler : null,\n checkClassLoaderLeak));\n\n final int numberCPUCores = Hardware.getNumberCPUCores();\n final int jobManagerFuturePoolSize =\n config.getInteger(JobManagerOptions.JOB_MANAGER_FUTURE_POOL_SIZE, numberCPUCores);\n final ScheduledExecutorService futureExecutor =\n Executors.newScheduledThreadPool(\n jobManagerFuturePoolSize, new ExecutorThreadFactory(\"jobmanager-future\"));\n\n final int jobManagerIoPoolSize =\n config.getInteger(JobManagerOptions.JOB_MANAGER_IO_POOL_SIZE, numberCPUCores);\n final ExecutorService ioExecutor =\n Executors.newFixedThreadPool(\n jobManagerIoPoolSize, new ExecutorThreadFactory(\"jobmanager-io\"));\n\n final ShuffleMasterContext shuffleMasterContext =\n new ShuffleMasterContextImpl(config, fatalErrorHandler);\n final ShuffleMaster shuffleMaster =\n ShuffleServiceLoader.loadShuffleServiceFactory(config)\n .createShuffleMaster(shuffleMasterContext);\n shuffleMaster.start();\n\n return new JobManagerSharedServices(\n futureExecutor, ioExecutor, libraryCacheManager, shuffleMaster, blobServer);\n }\n}" }, { "comment": "We don't have anywhere that is specifically dedicated to testing RESTEasy + Undertow, so I just enhanced the security tests to also test RESTEasy on Undertow, but used this mechanism to map RESTeasy.", "method_body": "void testPost() {\n \n \n \n given()\n .header(\"Authorization\", \"Basic am9objpqb2hu\")\n .body(\"Bill\")\n .contentType(ContentType.TEXT)\n .when()\n .post(\"/foo/mapped/rest\")\n .then()\n .statusCode(200)\n .body(is(\"post success\"));\n }", "target_code": "", "method_body_after": "void testPost() {\n \n \n \n given()\n .header(\"Authorization\", \"Basic am9objpqb2hu\")\n .body(\"Bill\")\n .contentType(ContentType.TEXT)\n .when()\n .post(\"/foo/mapped/rest\")\n .then()\n .statusCode(200)\n .body(is(\"post success\"));\n }", "context_before": "class BaseAuthRestTest {\n\n @Test\n @RepeatedTest(100)\n \n\n @Test\n void testGet() {\n given()\n .header(\"Authorization\", \"Basic am9objpqb2hu\")\n .when()\n .get(\"/foo/mapped/rest\")\n .then()\n .statusCode(200)\n .body(is(\"get success\"));\n }\n\n}", "context_after": "class BaseAuthRestTest {\n\n @Test\n @RepeatedTest(100)\n \n\n @Test\n void testGet() {\n given()\n .header(\"Authorization\", \"Basic am9objpqb2hu\")\n .when()\n .get(\"/foo/mapped/rest\")\n .then()\n .statusCode(200)\n .body(is(\"get success\"));\n }\n\n}" }, { "comment": "True, made changes in amended commit.", "method_body": "protected void maintain() {\n AtomicInteger failures = new AtomicInteger(0);\n AtomicInteger zeroQps = new AtomicInteger(0);\n AtomicReference lastException = new AtomicReference<>(null);\n List applicationList = applications.asList();\n\n \n ForkJoinPool pool = new ForkJoinPool(applicationsToUpdateInParallel);\n pool.submit(() -> {\n applicationList.parallelStream().forEach(application -> {\n try {\n applications.lockIfPresent(application.id(), locked ->\n applications.store(locked.with(controller().metricsService().getApplicationMetrics(application.id()))));\n\n applications.lockIfPresent(application.id(), locked ->\n applications.store(locked.withRotationStatus(rotationStatus(application))));\n\n for (Deployment deployment : application.deployments().values()) {\n MetricsService.DeploymentMetrics deploymentMetrics = controller().metricsService()\n .getDeploymentMetrics(application.id(), deployment.zone());\n if (deploymentMetrics.queriesPerSecond() < 0.0001) {\n zeroQps.incrementAndGet();\n }\n\n DeploymentMetrics newMetrics = new DeploymentMetrics(deploymentMetrics.queriesPerSecond(),\n deploymentMetrics.writesPerSecond(),\n deploymentMetrics.documentCount(),\n deploymentMetrics.queryLatencyMillis(),\n deploymentMetrics.writeLatencyMillis());\n\n applications.lockIfPresent(application.id(), locked ->\n applications.store(locked.with(deployment.zone(), newMetrics)\n .recordActivityAt(controller().clock().instant(), deployment.zone())));\n }\n } catch (Exception e) {\n failures.incrementAndGet();\n lastException.set(e);\n }\n });\n });\n pool.shutdown();\n try {\n pool.awaitTermination(30, TimeUnit.MINUTES);\n log.log(Level.WARNING, String.format(\"Number of application with 0 qps: %d/%d\", zeroQps.get(), applicationList.size()));\n if (lastException.get() != null) {\n log.log(Level.WARNING, String.format(\"Failed to query metrics service for %d/%d applications. Last error: %s. Retrying in %s\",\n failures.get(),\n applicationList.size(),\n Exceptions.toMessageString(lastException.get()),\n maintenanceInterval()));\n }\n } catch (InterruptedException e) {\n throw new RuntimeException(e);\n }\n }", "target_code": "log.log(Level.WARNING, String.format(\"Number of application with 0 qps: %d/%d\", zeroQps.get(), applicationList.size()));", "method_body_after": "protected void maintain() {\n AtomicInteger failures = new AtomicInteger(0);\n AtomicInteger zeroQps = new AtomicInteger(0);\n AtomicReference lastException = new AtomicReference<>(null);\n List applicationList = applications.asList();\n\n \n ForkJoinPool pool = new ForkJoinPool(applicationsToUpdateInParallel);\n pool.submit(() -> {\n applicationList.parallelStream().forEach(application -> {\n try {\n applications.lockIfPresent(application.id(), locked ->\n applications.store(locked.with(controller().metricsService().getApplicationMetrics(application.id()))));\n\n applications.lockIfPresent(application.id(), locked ->\n applications.store(locked.withRotationStatus(rotationStatus(application))));\n\n for (Deployment deployment : application.deployments().values()) {\n MetricsService.DeploymentMetrics deploymentMetrics = controller().metricsService()\n .getDeploymentMetrics(application.id(), deployment.zone());\n if (deploymentMetrics.queriesPerSecond() < 0.0001) {\n zeroQps.incrementAndGet();\n }\n\n DeploymentMetrics newMetrics = new DeploymentMetrics(deploymentMetrics.queriesPerSecond(),\n deploymentMetrics.writesPerSecond(),\n deploymentMetrics.documentCount(),\n deploymentMetrics.queryLatencyMillis(),\n deploymentMetrics.writeLatencyMillis());\n\n applications.lockIfPresent(application.id(), locked ->\n applications.store(locked.with(deployment.zone(), newMetrics)\n .recordActivityAt(controller().clock().instant(), deployment.zone())));\n }\n } catch (Exception e) {\n failures.incrementAndGet();\n lastException.set(e);\n }\n });\n });\n pool.shutdown();\n try {\n pool.awaitTermination(30, TimeUnit.MINUTES);\n log.log(Level.INFO, String.format(\"Number of application with 0 qps: %d/%d\", zeroQps.get(), applicationList.size()));\n if (lastException.get() != null) {\n log.log(Level.WARNING, String.format(\"Failed to query metrics service for %d/%d applications. Last error: %s. Retrying in %s\",\n failures.get(),\n applicationList.size(),\n Exceptions.toMessageString(lastException.get()),\n maintenanceInterval()));\n }\n } catch (InterruptedException e) {\n throw new RuntimeException(e);\n }\n }", "context_before": "class DeploymentMetricsMaintainer extends Maintainer {\n\n private static final Logger log = Logger.getLogger(DeploymentMetricsMaintainer.class.getName());\n\n private static final int applicationsToUpdateInParallel = 10;\n\n private final ApplicationController applications;\n\n public DeploymentMetricsMaintainer(Controller controller, Duration duration, JobControl jobControl) {\n super(controller, duration, jobControl);\n this.applications = controller.applications();\n }\n\n @Override\n \n\n /** Get global rotation status for application */\n private Map rotationStatus(Application application) {\n return applications.rotationRepository().getRotation(application)\n .map(rotation -> controller().metricsService().getRotationStatus(rotation.name()))\n .map(rotationStatus -> {\n Map result = new TreeMap<>();\n rotationStatus.forEach((hostname, status) -> result.put(hostname, from(status)));\n return result;\n })\n .orElseGet(Collections::emptyMap);\n }\n\n private static RotationStatus from(com.yahoo.vespa.hosted.controller.api.integration.routing.RotationStatus status) {\n switch (status) {\n case IN: return RotationStatus.in;\n case OUT: return RotationStatus.out;\n case UNKNOWN: return RotationStatus.unknown;\n default: throw new IllegalArgumentException(\"Unknown API value for rotation status: \" + status);\n }\n }\n\n}", "context_after": "class DeploymentMetricsMaintainer extends Maintainer {\n\n private static final Logger log = Logger.getLogger(DeploymentMetricsMaintainer.class.getName());\n\n private static final int applicationsToUpdateInParallel = 10;\n\n private final ApplicationController applications;\n\n public DeploymentMetricsMaintainer(Controller controller, Duration duration, JobControl jobControl) {\n super(controller, duration, jobControl);\n this.applications = controller.applications();\n }\n\n @Override\n \n\n /** Get global rotation status for application */\n private Map rotationStatus(Application application) {\n return applications.rotationRepository().getRotation(application)\n .map(rotation -> controller().metricsService().getRotationStatus(rotation.name()))\n .map(rotationStatus -> {\n Map result = new TreeMap<>();\n rotationStatus.forEach((hostname, status) -> result.put(hostname, from(status)));\n return result;\n })\n .orElseGet(Collections::emptyMap);\n }\n\n private static RotationStatus from(com.yahoo.vespa.hosted.controller.api.integration.routing.RotationStatus status) {\n switch (status) {\n case IN: return RotationStatus.in;\n case OUT: return RotationStatus.out;\n case UNKNOWN: return RotationStatus.unknown;\n default: throw new IllegalArgumentException(\"Unknown API value for rotation status: \" + status);\n }\n }\n\n}" }, { "comment": "Can you please elaborate on why you think it shouldn't be an error?", "method_body": "boolean validateErrorVariable(BLangErrorVariable errorVariable, SymbolEnv env) {\n BType varType = Types.getReferredType(errorVariable.getBType());\n BErrorType errorType;\n switch (varType.tag) {\n case TypeTags.UNION:\n BUnionType unionType = ((BUnionType) varType);\n List possibleTypes = types.getAllTypes(unionType, true).stream()\n .filter(type -> TypeTags.ERROR == type.tag)\n .map(BErrorType.class::cast)\n .collect(Collectors.toList());\n\n if (possibleTypes.isEmpty()) {\n dlog.error(errorVariable.pos, DiagnosticErrorCode.INVALID_ERROR_BINDING_PATTERN, varType);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n LinkedHashSet detailType = new LinkedHashSet<>();\n for (BErrorType possibleErrType : possibleTypes) {\n detailType.add(possibleErrType.detailType);\n }\n BType errorDetailType = detailType.size() > 1\n ? BUnionType.create(null, detailType)\n : detailType.iterator().next();\n errorType = new BErrorType(null, errorDetailType);\n } else {\n errorType = possibleTypes.get(0);\n }\n break;\n case TypeTags.ERROR:\n errorType = (BErrorType) varType;\n break;\n case TypeTags.SEMANTIC_ERROR:\n \n return false;\n default:\n dlog.error(errorVariable.pos, DiagnosticErrorCode.INVALID_ERROR_BINDING_PATTERN,\n varType);\n return false;\n }\n errorVariable.setBType(errorType);\n\n if (!errorVariable.isInMatchStmt) {\n BLangSimpleVariable errorMsg = errorVariable.message;\n if (errorMsg != null) {\n defineMemberNode(errorVariable.message, env, symTable.stringType);\n }\n\n BLangVariable errorCause = errorVariable.cause;\n if (errorCause != null) {\n if (errorCause.getKind() == NodeKind.VARIABLE &&\n names.fromIdNode(((BLangSimpleVariable) errorCause).name) == Names.IGNORE) {\n dlog.error(errorCause.pos,\n DiagnosticErrorCode.CANNOT_USE_WILDCARD_BINDING_PATTERN_FOR_ERROR_CAUSE);\n return false;\n }\n defineMemberNode(errorVariable.cause, env, symTable.errorOrNilType);\n }\n }\n\n if (errorVariable.detail == null || (errorVariable.detail.isEmpty()\n && !isRestDetailBindingAvailable(errorVariable))) {\n return validateErrorMessageMatchPatternSyntax(errorVariable, env);\n }\n\n BType detailType = Types.getReferredType(errorType.detailType);\n if (detailType.getKind() == TypeKind.RECORD || detailType.getKind() == TypeKind.MAP) {\n return validateErrorVariable(errorVariable, errorType, env);\n } else if (detailType.getKind() == TypeKind.UNION) {\n BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR,\n env.enclPkg.packageID, symTable.errorType,\n env.scope.owner, errorVariable.pos, SOURCE);\n \n errorVariable.setBType(new BErrorType(errorTypeSymbol, symTable.detailType));\n return validateErrorVariable(errorVariable, env);\n }\n\n if (isRestDetailBindingAvailable(errorVariable)) {\n defineMemberNode(errorVariable.restDetail, env, symTable.detailType);\n }\n return true;\n }", "target_code": "DiagnosticErrorCode.CANNOT_USE_WILDCARD_BINDING_PATTERN_FOR_ERROR_CAUSE);", "method_body_after": "boolean validateErrorVariable(BLangErrorVariable errorVariable, SymbolEnv env) {\n BType varType = Types.getReferredType(errorVariable.getBType());\n BErrorType errorType;\n switch (varType.tag) {\n case TypeTags.UNION:\n BUnionType unionType = ((BUnionType) varType);\n List possibleTypes = types.getAllTypes(unionType, true).stream()\n .filter(type -> TypeTags.ERROR == type.tag)\n .map(BErrorType.class::cast)\n .collect(Collectors.toList());\n\n if (possibleTypes.isEmpty()) {\n dlog.error(errorVariable.pos, DiagnosticErrorCode.INVALID_ERROR_BINDING_PATTERN, varType);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n LinkedHashSet detailType = new LinkedHashSet<>();\n for (BErrorType possibleErrType : possibleTypes) {\n detailType.add(possibleErrType.detailType);\n }\n BType errorDetailType = detailType.size() > 1\n ? BUnionType.create(null, detailType)\n : detailType.iterator().next();\n errorType = new BErrorType(null, errorDetailType);\n } else {\n errorType = possibleTypes.get(0);\n }\n break;\n case TypeTags.ERROR:\n errorType = (BErrorType) varType;\n break;\n case TypeTags.SEMANTIC_ERROR:\n \n return false;\n default:\n dlog.error(errorVariable.pos, DiagnosticErrorCode.INVALID_ERROR_BINDING_PATTERN,\n varType);\n return false;\n }\n errorVariable.setBType(errorType);\n\n if (!errorVariable.isInMatchStmt) {\n BLangSimpleVariable errorMsg = errorVariable.message;\n if (errorMsg != null) {\n defineMemberNode(errorVariable.message, env, symTable.stringType);\n }\n\n BLangVariable errorCause = errorVariable.cause;\n if (errorCause != null) {\n if (errorCause.getKind() == NodeKind.VARIABLE &&\n names.fromIdNode(((BLangSimpleVariable) errorCause).name) == Names.IGNORE) {\n dlog.error(errorCause.pos,\n DiagnosticErrorCode.CANNOT_USE_WILDCARD_BINDING_PATTERN_FOR_ERROR_CAUSE);\n return false;\n }\n defineMemberNode(errorCause, env, symTable.errorOrNilType);\n }\n }\n\n if (errorVariable.detail == null || (errorVariable.detail.isEmpty()\n && !isRestDetailBindingAvailable(errorVariable))) {\n return validateErrorMessageMatchPatternSyntax(errorVariable, env);\n }\n\n BType detailType = Types.getReferredType(errorType.detailType);\n if (detailType.getKind() == TypeKind.RECORD || detailType.getKind() == TypeKind.MAP) {\n return validateErrorVariable(errorVariable, errorType, env);\n } else if (detailType.getKind() == TypeKind.UNION) {\n BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR,\n env.enclPkg.packageID, symTable.errorType,\n env.scope.owner, errorVariable.pos, SOURCE);\n \n errorVariable.setBType(new BErrorType(errorTypeSymbol, symTable.detailType));\n return validateErrorVariable(errorVariable, env);\n }\n\n if (isRestDetailBindingAvailable(errorVariable)) {\n defineMemberNode(errorVariable.restDetail, env, symTable.detailType);\n }\n return true;\n }", "context_before": "class SymbolEnter extends BLangNodeVisitor {\n\n private static final CompilerContext.Key SYMBOL_ENTER_KEY =\n new CompilerContext.Key<>();\n\n private final SymbolTable symTable;\n private final Names names;\n private final SymbolResolver symResolver;\n private final BLangDiagnosticLog dlog;\n private final Types types;\n private final SourceDirectory sourceDirectory;\n private List unresolvedTypes;\n private Set unresolvedRecordDueToFields;\n private boolean resolveRecordsUnresolvedDueToFields;\n private List unresolvedClasses;\n private HashSet unknownTypeRefs;\n private List importedPackages;\n private int typePrecedence;\n private final TypeParamAnalyzer typeParamAnalyzer;\n private BLangAnonymousModelHelper anonymousModelHelper;\n private BLangMissingNodesHelper missingNodesHelper;\n private PackageCache packageCache;\n private List intersectionTypes;\n\n private SymbolEnv env;\n private final boolean projectAPIInitiatedCompilation;\n\n private static final String DEPRECATION_ANNOTATION = \"deprecated\";\n private static final String ANONYMOUS_RECORD_NAME = \"anonymous-record\";\n\n public static SymbolEnter getInstance(CompilerContext context) {\n SymbolEnter symbolEnter = context.get(SYMBOL_ENTER_KEY);\n if (symbolEnter == null) {\n symbolEnter = new SymbolEnter(context);\n }\n\n return symbolEnter;\n }\n\n public SymbolEnter(CompilerContext context) {\n context.put(SYMBOL_ENTER_KEY, this);\n\n this.symTable = SymbolTable.getInstance(context);\n this.names = Names.getInstance(context);\n this.symResolver = SymbolResolver.getInstance(context);\n this.dlog = BLangDiagnosticLog.getInstance(context);\n this.types = Types.getInstance(context);\n this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);\n this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);\n this.sourceDirectory = context.get(SourceDirectory.class);\n this.importedPackages = new ArrayList<>();\n this.unknownTypeRefs = new HashSet<>();\n this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);\n this.packageCache = PackageCache.getInstance(context);\n this.intersectionTypes = new ArrayList<>();\n\n CompilerOptions options = CompilerOptions.getInstance(context);\n projectAPIInitiatedCompilation = Boolean.parseBoolean(\n options.get(CompilerOptionName.PROJECT_API_INITIATED_COMPILATION));\n }\n\n private void cleanup() {\n unknownTypeRefs.clear();\n }\n\n public BLangPackage definePackage(BLangPackage pkgNode) {\n dlog.setCurrentPackageId(pkgNode.packageID);\n populatePackageNode(pkgNode);\n defineNode(pkgNode, this.symTable.pkgEnvMap.get(symTable.langAnnotationModuleSymbol));\n return pkgNode;\n }\n\n public void defineClassDefinition(BLangClassDefinition classNode, SymbolEnv env) {\n\n\n\n\n if (classNode.definitionCompleted) {\n return;\n }\n populateDistinctTypeIdsFromIncludedTypeReferences(classNode);\n defineFieldsOfClassDef(classNode, env);\n defineReferencedFieldsOfClassDef(classNode, env);\n defineFunctionsOfClassDef(env, classNode);\n setReadOnlynessOfClassDef(classNode, env);\n defineReadOnlyIncludedFieldsAndMethods(classNode, env);\n classNode.definitionCompleted = true;\n }\n\n public void defineNode(BLangNode node, SymbolEnv env) {\n SymbolEnv prevEnv = this.env;\n this.env = env;\n node.accept(this);\n this.env = prevEnv;\n }\n\n public BLangPackage defineTestablePackage(BLangTestablePackage pkgNode, SymbolEnv env) {\n populatePackageNode(pkgNode);\n defineNode(pkgNode, env);\n return pkgNode;\n }\n\n \n\n @Override\n public void visit(BLangPackage pkgNode) {\n if (pkgNode.completedPhases.contains(CompilerPhase.DEFINE)) {\n return;\n }\n\n \n BPackageSymbol pkgSymbol;\n if (Symbols.isFlagOn(Flags.asMask(pkgNode.flagSet), Flags.TESTABLE)) {\n pkgSymbol = Symbols.createPackageSymbol(pkgNode.packageID, this.symTable, Flags.asMask(pkgNode.flagSet),\n SOURCE);\n } else {\n pkgSymbol = Symbols.createPackageSymbol(pkgNode.packageID, this.symTable, SOURCE);\n }\n if (PackageID.isLangLibPackageID(pkgSymbol.pkgID)) {\n populateLangLibInSymTable(pkgSymbol);\n }\n\n if (pkgNode.moduleContextDataHolder != null) {\n pkgSymbol.exported = pkgNode.moduleContextDataHolder.isExported();\n pkgSymbol.descriptor = pkgNode.moduleContextDataHolder.descriptor();\n }\n\n pkgNode.symbol = pkgSymbol;\n SymbolEnv pkgEnv = SymbolEnv.createPkgEnv(pkgNode, pkgSymbol.scope, this.env);\n this.symTable.pkgEnvMap.put(pkgSymbol, pkgEnv);\n this.symTable.immutableTypeMaps.remove(Types.getPackageIdString(pkgSymbol.pkgID));\n\n \n \n importedPackages.add(pkgNode.packageID);\n\n defineConstructs(pkgNode, pkgEnv);\n pkgNode.getTestablePkgs().forEach(testablePackage -> defineTestablePackage(testablePackage, pkgEnv));\n pkgNode.completedPhases.add(CompilerPhase.DEFINE);\n\n \n cleanup();\n\n \n importedPackages.remove(pkgNode.packageID);\n }\n\n private void defineConstructs(BLangPackage pkgNode, SymbolEnv pkgEnv) {\n \n \n Map importPkgHolder = new HashMap<>();\n pkgNode.imports.forEach(importNode -> {\n String qualifiedName = importNode.getQualifiedPackageName();\n if (importPkgHolder.containsKey(qualifiedName)) {\n importPkgHolder.get(qualifiedName).unresolved.add(importNode);\n return;\n }\n defineNode(importNode, pkgEnv);\n if (importNode.symbol != null) {\n importPkgHolder.put(qualifiedName, new ImportResolveHolder(importNode));\n }\n });\n\n for (ImportResolveHolder importHolder : importPkgHolder.values()) {\n BPackageSymbol pkgSymbol = importHolder.resolved.symbol; \n \n\n for (BLangImportPackage unresolvedPkg : importHolder.unresolved) {\n BPackageSymbol importSymbol = importHolder.resolved.symbol;\n Name resolvedPkgAlias = names.fromIdNode(importHolder.resolved.alias);\n Name unresolvedPkgAlias = names.fromIdNode(unresolvedPkg.alias);\n\n \n if (!Names.IGNORE.equals(unresolvedPkgAlias) && unresolvedPkgAlias.equals(resolvedPkgAlias)\n && importSymbol.compUnit.equals(names.fromIdNode(unresolvedPkg.compUnit))) {\n if (isSameImport(unresolvedPkg, importSymbol)) {\n dlog.error(unresolvedPkg.pos, DiagnosticErrorCode.REDECLARED_IMPORT_MODULE,\n unresolvedPkg.getQualifiedPackageName());\n } else {\n dlog.error(unresolvedPkg.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, unresolvedPkgAlias);\n }\n continue;\n }\n\n unresolvedPkg.symbol = pkgSymbol;\n \n BPackageSymbol symbol = dupPackageSymbolAndSetCompUnit(pkgSymbol,\n names.fromIdNode(unresolvedPkg.compUnit));\n symbol.scope = pkgSymbol.scope;\n unresolvedPkg.symbol = symbol;\n pkgEnv.scope.define(unresolvedPkgAlias, symbol);\n }\n }\n if (!PackageID.ANNOTATIONS.equals(pkgNode.packageID)) {\n initPredeclaredModules(symTable.predeclaredModules, pkgNode.compUnits, pkgEnv);\n }\n\n \n this.typePrecedence = 0;\n\n \n \n \n List typeAndClassDefs = new ArrayList<>();\n pkgNode.constants.forEach(constant -> typeAndClassDefs.add(constant));\n pkgNode.typeDefinitions.forEach(typDef -> typeAndClassDefs.add(typDef));\n List classDefinitions = getClassDefinitions(pkgNode.topLevelNodes);\n classDefinitions.forEach(classDefn -> typeAndClassDefs.add(classDefn));\n defineTypeNodes(typeAndClassDefs, pkgEnv);\n\n \n \n pkgEnv.logErrors = true;\n\n \n pkgNode.typeDefinitions.sort(getTypePrecedenceComparator());\n typeAndClassDefs.sort(getTypePrecedenceComparator());\n\n \n defineDistinctClassAndObjectDefinitions(typeAndClassDefs);\n\n \n defineFields(typeAndClassDefs, pkgEnv);\n defineDependentFields(typeAndClassDefs, pkgEnv);\n\n \n defineIntersectionTypes(pkgEnv);\n\n \n defineErrorDetails(pkgNode.typeDefinitions, pkgEnv);\n\n \n defineFunctions(typeAndClassDefs, pkgEnv);\n\n \n \n \n validateIntersectionTypeDefinitions(pkgNode.typeDefinitions, pkgNode.packageID);\n defineUndefinedReadOnlyTypes(pkgNode.typeDefinitions, typeAndClassDefs, pkgEnv);\n\n \n pkgNode.services.forEach(service -> defineNode(service, pkgEnv));\n\n \n for (BLangFunction bLangFunction : pkgNode.functions) {\n \n if (!bLangFunction.flagSet.contains(Flag.LAMBDA)) {\n defineNode(bLangFunction, pkgEnv);\n }\n }\n\n \n pkgNode.annotations.forEach(annot -> defineNode(annot, pkgEnv));\n\n for (BLangVariable variable : pkgNode.globalVars) {\n BLangExpression expr = variable.expr;\n if (expr != null && expr.getKind() == NodeKind.LAMBDA) {\n defineNode(((BLangLambdaFunction) expr).function, pkgEnv);\n if (variable.isDeclaredWithVar) {\n setTypeFromLambdaExpr(variable);\n }\n }\n defineNode(variable, pkgEnv);\n }\n\n \n for (BLangVariable var : pkgNode.globalVars) {\n if (var.getKind() == NodeKind.VARIABLE) {\n BVarSymbol varSymbol = var.symbol;\n if (varSymbol != null) {\n BTypeSymbol tSymbol = varSymbol.type.tsymbol;\n if (tSymbol != null && Symbols.isFlagOn(tSymbol.flags, Flags.CLIENT)) {\n varSymbol.tag = SymTag.ENDPOINT;\n }\n }\n }\n }\n }\n\n private void defineDependentFields(List typeDefNodes, SymbolEnv pkgEnv) {\n for (BLangNode typeDef : typeDefNodes) {\n if (typeDef.getKind() == NodeKind.CLASS_DEFN) {\n BLangClassDefinition classDefinition = (BLangClassDefinition) typeDef;\n if (isObjectCtor(classDefinition)) {\n continue;\n }\n defineReferencedFieldsOfClassDef(classDefinition, pkgEnv);\n } else if (typeDef.getKind() == NodeKind.TYPE_DEFINITION) {\n defineReferencedFieldsOfRecordTypeDef((BLangTypeDefinition) typeDef);\n }\n }\n }\n\n private void defineReferencedFieldsOfClassDef(BLangClassDefinition classDefinition, SymbolEnv pkgEnv) {\n SymbolEnv typeDefEnv = classDefinition.typeDefEnv;\n BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) classDefinition.symbol;\n BObjectType objType = (BObjectType) tSymbol.type;\n\n defineReferencedClassFields(classDefinition, typeDefEnv, objType, false);\n }\n\n private void defineIntersectionTypes(SymbolEnv env) {\n for (BLangNode typeDescriptor : this.intersectionTypes) {\n defineNode(typeDescriptor, env);\n }\n this.intersectionTypes.clear();\n }\n\n private void defineErrorType(Location pos, BErrorType errorType, SymbolEnv env) {\n SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);\n BTypeSymbol errorTSymbol = errorType.tsymbol;\n errorTSymbol.scope = new Scope(errorTSymbol);\n\n if (symResolver.checkForUniqueSymbol(pos, pkgEnv, errorTSymbol)) {\n pkgEnv.scope.define(errorTSymbol.name, errorTSymbol);\n }\n\n SymbolEnv prevEnv = this.env;\n this.env = pkgEnv;\n this.env = prevEnv;\n }\n\n private boolean isObjectCtor(BLangNode node) {\n if (node.getKind() == NodeKind.CLASS_DEFN) {\n BLangClassDefinition classDefinition = (BLangClassDefinition) node;\n return isObjectCtor(classDefinition);\n }\n return false;\n }\n\n private boolean isObjectCtor(BLangClassDefinition classDefinition) {\n if (!classDefinition.isObjectContructorDecl && classDefinition.isServiceDecl) {\n return false;\n }\n if (classDefinition.flagSet.contains(Flag.OBJECT_CTOR)) {\n return true;\n }\n return false;\n }\n\n private void defineDistinctClassAndObjectDefinitions(List typDefs) {\n for (BLangNode node : typDefs) {\n if (node.getKind() == NodeKind.CLASS_DEFN) {\n BLangClassDefinition classDefinition = (BLangClassDefinition) node;\n if (isObjectCtor(classDefinition)) {\n continue;\n }\n populateDistinctTypeIdsFromIncludedTypeReferences((BLangClassDefinition) node);\n } else if (node.getKind() == NodeKind.TYPE_DEFINITION) {\n populateDistinctTypeIdsFromIncludedTypeReferences((BLangTypeDefinition) node);\n }\n }\n }\n\n private void populateDistinctTypeIdsFromIncludedTypeReferences(BLangTypeDefinition typeDefinition) {\n if (typeDefinition.typeNode.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {\n if (typeDefinition.typeNode.getBType() == null) {\n return;\n }\n\n BType definingType = types.getTypeWithEffectiveIntersectionTypes(typeDefinition.typeNode.getBType());\n definingType = Types.getReferredType(definingType);\n if (definingType.tag != TypeTags.OBJECT) {\n return;\n }\n BObjectType definigObjType = (BObjectType) definingType;\n\n BLangIntersectionTypeNode typeNode = (BLangIntersectionTypeNode) typeDefinition.typeNode;\n for (BLangType constituentTypeNode : typeNode.getConstituentTypeNodes()) {\n BType constituentType = Types.getReferredType(constituentTypeNode.getBType());\n if (constituentType.tag != TypeTags.OBJECT) {\n continue;\n }\n definigObjType.typeIdSet.add(((BObjectType) constituentType).typeIdSet);\n }\n } else if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE) {\n BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDefinition.typeNode;\n BTypeIdSet typeIdSet = ((BObjectType) objectTypeNode.getBType()).typeIdSet;\n\n for (BLangType typeRef : objectTypeNode.typeRefs) {\n BType type = types.getTypeWithEffectiveIntersectionTypes(typeRef.getBType());\n type = Types.getReferredType(type);\n if (type.tag != TypeTags.OBJECT) {\n continue;\n }\n BObjectType refType = (BObjectType) type;\n typeIdSet.add(refType.typeIdSet);\n }\n }\n }\n\n private void populateDistinctTypeIdsFromIncludedTypeReferences(BLangClassDefinition typeDef) {\n BLangClassDefinition classDefinition = typeDef;\n BTypeIdSet typeIdSet = ((BObjectType) classDefinition.getBType()).typeIdSet;\n\n for (BLangType typeRef : classDefinition.typeRefs) {\n BType type = types.getTypeWithEffectiveIntersectionTypes(typeRef.getBType());\n type = Types.getReferredType(type);\n if (type.tag != TypeTags.OBJECT) {\n continue;\n }\n BObjectType refType = (BObjectType) type;\n typeIdSet.add(refType.typeIdSet);\n }\n }\n\n private Comparator getTypePrecedenceComparator() {\n return (l, r) -> {\n if (l instanceof OrderedNode && r instanceof OrderedNode) {\n return ((OrderedNode) l).getPrecedence() - ((OrderedNode) r).getPrecedence();\n }\n return 0;\n };\n }\n\n private void defineFunctionsOfClassDef(SymbolEnv pkgEnv, BLangClassDefinition classDefinition) {\n validateInclusionsForNonPrivateMembers(classDefinition.typeRefs);\n BObjectType objectType = (BObjectType) classDefinition.symbol.type;\n\n if (objectType.mutableType != null) {\n \n \n \n return;\n }\n\n SymbolEnv objMethodsEnv =\n SymbolEnv.createClassMethodsEnv(classDefinition, (BObjectTypeSymbol) classDefinition.symbol, pkgEnv);\n if (classDefinition.isObjectContructorDecl) {\n classDefinition.oceEnvData.objMethodsEnv = objMethodsEnv;\n }\n\n \n defineClassInitFunction(classDefinition, objMethodsEnv);\n classDefinition.functions.forEach(f -> {\n f.flagSet.add(Flag.FINAL); \n f.setReceiver(ASTBuilderUtil.createReceiver(classDefinition.pos, objectType));\n defineNode(f, objMethodsEnv);\n });\n\n defineIncludedMethods(classDefinition, objMethodsEnv, false);\n }\n\n private void defineIncludedMethods(BLangClassDefinition classDefinition, SymbolEnv objMethodsEnv,\n boolean defineReadOnlyInclusionsOnly) {\n Set includedFunctionNames = new HashSet<>();\n\n if (defineReadOnlyInclusionsOnly) {\n for (BAttachedFunction function :\n ((BObjectTypeSymbol) classDefinition.getBType().tsymbol).referencedFunctions) {\n includedFunctionNames.add(function.funcName.value);\n }\n }\n\n \n \n \n \n for (BLangType typeRef : classDefinition.typeRefs) {\n BType type = Types.getReferredType(typeRef.getBType());\n if (type == null || type == symTable.semanticError) {\n return;\n }\n\n if (type.tag == TypeTags.INTERSECTION) {\n if (!defineReadOnlyInclusionsOnly) {\n \n continue;\n }\n\n type = ((BIntersectionType) type).effectiveType;\n } else {\n if (defineReadOnlyInclusionsOnly) {\n if (!isImmutable((BObjectType) type)) {\n continue;\n }\n } else if (isImmutable((BObjectType) type)) {\n continue;\n }\n }\n\n List functions = ((BObjectTypeSymbol) type.tsymbol).attachedFuncs;\n for (BAttachedFunction function : functions) {\n defineReferencedFunction(classDefinition.pos, classDefinition.flagSet, objMethodsEnv,\n typeRef, function, includedFunctionNames, classDefinition.symbol, classDefinition.functions,\n classDefinition.internal);\n }\n }\n }\n\n private void defineReferencedClassFields(BLangClassDefinition classDefinition, SymbolEnv typeDefEnv,\n BObjectType objType, boolean defineReadOnlyInclusionsOnly) {\n if (classDefinition.typeRefs.isEmpty()) {\n return;\n }\n Set referencedTypes = new HashSet<>(classDefinition.typeRefs.size());\n List invalidTypeRefs = new ArrayList<>(classDefinition.typeRefs.size());\n\n Map fieldNames = new HashMap<>(classDefinition.fields.size());\n for (BLangSimpleVariable fieldVariable : classDefinition.fields) {\n fieldNames.put(fieldVariable.name.value, fieldVariable);\n }\n\n \n List referencedFields = new ArrayList<>();\n\n for (BLangType typeRef : classDefinition.typeRefs) {\n BType referredType = Types.getReferredType(symResolver.resolveTypeNode(typeRef, typeDefEnv));\n if (referredType == symTable.semanticError) {\n continue;\n }\n\n int tag = Types.getReferredType(classDefinition.getBType()).tag;\n if (tag == TypeTags.OBJECT) {\n if (isInvalidIncludedTypeInClass(referredType)) {\n if (!defineReadOnlyInclusionsOnly) {\n dlog.error(typeRef.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_REFERENCE, typeRef);\n }\n invalidTypeRefs.add(typeRef);\n continue;\n }\n\n BObjectType objectType = null;\n\n if (referredType.tag == TypeTags.INTERSECTION) {\n if (!defineReadOnlyInclusionsOnly) {\n \n continue;\n }\n } else {\n objectType = (BObjectType) referredType;\n\n if (defineReadOnlyInclusionsOnly) {\n if (!isImmutable(objectType)) {\n continue;\n }\n } else if (isImmutable(objectType)) {\n continue;\n }\n }\n } else if (defineReadOnlyInclusionsOnly) {\n continue;\n }\n\n \n if (!referencedTypes.add(referredType.tsymbol)) {\n dlog.error(typeRef.pos, DiagnosticErrorCode.REDECLARED_TYPE_REFERENCE, typeRef);\n continue;\n }\n\n BType effectiveIncludedType = referredType;\n\n if (tag == TypeTags.OBJECT) {\n BObjectType objectType;\n referredType = Types.getReferredType(referredType);\n\n if (referredType.tag == TypeTags.INTERSECTION) {\n effectiveIncludedType = objectType = (BObjectType) ((BIntersectionType) referredType).effectiveType;\n } else {\n objectType = (BObjectType) referredType;\n }\n\n if (!classDefinition.symbol.pkgID.equals(referredType.tsymbol.pkgID)) {\n boolean errored = false;\n for (BField field : objectType.fields.values()) {\n if (!Symbols.isPublic(field.symbol)) {\n dlog.error(typeRef.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_REFERENCE_NON_PUBLIC_MEMBERS,\n typeRef);\n invalidTypeRefs.add(typeRef);\n errored = true;\n break;\n }\n }\n\n if (errored) {\n continue;\n }\n\n for (BAttachedFunction func : ((BObjectTypeSymbol) objectType.tsymbol).attachedFuncs) {\n if (!Symbols.isPublic(func.symbol)) {\n dlog.error(typeRef.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_REFERENCE_NON_PUBLIC_MEMBERS,\n typeRef);\n invalidTypeRefs.add(typeRef);\n errored = true;\n break;\n }\n }\n\n if (errored) {\n continue;\n }\n }\n }\n\n \n \n \n \n for (BField field : ((BStructureType) effectiveIncludedType).fields.values()) {\n if (fieldNames.containsKey(field.name.value)) {\n BLangSimpleVariable existingVariable = fieldNames.get(field.name.value);\n if ((existingVariable.flagSet.contains(Flag.PUBLIC) !=\n Symbols.isFlagOn(field.symbol.flags, Flags.PUBLIC)) ||\n (existingVariable.flagSet.contains(Flag.PRIVATE) !=\n Symbols.isFlagOn(field.symbol.flags, Flags.PRIVATE))) {\n dlog.error(existingVariable.pos,\n DiagnosticErrorCode.MISMATCHED_VISIBILITY_QUALIFIERS_IN_OBJECT_FIELD,\n existingVariable.name.value);\n }\n continue;\n }\n\n BLangSimpleVariable var = ASTBuilderUtil.createVariable(typeRef.pos, field.name.value, field.type);\n var.flagSet = field.symbol.getFlags();\n referencedFields.add(var);\n }\n }\n classDefinition.typeRefs.removeAll(invalidTypeRefs);\n\n for (BLangSimpleVariable field : referencedFields) {\n defineNode(field, typeDefEnv);\n if (field.symbol.type == symTable.semanticError) {\n continue;\n }\n objType.fields.put(field.name.value, new BField(names.fromIdNode(field.name), field.pos, field.symbol));\n }\n\n classDefinition.referencedFields.addAll(referencedFields);\n }\n\n private List getClassDefinitions(List topLevelNodes) {\n List classDefinitions = new ArrayList<>();\n for (TopLevelNode topLevelNode : topLevelNodes) {\n if (topLevelNode.getKind() == NodeKind.CLASS_DEFN) {\n classDefinitions.add((BLangClassDefinition) topLevelNode);\n }\n }\n return classDefinitions;\n }\n\n @Override\n public void visit(BLangObjectConstructorExpression objectCtorExpression) {\n visit(objectCtorExpression.classNode);\n objectCtorExpression.setBType(objectCtorExpression.classNode.getBType());\n }\n\n @Override\n public void visit(BLangClassDefinition classDefinition) {\n EnumSet flags = EnumSet.copyOf(classDefinition.flagSet);\n boolean isPublicType = flags.contains(Flag.PUBLIC);\n Name className = names.fromIdNode(classDefinition.name);\n Name classOrigName = names.originalNameFromIdNode(classDefinition.name);\n\n BClassSymbol tSymbol = Symbols.createClassSymbol(Flags.asMask(flags),\n className, env.enclPkg.symbol.pkgID, null,\n env.scope.owner, classDefinition.name.pos,\n getOrigin(className, flags), classDefinition.isServiceDecl);\n tSymbol.originalName = classOrigName;\n tSymbol.scope = new Scope(tSymbol);\n tSymbol.markdownDocumentation = getMarkdownDocAttachment(classDefinition.markdownDocumentationAttachment);\n\n\n long typeFlags = 0;\n\n if (flags.contains(Flag.READONLY)) {\n typeFlags |= Flags.READONLY;\n }\n\n if (flags.contains(Flag.ISOLATED)) {\n typeFlags |= Flags.ISOLATED;\n }\n\n if (flags.contains(Flag.SERVICE)) {\n typeFlags |= Flags.SERVICE;\n }\n\n if (flags.contains(Flag.OBJECT_CTOR)) {\n typeFlags |= Flags.OBJECT_CTOR;\n }\n\n BObjectType objectType = new BObjectType(tSymbol, typeFlags);\n if (classDefinition.isObjectContructorDecl || flags.contains(Flag.OBJECT_CTOR)) {\n classDefinition.oceEnvData.objectType = objectType;\n objectType.classDef = classDefinition;\n }\n\n if (flags.contains(Flag.DISTINCT)) {\n objectType.typeIdSet = BTypeIdSet.from(env.enclPkg.symbol.pkgID, classDefinition.name.value, isPublicType);\n }\n\n if (flags.contains(Flag.CLIENT)) {\n objectType.flags |= Flags.CLIENT;\n }\n\n tSymbol.type = objectType;\n classDefinition.setBType(objectType);\n classDefinition.setDeterminedType(objectType);\n classDefinition.symbol = tSymbol;\n\n if (isDeprecated(classDefinition.annAttachments)) {\n tSymbol.flags |= Flags.DEPRECATED;\n }\n\n \n \n for (BLangType typeRef : classDefinition.typeRefs) {\n BType referencedType = symResolver.resolveTypeNode(typeRef, env);\n if (referencedType == symTable.noType && !this.unresolvedTypes.contains(classDefinition)) {\n this.unresolvedTypes.add(classDefinition);\n return;\n }\n objectType.typeInclusions.add(referencedType);\n }\n\n classDefinition.setPrecedence(this.typePrecedence++);\n if (symResolver.checkForUniqueSymbol(classDefinition.pos, env, tSymbol)) {\n env.scope.define(tSymbol.name, tSymbol);\n }\n \n \n }\n\n public void visit(BLangAnnotation annotationNode) {\n Name annotName = names.fromIdNode(annotationNode.name);\n Name annotOrigName = names.originalNameFromIdNode(annotationNode.name);\n BAnnotationSymbol annotationSymbol = Symbols.createAnnotationSymbol(Flags.asMask(annotationNode.flagSet),\n annotationNode.getAttachPoints(),\n annotName, annotOrigName,\n env.enclPkg.symbol.pkgID, null,\n env.scope.owner, annotationNode.name.pos,\n getOrigin(annotName));\n annotationSymbol.markdownDocumentation =\n getMarkdownDocAttachment(annotationNode.markdownDocumentationAttachment);\n if (isDeprecated(annotationNode.annAttachments)) {\n annotationSymbol.flags |= Flags.DEPRECATED;\n }\n annotationSymbol.type = new BAnnotationType(annotationSymbol);\n annotationNode.symbol = annotationSymbol;\n defineSymbol(annotationNode.name.pos, annotationSymbol);\n SymbolEnv annotationEnv = SymbolEnv.createAnnotationEnv(annotationNode, annotationSymbol.scope, env);\n BLangType annotTypeNode = annotationNode.typeNode;\n if (annotTypeNode != null) {\n BType type = this.symResolver.resolveTypeNode(annotTypeNode, annotationEnv);\n annotationSymbol.attachedType = type;\n if (!isValidAnnotationType(type)) {\n dlog.error(annotTypeNode.pos, DiagnosticErrorCode.ANNOTATION_INVALID_TYPE, type);\n }\n\n\n\n\n }\n\n if (!annotationNode.flagSet.contains(Flag.CONSTANT) &&\n annotationNode.getAttachPoints().stream().anyMatch(attachPoint -> attachPoint.source)) {\n dlog.error(annotationNode.pos, DiagnosticErrorCode.ANNOTATION_REQUIRES_CONST);\n }\n }\n\n private boolean isNullOrEmpty(String s) {\n return s == null || s.isEmpty();\n }\n\n @Override\n public void visit(BLangImportPackage importPkgNode) {\n Name pkgAlias = names.fromIdNode(importPkgNode.alias);\n if (!Names.IGNORE.equals(pkgAlias)) {\n BSymbol importSymbol =\n symResolver.resolvePrefixSymbol(env, pkgAlias, names.fromIdNode(importPkgNode.compUnit));\n if (importSymbol != symTable.notFoundSymbol) {\n if (isSameImport(importPkgNode, (BPackageSymbol) importSymbol)) {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.REDECLARED_IMPORT_MODULE,\n importPkgNode.getQualifiedPackageName());\n } else {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, pkgAlias);\n }\n return;\n }\n }\n\n \n \n Name orgName;\n Name pkgName = null;\n Name version;\n PackageID enclPackageID = env.enclPkg.packageID;\n \n \n \n \n \n \n \n if (!isNullOrEmpty(importPkgNode.orgName.value)) {\n orgName = names.fromIdNode(importPkgNode.orgName);\n if (!isNullOrEmpty(importPkgNode.version.value)) {\n version = names.fromIdNode(importPkgNode.version);\n } else {\n \n if (projectAPIInitiatedCompilation) {\n version = Names.EMPTY;\n } else {\n String pkgNameComps = importPkgNode.getPackageName().stream()\n .map(id -> id.value)\n .collect(Collectors.joining(\".\"));\n if (this.sourceDirectory.getSourcePackageNames().contains(pkgNameComps)\n && orgName.value.equals(enclPackageID.orgName.value)) {\n version = enclPackageID.version;\n } else {\n version = Names.EMPTY;\n }\n }\n }\n } else {\n orgName = enclPackageID.orgName;\n pkgName = enclPackageID.pkgName;\n version = (Names.DEFAULT_VERSION.equals(enclPackageID.version)) ? Names.EMPTY : enclPackageID.version;\n }\n\n List nameComps = importPkgNode.pkgNameComps.stream()\n .map(identifier -> names.fromIdNode(identifier))\n .collect(Collectors.toList());\n Name moduleName = new Name(nameComps.stream().map(Name::getValue).collect(Collectors.joining(\".\")));\n\n if (pkgName == null) {\n pkgName = moduleName;\n }\n\n PackageID pkgId = new PackageID(orgName, pkgName, moduleName, version, null);\n\n \n BPackageSymbol bPackageSymbol = this.packageCache.getSymbol(pkgId);\n if (bPackageSymbol != null && this.env.enclPkg.moduleContextDataHolder != null) {\n boolean isCurrentPackageModuleImport =\n this.env.enclPkg.moduleContextDataHolder.descriptor().org() == bPackageSymbol.descriptor.org()\n && this.env.enclPkg.moduleContextDataHolder.descriptor().packageName() ==\n bPackageSymbol.descriptor.packageName();\n if (!isCurrentPackageModuleImport && !bPackageSymbol.exported) {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.MODULE_NOT_FOUND,\n bPackageSymbol.toString() + \" is not exported\");\n return;\n }\n }\n\n \n if (pkgId.equals(PackageID.ANNOTATIONS) || pkgId.equals(PackageID.INTERNAL) || pkgId.equals(PackageID.QUERY)) {\n \n \n if (!(enclPackageID.orgName.equals(Names.BALLERINA_ORG)\n && enclPackageID.name.value.startsWith(Names.LANG.value))) {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.MODULE_NOT_FOUND,\n importPkgNode.getQualifiedPackageName());\n return;\n }\n }\n\n \n \n if (importedPackages.contains(pkgId)) {\n int index = importedPackages.indexOf(pkgId);\n \n StringBuilder stringBuilder = new StringBuilder();\n for (int i = index; i < importedPackages.size(); i++) {\n stringBuilder.append(importedPackages.get(i).toString()).append(\" -> \");\n }\n \n stringBuilder.append(pkgId);\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.CYCLIC_MODULE_IMPORTS_DETECTED, stringBuilder.toString());\n return;\n }\n\n boolean samePkg = false;\n \n PackageID entryPackage = importedPackages.get(0);\n if (entryPackage.isUnnamed == pkgId.isUnnamed) {\n samePkg = (!entryPackage.isUnnamed) || (entryPackage.sourceFileName.equals(pkgId.sourceFileName));\n }\n \n \n if (samePkg && entryPackage.orgName.equals(pkgId.orgName) && entryPackage.name.equals(pkgId.name)) {\n StringBuilder stringBuilder = new StringBuilder();\n String entryPackageString = importedPackages.get(0).toString();\n \n int packageIndex = entryPackageString.indexOf(\":\");\n if (packageIndex != -1) {\n entryPackageString = entryPackageString.substring(0, packageIndex);\n }\n \n stringBuilder.append(entryPackageString).append(\" -> \");\n for (int i = 1; i < importedPackages.size(); i++) {\n stringBuilder.append(importedPackages.get(i).toString()).append(\" -> \");\n }\n stringBuilder.append(pkgId);\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.CYCLIC_MODULE_IMPORTS_DETECTED, stringBuilder.toString());\n return;\n }\n\n BPackageSymbol pkgSymbol = packageCache.getSymbol(pkgId);\n\n if (pkgSymbol == null) {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.MODULE_NOT_FOUND,\n importPkgNode.getQualifiedPackageName());\n return;\n }\n\n List imports = ((BPackageSymbol) this.env.scope.owner).imports;\n if (!imports.contains(pkgSymbol)) {\n imports.add(pkgSymbol);\n }\n\n \n \n BPackageSymbol symbol = dupPackageSymbolAndSetCompUnit(pkgSymbol, names.fromIdNode(importPkgNode.compUnit));\n if (!Names.IGNORE.equals(pkgAlias)) {\n symbol.importPrefix = pkgAlias;\n }\n symbol.scope = pkgSymbol.scope;\n importPkgNode.symbol = symbol;\n this.env.scope.define(pkgAlias, symbol);\n }\n\n public void initPredeclaredModules(Map predeclaredModules,\n List compUnits, SymbolEnv env) {\n SymbolEnv prevEnv = this.env;\n this.env = env;\n for (Map.Entry predeclaredModuleEntry : predeclaredModules.entrySet()) {\n Name alias = predeclaredModuleEntry.getKey();\n BPackageSymbol packageSymbol = predeclaredModuleEntry.getValue();\n int index = 0;\n ScopeEntry entry = this.env.scope.lookup(alias);\n if (entry == NOT_FOUND_ENTRY && !compUnits.isEmpty()) {\n this.env.scope.define(alias, dupPackageSymbolAndSetCompUnit(packageSymbol,\n new Name(compUnits.get(index++).name)));\n entry = this.env.scope.lookup(alias);\n }\n for (int i = index; i < compUnits.size(); i++) {\n boolean isUndefinedModule = true;\n String compUnitName = compUnits.get(i).name;\n if (((BPackageSymbol) entry.symbol).compUnit.value.equals(compUnitName)) {\n isUndefinedModule = false;\n }\n while (entry.next != NOT_FOUND_ENTRY) {\n if (((BPackageSymbol) entry.next.symbol).compUnit.value.equals(compUnitName)) {\n isUndefinedModule = false;\n break;\n }\n entry = entry.next;\n }\n if (isUndefinedModule) {\n entry.next = new ScopeEntry(dupPackageSymbolAndSetCompUnit(packageSymbol,\n new Name(compUnitName)), NOT_FOUND_ENTRY);\n }\n }\n }\n this.env = prevEnv;\n }\n\n @Override\n public void visit(BLangXMLNS xmlnsNode) {\n String nsURI;\n if (xmlnsNode.namespaceURI.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n BLangSimpleVarRef varRef = (BLangSimpleVarRef) xmlnsNode.namespaceURI;\n if (missingNodesHelper.isMissingNode(varRef.variableName.value)) {\n nsURI = \"\";\n } else {\n \n nsURI = \"\";\n }\n } else {\n nsURI = (String) ((BLangLiteral) xmlnsNode.namespaceURI).value;\n if (!nullOrEmpty(xmlnsNode.prefix.value) && nsURI.isEmpty()) {\n dlog.error(xmlnsNode.pos, DiagnosticErrorCode.INVALID_NAMESPACE_DECLARATION, xmlnsNode.prefix);\n }\n }\n\n \n if (xmlnsNode.prefix.value == null) {\n xmlnsNode.prefix.value = XMLConstants.DEFAULT_NS_PREFIX;\n }\n\n Name prefix = names.fromIdNode(xmlnsNode.prefix);\n Location nsSymbolPos = prefix.value.isEmpty() ? xmlnsNode.pos : xmlnsNode.prefix.pos;\n BXMLNSSymbol xmlnsSymbol = Symbols.createXMLNSSymbol(prefix, nsURI, env.enclPkg.symbol.pkgID, env.scope.owner,\n nsSymbolPos, getOrigin(prefix));\n xmlnsNode.symbol = xmlnsSymbol;\n\n \n \n \n BSymbol foundSym = symResolver.lookupSymbolInPrefixSpace(env, xmlnsSymbol.name);\n if ((foundSym.tag & SymTag.PACKAGE) != SymTag.PACKAGE) {\n foundSym = symTable.notFoundSymbol;\n }\n if (foundSym != symTable.notFoundSymbol) {\n dlog.error(xmlnsNode.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, xmlnsSymbol.name);\n return;\n }\n\n \n \n defineSymbol(xmlnsNode.prefix.pos, xmlnsSymbol);\n }\n\n private boolean nullOrEmpty(String value) {\n return value == null || value.isEmpty();\n }\n\n public void visit(BLangXMLNSStatement xmlnsStmtNode) {\n defineNode(xmlnsStmtNode.xmlnsDecl, env);\n }\n\n private void defineTypeNodes(List typeDefs, SymbolEnv env) {\n if (typeDefs.isEmpty()) {\n return;\n }\n\n this.unresolvedTypes = new ArrayList<>(typeDefs.size());\n this.unresolvedRecordDueToFields = new HashSet<>(typeDefs.size());\n this.resolveRecordsUnresolvedDueToFields = false;\n for (BLangNode typeDef : typeDefs) {\n if (isErrorIntersectionTypeCreatingNewType(typeDef, env)) {\n populateUndefinedErrorIntersection((BLangTypeDefinition) typeDef, env);\n continue;\n }\n\n\n\n\n defineNode(typeDef, env);\n }\n\n if (typeDefs.size() <= unresolvedTypes.size()) {\n\n this.resolveRecordsUnresolvedDueToFields = true;\n unresolvedTypes.removeAll(unresolvedRecordDueToFields);\n for (BLangNode unresolvedType : unresolvedRecordDueToFields) {\n defineNode(unresolvedType, env);\n }\n this.resolveRecordsUnresolvedDueToFields = false;\n\n \n \n \n\n for (BLangNode unresolvedType : unresolvedTypes) {\n Stack references = new Stack<>();\n NodeKind unresolvedKind = unresolvedType.getKind();\n if (unresolvedKind == NodeKind.TYPE_DEFINITION || unresolvedKind == NodeKind.CONSTANT) {\n TypeDefinition def = (TypeDefinition) unresolvedType;\n \n references.push(def.getName().getValue());\n checkErrors(env, unresolvedType, (BLangNode) def.getTypeNode(), references, false);\n } else if (unresolvedType.getKind() == NodeKind.CLASS_DEFN) {\n BLangClassDefinition classDefinition = (BLangClassDefinition) unresolvedType;\n references.push(classDefinition.getName().getValue());\n checkErrors(env, unresolvedType, classDefinition, references, true);\n }\n }\n defineAllUnresolvedCyclicTypesInScope(env);\n\n Set alreadyDefinedTypeDefNames = new HashSet<>();\n int unresolvedTypeCount = unresolvedTypes.size();\n for (int i = 0; i < unresolvedTypeCount; i++) {\n for (BLangNode node : this.unresolvedTypes) {\n String name = getTypeOrClassName(node);\n boolean symbolNotFound = false;\n boolean isTypeOrClassDefinition =\n node.getKind() == NodeKind.TYPE_DEFINITION || node.getKind() == NodeKind.CLASS_DEFN;\n \n \n if (isTypeOrClassDefinition && i != 0) { \n BSymbol bSymbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));\n symbolNotFound = (bSymbol == symTable.notFoundSymbol);\n }\n\n boolean notFoundInList = alreadyDefinedTypeDefNames.add(name);\n\n \n if (notFoundInList || symbolNotFound) {\n defineNode(node, env);\n }\n }\n }\n return;\n }\n defineTypeNodes(unresolvedTypes, env);\n }\n\n private void populateUndefinedErrorIntersection(BLangTypeDefinition typeDef, SymbolEnv env) {\n long flags = 0;\n if (typeDef.flagSet.contains(Flag.PUBLIC)) {\n flags = Flags.PUBLIC;\n }\n\n BErrorType intersectionErrorType = types.createErrorType(null, flags, env);\n intersectionErrorType.tsymbol.name = names.fromString(typeDef.name.value);\n defineErrorType(typeDef.pos, intersectionErrorType, env);\n\n this.intersectionTypes.add(typeDef);\n }\n\n private boolean isErrorIntersectionTypeCreatingNewType(BLangNode typeDef, SymbolEnv env) {\n boolean isIntersectionType = typeDef.getKind() == NodeKind.TYPE_DEFINITION\n && ((BLangTypeDefinition) typeDef).typeNode.getKind() == NodeKind.INTERSECTION_TYPE_NODE;\n if (!isIntersectionType) {\n return false;\n }\n\n BLangIntersectionTypeNode intersectionTypeNode =\n (BLangIntersectionTypeNode) ((BLangTypeDefinition) typeDef).typeNode;\n\n Set errorTypes = new HashSet<>();\n\n for (BLangType type : intersectionTypeNode.constituentTypeNodes) {\n BType bType = symResolver.resolveTypeNode(type, env);\n if (Types.getReferredType(bType).tag == TypeTags.ERROR) {\n errorTypes.add(bType);\n }\n }\n return errorTypes.size() > 1;\n }\n\n private void checkErrors(SymbolEnv env, BLangNode unresolvedType, BLangNode currentTypeOrClassNode,\n Stack visitedNodes,\n boolean fromStructuredType) {\n \n List memberTypeNodes;\n switch (currentTypeOrClassNode.getKind()) {\n case ARRAY_TYPE:\n checkErrors(env, unresolvedType, ((BLangArrayType) currentTypeOrClassNode).elemtype, visitedNodes,\n true);\n break;\n case UNION_TYPE_NODE:\n \n memberTypeNodes = ((BLangUnionTypeNode) currentTypeOrClassNode).memberTypeNodes;\n \n for (BLangType memberTypeNode : memberTypeNodes) {\n checkErrors(env, unresolvedType, memberTypeNode, visitedNodes, fromStructuredType);\n }\n break;\n case INTERSECTION_TYPE_NODE:\n memberTypeNodes = ((BLangIntersectionTypeNode) currentTypeOrClassNode).constituentTypeNodes;\n for (BLangType memberTypeNode : memberTypeNodes) {\n checkErrors(env, unresolvedType, memberTypeNode, visitedNodes, fromStructuredType);\n }\n break;\n case TUPLE_TYPE_NODE:\n BLangTupleTypeNode tupleNode = (BLangTupleTypeNode) currentTypeOrClassNode;\n List tupleMemberTypes = tupleNode.getMemberTypeNodes();\n for (BLangType memberTypeNode : tupleMemberTypes) {\n checkErrors(env, unresolvedType, memberTypeNode, visitedNodes, true);\n }\n if (tupleNode.restParamType != null) {\n checkErrors(env, unresolvedType, tupleNode.restParamType, visitedNodes, true);\n }\n break;\n case CONSTRAINED_TYPE:\n checkErrors(env, unresolvedType, ((BLangConstrainedType) currentTypeOrClassNode).constraint,\n visitedNodes,\n true);\n break;\n case TABLE_TYPE:\n checkErrors(env, unresolvedType, ((BLangTableTypeNode) currentTypeOrClassNode).constraint, visitedNodes,\n true);\n break;\n case STREAM_TYPE:\n checkErrors(env, unresolvedType, ((BLangStreamType) currentTypeOrClassNode).constraint, visitedNodes,\n true);\n BLangType completionType = ((BLangStreamType) currentTypeOrClassNode).error;\n if (completionType != null) {\n checkErrors(env, unresolvedType, completionType, visitedNodes, true);\n }\n break;\n case USER_DEFINED_TYPE:\n checkErrorsOfUserDefinedType(env, unresolvedType, (BLangUserDefinedType) currentTypeOrClassNode,\n visitedNodes, fromStructuredType);\n break;\n case BUILT_IN_REF_TYPE:\n \n case FINITE_TYPE_NODE:\n case VALUE_TYPE:\n case ERROR_TYPE:\n \n break;\n case FUNCTION_TYPE:\n BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) currentTypeOrClassNode;\n functionTypeNode.params.forEach(p -> checkErrors(env, unresolvedType, p.typeNode, visitedNodes,\n fromStructuredType));\n if (functionTypeNode.restParam != null) {\n checkErrors(env, unresolvedType, functionTypeNode.restParam.typeNode, visitedNodes,\n fromStructuredType);\n }\n if (functionTypeNode.returnTypeNode != null) {\n checkErrors(env, unresolvedType, functionTypeNode.returnTypeNode, visitedNodes, fromStructuredType);\n }\n break;\n case RECORD_TYPE:\n for (TypeNode typeNode : ((BLangRecordTypeNode) currentTypeOrClassNode).getTypeReferences()) {\n checkErrors(env, unresolvedType, (BLangType) typeNode, visitedNodes, true);\n }\n break;\n case OBJECT_TYPE:\n for (TypeNode typeNode : ((BLangObjectTypeNode) currentTypeOrClassNode).getTypeReferences()) {\n checkErrors(env, unresolvedType, (BLangType) typeNode, visitedNodes, true);\n }\n break;\n case CLASS_DEFN:\n for (TypeNode typeNode : ((BLangClassDefinition) currentTypeOrClassNode).typeRefs) {\n checkErrors(env, unresolvedType, (BLangType) typeNode, visitedNodes, true);\n }\n break;\n default:\n throw new RuntimeException(\"unhandled type kind: \" + currentTypeOrClassNode.getKind());\n }\n }\n\n private boolean isTypeConstructorAvailable(NodeKind unresolvedType) {\n switch (unresolvedType) {\n case OBJECT_TYPE:\n case RECORD_TYPE:\n case CONSTRAINED_TYPE:\n case ARRAY_TYPE:\n case TUPLE_TYPE_NODE:\n case TABLE_TYPE:\n case ERROR_TYPE:\n case FUNCTION_TYPE:\n case STREAM_TYPE:\n return true;\n default:\n return false;\n }\n }\n\n private void checkErrorsOfUserDefinedType(SymbolEnv env, BLangNode unresolvedType,\n BLangUserDefinedType currentTypeOrClassNode,\n Stack visitedNodes, boolean fromStructuredType) {\n String currentTypeNodeName = currentTypeOrClassNode.typeName.value;\n \n if (currentTypeNodeName.startsWith(\"$\")) {\n return;\n }\n String unresolvedTypeNodeName = getTypeOrClassName(unresolvedType);\n boolean sameTypeNode = unresolvedTypeNodeName.equals(currentTypeNodeName);\n boolean isVisited = visitedNodes.contains(currentTypeNodeName);\n boolean typeDef = unresolvedType.getKind() == NodeKind.TYPE_DEFINITION;\n\n if (sameTypeNode || isVisited) {\n if (typeDef) {\n BLangTypeDefinition typeDefinition = (BLangTypeDefinition) unresolvedType;\n NodeKind unresolvedTypeNodeKind = typeDefinition.getTypeNode().getKind();\n if (fromStructuredType && (unresolvedTypeNodeKind == NodeKind.UNION_TYPE_NODE\n || unresolvedTypeNodeKind == NodeKind.TUPLE_TYPE_NODE)) {\n \n typeDefinition.hasCyclicReference = true;\n return;\n }\n \n if (unresolvedTypeNodeKind != NodeKind.OBJECT_TYPE && isTypeConstructorAvailable(unresolvedTypeNodeKind)\n && !sameTypeNode) {\n return;\n }\n }\n if (isVisited) {\n \n \n \n \n \n \n int i = visitedNodes.indexOf(currentTypeNodeName);\n List dependencyList = new ArrayList<>(visitedNodes.size() - i);\n for (; i < visitedNodes.size(); i++) {\n dependencyList.add(visitedNodes.get(i));\n }\n if (!sameTypeNode && dependencyList.size() == 1\n && dependencyList.get(0).equals(currentTypeNodeName)) {\n \n \n \n \n return;\n }\n \n dependencyList.add(currentTypeNodeName);\n dlog.error(unresolvedType.getPosition(), DiagnosticErrorCode.CYCLIC_TYPE_REFERENCE, dependencyList);\n } else {\n visitedNodes.push(currentTypeNodeName);\n dlog.error(unresolvedType.getPosition(), DiagnosticErrorCode.CYCLIC_TYPE_REFERENCE, visitedNodes);\n visitedNodes.remove(currentTypeNodeName);\n }\n } else {\n \n \n List typeDefinitions = unresolvedTypes.stream()\n .filter(node -> getTypeOrClassName(node).equals(currentTypeNodeName)).collect(Collectors.toList());\n\n if (typeDefinitions.isEmpty()) {\n BType referredType = symResolver.resolveTypeNode(currentTypeOrClassNode, env);\n \n if (referredType != symTable.noType) {\n return;\n }\n\n \n \n LocationData locationData = new LocationData(\n currentTypeNodeName, currentTypeOrClassNode.pos.lineRange().startLine().line(),\n currentTypeOrClassNode.pos.lineRange().startLine().offset());\n if (unknownTypeRefs.add(locationData)) {\n dlog.error(currentTypeOrClassNode.pos, DiagnosticErrorCode.UNKNOWN_TYPE, currentTypeNodeName);\n }\n } else {\n for (BLangNode typeDefinition : typeDefinitions) {\n if (typeDefinition.getKind() == NodeKind.TYPE_DEFINITION) {\n BLangTypeDefinition langTypeDefinition = (BLangTypeDefinition) typeDefinition;\n String typeName = langTypeDefinition.getName().getValue();\n \n visitedNodes.push(typeName);\n \n checkErrors(env, unresolvedType, langTypeDefinition.getTypeNode(), visitedNodes,\n fromStructuredType);\n \n visitedNodes.pop();\n } else {\n BLangClassDefinition classDefinition = (BLangClassDefinition) typeDefinition;\n visitedNodes.push(classDefinition.getName().getValue());\n checkErrors(env, unresolvedType, classDefinition, visitedNodes, fromStructuredType);\n visitedNodes.pop();\n }\n }\n }\n }\n }\n\n private String getTypeOrClassName(BLangNode node) {\n if (node.getKind() == NodeKind.TYPE_DEFINITION || node.getKind() == NodeKind.CONSTANT) {\n return ((TypeDefinition) node).getName().getValue();\n } else {\n return ((BLangClassDefinition) node).getName().getValue();\n }\n }\n\n public boolean isUnknownTypeRef(BLangUserDefinedType bLangUserDefinedType) {\n var startLine = bLangUserDefinedType.pos.lineRange().startLine();\n LocationData locationData = new LocationData(bLangUserDefinedType.typeName.value, startLine.line(),\n startLine.offset());\n return unknownTypeRefs.contains(locationData);\n }\n\n @Override\n public void visit(BLangTypeDefinition typeDefinition) {\n BType definedType;\n if (typeDefinition.hasCyclicReference) {\n definedType = getCyclicDefinedType(typeDefinition, env);\n } else {\n definedType = symResolver.resolveTypeNode(typeDefinition.typeNode, env);\n }\n\n if (definedType == symTable.semanticError) {\n \n\n invalidateAlreadyDefinedErrorType(typeDefinition);\n return;\n }\n if (definedType == symTable.noType) {\n \n if (!this.unresolvedTypes.contains(typeDefinition)) {\n this.unresolvedTypes.add(typeDefinition);\n }\n return;\n }\n\n \n boolean hasTypeInclusions = false;\n NodeKind typeNodeKind = typeDefinition.typeNode.getKind();\n if (typeNodeKind == TUPLE_TYPE_NODE) {\n if (definedType.tsymbol.scope == null) {\n definedType.tsymbol.scope = new Scope(definedType.tsymbol);\n }\n }\n if (typeNodeKind == NodeKind.OBJECT_TYPE || typeNodeKind == NodeKind.RECORD_TYPE) {\n if (definedType.tsymbol.scope == null) {\n definedType.tsymbol.scope = new Scope(definedType.tsymbol);\n }\n BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDefinition.typeNode;\n \n \n for (BLangType typeRef : structureTypeNode.typeRefs) {\n hasTypeInclusions = true;\n BType referencedType = symResolver.resolveTypeNode(typeRef, env);\n if (referencedType == symTable.noType) {\n if (!this.unresolvedTypes.contains(typeDefinition)) {\n this.unresolvedTypes.add(typeDefinition);\n return;\n }\n }\n }\n }\n\n \n if (hasTypeInclusions && !this.resolveRecordsUnresolvedDueToFields && typeNodeKind == NodeKind.RECORD_TYPE) {\n BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDefinition.typeNode;\n for (BLangSimpleVariable variable : structureTypeNode.fields) {\n if (variable.typeNode.getKind() == NodeKind.FUNCTION_TYPE) {\n continue;\n }\n Scope scope = new Scope(structureTypeNode.symbol);\n structureTypeNode.symbol.scope = scope;\n SymbolEnv typeEnv = SymbolEnv.createTypeEnv(structureTypeNode, scope, env);\n BType referencedType = symResolver.resolveTypeNode(variable.typeNode, typeEnv);\n if (referencedType == symTable.noType) {\n if (this.unresolvedRecordDueToFields.add(typeDefinition) &&\n !this.unresolvedTypes.contains(typeDefinition)) {\n this.unresolvedTypes.add(typeDefinition);\n return;\n }\n }\n }\n }\n\n if (typeDefinition.flagSet.contains(Flag.ENUM)) {\n definedType.tsymbol = createEnumSymbol(typeDefinition, definedType);\n }\n\n typeDefinition.setPrecedence(this.typePrecedence++);\n\n BSymbol typeDefSymbol = Symbols.createTypeDefinitionSymbol(Flags.asMask(typeDefinition.flagSet),\n names.fromIdNode(typeDefinition.name), env.enclPkg.packageID, definedType, env.scope.owner,\n typeDefinition.name.pos, getOrigin(typeDefinition.name.value));\n typeDefSymbol.markdownDocumentation = getMarkdownDocAttachment(typeDefinition.markdownDocumentationAttachment);\n BTypeSymbol typeSymbol = new BTypeSymbol(SymTag.TYPE_REF, typeDefSymbol.flags, typeDefSymbol.name,\n typeDefSymbol.pkgID, typeDefSymbol.type, typeDefSymbol.owner, typeDefSymbol.pos, typeDefSymbol.origin);\n typeSymbol.markdownDocumentation = typeDefSymbol.markdownDocumentation;\n ((BTypeDefinitionSymbol) typeDefSymbol).referenceType = new BTypeReferenceType(definedType, typeSymbol,\n typeDefSymbol.type.flags);\n\n boolean isLabel = true;\n \n if (definedType.tsymbol.name == Names.EMPTY) {\n isLabel = false;\n definedType.tsymbol.name = names.fromIdNode(typeDefinition.name);\n definedType.tsymbol.originalName = names.originalNameFromIdNode(typeDefinition.name);\n definedType.tsymbol.flags |= typeDefSymbol.flags;\n\n definedType.tsymbol.markdownDocumentation = typeDefSymbol.markdownDocumentation;\n definedType.tsymbol.pkgID = env.enclPkg.packageID;\n if (definedType.tsymbol.tag == SymTag.ERROR) {\n definedType.tsymbol.owner = env.scope.owner;\n }\n }\n\n if ((((definedType.tsymbol.kind == SymbolKind.OBJECT\n && !Symbols.isFlagOn(definedType.tsymbol.flags, Flags.CLASS))\n || definedType.tsymbol.kind == SymbolKind.RECORD))\n && ((BStructureTypeSymbol) definedType.tsymbol).typeDefinitionSymbol == null) {\n ((BStructureTypeSymbol) definedType.tsymbol).typeDefinitionSymbol = (BTypeDefinitionSymbol) typeDefSymbol;\n }\n\n if (typeDefinition.flagSet.contains(Flag.ENUM)) {\n typeDefSymbol = definedType.tsymbol;\n typeDefSymbol.pos = typeDefinition.name.pos;\n }\n\n boolean isErrorIntersection = isErrorIntersection(definedType);\n if (isErrorIntersection) {\n populateSymbolNameOfErrorIntersection(definedType, typeDefinition.name.value);\n populateAllReadyDefinedErrorIntersection(definedType, typeDefinition, env);\n }\n\n BType referenceConstraintType = Types.getReferredType(definedType);\n boolean isIntersectionType = referenceConstraintType.tag == TypeTags.INTERSECTION && !isLabel;\n\n BType effectiveDefinedType = isIntersectionType ? ((BIntersectionType) referenceConstraintType).effectiveType :\n referenceConstraintType;\n\n boolean isIntersectionTypeWithNonNullEffectiveTypeSymbol =\n isIntersectionType && effectiveDefinedType.tsymbol != null;\n\n if (isIntersectionTypeWithNonNullEffectiveTypeSymbol) {\n BTypeSymbol effectiveTypeSymbol = effectiveDefinedType.tsymbol;\n effectiveTypeSymbol.name = typeDefSymbol.name;\n effectiveTypeSymbol.pkgID = typeDefSymbol.pkgID;\n }\n\n handleDistinctDefinition(typeDefinition, typeDefSymbol, definedType, referenceConstraintType);\n\n typeDefSymbol.flags |= Flags.asMask(typeDefinition.flagSet);\n \n typeDefSymbol.flags &= getPublicFlagResetingMask(typeDefinition.flagSet, typeDefinition.typeNode);\n if (isDeprecated(typeDefinition.annAttachments)) {\n typeDefSymbol.flags |= Flags.DEPRECATED;\n }\n\n \n if (Symbols.isFlagOn(typeDefSymbol.flags, Flags.ANONYMOUS)) {\n typeDefSymbol.origin = VIRTUAL;\n }\n\n if (typeDefinition.annAttachments.stream()\n .anyMatch(attachment -> attachment.annotationName.value.equals(Names.ANNOTATION_TYPE_PARAM.value))) {\n \n \n if (PackageID.isLangLibPackageID(this.env.enclPkg.packageID)) {\n typeDefSymbol.type = typeParamAnalyzer.createTypeParam(typeDefSymbol.type, typeDefSymbol.name);\n typeDefSymbol.flags |= Flags.TYPE_PARAM;\n } else {\n dlog.error(typeDefinition.pos, DiagnosticErrorCode.TYPE_PARAM_OUTSIDE_LANG_MODULE);\n }\n }\n definedType.flags |= typeDefSymbol.flags;\n\n if (isIntersectionTypeWithNonNullEffectiveTypeSymbol) {\n BTypeSymbol effectiveTypeSymbol = effectiveDefinedType.tsymbol;\n effectiveTypeSymbol.flags |= definedType.tsymbol.flags;\n effectiveTypeSymbol.origin = VIRTUAL;\n effectiveDefinedType.flags |= definedType.flags;\n }\n\n typeDefinition.symbol = typeDefSymbol;\n\n if (typeDefinition.hasCyclicReference) {\n \n typeDefinition.getBType().tsymbol = definedType.tsymbol;\n } else {\n boolean isLanglibModule = PackageID.isLangLibPackageID(this.env.enclPkg.packageID);\n if (isLanglibModule) {\n handleLangLibTypes(typeDefinition);\n return;\n }\n \n if (!isErrorIntersection || lookupTypeSymbol(env, typeDefinition.name) == symTable.notFoundSymbol) {\n defineSymbol(typeDefinition.name.pos, typeDefSymbol);\n }\n }\n }\n\n private void handleDistinctDefinition(BLangTypeDefinition typeDefinition, BSymbol typeDefSymbol,\n BType definedType, BType referenceConstraintType) {\n BType distinctType = definedType;\n if (isDistinctFlagPresent(typeDefinition)) {\n if (referenceConstraintType.getKind() == TypeKind.ERROR) {\n distinctType = getDistinctErrorType(typeDefinition, (BErrorType) referenceConstraintType,\n typeDefSymbol);\n typeDefinition.typeNode.setBType(distinctType);\n } else if (referenceConstraintType.tag == TypeTags.INTERSECTION &&\n ((BIntersectionType) referenceConstraintType).effectiveType.getKind() == TypeKind.ERROR) {\n boolean distinctFlagPresentInTypeDef = typeDefinition.typeNode.flagSet.contains(Flag.DISTINCT);\n\n BTypeIdSet typeIdSet = BTypeIdSet.emptySet();\n int numberOfDistinctConstituentTypes = 0;\n BLangIntersectionTypeNode intersectionTypeNode = (BLangIntersectionTypeNode) typeDefinition.typeNode;\n for (BLangType constituentType : intersectionTypeNode.constituentTypeNodes) {\n BType type = constituentType.getBType();\n\n if (type.getKind() == TypeKind.ERROR) {\n if (constituentType.flagSet.contains(Flag.DISTINCT)) {\n numberOfDistinctConstituentTypes++;\n typeIdSet.addSecondarySet(((BErrorType) type).typeIdSet.getAll());\n } else {\n typeIdSet.add(((BErrorType) type).typeIdSet);\n }\n }\n }\n\n BErrorType effectiveType = (BErrorType) ((BIntersectionType) referenceConstraintType).effectiveType;\n\n \n \n \n if (numberOfDistinctConstituentTypes == 1\n || (numberOfDistinctConstituentTypes == 0 && distinctFlagPresentInTypeDef)) {\n BTypeIdSet typeIdSetForDefinedType = BTypeIdSet.from(\n env.enclPkg.packageID,\n typeDefinition.name.value,\n typeDefinition.flagSet.contains(Flag.PUBLIC),\n typeIdSet);\n effectiveType.typeIdSet.add(typeIdSetForDefinedType);\n } else {\n for (BLangType constituentType : intersectionTypeNode.constituentTypeNodes) {\n if (constituentType.getBType().getKind() != TypeKind.ERROR) {\n continue;\n }\n if (constituentType.flagSet.contains(Flag.DISTINCT)) {\n typeIdSet.add(BTypeIdSet.from(env.enclPkg.packageID,\n anonymousModelHelper.getNextAnonymousTypeId(env.enclPkg.packageID), true));\n }\n }\n effectiveType.typeIdSet.add(typeIdSet);\n }\n\n } else if (referenceConstraintType.getKind() == TypeKind.OBJECT) {\n distinctType = getDistinctObjectType(typeDefinition, (BObjectType) referenceConstraintType,\n referenceConstraintType.tsymbol);\n typeDefinition.typeNode.setBType(distinctType);\n }\n\n \n if (((BTypeDefinitionSymbol) typeDefSymbol).referenceType != null) {\n ((BTypeDefinitionSymbol) typeDefSymbol).referenceType.referredType = distinctType;\n }\n definedType.flags |= Flags.DISTINCT;\n }\n }\n\n private void invalidateAlreadyDefinedErrorType(BLangTypeDefinition typeDefinition) {\n \n BSymbol alreadyDefinedTypeSymbol = lookupTypeSymbol(env, typeDefinition.name);\n if (alreadyDefinedTypeSymbol.type.tag == TypeTags.ERROR) {\n alreadyDefinedTypeSymbol.type = symTable.errorType;\n }\n }\n\n private void populateErrorTypeIds(BErrorType effectiveType, BLangIntersectionTypeNode typeNode, String name,\n boolean distinctFlagPresentInTypeDef) {\n BTypeIdSet typeIdSet = BTypeIdSet.emptySet();\n int numberOfDistinctConstituentTypes = 0;\n\n for (BLangType constituentType : typeNode.constituentTypeNodes) {\n BType resolvedTypeNode = symResolver.resolveTypeNode(constituentType, env);\n BType type = Types.getReferredType(resolvedTypeNode);\n\n if (type.getKind() == TypeKind.ERROR) {\n if (constituentType.flagSet.contains(Flag.DISTINCT)) {\n numberOfDistinctConstituentTypes++;\n typeIdSet.addSecondarySet(((BErrorType) type).typeIdSet.getAll());\n } else {\n typeIdSet.add(((BErrorType) type).typeIdSet);\n }\n }\n }\n\n \n \n \n if (numberOfDistinctConstituentTypes == 1\n || (numberOfDistinctConstituentTypes == 0 && distinctFlagPresentInTypeDef)) {\n effectiveType.typeIdSet = BTypeIdSet.from(env.enclPkg.packageID, name, true, typeIdSet);\n } else {\n for (BLangType constituentType : typeNode.constituentTypeNodes) {\n if (constituentType.flagSet.contains(Flag.DISTINCT)) {\n typeIdSet.add(BTypeIdSet.from(env.enclPkg.packageID,\n anonymousModelHelper.getNextAnonymousTypeId(env.enclPkg.packageID), true));\n }\n }\n effectiveType.typeIdSet = typeIdSet;\n }\n }\n\n private void populateAllReadyDefinedErrorIntersection(BType definedType, BLangTypeDefinition typeDefinition,\n SymbolEnv env) {\n\n BSymbol bSymbol = lookupTypeSymbol(env, typeDefinition.name);\n BErrorType alreadyDefinedErrorType = (BErrorType) bSymbol.type;\n\n boolean distinctFlagPresent = typeDefinition.typeNode.flagSet.contains(Flag.DISTINCT);\n\n BIntersectionType intersectionType = (BIntersectionType) definedType;\n BErrorType errorType = (BErrorType) intersectionType.effectiveType;\n populateErrorTypeIds(errorType, (BLangIntersectionTypeNode) typeDefinition.typeNode,\n typeDefinition.name.value, distinctFlagPresent);\n\n alreadyDefinedErrorType.typeIdSet = errorType.typeIdSet;\n alreadyDefinedErrorType.detailType = errorType.detailType;\n alreadyDefinedErrorType.flags = errorType.flags;\n alreadyDefinedErrorType.name = errorType.name;\n intersectionType.effectiveType = alreadyDefinedErrorType;\n\n if (!errorType.typeIdSet.isEmpty()) {\n definedType.flags |= Flags.DISTINCT;\n }\n }\n\n private BSymbol lookupTypeSymbol(SymbolEnv env, BLangIdentifier name) {\n return symResolver.lookupSymbolInMainSpace(env, names.fromString(name.value));\n }\n\n private void populateSymbolNameOfErrorIntersection(BType definedType, String typeDefName) {\n BErrorType effectiveErrorType = (BErrorType) ((BIntersectionType) definedType).effectiveType;\n effectiveErrorType.tsymbol.name = names.fromString(typeDefName);\n }\n\n private boolean isErrorIntersection(BType definedType) {\n BType type = Types.getReferredType(definedType);\n if (type.tag == TypeTags.INTERSECTION) {\n BIntersectionType intersectionType = (BIntersectionType) type;\n return intersectionType.effectiveType.tag == TypeTags.ERROR;\n }\n\n return false;\n }\n\n private BEnumSymbol createEnumSymbol(BLangTypeDefinition typeDefinition, BType definedType) {\n List enumMembers = new ArrayList<>();\n\n List members = ((BLangUnionTypeNode) typeDefinition.typeNode).memberTypeNodes;\n for (BLangType member : members) {\n enumMembers.add((BConstantSymbol) ((BLangUserDefinedType) member).symbol);\n }\n\n BEnumSymbol enumSymbol = new BEnumSymbol(enumMembers, Flags.asMask(typeDefinition.flagSet),\n names.fromIdNode(typeDefinition.name), names.fromIdNode(typeDefinition.name),\n env.enclPkg.symbol.pkgID, definedType, env.scope.owner,\n typeDefinition.pos, SOURCE);\n\n enumSymbol.name = names.fromIdNode(typeDefinition.name);\n enumSymbol.originalName = names.fromIdNode(typeDefinition.name);\n enumSymbol.flags |= Flags.asMask(typeDefinition.flagSet);\n\n enumSymbol.markdownDocumentation = getMarkdownDocAttachment(typeDefinition.markdownDocumentationAttachment);\n enumSymbol.pkgID = env.enclPkg.packageID;\n return enumSymbol;\n }\n\n private BObjectType getDistinctObjectType(BLangTypeDefinition typeDefinition, BObjectType definedType,\n BTypeSymbol typeDefSymbol) {\n BTypeSymbol tSymbol = typeDefSymbol.kind == SymbolKind.TYPE_DEF ? typeDefSymbol.type.tsymbol : typeDefSymbol;\n BObjectType definedObjType = definedType;\n \n \n \n if (definedObjType.tsymbol != tSymbol) {\n BObjectType objType = new BObjectType(tSymbol);\n tSymbol.type = objType;\n definedObjType = objType;\n }\n boolean isPublicType = typeDefinition.flagSet.contains(Flag.PUBLIC);\n definedObjType.typeIdSet = calculateTypeIdSet(typeDefinition, isPublicType, definedType.typeIdSet);\n return definedObjType;\n }\n\n private void defineTypeInMainScope(BTypeSymbol typeDefSymbol, BLangTypeDefinition typeDef, SymbolEnv env) {\n if (PackageID.isLangLibPackageID(env.enclPkg.packageID)) {\n typeDefSymbol.origin = BUILTIN;\n handleLangLibTypes(typeDef);\n } else {\n defineSymbol(typeDef.name.pos, typeDefSymbol, env);\n }\n }\n\n private BType defineSymbolForCyclicTypeDefinition(BLangTypeDefinition typeDef, SymbolEnv env) {\n Name newTypeDefName = names.fromIdNode(typeDef.name);\n BTypeSymbol typeDefSymbol;\n BType newTypeNode;\n\n switch (typeDef.typeNode.getKind()) {\n case TUPLE_TYPE_NODE:\n newTypeNode = new BTupleType(null, new ArrayList<>(), true);\n typeDefSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(typeDef.flagSet),\n newTypeDefName, env.enclPkg.symbol.pkgID, newTypeNode, env.scope.owner,\n typeDef.name.pos, SOURCE);\n break;\n default:\n newTypeNode = BUnionType.create(null, new LinkedHashSet<>(), true);\n typeDefSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(typeDef.flagSet),\n newTypeDefName, env.enclPkg.symbol.pkgID, newTypeNode, env.scope.owner,\n typeDef.name.pos, SOURCE);\n }\n typeDef.symbol = typeDefSymbol;\n defineTypeInMainScope(typeDefSymbol, typeDef, env);\n newTypeNode.tsymbol = typeDefSymbol;\n newTypeNode.flags |= typeDefSymbol.flags;\n return newTypeNode;\n }\n\n private BType getCyclicDefinedType(BLangTypeDefinition typeDef, SymbolEnv env) {\n \n BSymbol foundSym = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(typeDef.name));\n BType newTypeNode = foundSym.type;\n\n \n \n \n \n BType resolvedTypeNodes = symResolver.resolveTypeNode(typeDef.typeNode, env);\n\n if (resolvedTypeNodes == symTable.noType) {\n return symTable.semanticError;\n }\n\n switch (resolvedTypeNodes.tag) {\n case TypeTags.TUPLE:\n BTupleType definedTupleType = (BTupleType) resolvedTypeNodes;\n for (BType member : definedTupleType.getTupleTypes()) {\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(member);\n if (!((BTupleType) newTypeNode).addMembers(new BTupleMember(member, varSymbol))) {\n return constructDependencyListError(typeDef, member);\n }\n }\n if (!((BTupleType) newTypeNode).addRestType(definedTupleType.restType)) {\n return constructDependencyListError(typeDef, definedTupleType.restType);\n }\n break;\n default:\n BUnionType definedUnionType = (BUnionType) resolvedTypeNodes;\n for (BType member : definedUnionType.getMemberTypes()) {\n ((BUnionType) newTypeNode).add(member);\n }\n break;\n }\n typeDef.typeNode.setBType(newTypeNode);\n typeDef.typeNode.getBType().tsymbol.type = newTypeNode;\n typeDef.symbol.type = newTypeNode;\n typeDef.setBType(newTypeNode);\n return newTypeNode;\n }\n\n private void defineAllUnresolvedCyclicTypesInScope(SymbolEnv env) {\n SymbolEnv prevEnv = this.env;\n this.env = env;\n for (BLangNode unresolvedNode : unresolvedTypes) {\n if (unresolvedNode.getKind() == NodeKind.TYPE_DEFINITION &&\n ((BLangTypeDefinition) unresolvedNode).hasCyclicReference) {\n defineSymbolForCyclicTypeDefinition((BLangTypeDefinition) unresolvedNode, env);\n }\n }\n this.env = prevEnv;\n }\n\n private BType constructDependencyListError(BLangTypeDefinition typeDef, BType member) {\n List dependencyList = new ArrayList<>();\n dependencyList.add(getTypeOrClassName(typeDef));\n dependencyList.add(member.tsymbol.name.value);\n dlog.error(typeDef.getPosition(), DiagnosticErrorCode.CYCLIC_TYPE_REFERENCE, dependencyList);\n return symTable.semanticError;\n }\n\n private BErrorType getDistinctErrorType(BLangTypeDefinition typeDefinition, BErrorType definedType,\n BSymbol typeDefSymbol) {\n BErrorType definedErrorType = definedType;\n \n \n \n if (definedErrorType.tsymbol != typeDefSymbol) {\n BTypeSymbol typeSymbol = new BTypeSymbol(SymTag.TYPE_DEF, typeDefSymbol.flags, typeDefSymbol.name,\n typeDefSymbol.pkgID, null, typeDefSymbol.owner, typeDefSymbol.pos, typeDefSymbol.origin);\n BErrorType bErrorType = new BErrorType(typeSymbol);\n typeSymbol.type = bErrorType;\n bErrorType.detailType = definedErrorType.detailType;\n typeDefSymbol.type = bErrorType;\n definedErrorType = bErrorType;\n }\n boolean isPublicType = typeDefinition.flagSet.contains(Flag.PUBLIC);\n definedErrorType.typeIdSet = calculateTypeIdSet(typeDefinition, isPublicType, definedType.typeIdSet);\n return definedErrorType;\n }\n\n private BTypeIdSet calculateTypeIdSet(BLangTypeDefinition typeDefinition, boolean isPublicType,\n BTypeIdSet secondary) {\n String name = typeDefinition.flagSet.contains(Flag.ANONYMOUS)\n ? anonymousModelHelper.getNextAnonymousTypeId(env.enclPkg.packageID)\n : typeDefinition.getName().value;\n\n return BTypeIdSet.from(env.enclPkg.packageID, name, isPublicType, secondary);\n }\n\n private boolean isDistinctFlagPresent(BLangTypeDefinition typeDefinition) {\n if (typeDefinition.typeNode.flagSet.contains(Flag.DISTINCT)) {\n return true;\n }\n\n return false;\n }\n\n private void handleLangLibTypes(BLangTypeDefinition typeDefinition) {\n\n \n for (BLangAnnotationAttachment attachment : typeDefinition.annAttachments) {\n if (attachment.annotationName.value.equals(Names.ANNOTATION_TYPE_PARAM.value)) {\n BSymbol typeDefSymbol = typeDefinition.symbol;\n typeDefSymbol.type = typeParamAnalyzer.createTypeParam(typeDefSymbol.type, typeDefSymbol.name);\n typeDefSymbol.flags |= Flags.TYPE_PARAM;\n break;\n } else if (attachment.annotationName.value.equals(Names.ANNOTATION_BUILTIN_SUBTYPE.value)) {\n \n BType type = symTable.getLangLibSubType(typeDefinition.name.value);\n typeDefinition.symbol.type = type;\n typeDefinition.symbol.flags |= type.tsymbol.flags;\n ((BTypeDefinitionSymbol) typeDefinition.symbol).referenceType.tsymbol.flags |= type.tsymbol.flags;\n ((BTypeDefinitionSymbol) typeDefinition.symbol).referenceType.referredType = type;\n typeDefinition.setBType(type);\n typeDefinition.typeNode.setBType(type);\n typeDefinition.isBuiltinTypeDef = true;\n break;\n }\n throw new IllegalStateException(\"Not supported annotation attachment at:\" + attachment.pos);\n }\n defineSymbol(typeDefinition.name.pos, typeDefinition.symbol);\n }\n\n \n \n \n \n private long getPublicFlagResetingMask(Set flagSet, BLangType typeNode) {\n boolean isAnonType =\n typeNode instanceof BLangStructureTypeNode && ((BLangStructureTypeNode) typeNode).isAnonymous;\n if (flagSet.contains(Flag.PUBLIC) || isAnonType) {\n return Long.MAX_VALUE;\n } else {\n return ~Flags.PUBLIC;\n }\n }\n\n @Override\n public void visit(BLangService serviceNode) {\n defineNode(serviceNode.serviceVariable, env);\n\n Name generatedServiceName = names.fromString(\"service$\" + serviceNode.serviceClass.symbol.name.value);\n BType type = serviceNode.serviceClass.typeRefs.isEmpty() ? null : serviceNode.serviceClass.typeRefs.get(0)\n .getBType();\n BServiceSymbol serviceSymbol = new BServiceSymbol((BClassSymbol) serviceNode.serviceClass.symbol,\n Flags.asMask(serviceNode.flagSet), generatedServiceName,\n env.enclPkg.symbol.pkgID, type, env.enclPkg.symbol,\n serviceNode.pos, SOURCE);\n serviceNode.symbol = serviceSymbol;\n\n if (!serviceNode.absoluteResourcePath.isEmpty()) {\n if (\"/\".equals(serviceNode.absoluteResourcePath.get(0).getValue())) {\n serviceSymbol.setAbsResourcePath(Collections.emptyList());\n } else {\n List list = new ArrayList<>(serviceNode.absoluteResourcePath.size());\n for (IdentifierNode identifierNode : serviceNode.absoluteResourcePath) {\n list.add(identifierNode.getValue());\n }\n serviceSymbol.setAbsResourcePath(list);\n }\n }\n\n if (serviceNode.serviceNameLiteral != null) {\n serviceSymbol.setAttachPointStringLiteral(serviceNode.serviceNameLiteral.value.toString());\n }\n\n env.scope.define(serviceSymbol.name, serviceSymbol);\n }\n\n @Override\n public void visit(BLangResourceFunction funcNode) {\n boolean validAttachedFunc = validateFuncReceiver(funcNode);\n\n if (PackageID.isLangLibPackageID(env.enclPkg.symbol.pkgID)) {\n funcNode.flagSet.add(Flag.LANG_LIB);\n }\n\n BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),\n getFuncSymbolName(funcNode), getFuncSymbolOriginalName(funcNode),\n env.enclPkg.symbol.pkgID, null, env.scope.owner,\n funcNode.hasBody(), funcNode.name.pos, SOURCE);\n funcSymbol.source = funcNode.pos.lineRange().filePath();\n funcSymbol.markdownDocumentation = getMarkdownDocAttachment(funcNode.markdownDocumentationAttachment);\n SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);\n defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);\n funcNode.setBType(funcSymbol.type);\n\n if (isDeprecated(funcNode.annAttachments)) {\n funcSymbol.flags |= Flags.DEPRECATED;\n }\n \n if (funcNode.receiver != null) {\n defineAttachedFunctions(funcNode, funcSymbol, invokableEnv, validAttachedFunc);\n }\n }\n\n @Override\n public void visit(BLangFunction funcNode) {\n boolean validAttachedFunc = validateFuncReceiver(funcNode);\n boolean remoteFlagSetOnNode = Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.REMOTE);\n\n if (!funcNode.attachedFunction && Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.PRIVATE)) {\n dlog.error(funcNode.pos, DiagnosticErrorCode.PRIVATE_FUNCTION_VISIBILITY, funcNode.name);\n }\n\n if (funcNode.receiver == null && !funcNode.attachedFunction && remoteFlagSetOnNode) {\n dlog.error(funcNode.pos, DiagnosticErrorCode.REMOTE_IN_NON_OBJECT_FUNCTION, funcNode.name.value);\n }\n\n if (PackageID.isLangLibPackageID(env.enclPkg.symbol.pkgID)) {\n funcNode.flagSet.add(Flag.LANG_LIB);\n }\n\n Location symbolPos = funcNode.flagSet.contains(Flag.LAMBDA) ?\n symTable.builtinPos : funcNode.name.pos;\n BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),\n getFuncSymbolName(funcNode),\n getFuncSymbolOriginalName(funcNode),\n env.enclPkg.symbol.pkgID, null, env.scope.owner,\n funcNode.hasBody(), symbolPos,\n getOrigin(funcNode.name.value));\n funcSymbol.source = funcNode.pos.lineRange().filePath();\n funcSymbol.markdownDocumentation = getMarkdownDocAttachment(funcNode.markdownDocumentationAttachment);\n SymbolEnv invokableEnv;\n NodeKind previousNodeKind = env.node.getKind();\n if (previousNodeKind == NodeKind.CLASS_DEFN) {\n invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope,\n fieldsRemovedEnv(env, ((BLangClassDefinition) env.node).fields));\n } else if (previousNodeKind == NodeKind.OBJECT_TYPE) {\n invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope,\n fieldsRemovedEnv(env, ((BLangObjectTypeNode) env.node).fields));\n } else {\n invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);\n }\n defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);\n funcNode.setBType(funcSymbol.type);\n\n \n if (Symbols.isFlagOn(funcSymbol.flags, Flags.LAMBDA)) {\n funcSymbol.origin = VIRTUAL;\n }\n\n if (isDeprecated(funcNode.annAttachments)) {\n funcSymbol.flags |= Flags.DEPRECATED;\n }\n \n if (funcNode.receiver != null) {\n defineAttachedFunctions(funcNode, funcSymbol, invokableEnv, validAttachedFunc);\n }\n }\n\n private SymbolEnv fieldsRemovedEnv(SymbolEnv currentEnv, List fields) {\n if (fields.isEmpty()) {\n return currentEnv;\n }\n Scope currentScope = currentEnv.scope;\n Scope newScope = new Scope(currentScope.owner);\n newScope.entries.putAll(currentScope.entries);\n Map entries = newScope.entries;\n for (BLangSimpleVariable field : fields) {\n entries.remove(Names.fromString(field.name.value));\n }\n SymbolEnv newEnv = new SymbolEnv(currentEnv.node, newScope);\n currentEnv.copyTo(newEnv, currentEnv.enclEnv);\n return newEnv;\n }\n\n private boolean isDeprecated(List annAttachments) {\n for (BLangAnnotationAttachment annotationAttachment : annAttachments) {\n if (annotationAttachment.annotationName.getValue().equals(DEPRECATION_ANNOTATION)) {\n return true;\n }\n }\n return false;\n }\n\n @Override\n public void visit(BLangConstant constant) {\n BType staticType;\n if (constant.typeNode != null) {\n staticType = symResolver.resolveTypeNode(constant.typeNode, env);\n if (staticType == symTable.noType) {\n constant.symbol = getConstantSymbol(constant);\n \n if (!this.unresolvedTypes.contains(constant)) {\n this.unresolvedTypes.add(constant);\n }\n return;\n }\n } else {\n staticType = symTable.semanticError;\n }\n BConstantSymbol constantSymbol = getConstantSymbol(constant);\n constant.symbol = constantSymbol;\n\n NodeKind nodeKind = constant.expr.getKind();\n if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {\n if (constant.typeNode != null) {\n BType referredType = Types.getReferredType(staticType);\n if (types.isValidLiteral((BLangLiteral) constant.expr, referredType)) {\n \n \n \n \n BLangFiniteTypeNode finiteType = (BLangFiniteTypeNode) constant.associatedTypeDefinition.typeNode;\n BLangExpression valueSpaceExpr = finiteType.valueSpace.iterator().next();\n valueSpaceExpr.setBType(referredType);\n defineNode(constant.associatedTypeDefinition, env);\n\n constantSymbol.type = constant.associatedTypeDefinition.symbol.type;\n constantSymbol.literalType = referredType;\n } else {\n \n \n defineNode(constant.associatedTypeDefinition, env);\n constantSymbol.type = staticType;\n constantSymbol.literalType = constant.expr.getBType();\n }\n } else {\n \n \n defineNode(constant.associatedTypeDefinition, env);\n constantSymbol.type = constant.associatedTypeDefinition.symbol.type;\n constantSymbol.literalType = constant.expr.getBType();\n }\n if (constantSymbol.type.tag != TypeTags.TYPEREFDESC) {\n constantSymbol.type.tsymbol.flags |= constant.associatedTypeDefinition.symbol.flags;\n }\n\n } else if (nodeKind == NodeKind.UNARY_EXPR && constant.typeNode == null &&\n types.isLiteralInUnaryAllowed((BLangUnaryExpr) constant.expr)) {\n \n \n \n\n BLangUnaryExpr unaryConstant = (BLangUnaryExpr) constant.expr;\n \n BLangNumericLiteral literal = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();\n Types.setValueOfNumericLiteral(literal, unaryConstant);\n literal.isConstant = true;\n literal.setBType(unaryConstant.expr.getBType());\n ((BLangFiniteTypeNode) constant.getAssociatedTypeDefinition().getTypeNode()).valueSpace.set(0, literal);\n\n defineNode(constant.associatedTypeDefinition, env);\n constantSymbol.type = constant.associatedTypeDefinition.symbol.type;\n constantSymbol.literalType = unaryConstant.expr.getBType();\n } else if (constant.typeNode != null) {\n constantSymbol.type = constantSymbol.literalType = staticType;\n }\n constantSymbol.markdownDocumentation = getMarkdownDocAttachment(constant.markdownDocumentationAttachment);\n if (isDeprecated(constant.annAttachments)) {\n constantSymbol.flags |= Flags.DEPRECATED;\n }\n \n if (!symResolver.checkForUniqueSymbol(constant.name.pos, env, constantSymbol)) {\n return;\n }\n\n if (constant.symbol.name == Names.IGNORE) {\n \n return;\n }\n \n env.scope.define(constantSymbol.name, constantSymbol);\n }\n\n private BConstantSymbol getConstantSymbol(BLangConstant constant) {\n \n Name name = names.fromIdNode(constant.name);\n PackageID pkgID = env.enclPkg.symbol.pkgID;\n return new BConstantSymbol(Flags.asMask(constant.flagSet), name, names.originalNameFromIdNode(constant.name),\n pkgID, symTable.semanticError, symTable.noType, env.scope.owner,\n constant.name.pos, getOrigin(name));\n }\n\n @Override\n public void visit(BLangSimpleVariable varNode) {\n \n if (varNode.getBType() == null) {\n if (varNode.typeNode != null) {\n varNode.setBType(symResolver.resolveTypeNode(varNode.typeNode, env));\n } else {\n varNode.setBType(symTable.noType);\n }\n }\n\n Name varName = names.fromIdNode(varNode.name);\n Name varOrigName = names.originalNameFromIdNode(varNode.name);\n if (varName == Names.IGNORE || varNode.symbol != null) {\n return;\n }\n\n BVarSymbol varSymbol = defineVarSymbol(varNode.name.pos, varNode.flagSet, varNode.getBType(), varName,\n varOrigName, env, varNode.internal);\n if (isDeprecated(varNode.annAttachments)) {\n varSymbol.flags |= Flags.DEPRECATED;\n }\n\n \n if (varSymbol.type == symTable.semanticError && varSymbol.state == DiagnosticState.VALID) {\n varSymbol.state = DiagnosticState.UNKNOWN_TYPE;\n }\n\n varSymbol.markdownDocumentation = getMarkdownDocAttachment(varNode.markdownDocumentationAttachment);\n varNode.symbol = varSymbol;\n if (varNode.symbol.type.tsymbol != null && Symbols.isFlagOn(varNode.symbol.type.tsymbol.flags, Flags.CLIENT)) {\n varSymbol.tag = SymTag.ENDPOINT;\n }\n\n if (Types.getReferredType(varSymbol.type).tag == TypeTags.FUTURE\n && ((BFutureType) Types.getReferredType(varSymbol.type)).workerDerivative) {\n Iterator lambdaFunctions = env.enclPkg.lambdaFunctions.iterator();\n while (lambdaFunctions.hasNext()) {\n BLangLambdaFunction lambdaFunction = lambdaFunctions.next();\n \n \n \n BLangInvokableNode enclInvokable = lambdaFunction.capturedClosureEnv.enclInvokable;\n if (lambdaFunctions.hasNext() && enclInvokable != null && varSymbol.owner == enclInvokable.symbol) {\n lambdaFunction.capturedClosureEnv.scope.define(varSymbol.name, varSymbol);\n }\n }\n }\n\n if (Types.getReferredType(varSymbol.type).tag == TypeTags.INVOKABLE) {\n BInvokableSymbol symbol = (BInvokableSymbol) varSymbol;\n BTypeSymbol typeSymbol = Types.getReferredType(varSymbol.type).tsymbol;\n BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) typeSymbol;\n symbol.params = tsymbol.params == null ? null : new ArrayList<>(tsymbol.params);\n symbol.restParam = tsymbol.restParam;\n symbol.retType = tsymbol.returnType;\n }\n\n if ((env.scope.owner.tag & SymTag.RECORD) != SymTag.RECORD && !varNode.flagSet.contains(Flag.NEVER_ALLOWED) &&\n (env.scope.owner.tag & SymTag.TUPLE_TYPE) != SymTag.TUPLE_TYPE &&\n types.isNeverTypeOrStructureTypeWithARequiredNeverMember(varSymbol.type)) {\n \n \n \n if (varNode.flagSet.contains(Flag.REQUIRED_PARAM) || varNode.flagSet.contains(Flag.DEFAULTABLE_PARAM)) {\n dlog.error(varNode.pos, DiagnosticErrorCode.NEVER_TYPE_NOT_ALLOWED_FOR_REQUIRED_DEFAULTABLE_PARAMS);\n } else {\n if ((env.scope.owner.tag & SymTag.OBJECT) == SymTag.OBJECT) {\n dlog.error(varNode.pos, DiagnosticErrorCode.NEVER_TYPED_OBJECT_FIELD_NOT_ALLOWED);\n } else {\n dlog.error(varNode.pos, DiagnosticErrorCode.NEVER_TYPED_VAR_DEF_NOT_ALLOWED);\n }\n }\n }\n }\n\n @Override\n public void visit(BLangTupleVariable varNode) {\n if (varNode.isDeclaredWithVar) {\n varNode.symbol =\n defineVarSymbol(varNode.pos, varNode.flagSet, symTable.noType,\n names.fromString(anonymousModelHelper.getNextTupleVarKey(env.enclPkg.packageID)),\n env, true);\n \n List memberVariables = new ArrayList<>(varNode.memberVariables);\n if (varNode.restVariable != null) {\n memberVariables.add(varNode.restVariable);\n }\n for (int i = 0; i < memberVariables.size(); i++) {\n BLangVariable memberVar = memberVariables.get(i);\n memberVar.isDeclaredWithVar = true;\n defineNode(memberVar, env);\n }\n return;\n }\n if (varNode.getBType() == null) {\n varNode.setBType(symResolver.resolveTypeNode(varNode.typeNode, env));\n }\n \n if (!(checkTypeAndVarCountConsistency(varNode, env))) {\n varNode.setBType(symTable.semanticError);\n return;\n }\n }\n\n boolean checkTypeAndVarCountConsistency(BLangTupleVariable var, SymbolEnv env) {\n if (var.symbol == null) {\n Name varName = names.fromString(anonymousModelHelper.getNextTupleVarKey(env.enclPkg.packageID));\n var.symbol = defineVarSymbol(var.pos, var.flagSet, var.getBType(), varName, env, true);\n }\n\n return checkTypeAndVarCountConsistency(var, null, env);\n }\n\n boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode, BTupleType tupleTypeNode,\n SymbolEnv env) {\n if (tupleTypeNode == null) {\n /*\n This switch block will resolve the tuple type of the tuple variable.\n For example consider the following - [int, string]|[boolean, float] [a, b] = foo();\n Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:\n Type of 'a' will be (int | boolean) while the type of 'b' will be (string | float).\n Consider anydata (a, b) = foo();\n Here, the type of 'a'and type of 'b' will be both anydata.\n */\n BType bType = varNode.getBType();\n BType referredType = Types.getEffectiveType(Types.getReferredType(bType));\n switch (referredType.tag) {\n case TypeTags.UNION:\n Set unionType = types.expandAndGetMemberTypesRecursive(referredType);\n List possibleTypes = new ArrayList<>();\n for (BType type : unionType) {\n if (!(TypeTags.TUPLE == type.tag &&\n checkMemVarCountMatchWithMemTypeCount(varNode, (BTupleType) type)) &&\n TypeTags.ANY != type.tag && TypeTags.ANYDATA != type.tag &&\n (TypeTags.ARRAY != type.tag || ((BArrayType) type).state == BArrayState.OPEN)) {\n continue;\n }\n possibleTypes.add(type);\n }\n if (possibleTypes.isEmpty()) {\n \n if (varNode.isDeclaredWithVar) {\n dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_LIST_BINDING_PATTERN);\n return false;\n }\n dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_LIST_BINDING_PATTERN_DECL, bType);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n List members = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n LinkedHashSet memberTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.TUPLE) {\n memberTypes.add(((BTupleType) possibleType).getTupleTypes().get(i));\n } else if (possibleType.tag == TypeTags.ARRAY) {\n memberTypes.add(((BArrayType) possibleType).eType);\n } else {\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(referredType);\n members.add(new BTupleMember(referredType, varSymbol));\n }\n }\n\n if (memberTypes.size() > 1) {\n BType type = BUnionType.create(null, memberTypes);\n BVarSymbol varSymbol = new BVarSymbol(type.flags, null, null, type, null,\n null, null);\n members.add(new BTupleMember(type, varSymbol));\n } else {\n memberTypes.forEach(m ->\n members.add(new BTupleMember(m,\n Symbols.createVarSymbolForTupleMember(m))));\n }\n }\n tupleTypeNode = new BTupleType(members);\n tupleTypeNode.restType = getPossibleRestTypeForUnion(varNode, possibleTypes);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.TUPLE) {\n tupleTypeNode = (BTupleType) possibleTypes.get(0);\n tupleTypeNode.restType = getPossibleRestTypeForUnion(varNode, possibleTypes);\n break;\n }\n\n List members = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n BType type = possibleTypes.get(0);\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(type);\n members.add(new BTupleMember(type, varSymbol));\n }\n tupleTypeNode = new BTupleType(members);\n tupleTypeNode.restType = getPossibleRestTypeForUnion(varNode, possibleTypes);\n break;\n case TypeTags.ANY:\n case TypeTags.ANYDATA:\n List memberTupleTypes = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(referredType);\n memberTupleTypes.add(new BTupleMember(referredType, varSymbol));\n }\n tupleTypeNode = new BTupleType(memberTupleTypes);\n if (varNode.restVariable != null) {\n tupleTypeNode.restType = referredType;\n }\n break;\n case TypeTags.TUPLE:\n tupleTypeNode = (BTupleType) referredType;\n break;\n case TypeTags.ARRAY:\n List tupleTypes = new ArrayList<>();\n BArrayType arrayType = (BArrayType) referredType;\n tupleTypeNode = new BTupleType(tupleTypes);\n BType eType = arrayType.eType;\n for (int i = 0; i < arrayType.size; i++) {\n BType type = arrayType.eType;\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(type);\n tupleTypes.add(new BTupleMember(type, varSymbol));\n\n }\n if (varNode.restVariable != null) {\n tupleTypeNode.restType = eType;\n }\n break;\n default:\n dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_LIST_BINDING_PATTERN_DECL, bType);\n return false;\n }\n }\n\n if (!checkMemVarCountMatchWithMemTypeCount(varNode, tupleTypeNode)) {\n dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_LIST_BINDING_PATTERN);\n return false;\n }\n\n int ignoredCount = 0;\n int i = 0;\n BType type;\n List tupleMemberTypes = tupleTypeNode.getTupleTypes();\n for (BLangVariable var : varNode.memberVariables) {\n type = tupleMemberTypes.get(i);\n i++;\n if (var.getKind() == NodeKind.VARIABLE) {\n \n BLangSimpleVariable simpleVar = (BLangSimpleVariable) var;\n Name varName = names.fromIdNode(simpleVar.name);\n if (varName == Names.IGNORE) {\n ignoredCount++;\n simpleVar.setBType(symTable.anyType);\n if (!types.isAssignable(type, symTable.anyType)) {\n dlog.error(varNode.pos, DiagnosticErrorCode.WILD_CARD_BINDING_PATTERN_ONLY_SUPPORTS_TYPE_ANY);\n }\n continue;\n }\n }\n defineMemberNode(var, env, type);\n }\n\n if (varNode.restVariable != null) {\n List tupleMembers = tupleTypeNode.getMembers();\n int tupleNodeMemCount = tupleMembers.size();\n int varNodeMemCount = varNode.memberVariables.size();\n BType restType = tupleTypeNode.restType;\n List members = new ArrayList<>();\n if (varNodeMemCount < tupleNodeMemCount) {\n for (int j = varNodeMemCount; j < tupleNodeMemCount; j++) {\n members.add(tupleMembers.get(j));\n }\n }\n if (!members.isEmpty()) {\n BTupleType restTupleType = new BTupleType(members);\n restTupleType.restType = restType;\n type = restTupleType;\n } else {\n type = restType != null ? new BArrayType(restType) : null;\n }\n defineMemberNode(varNode.restVariable, env, type);\n }\n\n if (!varNode.memberVariables.isEmpty() && ignoredCount == varNode.memberVariables.size()\n && varNode.restVariable == null) {\n dlog.error(varNode.pos, DiagnosticErrorCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return false;\n }\n return true;\n }\n\n private BType getPossibleRestTypeForUnion(BLangTupleVariable varNode, List possibleTypes) {\n if (varNode.restVariable == null) {\n return null;\n }\n LinkedHashSet memberRestTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.TUPLE) {\n BTupleType tupleType = (BTupleType) possibleType;\n List tupleMemberTypes = tupleType.getTupleTypes();\n for (int j = varNode.memberVariables.size(); j < tupleMemberTypes.size();\n j++) {\n memberRestTypes.add(tupleMemberTypes.get(j));\n }\n if (tupleType.restType != null) {\n memberRestTypes.add(tupleType.restType);\n }\n } else if (possibleType.tag == TypeTags.ARRAY) {\n memberRestTypes.add(((BArrayType) possibleType).eType);\n } else {\n memberRestTypes.add(possibleType);\n }\n }\n if (!memberRestTypes.isEmpty()) {\n return memberRestTypes.size() > 1 ? BUnionType.create(null, memberRestTypes) :\n memberRestTypes.iterator().next();\n } else {\n return varNode.getBType();\n }\n }\n\n private boolean checkMemVarCountMatchWithMemTypeCount(BLangTupleVariable varNode, BTupleType tupleTypeNode) {\n int memberVarsSize = varNode.memberVariables.size();\n BLangVariable restVariable = varNode.restVariable;\n int tupleTypesSize = tupleTypeNode.getMembers().size();\n if (memberVarsSize > tupleTypesSize) {\n return false;\n }\n return restVariable != null ||\n (tupleTypesSize == memberVarsSize && tupleTypeNode.restType == null);\n }\n\n @Override\n public void visit(BLangRecordVariable recordVar) {\n if (recordVar.isDeclaredWithVar) {\n recordVar.symbol =\n defineVarSymbol(recordVar.pos, recordVar.flagSet, symTable.noType,\n names.fromString(anonymousModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),\n env, true);\n \n for (BLangRecordVariable.BLangRecordVariableKeyValue variable : recordVar.variableList) {\n BLangVariable value = variable.getValue();\n value.isDeclaredWithVar = true;\n defineNode(value, env);\n }\n\n BLangSimpleVariable restParam = (BLangSimpleVariable) recordVar.restParam;\n if (restParam != null) {\n restParam.isDeclaredWithVar = true;\n defineNode(restParam, env);\n }\n return;\n }\n\n if (recordVar.getBType() == null) {\n recordVar.setBType(symResolver.resolveTypeNode(recordVar.typeNode, env));\n }\n \n if (!(symbolEnterAndValidateRecordVariable(recordVar, env))) {\n recordVar.setBType(symTable.semanticError);\n return;\n }\n }\n\n boolean symbolEnterAndValidateRecordVariable(BLangRecordVariable var, SymbolEnv env) {\n if (var.symbol == null) {\n Name varName = names.fromString(anonymousModelHelper.getNextRecordVarKey(env.enclPkg.packageID));\n var.symbol = defineVarSymbol(var.pos, var.flagSet, var.getBType(), varName, env, true);\n }\n\n return validateRecordVariable(var, env);\n }\n\n boolean validateRecordVariable(BLangRecordVariable recordVar, SymbolEnv env) {\n BType recordType = Types.getEffectiveType(Types.getReferredType(recordVar.getBType()));\n BRecordType recordVarType;\n /*\n This switch block will resolve the record type of the record variable.\n For example consider the following -\n type Foo record {int a, boolean b};\n type Bar record {string a, float b};\n Foo|Bar {a, b} = foo();\n Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:\n Type of 'a' will be a union of the types of field 'a' in both Foo and Bar.\n i.e. type of 'a' is (int | string) and type of 'b' is (boolean | float).\n Consider anydata {a, b} = foo();\n Here, the type of 'a'and type of 'b' will be both anydata.\n */\n switch (recordType.tag) {\n case TypeTags.UNION:\n BUnionType unionType = (BUnionType) recordType;\n Set bTypes = types.expandAndGetMemberTypesRecursive(unionType);\n List possibleTypes = bTypes.stream()\n .filter(rec -> doesRecordContainKeys(rec, recordVar.variableList, recordVar.restParam != null))\n .collect(Collectors.toList());\n\n if (possibleTypes.isEmpty()) {\n dlog.error(recordVar.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, recordType);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n recordVarType = populatePossibleFields(recordVar, possibleTypes, env);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.RECORD) {\n recordVarType = (BRecordType) possibleTypes.get(0);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.MAP) {\n recordVarType = createSameTypedFieldsRecordType(recordVar,\n ((BMapType) possibleTypes.get(0)).constraint, env);\n break;\n }\n\n recordVarType = createSameTypedFieldsRecordType(recordVar, possibleTypes.get(0), env);\n break;\n case TypeTags.RECORD:\n recordVarType = (BRecordType) recordType;\n break;\n case TypeTags.MAP:\n recordVarType = createSameTypedFieldsRecordType(recordVar,\n ((BMapType) recordType).constraint, env);\n break;\n default:\n dlog.error(recordVar.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, recordType);\n return false;\n }\n\n return defineVariableList(recordVar, recordVarType, env);\n }\n\n private BRecordType populatePossibleFields(BLangRecordVariable recordVar, List possibleTypes,\n SymbolEnv env) {\n BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS,\n names.fromString(ANONYMOUS_RECORD_NAME),\n env.enclPkg.symbol.pkgID, null,\n env.scope.owner, recordVar.pos, SOURCE);\n BRecordType recordVarType = (BRecordType) symTable.recordType;\n\n List mappedFields = recordVar.variableList.stream().map(varKeyValue -> varKeyValue.getKey().value)\n .collect(Collectors.toList());\n LinkedHashMap fields = populateAndGetPossibleFieldsForRecVar(recordVar.pos, possibleTypes,\n mappedFields, recordSymbol, env);\n\n if (recordVar.restParam != null) {\n recordVarType.restFieldType = createRestFieldFromPossibleTypes(recordVar.pos, env, possibleTypes,\n fields, recordSymbol);\n }\n recordVarType.tsymbol = recordSymbol;\n recordVarType.fields = fields;\n recordSymbol.type = recordVarType;\n return recordVarType;\n }\n\n private BType createRestFieldFromPossibleTypes(Location pos, SymbolEnv env, List possibleTypes,\n LinkedHashMap boundedFields, BSymbol recordSymbol) {\n LinkedHashSet restFieldMemberTypes = new LinkedHashSet<>();\n List> possibleRecordFieldMapList = new ArrayList<>();\n\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.RECORD) {\n BRecordType recordType = (BRecordType) possibleType;\n possibleRecordFieldMapList.add(recordType.fields);\n restFieldMemberTypes.add(recordType.restFieldType);\n } else if (possibleType.tag == TypeTags.MAP) {\n restFieldMemberTypes.add(((BMapType) possibleType).constraint);\n } else {\n restFieldMemberTypes.add(possibleType);\n }\n }\n\n BType restFieldType = restFieldMemberTypes.size() > 1 ?\n BUnionType.create(null, restFieldMemberTypes) :\n restFieldMemberTypes.iterator().next();\n\n if (!possibleRecordFieldMapList.isEmpty()) {\n List intersectionFields = getIntersectionFields(possibleRecordFieldMapList);\n LinkedHashMap unmappedMembers = populateAndGetPossibleFieldsForRecVar(pos,\n possibleTypes, intersectionFields, recordSymbol, env);\n\n LinkedHashMap optionalFields = new LinkedHashMap<>() {{\n possibleRecordFieldMapList.forEach(map -> putAll(map));\n }};\n\n intersectionFields.forEach(optionalFields::remove);\n boundedFields.keySet().forEach(unmappedMembers::remove);\n\n for (BField field : optionalFields.values()) {\n field.symbol.flags = setSymbolAsOptional(field.symbol.flags);\n }\n unmappedMembers.putAll(optionalFields);\n\n BRecordType restRecord = new BRecordType(null);\n restRecord.fields = unmappedMembers;\n restRecord.restFieldType = restFieldType;\n restFieldType = restRecord;\n }\n\n return restFieldType;\n }\n\n private List getIntersectionFields(List> fieldList) {\n LinkedHashMap intersectionMap = fieldList.get(0);\n HashSet intersectionSet = new HashSet<>(intersectionMap.keySet());\n\n for (int i = 1; i < fieldList.size(); i++) {\n LinkedHashMap map = fieldList.get(i);\n HashSet set = new HashSet<>(map.keySet());\n intersectionSet.retainAll(set);\n }\n\n return new ArrayList<>(intersectionSet);\n }\n\n /**\n * This method will resolve field types based on a list of possible types.\n * When a record variable has multiple possible assignable types, each field will be a union of the relevant\n * possible types field type.\n *\n * @param pos line number information of the source file\n * @param possibleTypes list of possible types\n * @param fieldNames fields types to be resolved\n * @param recordSymbol symbol of the record type to be used in creating fields\n * @param env environment to define the symbol\n * @return the list of fields\n */\n private LinkedHashMap populateAndGetPossibleFieldsForRecVar(Location pos, List possibleTypes,\n List fieldNames,\n BSymbol recordSymbol, SymbolEnv env) {\n LinkedHashMap fields = new LinkedHashMap<>();\n for (String fieldName : fieldNames) {\n LinkedHashSet memberTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.RECORD) {\n BRecordType possibleRecordType = (BRecordType) possibleType;\n\n if (possibleRecordType.fields.containsKey(fieldName)) {\n BField field = possibleRecordType.fields.get(fieldName);\n if (Symbols.isOptional(field.symbol)) {\n memberTypes.add(symTable.nilType);\n }\n memberTypes.add(field.type);\n } else {\n memberTypes.add(possibleRecordType.restFieldType);\n memberTypes.add(symTable.nilType);\n }\n\n continue;\n }\n\n if (possibleType.tag == TypeTags.MAP) {\n BMapType possibleMapType = (BMapType) possibleType;\n memberTypes.add(possibleMapType.constraint);\n continue;\n }\n memberTypes.add(possibleType); \n }\n\n BType fieldType = memberTypes.size() > 1 ?\n BUnionType.create(null, memberTypes) : memberTypes.iterator().next();\n BField field = new BField(names.fromString(fieldName), pos,\n new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID,\n fieldType, recordSymbol, pos, SOURCE));\n fields.put(field.name.value, field);\n }\n return fields;\n }", "context_after": "class SymbolEnter extends BLangNodeVisitor {\n\n private static final CompilerContext.Key SYMBOL_ENTER_KEY =\n new CompilerContext.Key<>();\n\n private final SymbolTable symTable;\n private final Names names;\n private final SymbolResolver symResolver;\n private final BLangDiagnosticLog dlog;\n private final Types types;\n private final SourceDirectory sourceDirectory;\n private List unresolvedTypes;\n private Set unresolvedRecordDueToFields;\n private boolean resolveRecordsUnresolvedDueToFields;\n private List unresolvedClasses;\n private HashSet unknownTypeRefs;\n private List importedPackages;\n private int typePrecedence;\n private final TypeParamAnalyzer typeParamAnalyzer;\n private BLangAnonymousModelHelper anonymousModelHelper;\n private BLangMissingNodesHelper missingNodesHelper;\n private PackageCache packageCache;\n private List intersectionTypes;\n\n private SymbolEnv env;\n private final boolean projectAPIInitiatedCompilation;\n\n private static final String DEPRECATION_ANNOTATION = \"deprecated\";\n private static final String ANONYMOUS_RECORD_NAME = \"anonymous-record\";\n\n public static SymbolEnter getInstance(CompilerContext context) {\n SymbolEnter symbolEnter = context.get(SYMBOL_ENTER_KEY);\n if (symbolEnter == null) {\n symbolEnter = new SymbolEnter(context);\n }\n\n return symbolEnter;\n }\n\n public SymbolEnter(CompilerContext context) {\n context.put(SYMBOL_ENTER_KEY, this);\n\n this.symTable = SymbolTable.getInstance(context);\n this.names = Names.getInstance(context);\n this.symResolver = SymbolResolver.getInstance(context);\n this.dlog = BLangDiagnosticLog.getInstance(context);\n this.types = Types.getInstance(context);\n this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);\n this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);\n this.sourceDirectory = context.get(SourceDirectory.class);\n this.importedPackages = new ArrayList<>();\n this.unknownTypeRefs = new HashSet<>();\n this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);\n this.packageCache = PackageCache.getInstance(context);\n this.intersectionTypes = new ArrayList<>();\n\n CompilerOptions options = CompilerOptions.getInstance(context);\n projectAPIInitiatedCompilation = Boolean.parseBoolean(\n options.get(CompilerOptionName.PROJECT_API_INITIATED_COMPILATION));\n }\n\n private void cleanup() {\n unknownTypeRefs.clear();\n }\n\n public BLangPackage definePackage(BLangPackage pkgNode) {\n dlog.setCurrentPackageId(pkgNode.packageID);\n populatePackageNode(pkgNode);\n defineNode(pkgNode, this.symTable.pkgEnvMap.get(symTable.langAnnotationModuleSymbol));\n return pkgNode;\n }\n\n public void defineClassDefinition(BLangClassDefinition classNode, SymbolEnv env) {\n\n\n\n\n if (classNode.definitionCompleted) {\n return;\n }\n populateDistinctTypeIdsFromIncludedTypeReferences(classNode);\n defineFieldsOfClassDef(classNode, env);\n defineReferencedFieldsOfClassDef(classNode, env);\n defineFunctionsOfClassDef(env, classNode);\n setReadOnlynessOfClassDef(classNode, env);\n defineReadOnlyIncludedFieldsAndMethods(classNode, env);\n classNode.definitionCompleted = true;\n }\n\n public void defineNode(BLangNode node, SymbolEnv env) {\n SymbolEnv prevEnv = this.env;\n this.env = env;\n node.accept(this);\n this.env = prevEnv;\n }\n\n public BLangPackage defineTestablePackage(BLangTestablePackage pkgNode, SymbolEnv env) {\n populatePackageNode(pkgNode);\n defineNode(pkgNode, env);\n return pkgNode;\n }\n\n \n\n @Override\n public void visit(BLangPackage pkgNode) {\n if (pkgNode.completedPhases.contains(CompilerPhase.DEFINE)) {\n return;\n }\n\n \n BPackageSymbol pkgSymbol;\n if (Symbols.isFlagOn(Flags.asMask(pkgNode.flagSet), Flags.TESTABLE)) {\n pkgSymbol = Symbols.createPackageSymbol(pkgNode.packageID, this.symTable, Flags.asMask(pkgNode.flagSet),\n SOURCE);\n } else {\n pkgSymbol = Symbols.createPackageSymbol(pkgNode.packageID, this.symTable, SOURCE);\n }\n if (PackageID.isLangLibPackageID(pkgSymbol.pkgID)) {\n populateLangLibInSymTable(pkgSymbol);\n }\n\n if (pkgNode.moduleContextDataHolder != null) {\n pkgSymbol.exported = pkgNode.moduleContextDataHolder.isExported();\n pkgSymbol.descriptor = pkgNode.moduleContextDataHolder.descriptor();\n }\n\n pkgNode.symbol = pkgSymbol;\n SymbolEnv pkgEnv = SymbolEnv.createPkgEnv(pkgNode, pkgSymbol.scope, this.env);\n this.symTable.pkgEnvMap.put(pkgSymbol, pkgEnv);\n this.symTable.immutableTypeMaps.remove(Types.getPackageIdString(pkgSymbol.pkgID));\n\n \n \n importedPackages.add(pkgNode.packageID);\n\n defineConstructs(pkgNode, pkgEnv);\n pkgNode.getTestablePkgs().forEach(testablePackage -> defineTestablePackage(testablePackage, pkgEnv));\n pkgNode.completedPhases.add(CompilerPhase.DEFINE);\n\n \n cleanup();\n\n \n importedPackages.remove(pkgNode.packageID);\n }\n\n private void defineConstructs(BLangPackage pkgNode, SymbolEnv pkgEnv) {\n \n \n Map importPkgHolder = new HashMap<>();\n pkgNode.imports.forEach(importNode -> {\n String qualifiedName = importNode.getQualifiedPackageName();\n if (importPkgHolder.containsKey(qualifiedName)) {\n importPkgHolder.get(qualifiedName).unresolved.add(importNode);\n return;\n }\n defineNode(importNode, pkgEnv);\n if (importNode.symbol != null) {\n importPkgHolder.put(qualifiedName, new ImportResolveHolder(importNode));\n }\n });\n\n for (ImportResolveHolder importHolder : importPkgHolder.values()) {\n BPackageSymbol pkgSymbol = importHolder.resolved.symbol; \n \n\n for (BLangImportPackage unresolvedPkg : importHolder.unresolved) {\n BPackageSymbol importSymbol = importHolder.resolved.symbol;\n Name resolvedPkgAlias = names.fromIdNode(importHolder.resolved.alias);\n Name unresolvedPkgAlias = names.fromIdNode(unresolvedPkg.alias);\n\n \n if (!Names.IGNORE.equals(unresolvedPkgAlias) && unresolvedPkgAlias.equals(resolvedPkgAlias)\n && importSymbol.compUnit.equals(names.fromIdNode(unresolvedPkg.compUnit))) {\n if (isSameImport(unresolvedPkg, importSymbol)) {\n dlog.error(unresolvedPkg.pos, DiagnosticErrorCode.REDECLARED_IMPORT_MODULE,\n unresolvedPkg.getQualifiedPackageName());\n } else {\n dlog.error(unresolvedPkg.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, unresolvedPkgAlias);\n }\n continue;\n }\n\n unresolvedPkg.symbol = pkgSymbol;\n \n BPackageSymbol symbol = dupPackageSymbolAndSetCompUnit(pkgSymbol,\n names.fromIdNode(unresolvedPkg.compUnit));\n symbol.scope = pkgSymbol.scope;\n unresolvedPkg.symbol = symbol;\n pkgEnv.scope.define(unresolvedPkgAlias, symbol);\n }\n }\n if (!PackageID.ANNOTATIONS.equals(pkgNode.packageID)) {\n initPredeclaredModules(symTable.predeclaredModules, pkgNode.compUnits, pkgEnv);\n }\n\n \n this.typePrecedence = 0;\n\n \n \n \n List typeAndClassDefs = new ArrayList<>();\n pkgNode.constants.forEach(constant -> typeAndClassDefs.add(constant));\n pkgNode.typeDefinitions.forEach(typDef -> typeAndClassDefs.add(typDef));\n List classDefinitions = getClassDefinitions(pkgNode.topLevelNodes);\n classDefinitions.forEach(classDefn -> typeAndClassDefs.add(classDefn));\n defineTypeNodes(typeAndClassDefs, pkgEnv);\n\n \n \n pkgEnv.logErrors = true;\n\n \n pkgNode.typeDefinitions.sort(getTypePrecedenceComparator());\n typeAndClassDefs.sort(getTypePrecedenceComparator());\n\n \n defineDistinctClassAndObjectDefinitions(typeAndClassDefs);\n\n \n defineFields(typeAndClassDefs, pkgEnv);\n defineDependentFields(typeAndClassDefs, pkgEnv);\n\n \n defineIntersectionTypes(pkgEnv);\n\n \n defineErrorDetails(pkgNode.typeDefinitions, pkgEnv);\n\n \n defineFunctions(typeAndClassDefs, pkgEnv);\n\n \n \n \n validateIntersectionTypeDefinitions(pkgNode.typeDefinitions, pkgNode.packageID);\n defineUndefinedReadOnlyTypes(pkgNode.typeDefinitions, typeAndClassDefs, pkgEnv);\n\n \n pkgNode.services.forEach(service -> defineNode(service, pkgEnv));\n\n \n for (BLangFunction bLangFunction : pkgNode.functions) {\n \n if (!bLangFunction.flagSet.contains(Flag.LAMBDA)) {\n defineNode(bLangFunction, pkgEnv);\n }\n }\n\n \n pkgNode.annotations.forEach(annot -> defineNode(annot, pkgEnv));\n\n for (BLangVariable variable : pkgNode.globalVars) {\n BLangExpression expr = variable.expr;\n if (expr != null && expr.getKind() == NodeKind.LAMBDA) {\n defineNode(((BLangLambdaFunction) expr).function, pkgEnv);\n if (variable.isDeclaredWithVar) {\n setTypeFromLambdaExpr(variable);\n }\n }\n defineNode(variable, pkgEnv);\n }\n\n \n for (BLangVariable var : pkgNode.globalVars) {\n if (var.getKind() == NodeKind.VARIABLE) {\n BVarSymbol varSymbol = var.symbol;\n if (varSymbol != null) {\n BTypeSymbol tSymbol = varSymbol.type.tsymbol;\n if (tSymbol != null && Symbols.isFlagOn(tSymbol.flags, Flags.CLIENT)) {\n varSymbol.tag = SymTag.ENDPOINT;\n }\n }\n }\n }\n }\n\n private void defineDependentFields(List typeDefNodes, SymbolEnv pkgEnv) {\n for (BLangNode typeDef : typeDefNodes) {\n if (typeDef.getKind() == NodeKind.CLASS_DEFN) {\n BLangClassDefinition classDefinition = (BLangClassDefinition) typeDef;\n if (isObjectCtor(classDefinition)) {\n continue;\n }\n defineReferencedFieldsOfClassDef(classDefinition, pkgEnv);\n } else if (typeDef.getKind() == NodeKind.TYPE_DEFINITION) {\n defineReferencedFieldsOfRecordTypeDef((BLangTypeDefinition) typeDef);\n }\n }\n }\n\n private void defineReferencedFieldsOfClassDef(BLangClassDefinition classDefinition, SymbolEnv pkgEnv) {\n SymbolEnv typeDefEnv = classDefinition.typeDefEnv;\n BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) classDefinition.symbol;\n BObjectType objType = (BObjectType) tSymbol.type;\n\n defineReferencedClassFields(classDefinition, typeDefEnv, objType, false);\n }\n\n private void defineIntersectionTypes(SymbolEnv env) {\n for (BLangNode typeDescriptor : this.intersectionTypes) {\n defineNode(typeDescriptor, env);\n }\n this.intersectionTypes.clear();\n }\n\n private void defineErrorType(Location pos, BErrorType errorType, SymbolEnv env) {\n SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);\n BTypeSymbol errorTSymbol = errorType.tsymbol;\n errorTSymbol.scope = new Scope(errorTSymbol);\n\n if (symResolver.checkForUniqueSymbol(pos, pkgEnv, errorTSymbol)) {\n pkgEnv.scope.define(errorTSymbol.name, errorTSymbol);\n }\n\n SymbolEnv prevEnv = this.env;\n this.env = pkgEnv;\n this.env = prevEnv;\n }\n\n private boolean isObjectCtor(BLangNode node) {\n if (node.getKind() == NodeKind.CLASS_DEFN) {\n BLangClassDefinition classDefinition = (BLangClassDefinition) node;\n return isObjectCtor(classDefinition);\n }\n return false;\n }\n\n private boolean isObjectCtor(BLangClassDefinition classDefinition) {\n if (!classDefinition.isObjectContructorDecl && classDefinition.isServiceDecl) {\n return false;\n }\n if (classDefinition.flagSet.contains(Flag.OBJECT_CTOR)) {\n return true;\n }\n return false;\n }\n\n private void defineDistinctClassAndObjectDefinitions(List typDefs) {\n for (BLangNode node : typDefs) {\n if (node.getKind() == NodeKind.CLASS_DEFN) {\n BLangClassDefinition classDefinition = (BLangClassDefinition) node;\n if (isObjectCtor(classDefinition)) {\n continue;\n }\n populateDistinctTypeIdsFromIncludedTypeReferences((BLangClassDefinition) node);\n } else if (node.getKind() == NodeKind.TYPE_DEFINITION) {\n populateDistinctTypeIdsFromIncludedTypeReferences((BLangTypeDefinition) node);\n }\n }\n }\n\n private void populateDistinctTypeIdsFromIncludedTypeReferences(BLangTypeDefinition typeDefinition) {\n if (typeDefinition.typeNode.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {\n if (typeDefinition.typeNode.getBType() == null) {\n return;\n }\n\n BType definingType = types.getTypeWithEffectiveIntersectionTypes(typeDefinition.typeNode.getBType());\n definingType = Types.getReferredType(definingType);\n if (definingType.tag != TypeTags.OBJECT) {\n return;\n }\n BObjectType definigObjType = (BObjectType) definingType;\n\n BLangIntersectionTypeNode typeNode = (BLangIntersectionTypeNode) typeDefinition.typeNode;\n for (BLangType constituentTypeNode : typeNode.getConstituentTypeNodes()) {\n BType constituentType = Types.getReferredType(constituentTypeNode.getBType());\n if (constituentType.tag != TypeTags.OBJECT) {\n continue;\n }\n definigObjType.typeIdSet.add(((BObjectType) constituentType).typeIdSet);\n }\n } else if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE) {\n BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDefinition.typeNode;\n BTypeIdSet typeIdSet = ((BObjectType) objectTypeNode.getBType()).typeIdSet;\n\n for (BLangType typeRef : objectTypeNode.typeRefs) {\n BType type = types.getTypeWithEffectiveIntersectionTypes(typeRef.getBType());\n type = Types.getReferredType(type);\n if (type.tag != TypeTags.OBJECT) {\n continue;\n }\n BObjectType refType = (BObjectType) type;\n typeIdSet.add(refType.typeIdSet);\n }\n }\n }\n\n private void populateDistinctTypeIdsFromIncludedTypeReferences(BLangClassDefinition typeDef) {\n BLangClassDefinition classDefinition = typeDef;\n BTypeIdSet typeIdSet = ((BObjectType) classDefinition.getBType()).typeIdSet;\n\n for (BLangType typeRef : classDefinition.typeRefs) {\n BType type = types.getTypeWithEffectiveIntersectionTypes(typeRef.getBType());\n type = Types.getReferredType(type);\n if (type.tag != TypeTags.OBJECT) {\n continue;\n }\n BObjectType refType = (BObjectType) type;\n typeIdSet.add(refType.typeIdSet);\n }\n }\n\n private Comparator getTypePrecedenceComparator() {\n return (l, r) -> {\n if (l instanceof OrderedNode && r instanceof OrderedNode) {\n return ((OrderedNode) l).getPrecedence() - ((OrderedNode) r).getPrecedence();\n }\n return 0;\n };\n }\n\n private void defineFunctionsOfClassDef(SymbolEnv pkgEnv, BLangClassDefinition classDefinition) {\n validateInclusionsForNonPrivateMembers(classDefinition.typeRefs);\n BObjectType objectType = (BObjectType) classDefinition.symbol.type;\n\n if (objectType.mutableType != null) {\n \n \n \n return;\n }\n\n SymbolEnv objMethodsEnv =\n SymbolEnv.createClassMethodsEnv(classDefinition, (BObjectTypeSymbol) classDefinition.symbol, pkgEnv);\n if (classDefinition.isObjectContructorDecl) {\n classDefinition.oceEnvData.objMethodsEnv = objMethodsEnv;\n }\n\n \n defineClassInitFunction(classDefinition, objMethodsEnv);\n classDefinition.functions.forEach(f -> {\n f.flagSet.add(Flag.FINAL); \n f.setReceiver(ASTBuilderUtil.createReceiver(classDefinition.pos, objectType));\n defineNode(f, objMethodsEnv);\n });\n\n defineIncludedMethods(classDefinition, objMethodsEnv, false);\n }\n\n private void defineIncludedMethods(BLangClassDefinition classDefinition, SymbolEnv objMethodsEnv,\n boolean defineReadOnlyInclusionsOnly) {\n Set includedFunctionNames = new HashSet<>();\n\n if (defineReadOnlyInclusionsOnly) {\n for (BAttachedFunction function :\n ((BObjectTypeSymbol) classDefinition.getBType().tsymbol).referencedFunctions) {\n includedFunctionNames.add(function.funcName.value);\n }\n }\n\n \n \n \n \n for (BLangType typeRef : classDefinition.typeRefs) {\n BType type = Types.getReferredType(typeRef.getBType());\n if (type == null || type == symTable.semanticError) {\n return;\n }\n\n if (type.tag == TypeTags.INTERSECTION) {\n if (!defineReadOnlyInclusionsOnly) {\n \n continue;\n }\n\n type = ((BIntersectionType) type).effectiveType;\n } else {\n if (defineReadOnlyInclusionsOnly) {\n if (!isImmutable((BObjectType) type)) {\n continue;\n }\n } else if (isImmutable((BObjectType) type)) {\n continue;\n }\n }\n\n List functions = ((BObjectTypeSymbol) type.tsymbol).attachedFuncs;\n for (BAttachedFunction function : functions) {\n defineReferencedFunction(classDefinition.pos, classDefinition.flagSet, objMethodsEnv,\n typeRef, function, includedFunctionNames, classDefinition.symbol, classDefinition.functions,\n classDefinition.internal);\n }\n }\n }\n\n private void defineReferencedClassFields(BLangClassDefinition classDefinition, SymbolEnv typeDefEnv,\n BObjectType objType, boolean defineReadOnlyInclusionsOnly) {\n if (classDefinition.typeRefs.isEmpty()) {\n return;\n }\n Set referencedTypes = new HashSet<>(classDefinition.typeRefs.size());\n List invalidTypeRefs = new ArrayList<>(classDefinition.typeRefs.size());\n\n Map fieldNames = new HashMap<>(classDefinition.fields.size());\n for (BLangSimpleVariable fieldVariable : classDefinition.fields) {\n fieldNames.put(fieldVariable.name.value, fieldVariable);\n }\n\n \n List referencedFields = new ArrayList<>();\n\n for (BLangType typeRef : classDefinition.typeRefs) {\n BType referredType = Types.getReferredType(symResolver.resolveTypeNode(typeRef, typeDefEnv));\n if (referredType == symTable.semanticError) {\n continue;\n }\n\n int tag = Types.getReferredType(classDefinition.getBType()).tag;\n if (tag == TypeTags.OBJECT) {\n if (isInvalidIncludedTypeInClass(referredType)) {\n if (!defineReadOnlyInclusionsOnly) {\n dlog.error(typeRef.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_REFERENCE, typeRef);\n }\n invalidTypeRefs.add(typeRef);\n continue;\n }\n\n BObjectType objectType = null;\n\n if (referredType.tag == TypeTags.INTERSECTION) {\n if (!defineReadOnlyInclusionsOnly) {\n \n continue;\n }\n } else {\n objectType = (BObjectType) referredType;\n\n if (defineReadOnlyInclusionsOnly) {\n if (!isImmutable(objectType)) {\n continue;\n }\n } else if (isImmutable(objectType)) {\n continue;\n }\n }\n } else if (defineReadOnlyInclusionsOnly) {\n continue;\n }\n\n \n if (!referencedTypes.add(referredType.tsymbol)) {\n dlog.error(typeRef.pos, DiagnosticErrorCode.REDECLARED_TYPE_REFERENCE, typeRef);\n continue;\n }\n\n BType effectiveIncludedType = referredType;\n\n if (tag == TypeTags.OBJECT) {\n BObjectType objectType;\n referredType = Types.getReferredType(referredType);\n\n if (referredType.tag == TypeTags.INTERSECTION) {\n effectiveIncludedType = objectType = (BObjectType) ((BIntersectionType) referredType).effectiveType;\n } else {\n objectType = (BObjectType) referredType;\n }\n\n if (!classDefinition.symbol.pkgID.equals(referredType.tsymbol.pkgID)) {\n boolean errored = false;\n for (BField field : objectType.fields.values()) {\n if (!Symbols.isPublic(field.symbol)) {\n dlog.error(typeRef.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_REFERENCE_NON_PUBLIC_MEMBERS,\n typeRef);\n invalidTypeRefs.add(typeRef);\n errored = true;\n break;\n }\n }\n\n if (errored) {\n continue;\n }\n\n for (BAttachedFunction func : ((BObjectTypeSymbol) objectType.tsymbol).attachedFuncs) {\n if (!Symbols.isPublic(func.symbol)) {\n dlog.error(typeRef.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_REFERENCE_NON_PUBLIC_MEMBERS,\n typeRef);\n invalidTypeRefs.add(typeRef);\n errored = true;\n break;\n }\n }\n\n if (errored) {\n continue;\n }\n }\n }\n\n \n \n \n \n for (BField field : ((BStructureType) effectiveIncludedType).fields.values()) {\n if (fieldNames.containsKey(field.name.value)) {\n BLangSimpleVariable existingVariable = fieldNames.get(field.name.value);\n if ((existingVariable.flagSet.contains(Flag.PUBLIC) !=\n Symbols.isFlagOn(field.symbol.flags, Flags.PUBLIC)) ||\n (existingVariable.flagSet.contains(Flag.PRIVATE) !=\n Symbols.isFlagOn(field.symbol.flags, Flags.PRIVATE))) {\n dlog.error(existingVariable.pos,\n DiagnosticErrorCode.MISMATCHED_VISIBILITY_QUALIFIERS_IN_OBJECT_FIELD,\n existingVariable.name.value);\n }\n continue;\n }\n\n BLangSimpleVariable var = ASTBuilderUtil.createVariable(typeRef.pos, field.name.value, field.type);\n var.flagSet = field.symbol.getFlags();\n referencedFields.add(var);\n }\n }\n classDefinition.typeRefs.removeAll(invalidTypeRefs);\n\n for (BLangSimpleVariable field : referencedFields) {\n defineNode(field, typeDefEnv);\n if (field.symbol.type == symTable.semanticError) {\n continue;\n }\n objType.fields.put(field.name.value, new BField(names.fromIdNode(field.name), field.pos, field.symbol));\n }\n\n classDefinition.referencedFields.addAll(referencedFields);\n }\n\n private List getClassDefinitions(List topLevelNodes) {\n List classDefinitions = new ArrayList<>();\n for (TopLevelNode topLevelNode : topLevelNodes) {\n if (topLevelNode.getKind() == NodeKind.CLASS_DEFN) {\n classDefinitions.add((BLangClassDefinition) topLevelNode);\n }\n }\n return classDefinitions;\n }\n\n @Override\n public void visit(BLangObjectConstructorExpression objectCtorExpression) {\n visit(objectCtorExpression.classNode);\n objectCtorExpression.setBType(objectCtorExpression.classNode.getBType());\n }\n\n @Override\n public void visit(BLangClassDefinition classDefinition) {\n EnumSet flags = EnumSet.copyOf(classDefinition.flagSet);\n boolean isPublicType = flags.contains(Flag.PUBLIC);\n Name className = names.fromIdNode(classDefinition.name);\n Name classOrigName = names.originalNameFromIdNode(classDefinition.name);\n\n BClassSymbol tSymbol = Symbols.createClassSymbol(Flags.asMask(flags),\n className, env.enclPkg.symbol.pkgID, null,\n env.scope.owner, classDefinition.name.pos,\n getOrigin(className, flags), classDefinition.isServiceDecl);\n tSymbol.originalName = classOrigName;\n tSymbol.scope = new Scope(tSymbol);\n tSymbol.markdownDocumentation = getMarkdownDocAttachment(classDefinition.markdownDocumentationAttachment);\n\n\n long typeFlags = 0;\n\n if (flags.contains(Flag.READONLY)) {\n typeFlags |= Flags.READONLY;\n }\n\n if (flags.contains(Flag.ISOLATED)) {\n typeFlags |= Flags.ISOLATED;\n }\n\n if (flags.contains(Flag.SERVICE)) {\n typeFlags |= Flags.SERVICE;\n }\n\n if (flags.contains(Flag.OBJECT_CTOR)) {\n typeFlags |= Flags.OBJECT_CTOR;\n }\n\n BObjectType objectType = new BObjectType(tSymbol, typeFlags);\n if (classDefinition.isObjectContructorDecl || flags.contains(Flag.OBJECT_CTOR)) {\n classDefinition.oceEnvData.objectType = objectType;\n objectType.classDef = classDefinition;\n }\n\n if (flags.contains(Flag.DISTINCT)) {\n objectType.typeIdSet = BTypeIdSet.from(env.enclPkg.symbol.pkgID, classDefinition.name.value, isPublicType);\n }\n\n if (flags.contains(Flag.CLIENT)) {\n objectType.flags |= Flags.CLIENT;\n }\n\n tSymbol.type = objectType;\n classDefinition.setBType(objectType);\n classDefinition.setDeterminedType(objectType);\n classDefinition.symbol = tSymbol;\n\n if (isDeprecated(classDefinition.annAttachments)) {\n tSymbol.flags |= Flags.DEPRECATED;\n }\n\n \n \n for (BLangType typeRef : classDefinition.typeRefs) {\n BType referencedType = symResolver.resolveTypeNode(typeRef, env);\n if (referencedType == symTable.noType && !this.unresolvedTypes.contains(classDefinition)) {\n this.unresolvedTypes.add(classDefinition);\n return;\n }\n objectType.typeInclusions.add(referencedType);\n }\n\n classDefinition.setPrecedence(this.typePrecedence++);\n if (symResolver.checkForUniqueSymbol(classDefinition.pos, env, tSymbol)) {\n env.scope.define(tSymbol.name, tSymbol);\n }\n \n \n }\n\n public void visit(BLangAnnotation annotationNode) {\n Name annotName = names.fromIdNode(annotationNode.name);\n Name annotOrigName = names.originalNameFromIdNode(annotationNode.name);\n BAnnotationSymbol annotationSymbol = Symbols.createAnnotationSymbol(Flags.asMask(annotationNode.flagSet),\n annotationNode.getAttachPoints(),\n annotName, annotOrigName,\n env.enclPkg.symbol.pkgID, null,\n env.scope.owner, annotationNode.name.pos,\n getOrigin(annotName));\n annotationSymbol.markdownDocumentation =\n getMarkdownDocAttachment(annotationNode.markdownDocumentationAttachment);\n if (isDeprecated(annotationNode.annAttachments)) {\n annotationSymbol.flags |= Flags.DEPRECATED;\n }\n annotationSymbol.type = new BAnnotationType(annotationSymbol);\n annotationNode.symbol = annotationSymbol;\n defineSymbol(annotationNode.name.pos, annotationSymbol);\n SymbolEnv annotationEnv = SymbolEnv.createAnnotationEnv(annotationNode, annotationSymbol.scope, env);\n BLangType annotTypeNode = annotationNode.typeNode;\n if (annotTypeNode != null) {\n BType type = this.symResolver.resolveTypeNode(annotTypeNode, annotationEnv);\n annotationSymbol.attachedType = type;\n if (!isValidAnnotationType(type)) {\n dlog.error(annotTypeNode.pos, DiagnosticErrorCode.ANNOTATION_INVALID_TYPE, type);\n }\n\n\n\n\n }\n\n if (!annotationNode.flagSet.contains(Flag.CONSTANT) &&\n annotationNode.getAttachPoints().stream().anyMatch(attachPoint -> attachPoint.source)) {\n dlog.error(annotationNode.pos, DiagnosticErrorCode.ANNOTATION_REQUIRES_CONST);\n }\n }\n\n private boolean isNullOrEmpty(String s) {\n return s == null || s.isEmpty();\n }\n\n @Override\n public void visit(BLangImportPackage importPkgNode) {\n Name pkgAlias = names.fromIdNode(importPkgNode.alias);\n if (!Names.IGNORE.equals(pkgAlias)) {\n BSymbol importSymbol =\n symResolver.resolvePrefixSymbol(env, pkgAlias, names.fromIdNode(importPkgNode.compUnit));\n if (importSymbol != symTable.notFoundSymbol) {\n if (isSameImport(importPkgNode, (BPackageSymbol) importSymbol)) {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.REDECLARED_IMPORT_MODULE,\n importPkgNode.getQualifiedPackageName());\n } else {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, pkgAlias);\n }\n return;\n }\n }\n\n \n \n Name orgName;\n Name pkgName = null;\n Name version;\n PackageID enclPackageID = env.enclPkg.packageID;\n \n \n \n \n \n \n \n if (!isNullOrEmpty(importPkgNode.orgName.value)) {\n orgName = names.fromIdNode(importPkgNode.orgName);\n if (!isNullOrEmpty(importPkgNode.version.value)) {\n version = names.fromIdNode(importPkgNode.version);\n } else {\n \n if (projectAPIInitiatedCompilation) {\n version = Names.EMPTY;\n } else {\n String pkgNameComps = importPkgNode.getPackageName().stream()\n .map(id -> id.value)\n .collect(Collectors.joining(\".\"));\n if (this.sourceDirectory.getSourcePackageNames().contains(pkgNameComps)\n && orgName.value.equals(enclPackageID.orgName.value)) {\n version = enclPackageID.version;\n } else {\n version = Names.EMPTY;\n }\n }\n }\n } else {\n orgName = enclPackageID.orgName;\n pkgName = enclPackageID.pkgName;\n version = (Names.DEFAULT_VERSION.equals(enclPackageID.version)) ? Names.EMPTY : enclPackageID.version;\n }\n\n List nameComps = importPkgNode.pkgNameComps.stream()\n .map(identifier -> names.fromIdNode(identifier))\n .collect(Collectors.toList());\n Name moduleName = new Name(nameComps.stream().map(Name::getValue).collect(Collectors.joining(\".\")));\n\n if (pkgName == null) {\n pkgName = moduleName;\n }\n\n PackageID pkgId = new PackageID(orgName, pkgName, moduleName, version, null);\n\n \n BPackageSymbol bPackageSymbol = this.packageCache.getSymbol(pkgId);\n if (bPackageSymbol != null && this.env.enclPkg.moduleContextDataHolder != null) {\n boolean isCurrentPackageModuleImport =\n this.env.enclPkg.moduleContextDataHolder.descriptor().org() == bPackageSymbol.descriptor.org()\n && this.env.enclPkg.moduleContextDataHolder.descriptor().packageName() ==\n bPackageSymbol.descriptor.packageName();\n if (!isCurrentPackageModuleImport && !bPackageSymbol.exported) {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.MODULE_NOT_FOUND,\n bPackageSymbol.toString() + \" is not exported\");\n return;\n }\n }\n\n \n if (pkgId.equals(PackageID.ANNOTATIONS) || pkgId.equals(PackageID.INTERNAL) || pkgId.equals(PackageID.QUERY)) {\n \n \n if (!(enclPackageID.orgName.equals(Names.BALLERINA_ORG)\n && enclPackageID.name.value.startsWith(Names.LANG.value))) {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.MODULE_NOT_FOUND,\n importPkgNode.getQualifiedPackageName());\n return;\n }\n }\n\n \n \n if (importedPackages.contains(pkgId)) {\n int index = importedPackages.indexOf(pkgId);\n \n StringBuilder stringBuilder = new StringBuilder();\n for (int i = index; i < importedPackages.size(); i++) {\n stringBuilder.append(importedPackages.get(i).toString()).append(\" -> \");\n }\n \n stringBuilder.append(pkgId);\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.CYCLIC_MODULE_IMPORTS_DETECTED, stringBuilder.toString());\n return;\n }\n\n boolean samePkg = false;\n \n PackageID entryPackage = importedPackages.get(0);\n if (entryPackage.isUnnamed == pkgId.isUnnamed) {\n samePkg = (!entryPackage.isUnnamed) || (entryPackage.sourceFileName.equals(pkgId.sourceFileName));\n }\n \n \n if (samePkg && entryPackage.orgName.equals(pkgId.orgName) && entryPackage.name.equals(pkgId.name)) {\n StringBuilder stringBuilder = new StringBuilder();\n String entryPackageString = importedPackages.get(0).toString();\n \n int packageIndex = entryPackageString.indexOf(\":\");\n if (packageIndex != -1) {\n entryPackageString = entryPackageString.substring(0, packageIndex);\n }\n \n stringBuilder.append(entryPackageString).append(\" -> \");\n for (int i = 1; i < importedPackages.size(); i++) {\n stringBuilder.append(importedPackages.get(i).toString()).append(\" -> \");\n }\n stringBuilder.append(pkgId);\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.CYCLIC_MODULE_IMPORTS_DETECTED, stringBuilder.toString());\n return;\n }\n\n BPackageSymbol pkgSymbol = packageCache.getSymbol(pkgId);\n\n if (pkgSymbol == null) {\n dlog.error(importPkgNode.pos, DiagnosticErrorCode.MODULE_NOT_FOUND,\n importPkgNode.getQualifiedPackageName());\n return;\n }\n\n List imports = ((BPackageSymbol) this.env.scope.owner).imports;\n if (!imports.contains(pkgSymbol)) {\n imports.add(pkgSymbol);\n }\n\n \n \n BPackageSymbol symbol = dupPackageSymbolAndSetCompUnit(pkgSymbol, names.fromIdNode(importPkgNode.compUnit));\n if (!Names.IGNORE.equals(pkgAlias)) {\n symbol.importPrefix = pkgAlias;\n }\n symbol.scope = pkgSymbol.scope;\n importPkgNode.symbol = symbol;\n this.env.scope.define(pkgAlias, symbol);\n }\n\n public void initPredeclaredModules(Map predeclaredModules,\n List compUnits, SymbolEnv env) {\n SymbolEnv prevEnv = this.env;\n this.env = env;\n for (Map.Entry predeclaredModuleEntry : predeclaredModules.entrySet()) {\n Name alias = predeclaredModuleEntry.getKey();\n BPackageSymbol packageSymbol = predeclaredModuleEntry.getValue();\n int index = 0;\n ScopeEntry entry = this.env.scope.lookup(alias);\n if (entry == NOT_FOUND_ENTRY && !compUnits.isEmpty()) {\n this.env.scope.define(alias, dupPackageSymbolAndSetCompUnit(packageSymbol,\n new Name(compUnits.get(index++).name)));\n entry = this.env.scope.lookup(alias);\n }\n for (int i = index; i < compUnits.size(); i++) {\n boolean isUndefinedModule = true;\n String compUnitName = compUnits.get(i).name;\n if (((BPackageSymbol) entry.symbol).compUnit.value.equals(compUnitName)) {\n isUndefinedModule = false;\n }\n while (entry.next != NOT_FOUND_ENTRY) {\n if (((BPackageSymbol) entry.next.symbol).compUnit.value.equals(compUnitName)) {\n isUndefinedModule = false;\n break;\n }\n entry = entry.next;\n }\n if (isUndefinedModule) {\n entry.next = new ScopeEntry(dupPackageSymbolAndSetCompUnit(packageSymbol,\n new Name(compUnitName)), NOT_FOUND_ENTRY);\n }\n }\n }\n this.env = prevEnv;\n }\n\n @Override\n public void visit(BLangXMLNS xmlnsNode) {\n String nsURI;\n if (xmlnsNode.namespaceURI.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {\n BLangSimpleVarRef varRef = (BLangSimpleVarRef) xmlnsNode.namespaceURI;\n if (missingNodesHelper.isMissingNode(varRef.variableName.value)) {\n nsURI = \"\";\n } else {\n \n nsURI = \"\";\n }\n } else {\n nsURI = (String) ((BLangLiteral) xmlnsNode.namespaceURI).value;\n if (!nullOrEmpty(xmlnsNode.prefix.value) && nsURI.isEmpty()) {\n dlog.error(xmlnsNode.pos, DiagnosticErrorCode.INVALID_NAMESPACE_DECLARATION, xmlnsNode.prefix);\n }\n }\n\n \n if (xmlnsNode.prefix.value == null) {\n xmlnsNode.prefix.value = XMLConstants.DEFAULT_NS_PREFIX;\n }\n\n Name prefix = names.fromIdNode(xmlnsNode.prefix);\n Location nsSymbolPos = prefix.value.isEmpty() ? xmlnsNode.pos : xmlnsNode.prefix.pos;\n BXMLNSSymbol xmlnsSymbol = Symbols.createXMLNSSymbol(prefix, nsURI, env.enclPkg.symbol.pkgID, env.scope.owner,\n nsSymbolPos, getOrigin(prefix));\n xmlnsNode.symbol = xmlnsSymbol;\n\n \n \n \n BSymbol foundSym = symResolver.lookupSymbolInPrefixSpace(env, xmlnsSymbol.name);\n if ((foundSym.tag & SymTag.PACKAGE) != SymTag.PACKAGE) {\n foundSym = symTable.notFoundSymbol;\n }\n if (foundSym != symTable.notFoundSymbol) {\n dlog.error(xmlnsNode.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, xmlnsSymbol.name);\n return;\n }\n\n \n \n defineSymbol(xmlnsNode.prefix.pos, xmlnsSymbol);\n }\n\n private boolean nullOrEmpty(String value) {\n return value == null || value.isEmpty();\n }\n\n public void visit(BLangXMLNSStatement xmlnsStmtNode) {\n defineNode(xmlnsStmtNode.xmlnsDecl, env);\n }\n\n private void defineTypeNodes(List typeDefs, SymbolEnv env) {\n if (typeDefs.isEmpty()) {\n return;\n }\n\n this.unresolvedTypes = new ArrayList<>(typeDefs.size());\n this.unresolvedRecordDueToFields = new HashSet<>(typeDefs.size());\n this.resolveRecordsUnresolvedDueToFields = false;\n for (BLangNode typeDef : typeDefs) {\n if (isErrorIntersectionTypeCreatingNewType(typeDef, env)) {\n populateUndefinedErrorIntersection((BLangTypeDefinition) typeDef, env);\n continue;\n }\n\n\n\n\n defineNode(typeDef, env);\n }\n\n if (typeDefs.size() <= unresolvedTypes.size()) {\n\n this.resolveRecordsUnresolvedDueToFields = true;\n unresolvedTypes.removeAll(unresolvedRecordDueToFields);\n for (BLangNode unresolvedType : unresolvedRecordDueToFields) {\n defineNode(unresolvedType, env);\n }\n this.resolveRecordsUnresolvedDueToFields = false;\n\n \n \n \n\n for (BLangNode unresolvedType : unresolvedTypes) {\n Stack references = new Stack<>();\n NodeKind unresolvedKind = unresolvedType.getKind();\n if (unresolvedKind == NodeKind.TYPE_DEFINITION || unresolvedKind == NodeKind.CONSTANT) {\n TypeDefinition def = (TypeDefinition) unresolvedType;\n \n references.push(def.getName().getValue());\n checkErrors(env, unresolvedType, (BLangNode) def.getTypeNode(), references, false);\n } else if (unresolvedType.getKind() == NodeKind.CLASS_DEFN) {\n BLangClassDefinition classDefinition = (BLangClassDefinition) unresolvedType;\n references.push(classDefinition.getName().getValue());\n checkErrors(env, unresolvedType, classDefinition, references, true);\n }\n }\n defineAllUnresolvedCyclicTypesInScope(env);\n\n Set alreadyDefinedTypeDefNames = new HashSet<>();\n int unresolvedTypeCount = unresolvedTypes.size();\n for (int i = 0; i < unresolvedTypeCount; i++) {\n for (BLangNode node : this.unresolvedTypes) {\n String name = getTypeOrClassName(node);\n boolean symbolNotFound = false;\n boolean isTypeOrClassDefinition =\n node.getKind() == NodeKind.TYPE_DEFINITION || node.getKind() == NodeKind.CLASS_DEFN;\n \n \n if (isTypeOrClassDefinition && i != 0) { \n BSymbol bSymbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));\n symbolNotFound = (bSymbol == symTable.notFoundSymbol);\n }\n\n boolean notFoundInList = alreadyDefinedTypeDefNames.add(name);\n\n \n if (notFoundInList || symbolNotFound) {\n defineNode(node, env);\n }\n }\n }\n return;\n }\n defineTypeNodes(unresolvedTypes, env);\n }\n\n private void populateUndefinedErrorIntersection(BLangTypeDefinition typeDef, SymbolEnv env) {\n long flags = 0;\n if (typeDef.flagSet.contains(Flag.PUBLIC)) {\n flags = Flags.PUBLIC;\n }\n\n BErrorType intersectionErrorType = types.createErrorType(null, flags, env);\n intersectionErrorType.tsymbol.name = names.fromString(typeDef.name.value);\n defineErrorType(typeDef.pos, intersectionErrorType, env);\n\n this.intersectionTypes.add(typeDef);\n }\n\n private boolean isErrorIntersectionTypeCreatingNewType(BLangNode typeDef, SymbolEnv env) {\n boolean isIntersectionType = typeDef.getKind() == NodeKind.TYPE_DEFINITION\n && ((BLangTypeDefinition) typeDef).typeNode.getKind() == NodeKind.INTERSECTION_TYPE_NODE;\n if (!isIntersectionType) {\n return false;\n }\n\n BLangIntersectionTypeNode intersectionTypeNode =\n (BLangIntersectionTypeNode) ((BLangTypeDefinition) typeDef).typeNode;\n\n Set errorTypes = new HashSet<>();\n\n for (BLangType type : intersectionTypeNode.constituentTypeNodes) {\n BType bType = symResolver.resolveTypeNode(type, env);\n if (Types.getReferredType(bType).tag == TypeTags.ERROR) {\n errorTypes.add(bType);\n }\n }\n return errorTypes.size() > 1;\n }\n\n private void checkErrors(SymbolEnv env, BLangNode unresolvedType, BLangNode currentTypeOrClassNode,\n Stack visitedNodes,\n boolean fromStructuredType) {\n \n List memberTypeNodes;\n switch (currentTypeOrClassNode.getKind()) {\n case ARRAY_TYPE:\n checkErrors(env, unresolvedType, ((BLangArrayType) currentTypeOrClassNode).elemtype, visitedNodes,\n true);\n break;\n case UNION_TYPE_NODE:\n \n memberTypeNodes = ((BLangUnionTypeNode) currentTypeOrClassNode).memberTypeNodes;\n \n for (BLangType memberTypeNode : memberTypeNodes) {\n checkErrors(env, unresolvedType, memberTypeNode, visitedNodes, fromStructuredType);\n }\n break;\n case INTERSECTION_TYPE_NODE:\n memberTypeNodes = ((BLangIntersectionTypeNode) currentTypeOrClassNode).constituentTypeNodes;\n for (BLangType memberTypeNode : memberTypeNodes) {\n checkErrors(env, unresolvedType, memberTypeNode, visitedNodes, fromStructuredType);\n }\n break;\n case TUPLE_TYPE_NODE:\n BLangTupleTypeNode tupleNode = (BLangTupleTypeNode) currentTypeOrClassNode;\n List tupleMemberTypes = tupleNode.getMemberTypeNodes();\n for (BLangType memberTypeNode : tupleMemberTypes) {\n checkErrors(env, unresolvedType, memberTypeNode, visitedNodes, true);\n }\n if (tupleNode.restParamType != null) {\n checkErrors(env, unresolvedType, tupleNode.restParamType, visitedNodes, true);\n }\n break;\n case CONSTRAINED_TYPE:\n checkErrors(env, unresolvedType, ((BLangConstrainedType) currentTypeOrClassNode).constraint,\n visitedNodes,\n true);\n break;\n case TABLE_TYPE:\n checkErrors(env, unresolvedType, ((BLangTableTypeNode) currentTypeOrClassNode).constraint, visitedNodes,\n true);\n break;\n case STREAM_TYPE:\n checkErrors(env, unresolvedType, ((BLangStreamType) currentTypeOrClassNode).constraint, visitedNodes,\n true);\n BLangType completionType = ((BLangStreamType) currentTypeOrClassNode).error;\n if (completionType != null) {\n checkErrors(env, unresolvedType, completionType, visitedNodes, true);\n }\n break;\n case USER_DEFINED_TYPE:\n checkErrorsOfUserDefinedType(env, unresolvedType, (BLangUserDefinedType) currentTypeOrClassNode,\n visitedNodes, fromStructuredType);\n break;\n case BUILT_IN_REF_TYPE:\n \n case FINITE_TYPE_NODE:\n case VALUE_TYPE:\n case ERROR_TYPE:\n \n break;\n case FUNCTION_TYPE:\n BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) currentTypeOrClassNode;\n functionTypeNode.params.forEach(p -> checkErrors(env, unresolvedType, p.typeNode, visitedNodes,\n fromStructuredType));\n if (functionTypeNode.restParam != null) {\n checkErrors(env, unresolvedType, functionTypeNode.restParam.typeNode, visitedNodes,\n fromStructuredType);\n }\n if (functionTypeNode.returnTypeNode != null) {\n checkErrors(env, unresolvedType, functionTypeNode.returnTypeNode, visitedNodes, fromStructuredType);\n }\n break;\n case RECORD_TYPE:\n for (TypeNode typeNode : ((BLangRecordTypeNode) currentTypeOrClassNode).getTypeReferences()) {\n checkErrors(env, unresolvedType, (BLangType) typeNode, visitedNodes, true);\n }\n break;\n case OBJECT_TYPE:\n for (TypeNode typeNode : ((BLangObjectTypeNode) currentTypeOrClassNode).getTypeReferences()) {\n checkErrors(env, unresolvedType, (BLangType) typeNode, visitedNodes, true);\n }\n break;\n case CLASS_DEFN:\n for (TypeNode typeNode : ((BLangClassDefinition) currentTypeOrClassNode).typeRefs) {\n checkErrors(env, unresolvedType, (BLangType) typeNode, visitedNodes, true);\n }\n break;\n default:\n throw new RuntimeException(\"unhandled type kind: \" + currentTypeOrClassNode.getKind());\n }\n }\n\n private boolean isTypeConstructorAvailable(NodeKind unresolvedType) {\n switch (unresolvedType) {\n case OBJECT_TYPE:\n case RECORD_TYPE:\n case CONSTRAINED_TYPE:\n case ARRAY_TYPE:\n case TUPLE_TYPE_NODE:\n case TABLE_TYPE:\n case ERROR_TYPE:\n case FUNCTION_TYPE:\n case STREAM_TYPE:\n return true;\n default:\n return false;\n }\n }\n\n private void checkErrorsOfUserDefinedType(SymbolEnv env, BLangNode unresolvedType,\n BLangUserDefinedType currentTypeOrClassNode,\n Stack visitedNodes, boolean fromStructuredType) {\n String currentTypeNodeName = currentTypeOrClassNode.typeName.value;\n \n if (currentTypeNodeName.startsWith(\"$\")) {\n return;\n }\n String unresolvedTypeNodeName = getTypeOrClassName(unresolvedType);\n boolean sameTypeNode = unresolvedTypeNodeName.equals(currentTypeNodeName);\n boolean isVisited = visitedNodes.contains(currentTypeNodeName);\n boolean typeDef = unresolvedType.getKind() == NodeKind.TYPE_DEFINITION;\n\n if (sameTypeNode || isVisited) {\n if (typeDef) {\n BLangTypeDefinition typeDefinition = (BLangTypeDefinition) unresolvedType;\n NodeKind unresolvedTypeNodeKind = typeDefinition.getTypeNode().getKind();\n if (fromStructuredType && (unresolvedTypeNodeKind == NodeKind.UNION_TYPE_NODE\n || unresolvedTypeNodeKind == NodeKind.TUPLE_TYPE_NODE)) {\n \n typeDefinition.hasCyclicReference = true;\n return;\n }\n \n if (unresolvedTypeNodeKind != NodeKind.OBJECT_TYPE && isTypeConstructorAvailable(unresolvedTypeNodeKind)\n && !sameTypeNode) {\n return;\n }\n }\n if (isVisited) {\n \n \n \n \n \n \n int i = visitedNodes.indexOf(currentTypeNodeName);\n List dependencyList = new ArrayList<>(visitedNodes.size() - i);\n for (; i < visitedNodes.size(); i++) {\n dependencyList.add(visitedNodes.get(i));\n }\n if (!sameTypeNode && dependencyList.size() == 1\n && dependencyList.get(0).equals(currentTypeNodeName)) {\n \n \n \n \n return;\n }\n \n dependencyList.add(currentTypeNodeName);\n dlog.error(unresolvedType.getPosition(), DiagnosticErrorCode.CYCLIC_TYPE_REFERENCE, dependencyList);\n } else {\n visitedNodes.push(currentTypeNodeName);\n dlog.error(unresolvedType.getPosition(), DiagnosticErrorCode.CYCLIC_TYPE_REFERENCE, visitedNodes);\n visitedNodes.remove(currentTypeNodeName);\n }\n } else {\n \n \n List typeDefinitions = unresolvedTypes.stream()\n .filter(node -> getTypeOrClassName(node).equals(currentTypeNodeName)).collect(Collectors.toList());\n\n if (typeDefinitions.isEmpty()) {\n BType referredType = symResolver.resolveTypeNode(currentTypeOrClassNode, env);\n \n if (referredType != symTable.noType) {\n return;\n }\n\n \n \n LocationData locationData = new LocationData(\n currentTypeNodeName, currentTypeOrClassNode.pos.lineRange().startLine().line(),\n currentTypeOrClassNode.pos.lineRange().startLine().offset());\n if (unknownTypeRefs.add(locationData)) {\n dlog.error(currentTypeOrClassNode.pos, DiagnosticErrorCode.UNKNOWN_TYPE, currentTypeNodeName);\n }\n } else {\n for (BLangNode typeDefinition : typeDefinitions) {\n if (typeDefinition.getKind() == NodeKind.TYPE_DEFINITION) {\n BLangTypeDefinition langTypeDefinition = (BLangTypeDefinition) typeDefinition;\n String typeName = langTypeDefinition.getName().getValue();\n \n visitedNodes.push(typeName);\n \n checkErrors(env, unresolvedType, langTypeDefinition.getTypeNode(), visitedNodes,\n fromStructuredType);\n \n visitedNodes.pop();\n } else {\n BLangClassDefinition classDefinition = (BLangClassDefinition) typeDefinition;\n visitedNodes.push(classDefinition.getName().getValue());\n checkErrors(env, unresolvedType, classDefinition, visitedNodes, fromStructuredType);\n visitedNodes.pop();\n }\n }\n }\n }\n }\n\n private String getTypeOrClassName(BLangNode node) {\n if (node.getKind() == NodeKind.TYPE_DEFINITION || node.getKind() == NodeKind.CONSTANT) {\n return ((TypeDefinition) node).getName().getValue();\n } else {\n return ((BLangClassDefinition) node).getName().getValue();\n }\n }\n\n public boolean isUnknownTypeRef(BLangUserDefinedType bLangUserDefinedType) {\n var startLine = bLangUserDefinedType.pos.lineRange().startLine();\n LocationData locationData = new LocationData(bLangUserDefinedType.typeName.value, startLine.line(),\n startLine.offset());\n return unknownTypeRefs.contains(locationData);\n }\n\n @Override\n public void visit(BLangTypeDefinition typeDefinition) {\n BType definedType;\n if (typeDefinition.hasCyclicReference) {\n definedType = getCyclicDefinedType(typeDefinition, env);\n } else {\n definedType = symResolver.resolveTypeNode(typeDefinition.typeNode, env);\n }\n\n if (definedType == symTable.semanticError) {\n \n\n invalidateAlreadyDefinedErrorType(typeDefinition);\n return;\n }\n if (definedType == symTable.noType) {\n \n if (!this.unresolvedTypes.contains(typeDefinition)) {\n this.unresolvedTypes.add(typeDefinition);\n }\n return;\n }\n\n \n boolean hasTypeInclusions = false;\n NodeKind typeNodeKind = typeDefinition.typeNode.getKind();\n if (typeNodeKind == TUPLE_TYPE_NODE) {\n if (definedType.tsymbol.scope == null) {\n definedType.tsymbol.scope = new Scope(definedType.tsymbol);\n }\n }\n if (typeNodeKind == NodeKind.OBJECT_TYPE || typeNodeKind == NodeKind.RECORD_TYPE) {\n if (definedType.tsymbol.scope == null) {\n definedType.tsymbol.scope = new Scope(definedType.tsymbol);\n }\n BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDefinition.typeNode;\n \n \n for (BLangType typeRef : structureTypeNode.typeRefs) {\n hasTypeInclusions = true;\n BType referencedType = symResolver.resolveTypeNode(typeRef, env);\n if (referencedType == symTable.noType) {\n if (!this.unresolvedTypes.contains(typeDefinition)) {\n this.unresolvedTypes.add(typeDefinition);\n return;\n }\n }\n }\n }\n\n \n if (hasTypeInclusions && !this.resolveRecordsUnresolvedDueToFields && typeNodeKind == NodeKind.RECORD_TYPE) {\n BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDefinition.typeNode;\n for (BLangSimpleVariable variable : structureTypeNode.fields) {\n if (variable.typeNode.getKind() == NodeKind.FUNCTION_TYPE) {\n continue;\n }\n Scope scope = new Scope(structureTypeNode.symbol);\n structureTypeNode.symbol.scope = scope;\n SymbolEnv typeEnv = SymbolEnv.createTypeEnv(structureTypeNode, scope, env);\n BType referencedType = symResolver.resolveTypeNode(variable.typeNode, typeEnv);\n if (referencedType == symTable.noType) {\n if (this.unresolvedRecordDueToFields.add(typeDefinition) &&\n !this.unresolvedTypes.contains(typeDefinition)) {\n this.unresolvedTypes.add(typeDefinition);\n return;\n }\n }\n }\n }\n\n if (typeDefinition.flagSet.contains(Flag.ENUM)) {\n definedType.tsymbol = createEnumSymbol(typeDefinition, definedType);\n }\n\n typeDefinition.setPrecedence(this.typePrecedence++);\n\n BSymbol typeDefSymbol = Symbols.createTypeDefinitionSymbol(Flags.asMask(typeDefinition.flagSet),\n names.fromIdNode(typeDefinition.name), env.enclPkg.packageID, definedType, env.scope.owner,\n typeDefinition.name.pos, getOrigin(typeDefinition.name.value));\n typeDefSymbol.markdownDocumentation = getMarkdownDocAttachment(typeDefinition.markdownDocumentationAttachment);\n BTypeSymbol typeSymbol = new BTypeSymbol(SymTag.TYPE_REF, typeDefSymbol.flags, typeDefSymbol.name,\n typeDefSymbol.pkgID, typeDefSymbol.type, typeDefSymbol.owner, typeDefSymbol.pos, typeDefSymbol.origin);\n typeSymbol.markdownDocumentation = typeDefSymbol.markdownDocumentation;\n ((BTypeDefinitionSymbol) typeDefSymbol).referenceType = new BTypeReferenceType(definedType, typeSymbol,\n typeDefSymbol.type.flags);\n\n boolean isLabel = true;\n \n if (definedType.tsymbol.name == Names.EMPTY) {\n isLabel = false;\n definedType.tsymbol.name = names.fromIdNode(typeDefinition.name);\n definedType.tsymbol.originalName = names.originalNameFromIdNode(typeDefinition.name);\n definedType.tsymbol.flags |= typeDefSymbol.flags;\n\n definedType.tsymbol.markdownDocumentation = typeDefSymbol.markdownDocumentation;\n definedType.tsymbol.pkgID = env.enclPkg.packageID;\n if (definedType.tsymbol.tag == SymTag.ERROR) {\n definedType.tsymbol.owner = env.scope.owner;\n }\n }\n\n if ((((definedType.tsymbol.kind == SymbolKind.OBJECT\n && !Symbols.isFlagOn(definedType.tsymbol.flags, Flags.CLASS))\n || definedType.tsymbol.kind == SymbolKind.RECORD))\n && ((BStructureTypeSymbol) definedType.tsymbol).typeDefinitionSymbol == null) {\n ((BStructureTypeSymbol) definedType.tsymbol).typeDefinitionSymbol = (BTypeDefinitionSymbol) typeDefSymbol;\n }\n\n if (typeDefinition.flagSet.contains(Flag.ENUM)) {\n typeDefSymbol = definedType.tsymbol;\n typeDefSymbol.pos = typeDefinition.name.pos;\n }\n\n boolean isErrorIntersection = isErrorIntersection(definedType);\n if (isErrorIntersection) {\n populateSymbolNameOfErrorIntersection(definedType, typeDefinition.name.value);\n populateAllReadyDefinedErrorIntersection(definedType, typeDefinition, env);\n }\n\n BType referenceConstraintType = Types.getReferredType(definedType);\n boolean isIntersectionType = referenceConstraintType.tag == TypeTags.INTERSECTION && !isLabel;\n\n BType effectiveDefinedType = isIntersectionType ? ((BIntersectionType) referenceConstraintType).effectiveType :\n referenceConstraintType;\n\n boolean isIntersectionTypeWithNonNullEffectiveTypeSymbol =\n isIntersectionType && effectiveDefinedType.tsymbol != null;\n\n if (isIntersectionTypeWithNonNullEffectiveTypeSymbol) {\n BTypeSymbol effectiveTypeSymbol = effectiveDefinedType.tsymbol;\n effectiveTypeSymbol.name = typeDefSymbol.name;\n effectiveTypeSymbol.pkgID = typeDefSymbol.pkgID;\n }\n\n handleDistinctDefinition(typeDefinition, typeDefSymbol, definedType, referenceConstraintType);\n\n typeDefSymbol.flags |= Flags.asMask(typeDefinition.flagSet);\n \n typeDefSymbol.flags &= getPublicFlagResetingMask(typeDefinition.flagSet, typeDefinition.typeNode);\n if (isDeprecated(typeDefinition.annAttachments)) {\n typeDefSymbol.flags |= Flags.DEPRECATED;\n }\n\n \n if (Symbols.isFlagOn(typeDefSymbol.flags, Flags.ANONYMOUS)) {\n typeDefSymbol.origin = VIRTUAL;\n }\n\n if (typeDefinition.annAttachments.stream()\n .anyMatch(attachment -> attachment.annotationName.value.equals(Names.ANNOTATION_TYPE_PARAM.value))) {\n \n \n if (PackageID.isLangLibPackageID(this.env.enclPkg.packageID)) {\n typeDefSymbol.type = typeParamAnalyzer.createTypeParam(typeDefSymbol.type, typeDefSymbol.name);\n typeDefSymbol.flags |= Flags.TYPE_PARAM;\n } else {\n dlog.error(typeDefinition.pos, DiagnosticErrorCode.TYPE_PARAM_OUTSIDE_LANG_MODULE);\n }\n }\n definedType.flags |= typeDefSymbol.flags;\n\n if (isIntersectionTypeWithNonNullEffectiveTypeSymbol) {\n BTypeSymbol effectiveTypeSymbol = effectiveDefinedType.tsymbol;\n effectiveTypeSymbol.flags |= definedType.tsymbol.flags;\n effectiveTypeSymbol.origin = VIRTUAL;\n effectiveDefinedType.flags |= definedType.flags;\n }\n\n typeDefinition.symbol = typeDefSymbol;\n\n if (typeDefinition.hasCyclicReference) {\n \n typeDefinition.getBType().tsymbol = definedType.tsymbol;\n } else {\n boolean isLanglibModule = PackageID.isLangLibPackageID(this.env.enclPkg.packageID);\n if (isLanglibModule) {\n handleLangLibTypes(typeDefinition);\n return;\n }\n \n if (!isErrorIntersection || lookupTypeSymbol(env, typeDefinition.name) == symTable.notFoundSymbol) {\n defineSymbol(typeDefinition.name.pos, typeDefSymbol);\n }\n }\n }\n\n private void handleDistinctDefinition(BLangTypeDefinition typeDefinition, BSymbol typeDefSymbol,\n BType definedType, BType referenceConstraintType) {\n BType distinctType = definedType;\n if (isDistinctFlagPresent(typeDefinition)) {\n if (referenceConstraintType.getKind() == TypeKind.ERROR) {\n distinctType = getDistinctErrorType(typeDefinition, (BErrorType) referenceConstraintType,\n typeDefSymbol);\n typeDefinition.typeNode.setBType(distinctType);\n } else if (referenceConstraintType.tag == TypeTags.INTERSECTION &&\n ((BIntersectionType) referenceConstraintType).effectiveType.getKind() == TypeKind.ERROR) {\n boolean distinctFlagPresentInTypeDef = typeDefinition.typeNode.flagSet.contains(Flag.DISTINCT);\n\n BTypeIdSet typeIdSet = BTypeIdSet.emptySet();\n int numberOfDistinctConstituentTypes = 0;\n BLangIntersectionTypeNode intersectionTypeNode = (BLangIntersectionTypeNode) typeDefinition.typeNode;\n for (BLangType constituentType : intersectionTypeNode.constituentTypeNodes) {\n BType type = constituentType.getBType();\n\n if (type.getKind() == TypeKind.ERROR) {\n if (constituentType.flagSet.contains(Flag.DISTINCT)) {\n numberOfDistinctConstituentTypes++;\n typeIdSet.addSecondarySet(((BErrorType) type).typeIdSet.getAll());\n } else {\n typeIdSet.add(((BErrorType) type).typeIdSet);\n }\n }\n }\n\n BErrorType effectiveType = (BErrorType) ((BIntersectionType) referenceConstraintType).effectiveType;\n\n \n \n \n if (numberOfDistinctConstituentTypes == 1\n || (numberOfDistinctConstituentTypes == 0 && distinctFlagPresentInTypeDef)) {\n BTypeIdSet typeIdSetForDefinedType = BTypeIdSet.from(\n env.enclPkg.packageID,\n typeDefinition.name.value,\n typeDefinition.flagSet.contains(Flag.PUBLIC),\n typeIdSet);\n effectiveType.typeIdSet.add(typeIdSetForDefinedType);\n } else {\n for (BLangType constituentType : intersectionTypeNode.constituentTypeNodes) {\n if (constituentType.getBType().getKind() != TypeKind.ERROR) {\n continue;\n }\n if (constituentType.flagSet.contains(Flag.DISTINCT)) {\n typeIdSet.add(BTypeIdSet.from(env.enclPkg.packageID,\n anonymousModelHelper.getNextAnonymousTypeId(env.enclPkg.packageID), true));\n }\n }\n effectiveType.typeIdSet.add(typeIdSet);\n }\n\n } else if (referenceConstraintType.getKind() == TypeKind.OBJECT) {\n distinctType = getDistinctObjectType(typeDefinition, (BObjectType) referenceConstraintType,\n referenceConstraintType.tsymbol);\n typeDefinition.typeNode.setBType(distinctType);\n }\n\n \n if (((BTypeDefinitionSymbol) typeDefSymbol).referenceType != null) {\n ((BTypeDefinitionSymbol) typeDefSymbol).referenceType.referredType = distinctType;\n }\n definedType.flags |= Flags.DISTINCT;\n }\n }\n\n private void invalidateAlreadyDefinedErrorType(BLangTypeDefinition typeDefinition) {\n \n BSymbol alreadyDefinedTypeSymbol = lookupTypeSymbol(env, typeDefinition.name);\n if (alreadyDefinedTypeSymbol.type.tag == TypeTags.ERROR) {\n alreadyDefinedTypeSymbol.type = symTable.errorType;\n }\n }\n\n private void populateErrorTypeIds(BErrorType effectiveType, BLangIntersectionTypeNode typeNode, String name,\n boolean distinctFlagPresentInTypeDef) {\n BTypeIdSet typeIdSet = BTypeIdSet.emptySet();\n int numberOfDistinctConstituentTypes = 0;\n\n for (BLangType constituentType : typeNode.constituentTypeNodes) {\n BType resolvedTypeNode = symResolver.resolveTypeNode(constituentType, env);\n BType type = Types.getReferredType(resolvedTypeNode);\n\n if (type.getKind() == TypeKind.ERROR) {\n if (constituentType.flagSet.contains(Flag.DISTINCT)) {\n numberOfDistinctConstituentTypes++;\n typeIdSet.addSecondarySet(((BErrorType) type).typeIdSet.getAll());\n } else {\n typeIdSet.add(((BErrorType) type).typeIdSet);\n }\n }\n }\n\n \n \n \n if (numberOfDistinctConstituentTypes == 1\n || (numberOfDistinctConstituentTypes == 0 && distinctFlagPresentInTypeDef)) {\n effectiveType.typeIdSet = BTypeIdSet.from(env.enclPkg.packageID, name, true, typeIdSet);\n } else {\n for (BLangType constituentType : typeNode.constituentTypeNodes) {\n if (constituentType.flagSet.contains(Flag.DISTINCT)) {\n typeIdSet.add(BTypeIdSet.from(env.enclPkg.packageID,\n anonymousModelHelper.getNextAnonymousTypeId(env.enclPkg.packageID), true));\n }\n }\n effectiveType.typeIdSet = typeIdSet;\n }\n }\n\n private void populateAllReadyDefinedErrorIntersection(BType definedType, BLangTypeDefinition typeDefinition,\n SymbolEnv env) {\n\n BSymbol bSymbol = lookupTypeSymbol(env, typeDefinition.name);\n BErrorType alreadyDefinedErrorType = (BErrorType) bSymbol.type;\n\n boolean distinctFlagPresent = typeDefinition.typeNode.flagSet.contains(Flag.DISTINCT);\n\n BIntersectionType intersectionType = (BIntersectionType) definedType;\n BErrorType errorType = (BErrorType) intersectionType.effectiveType;\n populateErrorTypeIds(errorType, (BLangIntersectionTypeNode) typeDefinition.typeNode,\n typeDefinition.name.value, distinctFlagPresent);\n\n alreadyDefinedErrorType.typeIdSet = errorType.typeIdSet;\n alreadyDefinedErrorType.detailType = errorType.detailType;\n alreadyDefinedErrorType.flags = errorType.flags;\n alreadyDefinedErrorType.name = errorType.name;\n intersectionType.effectiveType = alreadyDefinedErrorType;\n\n if (!errorType.typeIdSet.isEmpty()) {\n definedType.flags |= Flags.DISTINCT;\n }\n }\n\n private BSymbol lookupTypeSymbol(SymbolEnv env, BLangIdentifier name) {\n return symResolver.lookupSymbolInMainSpace(env, names.fromString(name.value));\n }\n\n private void populateSymbolNameOfErrorIntersection(BType definedType, String typeDefName) {\n BErrorType effectiveErrorType = (BErrorType) ((BIntersectionType) definedType).effectiveType;\n effectiveErrorType.tsymbol.name = names.fromString(typeDefName);\n }\n\n private boolean isErrorIntersection(BType definedType) {\n BType type = Types.getReferredType(definedType);\n if (type.tag == TypeTags.INTERSECTION) {\n BIntersectionType intersectionType = (BIntersectionType) type;\n return intersectionType.effectiveType.tag == TypeTags.ERROR;\n }\n\n return false;\n }\n\n private BEnumSymbol createEnumSymbol(BLangTypeDefinition typeDefinition, BType definedType) {\n List enumMembers = new ArrayList<>();\n\n List members = ((BLangUnionTypeNode) typeDefinition.typeNode).memberTypeNodes;\n for (BLangType member : members) {\n enumMembers.add((BConstantSymbol) ((BLangUserDefinedType) member).symbol);\n }\n\n BEnumSymbol enumSymbol = new BEnumSymbol(enumMembers, Flags.asMask(typeDefinition.flagSet),\n names.fromIdNode(typeDefinition.name), names.fromIdNode(typeDefinition.name),\n env.enclPkg.symbol.pkgID, definedType, env.scope.owner,\n typeDefinition.pos, SOURCE);\n\n enumSymbol.name = names.fromIdNode(typeDefinition.name);\n enumSymbol.originalName = names.fromIdNode(typeDefinition.name);\n enumSymbol.flags |= Flags.asMask(typeDefinition.flagSet);\n\n enumSymbol.markdownDocumentation = getMarkdownDocAttachment(typeDefinition.markdownDocumentationAttachment);\n enumSymbol.pkgID = env.enclPkg.packageID;\n return enumSymbol;\n }\n\n private BObjectType getDistinctObjectType(BLangTypeDefinition typeDefinition, BObjectType definedType,\n BTypeSymbol typeDefSymbol) {\n BTypeSymbol tSymbol = typeDefSymbol.kind == SymbolKind.TYPE_DEF ? typeDefSymbol.type.tsymbol : typeDefSymbol;\n BObjectType definedObjType = definedType;\n \n \n \n if (definedObjType.tsymbol != tSymbol) {\n BObjectType objType = new BObjectType(tSymbol);\n tSymbol.type = objType;\n definedObjType = objType;\n }\n boolean isPublicType = typeDefinition.flagSet.contains(Flag.PUBLIC);\n definedObjType.typeIdSet = calculateTypeIdSet(typeDefinition, isPublicType, definedType.typeIdSet);\n return definedObjType;\n }\n\n private void defineTypeInMainScope(BTypeSymbol typeDefSymbol, BLangTypeDefinition typeDef, SymbolEnv env) {\n if (PackageID.isLangLibPackageID(env.enclPkg.packageID)) {\n typeDefSymbol.origin = BUILTIN;\n handleLangLibTypes(typeDef);\n } else {\n defineSymbol(typeDef.name.pos, typeDefSymbol, env);\n }\n }\n\n private BType defineSymbolForCyclicTypeDefinition(BLangTypeDefinition typeDef, SymbolEnv env) {\n Name newTypeDefName = names.fromIdNode(typeDef.name);\n BTypeSymbol typeDefSymbol;\n BType newTypeNode;\n\n switch (typeDef.typeNode.getKind()) {\n case TUPLE_TYPE_NODE:\n newTypeNode = new BTupleType(null, new ArrayList<>(), true);\n typeDefSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(typeDef.flagSet),\n newTypeDefName, env.enclPkg.symbol.pkgID, newTypeNode, env.scope.owner,\n typeDef.name.pos, SOURCE);\n break;\n default:\n newTypeNode = BUnionType.create(null, new LinkedHashSet<>(), true);\n typeDefSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(typeDef.flagSet),\n newTypeDefName, env.enclPkg.symbol.pkgID, newTypeNode, env.scope.owner,\n typeDef.name.pos, SOURCE);\n }\n typeDef.symbol = typeDefSymbol;\n defineTypeInMainScope(typeDefSymbol, typeDef, env);\n newTypeNode.tsymbol = typeDefSymbol;\n newTypeNode.flags |= typeDefSymbol.flags;\n return newTypeNode;\n }\n\n private BType getCyclicDefinedType(BLangTypeDefinition typeDef, SymbolEnv env) {\n \n BSymbol foundSym = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(typeDef.name));\n BType newTypeNode = foundSym.type;\n\n \n \n \n \n BType resolvedTypeNodes = symResolver.resolveTypeNode(typeDef.typeNode, env);\n\n if (resolvedTypeNodes == symTable.noType) {\n return symTable.semanticError;\n }\n\n switch (resolvedTypeNodes.tag) {\n case TypeTags.TUPLE:\n BTupleType definedTupleType = (BTupleType) resolvedTypeNodes;\n for (BType member : definedTupleType.getTupleTypes()) {\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(member);\n if (!((BTupleType) newTypeNode).addMembers(new BTupleMember(member, varSymbol))) {\n return constructDependencyListError(typeDef, member);\n }\n }\n if (!((BTupleType) newTypeNode).addRestType(definedTupleType.restType)) {\n return constructDependencyListError(typeDef, definedTupleType.restType);\n }\n break;\n default:\n BUnionType definedUnionType = (BUnionType) resolvedTypeNodes;\n for (BType member : definedUnionType.getMemberTypes()) {\n ((BUnionType) newTypeNode).add(member);\n }\n break;\n }\n typeDef.typeNode.setBType(newTypeNode);\n typeDef.typeNode.getBType().tsymbol.type = newTypeNode;\n typeDef.symbol.type = newTypeNode;\n typeDef.setBType(newTypeNode);\n return newTypeNode;\n }\n\n private void defineAllUnresolvedCyclicTypesInScope(SymbolEnv env) {\n SymbolEnv prevEnv = this.env;\n this.env = env;\n for (BLangNode unresolvedNode : unresolvedTypes) {\n if (unresolvedNode.getKind() == NodeKind.TYPE_DEFINITION &&\n ((BLangTypeDefinition) unresolvedNode).hasCyclicReference) {\n defineSymbolForCyclicTypeDefinition((BLangTypeDefinition) unresolvedNode, env);\n }\n }\n this.env = prevEnv;\n }\n\n private BType constructDependencyListError(BLangTypeDefinition typeDef, BType member) {\n List dependencyList = new ArrayList<>();\n dependencyList.add(getTypeOrClassName(typeDef));\n dependencyList.add(member.tsymbol.name.value);\n dlog.error(typeDef.getPosition(), DiagnosticErrorCode.CYCLIC_TYPE_REFERENCE, dependencyList);\n return symTable.semanticError;\n }\n\n private BErrorType getDistinctErrorType(BLangTypeDefinition typeDefinition, BErrorType definedType,\n BSymbol typeDefSymbol) {\n BErrorType definedErrorType = definedType;\n \n \n \n if (definedErrorType.tsymbol != typeDefSymbol) {\n BTypeSymbol typeSymbol = new BTypeSymbol(SymTag.TYPE_DEF, typeDefSymbol.flags, typeDefSymbol.name,\n typeDefSymbol.pkgID, null, typeDefSymbol.owner, typeDefSymbol.pos, typeDefSymbol.origin);\n BErrorType bErrorType = new BErrorType(typeSymbol);\n typeSymbol.type = bErrorType;\n bErrorType.detailType = definedErrorType.detailType;\n typeDefSymbol.type = bErrorType;\n definedErrorType = bErrorType;\n }\n boolean isPublicType = typeDefinition.flagSet.contains(Flag.PUBLIC);\n definedErrorType.typeIdSet = calculateTypeIdSet(typeDefinition, isPublicType, definedType.typeIdSet);\n return definedErrorType;\n }\n\n private BTypeIdSet calculateTypeIdSet(BLangTypeDefinition typeDefinition, boolean isPublicType,\n BTypeIdSet secondary) {\n String name = typeDefinition.flagSet.contains(Flag.ANONYMOUS)\n ? anonymousModelHelper.getNextAnonymousTypeId(env.enclPkg.packageID)\n : typeDefinition.getName().value;\n\n return BTypeIdSet.from(env.enclPkg.packageID, name, isPublicType, secondary);\n }\n\n private boolean isDistinctFlagPresent(BLangTypeDefinition typeDefinition) {\n if (typeDefinition.typeNode.flagSet.contains(Flag.DISTINCT)) {\n return true;\n }\n\n return false;\n }\n\n private void handleLangLibTypes(BLangTypeDefinition typeDefinition) {\n\n \n for (BLangAnnotationAttachment attachment : typeDefinition.annAttachments) {\n if (attachment.annotationName.value.equals(Names.ANNOTATION_TYPE_PARAM.value)) {\n BSymbol typeDefSymbol = typeDefinition.symbol;\n typeDefSymbol.type = typeParamAnalyzer.createTypeParam(typeDefSymbol.type, typeDefSymbol.name);\n typeDefSymbol.flags |= Flags.TYPE_PARAM;\n break;\n } else if (attachment.annotationName.value.equals(Names.ANNOTATION_BUILTIN_SUBTYPE.value)) {\n \n BType type = symTable.getLangLibSubType(typeDefinition.name.value);\n typeDefinition.symbol.type = type;\n typeDefinition.symbol.flags |= type.tsymbol.flags;\n ((BTypeDefinitionSymbol) typeDefinition.symbol).referenceType.tsymbol.flags |= type.tsymbol.flags;\n ((BTypeDefinitionSymbol) typeDefinition.symbol).referenceType.referredType = type;\n typeDefinition.setBType(type);\n typeDefinition.typeNode.setBType(type);\n typeDefinition.isBuiltinTypeDef = true;\n break;\n }\n throw new IllegalStateException(\"Not supported annotation attachment at:\" + attachment.pos);\n }\n defineSymbol(typeDefinition.name.pos, typeDefinition.symbol);\n }\n\n \n \n \n \n private long getPublicFlagResetingMask(Set flagSet, BLangType typeNode) {\n boolean isAnonType =\n typeNode instanceof BLangStructureTypeNode && ((BLangStructureTypeNode) typeNode).isAnonymous;\n if (flagSet.contains(Flag.PUBLIC) || isAnonType) {\n return Long.MAX_VALUE;\n } else {\n return ~Flags.PUBLIC;\n }\n }\n\n @Override\n public void visit(BLangService serviceNode) {\n defineNode(serviceNode.serviceVariable, env);\n\n Name generatedServiceName = names.fromString(\"service$\" + serviceNode.serviceClass.symbol.name.value);\n BType type = serviceNode.serviceClass.typeRefs.isEmpty() ? null : serviceNode.serviceClass.typeRefs.get(0)\n .getBType();\n BServiceSymbol serviceSymbol = new BServiceSymbol((BClassSymbol) serviceNode.serviceClass.symbol,\n Flags.asMask(serviceNode.flagSet), generatedServiceName,\n env.enclPkg.symbol.pkgID, type, env.enclPkg.symbol,\n serviceNode.pos, SOURCE);\n serviceNode.symbol = serviceSymbol;\n\n if (!serviceNode.absoluteResourcePath.isEmpty()) {\n if (\"/\".equals(serviceNode.absoluteResourcePath.get(0).getValue())) {\n serviceSymbol.setAbsResourcePath(Collections.emptyList());\n } else {\n List list = new ArrayList<>(serviceNode.absoluteResourcePath.size());\n for (IdentifierNode identifierNode : serviceNode.absoluteResourcePath) {\n list.add(identifierNode.getValue());\n }\n serviceSymbol.setAbsResourcePath(list);\n }\n }\n\n if (serviceNode.serviceNameLiteral != null) {\n serviceSymbol.setAttachPointStringLiteral(serviceNode.serviceNameLiteral.value.toString());\n }\n\n env.scope.define(serviceSymbol.name, serviceSymbol);\n }\n\n @Override\n public void visit(BLangResourceFunction funcNode) {\n boolean validAttachedFunc = validateFuncReceiver(funcNode);\n\n if (PackageID.isLangLibPackageID(env.enclPkg.symbol.pkgID)) {\n funcNode.flagSet.add(Flag.LANG_LIB);\n }\n\n BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),\n getFuncSymbolName(funcNode), getFuncSymbolOriginalName(funcNode),\n env.enclPkg.symbol.pkgID, null, env.scope.owner,\n funcNode.hasBody(), funcNode.name.pos, SOURCE);\n funcSymbol.source = funcNode.pos.lineRange().filePath();\n funcSymbol.markdownDocumentation = getMarkdownDocAttachment(funcNode.markdownDocumentationAttachment);\n SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);\n defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);\n funcNode.setBType(funcSymbol.type);\n\n if (isDeprecated(funcNode.annAttachments)) {\n funcSymbol.flags |= Flags.DEPRECATED;\n }\n \n if (funcNode.receiver != null) {\n defineAttachedFunctions(funcNode, funcSymbol, invokableEnv, validAttachedFunc);\n }\n }\n\n @Override\n public void visit(BLangFunction funcNode) {\n boolean validAttachedFunc = validateFuncReceiver(funcNode);\n boolean remoteFlagSetOnNode = Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.REMOTE);\n\n if (!funcNode.attachedFunction && Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.PRIVATE)) {\n dlog.error(funcNode.pos, DiagnosticErrorCode.PRIVATE_FUNCTION_VISIBILITY, funcNode.name);\n }\n\n if (funcNode.receiver == null && !funcNode.attachedFunction && remoteFlagSetOnNode) {\n dlog.error(funcNode.pos, DiagnosticErrorCode.REMOTE_IN_NON_OBJECT_FUNCTION, funcNode.name.value);\n }\n\n if (PackageID.isLangLibPackageID(env.enclPkg.symbol.pkgID)) {\n funcNode.flagSet.add(Flag.LANG_LIB);\n }\n\n Location symbolPos = funcNode.flagSet.contains(Flag.LAMBDA) ?\n symTable.builtinPos : funcNode.name.pos;\n BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),\n getFuncSymbolName(funcNode),\n getFuncSymbolOriginalName(funcNode),\n env.enclPkg.symbol.pkgID, null, env.scope.owner,\n funcNode.hasBody(), symbolPos,\n getOrigin(funcNode.name.value));\n funcSymbol.source = funcNode.pos.lineRange().filePath();\n funcSymbol.markdownDocumentation = getMarkdownDocAttachment(funcNode.markdownDocumentationAttachment);\n SymbolEnv invokableEnv;\n NodeKind previousNodeKind = env.node.getKind();\n if (previousNodeKind == NodeKind.CLASS_DEFN) {\n invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope,\n fieldsRemovedEnv(env, ((BLangClassDefinition) env.node).fields));\n } else if (previousNodeKind == NodeKind.OBJECT_TYPE) {\n invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope,\n fieldsRemovedEnv(env, ((BLangObjectTypeNode) env.node).fields));\n } else {\n invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);\n }\n defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);\n funcNode.setBType(funcSymbol.type);\n\n \n if (Symbols.isFlagOn(funcSymbol.flags, Flags.LAMBDA)) {\n funcSymbol.origin = VIRTUAL;\n }\n\n if (isDeprecated(funcNode.annAttachments)) {\n funcSymbol.flags |= Flags.DEPRECATED;\n }\n \n if (funcNode.receiver != null) {\n defineAttachedFunctions(funcNode, funcSymbol, invokableEnv, validAttachedFunc);\n }\n }\n\n private SymbolEnv fieldsRemovedEnv(SymbolEnv currentEnv, List fields) {\n if (fields.isEmpty()) {\n return currentEnv;\n }\n Scope currentScope = currentEnv.scope;\n Scope newScope = new Scope(currentScope.owner);\n newScope.entries.putAll(currentScope.entries);\n Map entries = newScope.entries;\n for (BLangSimpleVariable field : fields) {\n entries.remove(Names.fromString(field.name.value));\n }\n SymbolEnv newEnv = new SymbolEnv(currentEnv.node, newScope);\n currentEnv.copyTo(newEnv, currentEnv.enclEnv);\n return newEnv;\n }\n\n private boolean isDeprecated(List annAttachments) {\n for (BLangAnnotationAttachment annotationAttachment : annAttachments) {\n if (annotationAttachment.annotationName.getValue().equals(DEPRECATION_ANNOTATION)) {\n return true;\n }\n }\n return false;\n }\n\n @Override\n public void visit(BLangConstant constant) {\n BType staticType;\n if (constant.typeNode != null) {\n staticType = symResolver.resolveTypeNode(constant.typeNode, env);\n if (staticType == symTable.noType) {\n constant.symbol = getConstantSymbol(constant);\n \n if (!this.unresolvedTypes.contains(constant)) {\n this.unresolvedTypes.add(constant);\n }\n return;\n }\n } else {\n staticType = symTable.semanticError;\n }\n BConstantSymbol constantSymbol = getConstantSymbol(constant);\n constant.symbol = constantSymbol;\n\n NodeKind nodeKind = constant.expr.getKind();\n if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {\n if (constant.typeNode != null) {\n BType referredType = Types.getReferredType(staticType);\n if (types.isValidLiteral((BLangLiteral) constant.expr, referredType)) {\n \n \n \n \n BLangFiniteTypeNode finiteType = (BLangFiniteTypeNode) constant.associatedTypeDefinition.typeNode;\n BLangExpression valueSpaceExpr = finiteType.valueSpace.iterator().next();\n valueSpaceExpr.setBType(referredType);\n defineNode(constant.associatedTypeDefinition, env);\n\n constantSymbol.type = constant.associatedTypeDefinition.symbol.type;\n constantSymbol.literalType = referredType;\n } else {\n \n \n defineNode(constant.associatedTypeDefinition, env);\n constantSymbol.type = staticType;\n constantSymbol.literalType = constant.expr.getBType();\n }\n } else {\n \n \n defineNode(constant.associatedTypeDefinition, env);\n constantSymbol.type = constant.associatedTypeDefinition.symbol.type;\n constantSymbol.literalType = constant.expr.getBType();\n }\n if (constantSymbol.type.tag != TypeTags.TYPEREFDESC) {\n constantSymbol.type.tsymbol.flags |= constant.associatedTypeDefinition.symbol.flags;\n }\n\n } else if (nodeKind == NodeKind.UNARY_EXPR && constant.typeNode == null &&\n types.isLiteralInUnaryAllowed((BLangUnaryExpr) constant.expr)) {\n \n \n \n\n BLangUnaryExpr unaryConstant = (BLangUnaryExpr) constant.expr;\n \n BLangNumericLiteral literal = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();\n Types.setValueOfNumericLiteral(literal, unaryConstant);\n literal.isConstant = true;\n literal.setBType(unaryConstant.expr.getBType());\n ((BLangFiniteTypeNode) constant.getAssociatedTypeDefinition().getTypeNode()).valueSpace.set(0, literal);\n\n defineNode(constant.associatedTypeDefinition, env);\n constantSymbol.type = constant.associatedTypeDefinition.symbol.type;\n constantSymbol.literalType = unaryConstant.expr.getBType();\n } else if (constant.typeNode != null) {\n constantSymbol.type = constantSymbol.literalType = staticType;\n }\n constantSymbol.markdownDocumentation = getMarkdownDocAttachment(constant.markdownDocumentationAttachment);\n if (isDeprecated(constant.annAttachments)) {\n constantSymbol.flags |= Flags.DEPRECATED;\n }\n \n if (!symResolver.checkForUniqueSymbol(constant.name.pos, env, constantSymbol)) {\n return;\n }\n\n if (constant.symbol.name == Names.IGNORE) {\n \n return;\n }\n \n env.scope.define(constantSymbol.name, constantSymbol);\n }\n\n private BConstantSymbol getConstantSymbol(BLangConstant constant) {\n \n Name name = names.fromIdNode(constant.name);\n PackageID pkgID = env.enclPkg.symbol.pkgID;\n return new BConstantSymbol(Flags.asMask(constant.flagSet), name, names.originalNameFromIdNode(constant.name),\n pkgID, symTable.semanticError, symTable.noType, env.scope.owner,\n constant.name.pos, getOrigin(name));\n }\n\n @Override\n public void visit(BLangSimpleVariable varNode) {\n \n if (varNode.getBType() == null) {\n if (varNode.typeNode != null) {\n varNode.setBType(symResolver.resolveTypeNode(varNode.typeNode, env));\n } else {\n varNode.setBType(symTable.noType);\n }\n }\n\n Name varName = names.fromIdNode(varNode.name);\n Name varOrigName = names.originalNameFromIdNode(varNode.name);\n if (varName == Names.IGNORE || varNode.symbol != null) {\n return;\n }\n\n BVarSymbol varSymbol = defineVarSymbol(varNode.name.pos, varNode.flagSet, varNode.getBType(), varName,\n varOrigName, env, varNode.internal);\n if (isDeprecated(varNode.annAttachments)) {\n varSymbol.flags |= Flags.DEPRECATED;\n }\n\n \n if (varSymbol.type == symTable.semanticError && varSymbol.state == DiagnosticState.VALID) {\n varSymbol.state = DiagnosticState.UNKNOWN_TYPE;\n }\n\n varSymbol.markdownDocumentation = getMarkdownDocAttachment(varNode.markdownDocumentationAttachment);\n varNode.symbol = varSymbol;\n if (varNode.symbol.type.tsymbol != null && Symbols.isFlagOn(varNode.symbol.type.tsymbol.flags, Flags.CLIENT)) {\n varSymbol.tag = SymTag.ENDPOINT;\n }\n\n if (Types.getReferredType(varSymbol.type).tag == TypeTags.FUTURE\n && ((BFutureType) Types.getReferredType(varSymbol.type)).workerDerivative) {\n Iterator lambdaFunctions = env.enclPkg.lambdaFunctions.iterator();\n while (lambdaFunctions.hasNext()) {\n BLangLambdaFunction lambdaFunction = lambdaFunctions.next();\n \n \n \n BLangInvokableNode enclInvokable = lambdaFunction.capturedClosureEnv.enclInvokable;\n if (lambdaFunctions.hasNext() && enclInvokable != null && varSymbol.owner == enclInvokable.symbol) {\n lambdaFunction.capturedClosureEnv.scope.define(varSymbol.name, varSymbol);\n }\n }\n }\n\n if (Types.getReferredType(varSymbol.type).tag == TypeTags.INVOKABLE) {\n BInvokableSymbol symbol = (BInvokableSymbol) varSymbol;\n BTypeSymbol typeSymbol = Types.getReferredType(varSymbol.type).tsymbol;\n BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) typeSymbol;\n symbol.params = tsymbol.params == null ? null : new ArrayList<>(tsymbol.params);\n symbol.restParam = tsymbol.restParam;\n symbol.retType = tsymbol.returnType;\n }\n\n if ((env.scope.owner.tag & SymTag.RECORD) != SymTag.RECORD && !varNode.flagSet.contains(Flag.NEVER_ALLOWED) &&\n (env.scope.owner.tag & SymTag.TUPLE_TYPE) != SymTag.TUPLE_TYPE &&\n types.isNeverTypeOrStructureTypeWithARequiredNeverMember(varSymbol.type)) {\n \n \n \n if (varNode.flagSet.contains(Flag.REQUIRED_PARAM) || varNode.flagSet.contains(Flag.DEFAULTABLE_PARAM)) {\n dlog.error(varNode.pos, DiagnosticErrorCode.NEVER_TYPE_NOT_ALLOWED_FOR_REQUIRED_DEFAULTABLE_PARAMS);\n } else {\n if ((env.scope.owner.tag & SymTag.OBJECT) == SymTag.OBJECT) {\n dlog.error(varNode.pos, DiagnosticErrorCode.NEVER_TYPED_OBJECT_FIELD_NOT_ALLOWED);\n } else {\n dlog.error(varNode.pos, DiagnosticErrorCode.NEVER_TYPED_VAR_DEF_NOT_ALLOWED);\n }\n }\n }\n }\n\n @Override\n public void visit(BLangTupleVariable varNode) {\n if (varNode.isDeclaredWithVar) {\n varNode.symbol =\n defineVarSymbol(varNode.pos, varNode.flagSet, symTable.noType,\n names.fromString(anonymousModelHelper.getNextTupleVarKey(env.enclPkg.packageID)),\n env, true);\n \n List memberVariables = new ArrayList<>(varNode.memberVariables);\n if (varNode.restVariable != null) {\n memberVariables.add(varNode.restVariable);\n }\n for (int i = 0; i < memberVariables.size(); i++) {\n BLangVariable memberVar = memberVariables.get(i);\n memberVar.isDeclaredWithVar = true;\n defineNode(memberVar, env);\n }\n return;\n }\n if (varNode.getBType() == null) {\n varNode.setBType(symResolver.resolveTypeNode(varNode.typeNode, env));\n }\n \n if (!(checkTypeAndVarCountConsistency(varNode, env))) {\n varNode.setBType(symTable.semanticError);\n return;\n }\n }\n\n boolean checkTypeAndVarCountConsistency(BLangTupleVariable var, SymbolEnv env) {\n if (var.symbol == null) {\n Name varName = names.fromString(anonymousModelHelper.getNextTupleVarKey(env.enclPkg.packageID));\n var.symbol = defineVarSymbol(var.pos, var.flagSet, var.getBType(), varName, env, true);\n }\n\n return checkTypeAndVarCountConsistency(var, null, env);\n }\n\n boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode, BTupleType tupleTypeNode,\n SymbolEnv env) {\n if (tupleTypeNode == null) {\n /*\n This switch block will resolve the tuple type of the tuple variable.\n For example consider the following - [int, string]|[boolean, float] [a, b] = foo();\n Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:\n Type of 'a' will be (int | boolean) while the type of 'b' will be (string | float).\n Consider anydata (a, b) = foo();\n Here, the type of 'a'and type of 'b' will be both anydata.\n */\n BType bType = varNode.getBType();\n BType referredType = Types.getEffectiveType(Types.getReferredType(bType));\n switch (referredType.tag) {\n case TypeTags.UNION:\n Set unionType = types.expandAndGetMemberTypesRecursive(referredType);\n List possibleTypes = new ArrayList<>();\n for (BType type : unionType) {\n if (!(TypeTags.TUPLE == type.tag &&\n checkMemVarCountMatchWithMemTypeCount(varNode, (BTupleType) type)) &&\n TypeTags.ANY != type.tag && TypeTags.ANYDATA != type.tag &&\n (TypeTags.ARRAY != type.tag || ((BArrayType) type).state == BArrayState.OPEN)) {\n continue;\n }\n possibleTypes.add(type);\n }\n if (possibleTypes.isEmpty()) {\n \n if (varNode.isDeclaredWithVar) {\n dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_LIST_BINDING_PATTERN);\n return false;\n }\n dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_LIST_BINDING_PATTERN_DECL, bType);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n List members = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n LinkedHashSet memberTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.TUPLE) {\n memberTypes.add(((BTupleType) possibleType).getTupleTypes().get(i));\n } else if (possibleType.tag == TypeTags.ARRAY) {\n memberTypes.add(((BArrayType) possibleType).eType);\n } else {\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(referredType);\n members.add(new BTupleMember(referredType, varSymbol));\n }\n }\n\n if (memberTypes.size() > 1) {\n BType type = BUnionType.create(null, memberTypes);\n BVarSymbol varSymbol = new BVarSymbol(type.flags, null, null, type, null,\n null, null);\n members.add(new BTupleMember(type, varSymbol));\n } else {\n memberTypes.forEach(m ->\n members.add(new BTupleMember(m,\n Symbols.createVarSymbolForTupleMember(m))));\n }\n }\n tupleTypeNode = new BTupleType(members);\n tupleTypeNode.restType = getPossibleRestTypeForUnion(varNode, possibleTypes);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.TUPLE) {\n tupleTypeNode = (BTupleType) possibleTypes.get(0);\n tupleTypeNode.restType = getPossibleRestTypeForUnion(varNode, possibleTypes);\n break;\n }\n\n List members = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n BType type = possibleTypes.get(0);\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(type);\n members.add(new BTupleMember(type, varSymbol));\n }\n tupleTypeNode = new BTupleType(members);\n tupleTypeNode.restType = getPossibleRestTypeForUnion(varNode, possibleTypes);\n break;\n case TypeTags.ANY:\n case TypeTags.ANYDATA:\n List memberTupleTypes = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(referredType);\n memberTupleTypes.add(new BTupleMember(referredType, varSymbol));\n }\n tupleTypeNode = new BTupleType(memberTupleTypes);\n if (varNode.restVariable != null) {\n tupleTypeNode.restType = referredType;\n }\n break;\n case TypeTags.TUPLE:\n tupleTypeNode = (BTupleType) referredType;\n break;\n case TypeTags.ARRAY:\n List tupleTypes = new ArrayList<>();\n BArrayType arrayType = (BArrayType) referredType;\n tupleTypeNode = new BTupleType(tupleTypes);\n BType eType = arrayType.eType;\n for (int i = 0; i < arrayType.size; i++) {\n BType type = arrayType.eType;\n BVarSymbol varSymbol = Symbols.createVarSymbolForTupleMember(type);\n tupleTypes.add(new BTupleMember(type, varSymbol));\n\n }\n if (varNode.restVariable != null) {\n tupleTypeNode.restType = eType;\n }\n break;\n default:\n dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_LIST_BINDING_PATTERN_DECL, bType);\n return false;\n }\n }\n\n if (!checkMemVarCountMatchWithMemTypeCount(varNode, tupleTypeNode)) {\n dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_LIST_BINDING_PATTERN);\n return false;\n }\n\n int ignoredCount = 0;\n int i = 0;\n BType type;\n List tupleMemberTypes = tupleTypeNode.getTupleTypes();\n for (BLangVariable var : varNode.memberVariables) {\n type = tupleMemberTypes.get(i);\n i++;\n if (var.getKind() == NodeKind.VARIABLE) {\n \n BLangSimpleVariable simpleVar = (BLangSimpleVariable) var;\n Name varName = names.fromIdNode(simpleVar.name);\n if (varName == Names.IGNORE) {\n ignoredCount++;\n simpleVar.setBType(symTable.anyType);\n if (!types.isAssignable(type, symTable.anyType)) {\n dlog.error(varNode.pos, DiagnosticErrorCode.WILD_CARD_BINDING_PATTERN_ONLY_SUPPORTS_TYPE_ANY);\n }\n continue;\n }\n }\n defineMemberNode(var, env, type);\n }\n\n if (varNode.restVariable != null) {\n List tupleMembers = tupleTypeNode.getMembers();\n int tupleNodeMemCount = tupleMembers.size();\n int varNodeMemCount = varNode.memberVariables.size();\n BType restType = tupleTypeNode.restType;\n List members = new ArrayList<>();\n if (varNodeMemCount < tupleNodeMemCount) {\n for (int j = varNodeMemCount; j < tupleNodeMemCount; j++) {\n members.add(tupleMembers.get(j));\n }\n }\n if (!members.isEmpty()) {\n BTupleType restTupleType = new BTupleType(members);\n restTupleType.restType = restType;\n type = restTupleType;\n } else {\n type = restType != null ? new BArrayType(restType) : null;\n }\n defineMemberNode(varNode.restVariable, env, type);\n }\n\n if (!varNode.memberVariables.isEmpty() && ignoredCount == varNode.memberVariables.size()\n && varNode.restVariable == null) {\n dlog.error(varNode.pos, DiagnosticErrorCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return false;\n }\n return true;\n }\n\n private BType getPossibleRestTypeForUnion(BLangTupleVariable varNode, List possibleTypes) {\n if (varNode.restVariable == null) {\n return null;\n }\n LinkedHashSet memberRestTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.TUPLE) {\n BTupleType tupleType = (BTupleType) possibleType;\n List tupleMemberTypes = tupleType.getTupleTypes();\n for (int j = varNode.memberVariables.size(); j < tupleMemberTypes.size();\n j++) {\n memberRestTypes.add(tupleMemberTypes.get(j));\n }\n if (tupleType.restType != null) {\n memberRestTypes.add(tupleType.restType);\n }\n } else if (possibleType.tag == TypeTags.ARRAY) {\n memberRestTypes.add(((BArrayType) possibleType).eType);\n } else {\n memberRestTypes.add(possibleType);\n }\n }\n if (!memberRestTypes.isEmpty()) {\n return memberRestTypes.size() > 1 ? BUnionType.create(null, memberRestTypes) :\n memberRestTypes.iterator().next();\n } else {\n return varNode.getBType();\n }\n }\n\n private boolean checkMemVarCountMatchWithMemTypeCount(BLangTupleVariable varNode, BTupleType tupleTypeNode) {\n int memberVarsSize = varNode.memberVariables.size();\n BLangVariable restVariable = varNode.restVariable;\n int tupleTypesSize = tupleTypeNode.getMembers().size();\n if (memberVarsSize > tupleTypesSize) {\n return false;\n }\n return restVariable != null ||\n (tupleTypesSize == memberVarsSize && tupleTypeNode.restType == null);\n }\n\n @Override\n public void visit(BLangRecordVariable recordVar) {\n if (recordVar.isDeclaredWithVar) {\n recordVar.symbol =\n defineVarSymbol(recordVar.pos, recordVar.flagSet, symTable.noType,\n names.fromString(anonymousModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),\n env, true);\n \n for (BLangRecordVariable.BLangRecordVariableKeyValue variable : recordVar.variableList) {\n BLangVariable value = variable.getValue();\n value.isDeclaredWithVar = true;\n defineNode(value, env);\n }\n\n BLangSimpleVariable restParam = (BLangSimpleVariable) recordVar.restParam;\n if (restParam != null) {\n restParam.isDeclaredWithVar = true;\n defineNode(restParam, env);\n }\n return;\n }\n\n if (recordVar.getBType() == null) {\n recordVar.setBType(symResolver.resolveTypeNode(recordVar.typeNode, env));\n }\n \n if (!(symbolEnterAndValidateRecordVariable(recordVar, env))) {\n recordVar.setBType(symTable.semanticError);\n return;\n }\n }\n\n boolean symbolEnterAndValidateRecordVariable(BLangRecordVariable var, SymbolEnv env) {\n if (var.symbol == null) {\n Name varName = names.fromString(anonymousModelHelper.getNextRecordVarKey(env.enclPkg.packageID));\n var.symbol = defineVarSymbol(var.pos, var.flagSet, var.getBType(), varName, env, true);\n }\n\n return validateRecordVariable(var, env);\n }\n\n boolean validateRecordVariable(BLangRecordVariable recordVar, SymbolEnv env) {\n BType recordType = Types.getEffectiveType(Types.getReferredType(recordVar.getBType()));\n BRecordType recordVarType;\n /*\n This switch block will resolve the record type of the record variable.\n For example consider the following -\n type Foo record {int a, boolean b};\n type Bar record {string a, float b};\n Foo|Bar {a, b} = foo();\n Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:\n Type of 'a' will be a union of the types of field 'a' in both Foo and Bar.\n i.e. type of 'a' is (int | string) and type of 'b' is (boolean | float).\n Consider anydata {a, b} = foo();\n Here, the type of 'a'and type of 'b' will be both anydata.\n */\n switch (recordType.tag) {\n case TypeTags.UNION:\n BUnionType unionType = (BUnionType) recordType;\n Set bTypes = types.expandAndGetMemberTypesRecursive(unionType);\n List possibleTypes = bTypes.stream()\n .filter(rec -> doesRecordContainKeys(rec, recordVar.variableList, recordVar.restParam != null))\n .collect(Collectors.toList());\n\n if (possibleTypes.isEmpty()) {\n dlog.error(recordVar.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, recordType);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n recordVarType = populatePossibleFields(recordVar, possibleTypes, env);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.RECORD) {\n recordVarType = (BRecordType) possibleTypes.get(0);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.MAP) {\n recordVarType = createSameTypedFieldsRecordType(recordVar,\n ((BMapType) possibleTypes.get(0)).constraint, env);\n break;\n }\n\n recordVarType = createSameTypedFieldsRecordType(recordVar, possibleTypes.get(0), env);\n break;\n case TypeTags.RECORD:\n recordVarType = (BRecordType) recordType;\n break;\n case TypeTags.MAP:\n recordVarType = createSameTypedFieldsRecordType(recordVar,\n ((BMapType) recordType).constraint, env);\n break;\n default:\n dlog.error(recordVar.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, recordType);\n return false;\n }\n\n return defineVariableList(recordVar, recordVarType, env);\n }\n\n private BRecordType populatePossibleFields(BLangRecordVariable recordVar, List possibleTypes,\n SymbolEnv env) {\n BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS,\n names.fromString(ANONYMOUS_RECORD_NAME),\n env.enclPkg.symbol.pkgID, null,\n env.scope.owner, recordVar.pos, SOURCE);\n BRecordType recordVarType = (BRecordType) symTable.recordType;\n\n List mappedFields = recordVar.variableList.stream().map(varKeyValue -> varKeyValue.getKey().value)\n .collect(Collectors.toList());\n LinkedHashMap fields = populateAndGetPossibleFieldsForRecVar(recordVar.pos, possibleTypes,\n mappedFields, recordSymbol, env);\n\n if (recordVar.restParam != null) {\n recordVarType.restFieldType = createRestFieldFromPossibleTypes(recordVar.pos, env, possibleTypes,\n fields, recordSymbol);\n }\n recordVarType.tsymbol = recordSymbol;\n recordVarType.fields = fields;\n recordSymbol.type = recordVarType;\n return recordVarType;\n }\n\n private BType createRestFieldFromPossibleTypes(Location pos, SymbolEnv env, List possibleTypes,\n LinkedHashMap boundedFields, BSymbol recordSymbol) {\n LinkedHashSet restFieldMemberTypes = new LinkedHashSet<>();\n List> possibleRecordFieldMapList = new ArrayList<>();\n\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.RECORD) {\n BRecordType recordType = (BRecordType) possibleType;\n possibleRecordFieldMapList.add(recordType.fields);\n restFieldMemberTypes.add(recordType.restFieldType);\n } else if (possibleType.tag == TypeTags.MAP) {\n restFieldMemberTypes.add(((BMapType) possibleType).constraint);\n } else {\n restFieldMemberTypes.add(possibleType);\n }\n }\n\n BType restFieldType = restFieldMemberTypes.size() > 1 ?\n BUnionType.create(null, restFieldMemberTypes) :\n restFieldMemberTypes.iterator().next();\n\n if (!possibleRecordFieldMapList.isEmpty()) {\n List intersectionFields = getIntersectionFields(possibleRecordFieldMapList);\n LinkedHashMap unmappedMembers = populateAndGetPossibleFieldsForRecVar(pos,\n possibleTypes, intersectionFields, recordSymbol, env);\n\n LinkedHashMap optionalFields = new LinkedHashMap<>() {{\n possibleRecordFieldMapList.forEach(map -> putAll(map));\n }};\n\n intersectionFields.forEach(optionalFields::remove);\n boundedFields.keySet().forEach(unmappedMembers::remove);\n\n for (BField field : optionalFields.values()) {\n field.symbol.flags = setSymbolAsOptional(field.symbol.flags);\n }\n unmappedMembers.putAll(optionalFields);\n\n BRecordType restRecord = new BRecordType(null);\n restRecord.fields = unmappedMembers;\n restRecord.restFieldType = restFieldType;\n restFieldType = restRecord;\n }\n\n return restFieldType;\n }\n\n private List getIntersectionFields(List> fieldList) {\n LinkedHashMap intersectionMap = fieldList.get(0);\n HashSet intersectionSet = new HashSet<>(intersectionMap.keySet());\n\n for (int i = 1; i < fieldList.size(); i++) {\n LinkedHashMap map = fieldList.get(i);\n HashSet set = new HashSet<>(map.keySet());\n intersectionSet.retainAll(set);\n }\n\n return new ArrayList<>(intersectionSet);\n }\n\n /**\n * This method will resolve field types based on a list of possible types.\n * When a record variable has multiple possible assignable types, each field will be a union of the relevant\n * possible types field type.\n *\n * @param pos line number information of the source file\n * @param possibleTypes list of possible types\n * @param fieldNames fields types to be resolved\n * @param recordSymbol symbol of the record type to be used in creating fields\n * @param env environment to define the symbol\n * @return the list of fields\n */\n private LinkedHashMap populateAndGetPossibleFieldsForRecVar(Location pos, List possibleTypes,\n List fieldNames,\n BSymbol recordSymbol, SymbolEnv env) {\n LinkedHashMap fields = new LinkedHashMap<>();\n for (String fieldName : fieldNames) {\n LinkedHashSet memberTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.RECORD) {\n BRecordType possibleRecordType = (BRecordType) possibleType;\n\n if (possibleRecordType.fields.containsKey(fieldName)) {\n BField field = possibleRecordType.fields.get(fieldName);\n if (Symbols.isOptional(field.symbol)) {\n memberTypes.add(symTable.nilType);\n }\n memberTypes.add(field.type);\n } else {\n memberTypes.add(possibleRecordType.restFieldType);\n memberTypes.add(symTable.nilType);\n }\n\n continue;\n }\n\n if (possibleType.tag == TypeTags.MAP) {\n BMapType possibleMapType = (BMapType) possibleType;\n memberTypes.add(possibleMapType.constraint);\n continue;\n }\n memberTypes.add(possibleType); \n }\n\n BType fieldType = memberTypes.size() > 1 ?\n BUnionType.create(null, memberTypes) : memberTypes.iterator().next();\n BField field = new BField(names.fromString(fieldName), pos,\n new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID,\n fieldType, recordSymbol, pos, SOURCE));\n fields.put(field.name.value, field);\n }\n return fields;\n }" }, { "comment": "@hemikak WDYT of using `anyMatch(predicate)` instead? IMO it'll be more convenient for the above context, if we don't need the exact count.", "method_body": "public static CompileResult compile(String sourceFilePath) {\n Path sourcePath = Paths.get(sourceFilePath);\n String sourceFileName = sourcePath.getFileName().toString();\n Path sourceRoot = testSourcesDirectory.resolve(sourcePath.getParent());\n\n Path projectPath = Paths.get(sourceRoot.toString(), sourceFileName);\n Project project = ProjectLoader.loadProject(projectPath);\n\n Package currentPackage = project.currentPackage();\n PackageCompilation packageCompilation = currentPackage.getCompilation();\n JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JdkVersion.JAVA_11);\n long errorCount = jBallerinaBackend.diagnostics().stream().filter(diagnostic ->\n diagnostic.diagnosticInfo().severity() == DiagnosticSeverity.ERROR).count();\n if (errorCount > 0) {\n return new CompileResult(currentPackage, jBallerinaBackend.diagnostics());\n }\n\n Path jarTargetPath = jarTargetPath(currentPackage);\n jBallerinaBackend.emit(JBallerinaBackend.OutputType.JAR, jarTargetPath);\n\n CompileResult compileResult = new CompileResult(currentPackage, jBallerinaBackend.diagnostics(), jarTargetPath);\n invokeModuleInit(compileResult);\n return compileResult;\n }", "target_code": "long errorCount = jBallerinaBackend.diagnostics().stream().filter(diagnostic ->", "method_body_after": "public static CompileResult compile(String sourceFilePath) {\n Path sourcePath = Paths.get(sourceFilePath);\n String sourceFileName = sourcePath.getFileName().toString();\n Path sourceRoot = testSourcesDirectory.resolve(sourcePath.getParent());\n\n Path projectPath = Paths.get(sourceRoot.toString(), sourceFileName);\n Project project = ProjectLoader.loadProject(projectPath);\n\n Package currentPackage = project.currentPackage();\n PackageCompilation packageCompilation = currentPackage.getCompilation();\n JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JdkVersion.JAVA_11);\n boolean containErrors = jBallerinaBackend.diagnostics().stream()\n .anyMatch(diagnostic -> diagnostic.diagnosticInfo().severity() == DiagnosticSeverity.ERROR);\n if (containErrors) {\n return new CompileResult(currentPackage, jBallerinaBackend.diagnostics());\n }\n\n Path jarTargetPath = jarTargetPath(currentPackage);\n jBallerinaBackend.emit(JBallerinaBackend.OutputType.JAR, jarTargetPath);\n\n CompileResult compileResult = new CompileResult(currentPackage, jBallerinaBackend.diagnostics(), jarTargetPath);\n invokeModuleInit(compileResult);\n return compileResult;\n }", "context_before": "class BCompileUtil {\n\n private static Path testSourcesDirectory = Paths.get(\"src/test/resources\").toAbsolutePath().normalize();\n private static Path testBuildDirectory = Paths.get(\"build\").toAbsolutePath().normalize();\n\n \n\n public static CompileResult compileAndCacheBalo(String sourceFilePath) {\n Path sourcePath = Paths.get(sourceFilePath);\n String sourceFileName = sourcePath.getFileName().toString();\n Path sourceRoot = testSourcesDirectory.resolve(sourcePath.getParent());\n\n Path projectPath = Paths.get(sourceRoot.toString(), sourceFileName);\n Project project = ProjectLoader.loadProject(projectPath);\n\n if (isSingleFileProject(project)) {\n throw new RuntimeException(\"single file project is given for compilation at \" + project.sourceRoot());\n }\n\n Package currentPackage = project.currentPackage();\n PackageCompilation packageCompilation = currentPackage.getCompilation();\n JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JdkVersion.JAVA_11);\n long errorCount = jBallerinaBackend.diagnostics().stream().filter(diagnostic ->\n diagnostic.diagnosticInfo().severity() == DiagnosticSeverity.ERROR).count();\n if (errorCount > 0) {\n return new CompileResult(currentPackage, jBallerinaBackend.diagnostics());\n }\n\n Path jarCachePath = jarCachePath(currentPackage);\n jBallerinaBackend.emit(JBallerinaBackend.OutputType.JAR, jarCachePath);\n\n Path birCachePath = birCachePath(currentPackage);\n jBallerinaBackend.emit(JBallerinaBackend.OutputType.BIR, birCachePath);\n\n Path baloCachePath = baloCachePath(currentPackage);\n jBallerinaBackend.emit(JBallerinaBackend.OutputType.BALO, baloCachePath);\n\n CompileResult compileResult = new CompileResult(currentPackage, jBallerinaBackend.diagnostics(), jarCachePath);\n invokeModuleInit(compileResult);\n return compileResult;\n }\n\n private static void invokeModuleInit(CompileResult compileResult) {\n if (compileResult.getDiagnostics().length > 0) {\n return;\n }\n\n try {\n BRunUtil.runInit(compileResult);\n } catch (ClassNotFoundException e) {\n throw new RuntimeException(\"error while invoking init method of \" + compileResult.projectSourceRoot(), e);\n }\n }\n\n private static Path jarTargetPath(Package pkg) {\n try {\n Target target = new Target(testBuildDirectory);\n Path jarTargetPath = target.getJarCachePath();\n\n if (isSingleFileProject(pkg.project())) {\n Module defaultModule = pkg.getDefaultModule();\n DocumentId documentId = defaultModule.documentIds().iterator().next();\n String documentName = defaultModule.document(documentId).name();\n String executableName = FileUtils.geFileNameWithoutExtension(Paths.get(documentName));\n if (executableName == null) {\n throw new RuntimeException(\"cannot identify executable name for \" + defaultModule.moduleName());\n }\n jarTargetPath = jarTargetPath.resolve(executableName).toAbsolutePath().normalize();\n } else {\n jarTargetPath = jarTargetPath.resolve(pkg.packageOrg().toString()).resolve(pkg.packageName().value()).\n resolve(pkg.packageVersion().version().toString());\n }\n\n Files.createDirectories(jarTargetPath);\n return jarTargetPath;\n } catch (IOException e) {\n throw new RuntimeException(\"error while creating the jar cache directory at \" + testBuildDirectory, e);\n }\n }\n\n private static boolean isSingleFileProject(Project project) {\n return project instanceof SingleFileProject;\n }\n\n private static Path jarCachePath(Package pkg) {\n try {\n Path cache = cachePathForPackage(pkg);\n Path jarCache = cache.resolve(\"jar\");\n Files.createDirectories(jarCache);\n return jarCache;\n } catch (IOException e) {\n throw new RuntimeException(\"error while creating the jar cache directory at \" + testBuildDirectory, e);\n }\n }\n\n private static Path birCachePath(Package pkg) {\n try {\n Path cache = cachePathForPackage(pkg);\n Path birCache = cache.resolve(\"bir\");\n Files.createDirectories(birCache);\n\n return birCache;\n } catch (IOException e) {\n throw new RuntimeException(\"error while creating the bir cache directory at \" + testBuildDirectory, e);\n }\n }\n\n private static Path cachePathForPackage(Package pkg) throws IOException {\n Path distributionCache = testBuildDirectory.resolve(DIST_CACHE_DIRECTORY);\n Path cache = distributionCache.resolve(\"cache\")\n .resolve(pkg.packageOrg().toString())\n .resolve(pkg.packageName().value())\n .resolve(pkg.packageVersion().version().toString());\n Files.createDirectories(cache);\n\n return cache;\n }\n\n private static Path baloCachePath(Package pkg) {\n try {\n Path distributionCache = testBuildDirectory.resolve(DIST_CACHE_DIRECTORY);\n Path balos = distributionCache.resolve(\"balo\")\n .resolve(pkg.packageOrg().toString())\n .resolve(pkg.packageName().value())\n .resolve(pkg.packageVersion().version().toString());\n Files.createDirectories(balos);\n\n String baloName = ProjectUtils.getBaloName(pkg);\n return balos.resolve(baloName);\n } catch (IOException e) {\n throw new RuntimeException(\"error while creating the balo distribution cache directory at \" +\n testBuildDirectory, e);\n }\n }\n}", "context_after": "class BCompileUtil {\n\n private static Path testSourcesDirectory = Paths.get(\"src/test/resources\").toAbsolutePath().normalize();\n private static Path testBuildDirectory = Paths.get(\"build\").toAbsolutePath().normalize();\n\n \n\n public static CompileResult compileAndCacheBalo(String sourceFilePath) {\n Path sourcePath = Paths.get(sourceFilePath);\n String sourceFileName = sourcePath.getFileName().toString();\n Path sourceRoot = testSourcesDirectory.resolve(sourcePath.getParent());\n\n Path projectPath = Paths.get(sourceRoot.toString(), sourceFileName);\n Project project = ProjectLoader.loadProject(projectPath);\n\n if (isSingleFileProject(project)) {\n throw new RuntimeException(\"single file project is given for compilation at \" + project.sourceRoot());\n }\n\n Package currentPackage = project.currentPackage();\n PackageCompilation packageCompilation = currentPackage.getCompilation();\n JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JdkVersion.JAVA_11);\n boolean containErrors = jBallerinaBackend.diagnostics().stream()\n .anyMatch(diagnostic -> diagnostic.diagnosticInfo().severity() == DiagnosticSeverity.ERROR);\n if (containErrors) {\n return new CompileResult(currentPackage, jBallerinaBackend.diagnostics());\n }\n\n Path jarCachePath = jarCachePath(currentPackage);\n jBallerinaBackend.emit(JBallerinaBackend.OutputType.JAR, jarCachePath);\n\n Path birCachePath = birCachePath(currentPackage);\n jBallerinaBackend.emit(JBallerinaBackend.OutputType.BIR, birCachePath);\n\n Path baloCachePath = baloCachePath(currentPackage);\n jBallerinaBackend.emit(JBallerinaBackend.OutputType.BALO, baloCachePath);\n\n CompileResult compileResult = new CompileResult(currentPackage, jBallerinaBackend.diagnostics(), jarCachePath);\n invokeModuleInit(compileResult);\n return compileResult;\n }\n\n private static void invokeModuleInit(CompileResult compileResult) {\n if (compileResult.getDiagnostics().length > 0) {\n return;\n }\n\n try {\n BRunUtil.runInit(compileResult);\n } catch (ClassNotFoundException e) {\n throw new RuntimeException(\"error while invoking init method of \" + compileResult.projectSourceRoot(), e);\n }\n }\n\n private static Path jarTargetPath(Package pkg) {\n try {\n Target target = new Target(testBuildDirectory);\n Path jarTargetPath = target.getJarCachePath();\n\n if (isSingleFileProject(pkg.project())) {\n Module defaultModule = pkg.getDefaultModule();\n DocumentId documentId = defaultModule.documentIds().iterator().next();\n String documentName = defaultModule.document(documentId).name();\n String executableName = FileUtils.geFileNameWithoutExtension(Paths.get(documentName));\n if (executableName == null) {\n throw new RuntimeException(\"cannot identify executable name for \" + defaultModule.moduleName());\n }\n jarTargetPath = jarTargetPath.resolve(executableName).toAbsolutePath().normalize();\n } else {\n jarTargetPath = jarTargetPath.resolve(pkg.packageOrg().toString()).resolve(pkg.packageName().value()).\n resolve(pkg.packageVersion().version().toString());\n }\n\n Files.createDirectories(jarTargetPath);\n return jarTargetPath;\n } catch (IOException e) {\n throw new RuntimeException(\"error while creating the jar cache directory at \" + testBuildDirectory, e);\n }\n }\n\n private static boolean isSingleFileProject(Project project) {\n return project instanceof SingleFileProject;\n }\n\n private static Path jarCachePath(Package pkg) {\n try {\n Path cache = cachePathForPackage(pkg);\n Path jarCache = cache.resolve(\"jar\");\n Files.createDirectories(jarCache);\n return jarCache;\n } catch (IOException e) {\n throw new RuntimeException(\"error while creating the jar cache directory at \" + testBuildDirectory, e);\n }\n }\n\n private static Path birCachePath(Package pkg) {\n try {\n Path cache = cachePathForPackage(pkg);\n Path birCache = cache.resolve(\"bir\");\n Files.createDirectories(birCache);\n\n return birCache;\n } catch (IOException e) {\n throw new RuntimeException(\"error while creating the bir cache directory at \" + testBuildDirectory, e);\n }\n }\n\n private static Path cachePathForPackage(Package pkg) throws IOException {\n Path distributionCache = testBuildDirectory.resolve(DIST_CACHE_DIRECTORY);\n Path cache = distributionCache.resolve(\"cache\")\n .resolve(pkg.packageOrg().toString())\n .resolve(pkg.packageName().value())\n .resolve(pkg.packageVersion().version().toString());\n Files.createDirectories(cache);\n\n return cache;\n }\n\n private static Path baloCachePath(Package pkg) {\n try {\n Path distributionCache = testBuildDirectory.resolve(DIST_CACHE_DIRECTORY);\n Path balos = distributionCache.resolve(\"balo\")\n .resolve(pkg.packageOrg().toString())\n .resolve(pkg.packageName().value())\n .resolve(pkg.packageVersion().version().toString());\n Files.createDirectories(balos);\n\n String baloName = ProjectUtils.getBaloName(pkg);\n return balos.resolve(baloName);\n } catch (IOException e) {\n throw new RuntimeException(\"error while creating the balo distribution cache directory at \" +\n testBuildDirectory, e);\n }\n }\n}" }, { "comment": "Shouldn't this be an AND expression here? Since both the `namespacePrefix` and the `asKeyword` should not be null.", "method_body": "public XMLNamespaceDeclarationNode transform(XMLNamespaceDeclarationNode xMLNamespaceDeclarationNode) {\n Token xmlnsKeyword = getToken(xMLNamespaceDeclarationNode.xmlnsKeyword());\n ExpressionNode namespaceuri = this.modifyNode(xMLNamespaceDeclarationNode.namespaceuri());\n Token asKeyword = getToken(xMLNamespaceDeclarationNode.asKeyword().orElse(null));\n IdentifierToken namespacePrefix = this.modifyNode(xMLNamespaceDeclarationNode.namespacePrefix().orElse(null));\n Token semicolonToken = getToken(xMLNamespaceDeclarationNode.semicolonToken());\n if (asKeyword != null || namespacePrefix != null) {\n xMLNamespaceDeclarationNode = xMLNamespaceDeclarationNode.modify()\n .withNamespacePrefix(namespacePrefix)\n .withAsKeyword(formatToken(asKeyword, 1, 1, 0, 0))\n .apply();\n }\n return xMLNamespaceDeclarationNode.modify()\n .withNamespaceuri(namespaceuri)\n .withXmlnsKeyword(formatToken(xmlnsKeyword, 3, 1, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .apply();\n }", "target_code": "if (asKeyword != null || namespacePrefix != null) {", "method_body_after": "public XMLNamespaceDeclarationNode transform(XMLNamespaceDeclarationNode xMLNamespaceDeclarationNode) {\n Token xmlnsKeyword = getToken(xMLNamespaceDeclarationNode.xmlnsKeyword());\n ExpressionNode namespaceuri = this.modifyNode(xMLNamespaceDeclarationNode.namespaceuri());\n Token asKeyword = getToken(xMLNamespaceDeclarationNode.asKeyword().orElse(null));\n IdentifierToken namespacePrefix = this.modifyNode(xMLNamespaceDeclarationNode.namespacePrefix().orElse(null));\n Token semicolonToken = getToken(xMLNamespaceDeclarationNode.semicolonToken());\n int startColumn = getStartColumn(xMLNamespaceDeclarationNode, xMLNamespaceDeclarationNode.kind(), true);\n\n if (asKeyword != null) {\n xMLNamespaceDeclarationNode = xMLNamespaceDeclarationNode.modify()\n .withAsKeyword(formatToken(asKeyword, 1, 1, 0, 0))\n .apply();\n }\n if (namespacePrefix != null) {\n xMLNamespaceDeclarationNode = xMLNamespaceDeclarationNode.modify()\n .withNamespacePrefix(namespacePrefix)\n .apply();\n }\n\n return xMLNamespaceDeclarationNode.modify()\n .withNamespaceuri(namespaceuri)\n .withXmlnsKeyword(formatToken(xmlnsKeyword, startColumn, 1, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .apply();\n }", "context_before": "class FormattingTreeModifier extends TreeModifier {\n\n private FormattingOptions formattingOptions;\n private LineRange lineRange;\n\n @Override\n public ImportDeclarationNode transform(ImportDeclarationNode importDeclarationNode) {\n if (!isInLineRange(importDeclarationNode)) {\n return importDeclarationNode;\n }\n Token importKeyword = getToken(importDeclarationNode.importKeyword());\n Token semicolon = getToken(importDeclarationNode.semicolon());\n SeparatedNodeList moduleNames = this.modifySeparatedNodeList(\n importDeclarationNode.moduleName());\n ImportOrgNameNode orgName = this.modifyNode(importDeclarationNode.orgName().orElse(null));\n ImportPrefixNode prefix = this.modifyNode(importDeclarationNode.prefix().orElse(null));\n ImportVersionNode version = this.modifyNode(importDeclarationNode.version().orElse(null));\n if (orgName != null) {\n importDeclarationNode = importDeclarationNode.modify()\n .withOrgName(orgName).apply();\n }\n if (prefix != null) {\n importDeclarationNode = importDeclarationNode.modify()\n .withPrefix(prefix).apply();\n }\n if (version != null) {\n importDeclarationNode = importDeclarationNode.modify()\n .withVersion(version).apply();\n }\n return importDeclarationNode.modify()\n .withImportKeyword(formatToken(importKeyword, 0, 0, 0, 0))\n .withModuleName(moduleNames)\n .withSemicolon(formatToken(semicolon, 0, 0, 0, 1))\n .apply();\n }\n\n @Override\n public ImportOrgNameNode transform(ImportOrgNameNode importOrgNameNode) {\n if (!isInLineRange(importOrgNameNode)) {\n return importOrgNameNode;\n }\n Token orgName = getToken(importOrgNameNode.orgName());\n Token slashToken = getToken(importOrgNameNode.slashToken());\n return importOrgNameNode.modify()\n .withOrgName(formatToken(orgName, 1, 0, 0, 0))\n .withSlashToken(formatToken(slashToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ImportPrefixNode transform(ImportPrefixNode importPrefixNode) {\n if (!isInLineRange(importPrefixNode)) {\n return importPrefixNode;\n }\n Token asKeyword = getToken(importPrefixNode.asKeyword());\n Token prefix = getToken(importPrefixNode.prefix());\n return importPrefixNode.modify()\n .withAsKeyword(formatToken(asKeyword, 1, 0, 0, 0))\n .withPrefix(formatToken(prefix, 1, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ImportVersionNode transform(ImportVersionNode importVersionNode) {\n if (!isInLineRange(importVersionNode)) {\n return importVersionNode;\n }\n Token versionKeyword = getToken(importVersionNode.versionKeyword());\n SeparatedNodeList versionNumber = this.modifySeparatedNodeList(importVersionNode.versionNumber());\n return importVersionNode.modify()\n .withVersionKeyword(formatToken(versionKeyword, 1, 1, 0, 0))\n .withVersionNumber(versionNumber)\n .apply();\n }\n\n @Override\n public IdentifierToken transform(IdentifierToken identifier) {\n if (!isInLineRange(identifier)) {\n return identifier;\n }\n Token identifierToken = getToken(identifier);\n return (IdentifierToken) formatToken(identifierToken, 0, 0, 0, 0);\n }\n\n @Override\n public FunctionDefinitionNode transform(FunctionDefinitionNode functionDefinitionNode) {\n if (!isInLineRange(functionDefinitionNode)) {\n return functionDefinitionNode;\n }\n MetadataNode metadata = this.modifyNode(functionDefinitionNode.metadata().orElse(null));\n NodeList qualifierList = this.modifyNodeList(functionDefinitionNode.qualifierList());\n Token functionKeyword = getToken(functionDefinitionNode.functionKeyword());\n Token functionName = getToken(functionDefinitionNode.functionName());\n FunctionSignatureNode functionSignatureNode = this.modifyNode(functionDefinitionNode.functionSignature());\n FunctionBodyNode functionBodyNode = this.modifyNode(functionDefinitionNode.functionBody());\n if (metadata != null) {\n functionDefinitionNode = functionDefinitionNode.modify()\n .withMetadata(metadata).apply();\n }\n return functionDefinitionNode.modify()\n .withFunctionKeyword(formatToken(functionKeyword, 0, 0, 0, 0))\n .withFunctionName((IdentifierToken) formatToken(functionName, 1, 0, 0, 0))\n .withFunctionSignature(functionSignatureNode)\n .withQualifierList(qualifierList)\n .withFunctionBody(functionBodyNode)\n .apply();\n }\n\n @Override\n public FunctionSignatureNode transform(FunctionSignatureNode functionSignatureNode) {\n if (!isInLineRange(functionSignatureNode)) {\n return functionSignatureNode;\n }\n Token openPara = getToken(functionSignatureNode.openParenToken());\n Token closePara = getToken(functionSignatureNode.closeParenToken());\n SeparatedNodeList parameters = this.modifySeparatedNodeList(functionSignatureNode.parameters());\n ReturnTypeDescriptorNode returnTypeDesc = this.modifyNode(functionSignatureNode.returnTypeDesc().orElse(null));\n if (returnTypeDesc != null) {\n functionSignatureNode = functionSignatureNode.modify()\n .withReturnTypeDesc(returnTypeDesc).apply();\n }\n return functionSignatureNode.modify()\n .withOpenParenToken(formatToken(openPara, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closePara, 0, 0, 0, 0))\n .withParameters(parameters)\n .apply();\n }\n\n @Override\n public ReturnTypeDescriptorNode transform(ReturnTypeDescriptorNode returnTypeDescriptorNode) {\n if (!isInLineRange(returnTypeDescriptorNode)) {\n return returnTypeDescriptorNode;\n }\n Token returnsKeyword = getToken(returnTypeDescriptorNode.returnsKeyword());\n NodeList annotations = this.modifyNodeList(returnTypeDescriptorNode.annotations());\n Node type = this.modifyNode(returnTypeDescriptorNode.type());\n return returnTypeDescriptorNode.modify()\n .withAnnotations(annotations)\n .withReturnsKeyword(formatToken(returnsKeyword, 1, 1, 0, 0))\n .withType(type)\n .apply();\n }\n\n @Override\n public OptionalTypeDescriptorNode transform(OptionalTypeDescriptorNode optionalTypeDescriptorNode) {\n if (!isInLineRange(optionalTypeDescriptorNode)) {\n return optionalTypeDescriptorNode;\n }\n Node typeDescriptor = this.modifyNode(optionalTypeDescriptorNode.typeDescriptor());\n Token questionMarkToken = getToken(optionalTypeDescriptorNode.questionMarkToken());\n return optionalTypeDescriptorNode.modify()\n .withTypeDescriptor(typeDescriptor)\n .withQuestionMarkToken(formatToken(questionMarkToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RequiredParameterNode transform(RequiredParameterNode requiredParameterNode) {\n if (!isInLineRange(requiredParameterNode)) {\n return requiredParameterNode;\n }\n Token paramName = getToken(requiredParameterNode.paramName().orElse(null));\n NodeList annotations = this.modifyNodeList(requiredParameterNode.annotations());\n Node typeName = this.modifyNode(requiredParameterNode.typeName());\n if (paramName != null) {\n requiredParameterNode = requiredParameterNode.modify()\n .withParamName(formatToken(paramName, 1, 0, 0, 0)).apply();\n }\n return requiredParameterNode.modify()\n .withAnnotations(annotations)\n .withTypeName(typeName)\n .apply();\n }\n\n @Override\n public BuiltinSimpleNameReferenceNode transform(BuiltinSimpleNameReferenceNode builtinSimpleNameReferenceNode) {\n if (!isInLineRange(builtinSimpleNameReferenceNode)) {\n return builtinSimpleNameReferenceNode;\n }\n int startCol = getStartColumn(builtinSimpleNameReferenceNode, builtinSimpleNameReferenceNode.kind(), true);\n Token name = getToken(builtinSimpleNameReferenceNode.name());\n return builtinSimpleNameReferenceNode.modify()\n .withName(formatToken(name, startCol, 0, 0, 0))\n .apply();\n }\n\n @Override\n public FunctionBodyBlockNode transform(FunctionBodyBlockNode functionBodyBlockNode) {\n if (!isInLineRange(functionBodyBlockNode)) {\n return functionBodyBlockNode;\n }\n int startColumn = getStartColumn(functionBodyBlockNode, functionBodyBlockNode.kind(), false);\n Token functionBodyOpenBrace = getToken(functionBodyBlockNode.openBraceToken());\n Token functionBodyCloseBrace = getToken(functionBodyBlockNode.closeBraceToken());\n NodeList statements = this.modifyNodeList(functionBodyBlockNode.statements());\n NamedWorkerDeclarator namedWorkerDeclarator =\n this.modifyNode(functionBodyBlockNode.namedWorkerDeclarator().orElse(null));\n if (namedWorkerDeclarator != null) {\n functionBodyBlockNode = functionBodyBlockNode.modify()\n .withNamedWorkerDeclarator(namedWorkerDeclarator).apply();\n }\n return functionBodyBlockNode.modify()\n .withOpenBraceToken(formatToken(functionBodyOpenBrace, 1, 0, 0, 1))\n .withCloseBraceToken(formatToken(functionBodyCloseBrace, startColumn, 0, 0, 1))\n .withStatements(statements)\n .apply();\n }\n\n @Override\n public ExpressionStatementNode transform(ExpressionStatementNode expressionStatementNode) {\n if (!isInLineRange(expressionStatementNode)) {\n return expressionStatementNode;\n }\n ExpressionNode expression = this.modifyNode(expressionStatementNode.expression());\n Token semicolonToken = expressionStatementNode.semicolonToken();\n return expressionStatementNode.modify()\n .withExpression(expression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public FunctionCallExpressionNode transform(FunctionCallExpressionNode functionCallExpressionNode) {\n if (!isInLineRange(functionCallExpressionNode)) {\n return functionCallExpressionNode;\n }\n NameReferenceNode functionName = this.modifyNode(functionCallExpressionNode.functionName());\n Token functionCallOpenPara = getToken(functionCallExpressionNode.openParenToken());\n Token functionCallClosePara = getToken(functionCallExpressionNode.closeParenToken());\n SeparatedNodeList arguments = this.modifySeparatedNodeList(functionCallExpressionNode\n .arguments());\n return functionCallExpressionNode.modify()\n .withFunctionName(functionName)\n .withOpenParenToken(formatToken(functionCallOpenPara, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(functionCallClosePara, 0, 0, 0, 0))\n .withArguments(arguments)\n .apply();\n }\n\n @Override\n public QualifiedNameReferenceNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {\n if (!isInLineRange(qualifiedNameReferenceNode)) {\n return qualifiedNameReferenceNode;\n }\n int startCol = getStartColumn(qualifiedNameReferenceNode, qualifiedNameReferenceNode.kind(), false);\n Token modulePrefix = getToken(qualifiedNameReferenceNode.modulePrefix());\n Token identifier = getToken(qualifiedNameReferenceNode.identifier());\n Token colon = getToken((Token) qualifiedNameReferenceNode.colon());\n return qualifiedNameReferenceNode.modify()\n .withModulePrefix(formatToken(modulePrefix, startCol, 0, 0, 0))\n .withIdentifier((IdentifierToken) formatToken(identifier, 0, 0, 0, 0))\n .withColon(formatToken(colon, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public PositionalArgumentNode transform(PositionalArgumentNode positionalArgumentNode) {\n if (!isInLineRange(positionalArgumentNode)) {\n return positionalArgumentNode;\n }\n ExpressionNode expression = this.modifyNode(positionalArgumentNode.expression());\n return positionalArgumentNode.modify()\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public BasicLiteralNode transform(BasicLiteralNode basicLiteralNode) {\n if (!isInLineRange(basicLiteralNode)) {\n return basicLiteralNode;\n }\n Token literalToken = getToken(basicLiteralNode.literalToken());\n return basicLiteralNode.modify()\n .withLiteralToken(formatToken(literalToken, 0, 0, 0, 0))\n .apply();\n }\n\n\n @Override\n public ServiceDeclarationNode transform(ServiceDeclarationNode serviceDeclarationNode) {\n if (!isInLineRange(serviceDeclarationNode)) {\n return serviceDeclarationNode;\n }\n Token serviceKeyword = getToken(serviceDeclarationNode.serviceKeyword());\n IdentifierToken serviceName = (IdentifierToken) getToken(serviceDeclarationNode.serviceName());\n Token onKeyword = getToken(serviceDeclarationNode.onKeyword());\n MetadataNode metadata = this.modifyNode(serviceDeclarationNode.metadata().orElse(null));\n SeparatedNodeList expressions =\n this.modifySeparatedNodeList(serviceDeclarationNode.expressions());\n Node serviceBody = this.modifyNode(serviceDeclarationNode.serviceBody());\n if (metadata != null) {\n serviceDeclarationNode = serviceDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return serviceDeclarationNode.modify()\n .withServiceKeyword(formatToken(serviceKeyword, 0, 0, 1, 0))\n .withServiceName((IdentifierToken) formatToken(serviceName, 1, 0, 0, 0))\n .withOnKeyword(formatToken(onKeyword, 1, 0, 0, 0))\n .withExpressions(expressions)\n .withServiceBody(serviceBody)\n .apply();\n }\n\n @Override\n public ServiceBodyNode transform(ServiceBodyNode serviceBodyNode) {\n if (!isInLineRange(serviceBodyNode)) {\n return serviceBodyNode;\n }\n Token openBraceToken = getToken(serviceBodyNode.openBraceToken());\n Token closeBraceToken = getToken(serviceBodyNode.closeBraceToken());\n NodeList resources = this.modifyNodeList(serviceBodyNode.resources());\n return serviceBodyNode.modify()\n .withOpenBraceToken(formatToken(openBraceToken, 1, 0, 0, 1))\n .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 1))\n .withResources(resources)\n .apply();\n }\n\n @Override\n public ExplicitNewExpressionNode transform(ExplicitNewExpressionNode explicitNewExpressionNode) {\n if (!isInLineRange(explicitNewExpressionNode)) {\n return explicitNewExpressionNode;\n }\n Token newKeywordToken = getToken(explicitNewExpressionNode.newKeyword());\n TypeDescriptorNode typeDescriptorNode = this.modifyNode(explicitNewExpressionNode.typeDescriptor());\n return explicitNewExpressionNode.modify()\n .withNewKeyword(formatToken(newKeywordToken, 1, 1, 0, 0))\n .withParenthesizedArgList(modifyNode(explicitNewExpressionNode.parenthesizedArgList()))\n .withTypeDescriptor(typeDescriptorNode)\n .apply();\n }\n\n @Override\n public ParenthesizedArgList transform(ParenthesizedArgList parenthesizedArgList) {\n if (!isInLineRange(parenthesizedArgList)) {\n return parenthesizedArgList;\n }\n Token openParenToken = getToken(parenthesizedArgList.openParenToken());\n Token closeParenToken = getToken(parenthesizedArgList.closeParenToken());\n SeparatedNodeList arguments = this.modifySeparatedNodeList(parenthesizedArgList\n .arguments());\n return parenthesizedArgList.modify()\n .withArguments(arguments)\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public VariableDeclarationNode transform(VariableDeclarationNode variableDeclarationNode) {\n if (!isInLineRange(variableDeclarationNode)) {\n return variableDeclarationNode;\n }\n Token semicolonToken = getToken(variableDeclarationNode.semicolonToken());\n Token equalToken = getToken(variableDeclarationNode.equalsToken().orElse(null));\n Token finalToken = getToken(variableDeclarationNode.finalKeyword().orElse(null));\n ExpressionNode initializer = this.modifyNode(variableDeclarationNode.initializer().orElse(null));\n NodeList annotationNodes = this.modifyNodeList(variableDeclarationNode.annotations());\n TypedBindingPatternNode typedBindingPatternNode = this.modifyNode(\n variableDeclarationNode.typedBindingPattern());\n if (equalToken != null) {\n variableDeclarationNode = variableDeclarationNode.modify()\n .withEqualsToken(formatToken(equalToken, 1, 1, 0, 0)).apply();\n }\n if (finalToken != null) {\n variableDeclarationNode = variableDeclarationNode.modify()\n .withFinalKeyword(formatToken(finalToken, 0, 0, 0, 0)).apply();\n }\n if (initializer != null) {\n variableDeclarationNode = variableDeclarationNode.modify()\n .withInitializer(initializer).apply();\n }\n return variableDeclarationNode.modify()\n .withAnnotations(annotationNodes)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .withTypedBindingPattern(typedBindingPatternNode)\n .apply();\n }\n\n @Override\n public TypedBindingPatternNode transform(TypedBindingPatternNode typedBindingPatternNode) {\n if (!isInLineRange(typedBindingPatternNode)) {\n return typedBindingPatternNode;\n }\n BindingPatternNode bindingPatternNode = this.modifyNode(typedBindingPatternNode.bindingPattern());\n TypeDescriptorNode typeDescriptorNode = this.modifyNode(typedBindingPatternNode.typeDescriptor());\n return typedBindingPatternNode.modify()\n .withBindingPattern(bindingPatternNode)\n .withTypeDescriptor(typeDescriptorNode)\n .apply();\n }\n\n @Override\n public CaptureBindingPatternNode transform(CaptureBindingPatternNode captureBindingPatternNode) {\n if (!isInLineRange(captureBindingPatternNode)) {\n return captureBindingPatternNode;\n }\n Token variableName = getToken(captureBindingPatternNode.variableName());\n return captureBindingPatternNode.modify()\n .withVariableName(formatToken(variableName, 1, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ListBindingPatternNode transform(ListBindingPatternNode listBindingPatternNode) {\n if (!isInLineRange(listBindingPatternNode)) {\n return listBindingPatternNode;\n }\n SeparatedNodeList bindingPatternNodes = this.modifySeparatedNodeList(\n listBindingPatternNode.bindingPatterns());\n Token openBracket = getToken(listBindingPatternNode.openBracket());\n Token closeBracket = getToken(listBindingPatternNode.closeBracket());\n RestBindingPatternNode restBindingPattern =\n this.modifyNode(listBindingPatternNode.restBindingPattern().orElse(null));\n if (restBindingPattern != null) {\n listBindingPatternNode = listBindingPatternNode.modify()\n .withRestBindingPattern(restBindingPattern).apply();\n }\n return listBindingPatternNode.modify()\n .withBindingPatterns(bindingPatternNodes)\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public MappingBindingPatternNode transform(MappingBindingPatternNode mappingBindingPatternNode) {\n if (!isInLineRange(mappingBindingPatternNode)) {\n return mappingBindingPatternNode;\n }\n Token openBraceToken = getToken(mappingBindingPatternNode.openBrace());\n Token closeBraceToken = getToken(mappingBindingPatternNode.closeBrace());\n SeparatedNodeList fieldBindingPatternNodes =\n this.modifySeparatedNodeList(mappingBindingPatternNode.fieldBindingPatterns());\n RestBindingPatternNode restBindingPattern =\n this.modifyNode(mappingBindingPatternNode.restBindingPattern().orElse(null));\n if (restBindingPattern != null) {\n mappingBindingPatternNode = mappingBindingPatternNode.modify()\n .withRestBindingPattern(restBindingPattern).apply();\n }\n return mappingBindingPatternNode.modify()\n .withOpenBrace(formatToken(openBraceToken, 1, 0, 0, 1))\n .withCloseBrace(formatToken(closeBraceToken, 0, 0, 1, 0))\n .withFieldBindingPatterns(fieldBindingPatternNodes)\n .apply();\n }\n\n @Override\n public FieldBindingPatternFullNode transform(FieldBindingPatternFullNode fieldBindingPatternFullNode) {\n if (!isInLineRange(fieldBindingPatternFullNode)) {\n return fieldBindingPatternFullNode;\n }\n Token colon = getToken(fieldBindingPatternFullNode.colon());\n BindingPatternNode bindingPatternNode = this.modifyNode(fieldBindingPatternFullNode.bindingPattern());\n SimpleNameReferenceNode variableName = this.modifyNode(fieldBindingPatternFullNode.variableName());\n return fieldBindingPatternFullNode.modify()\n .withBindingPattern(bindingPatternNode)\n .withColon(formatToken(colon, 0, 0, 0, 0))\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public FieldBindingPatternVarnameNode transform(FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode) {\n if (!isInLineRange(fieldBindingPatternVarnameNode)) {\n return fieldBindingPatternVarnameNode;\n }\n SimpleNameReferenceNode variableName = this.modifyNode(fieldBindingPatternVarnameNode.variableName());\n return fieldBindingPatternVarnameNode.modify()\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public RestBindingPatternNode transform(RestBindingPatternNode restBindingPatternNode) {\n if (!isInLineRange(restBindingPatternNode)) {\n return restBindingPatternNode;\n }\n Token ellipsisToken = getToken(restBindingPatternNode.ellipsisToken());\n SimpleNameReferenceNode variableName = restBindingPatternNode.variableName();\n return restBindingPatternNode.modify()\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public RemoteMethodCallActionNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) {\n if (!isInLineRange(remoteMethodCallActionNode)) {\n return remoteMethodCallActionNode;\n }\n Token openParenToken = getToken(remoteMethodCallActionNode.openParenToken());\n Token closeParenToken = getToken(remoteMethodCallActionNode.closeParenToken());\n Token rightArrowToken = getToken(remoteMethodCallActionNode.rightArrowToken());\n SeparatedNodeList arguments = this.modifySeparatedNodeList(remoteMethodCallActionNode\n .arguments());\n ExpressionNode expression = this.modifyNode(remoteMethodCallActionNode.expression());\n SimpleNameReferenceNode methodName = this.modifyNode(remoteMethodCallActionNode.methodName());\n return remoteMethodCallActionNode.modify()\n .withArguments(arguments)\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .withExpression(expression)\n .withMethodName(methodName)\n .withRightArrowToken(formatToken(rightArrowToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public SimpleNameReferenceNode transform(SimpleNameReferenceNode simpleNameReferenceNode) {\n if (!isInLineRange(simpleNameReferenceNode)) {\n return simpleNameReferenceNode;\n }\n Token name = getToken(simpleNameReferenceNode.name());\n return simpleNameReferenceNode.modify()\n .withName(formatToken(name, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public IfElseStatementNode transform(IfElseStatementNode ifElseStatementNode) {\n if (!isInLineRange(ifElseStatementNode)) {\n return ifElseStatementNode;\n }\n BlockStatementNode ifBody = this.modifyNode(ifElseStatementNode.ifBody());\n ExpressionNode condition = this.modifyNode(ifElseStatementNode.condition());\n Token ifKeyword = getToken(ifElseStatementNode.ifKeyword());\n Node elseBody = this.modifyNode(ifElseStatementNode.elseBody().orElse(null));\n\n int startColumn = 1;\n if (ifElseStatementNode.parent().kind() != SyntaxKind.ELSE_BLOCK) {\n startColumn = getStartColumn(ifElseStatementNode, ifElseStatementNode.kind(), true);\n }\n if (elseBody != null) {\n ifElseStatementNode = ifElseStatementNode.modify()\n .withElseBody(elseBody).apply();\n }\n return ifElseStatementNode.modify()\n .withIfKeyword(formatToken(ifKeyword, startColumn, 0, 0, 0))\n .withIfBody(ifBody)\n .withCondition(condition)\n .apply();\n }\n\n @Override\n public ElseBlockNode transform(ElseBlockNode elseBlockNode) {\n if (!isInLineRange(elseBlockNode)) {\n return elseBlockNode;\n }\n Token elseKeyword = getToken(elseBlockNode.elseKeyword());\n StatementNode elseBody = this.modifyNode(elseBlockNode.elseBody());\n return elseBlockNode.modify()\n .withElseKeyword(formatToken(elseKeyword, 1, 0, 0, 0))\n .withElseBody(elseBody)\n .apply();\n }\n\n @Override\n public BracedExpressionNode transform(BracedExpressionNode bracedExpressionNode) {\n if (!isInLineRange(bracedExpressionNode)) {\n return bracedExpressionNode;\n }\n Token openParen = getToken(bracedExpressionNode.openParen());\n Token closeParen = getToken(bracedExpressionNode.closeParen());\n ExpressionNode expression = this.modifyNode(bracedExpressionNode.expression());\n return bracedExpressionNode.modify()\n .withOpenParen(formatToken(openParen, 1, 0, 0, 0))\n .withCloseParen(formatToken(closeParen, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public TypeTestExpressionNode transform(TypeTestExpressionNode typeTestExpressionNode) {\n if (!isInLineRange(typeTestExpressionNode)) {\n return typeTestExpressionNode;\n }\n ExpressionNode expression = this.modifyNode(typeTestExpressionNode.expression());\n Node typeDescriptor = this.modifyNode(typeTestExpressionNode.typeDescriptor());\n Token isToken = getToken(typeTestExpressionNode.isKeyword());\n return typeTestExpressionNode.modify()\n .withExpression(expression)\n .withIsKeyword(formatToken(isToken, 1, 1, 0, 0))\n .withTypeDescriptor(typeDescriptor)\n .apply();\n }\n\n @Override\n public ErrorTypeDescriptorNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) {\n if (!isInLineRange(errorTypeDescriptorNode)) {\n return errorTypeDescriptorNode;\n }\n Token errorKeywordToken = getToken(errorTypeDescriptorNode.errorKeywordToken());\n ErrorTypeParamsNode errorTypeParamsNode =\n this.modifyNode(errorTypeDescriptorNode.errorTypeParamsNode().orElse(null));\n if (errorTypeParamsNode != null) {\n errorTypeDescriptorNode = errorTypeDescriptorNode.modify()\n .withErrorTypeParamsNode(errorTypeParamsNode).apply();\n }\n return errorTypeDescriptorNode.modify()\n .withErrorKeywordToken(formatToken(errorKeywordToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ModuleVariableDeclarationNode transform(ModuleVariableDeclarationNode moduleVariableDeclarationNode) {\n if (!isInLineRange(moduleVariableDeclarationNode)) {\n return moduleVariableDeclarationNode;\n }\n Token equalsToken = getToken(moduleVariableDeclarationNode.equalsToken());\n Token semicolonToken = getToken(moduleVariableDeclarationNode.semicolonToken());\n Token finalKeyword = getToken(moduleVariableDeclarationNode.finalKeyword().orElse(null));\n MetadataNode metadata = this.modifyNode(moduleVariableDeclarationNode.metadata().orElse(null));\n ExpressionNode initializer = this.modifyNode(moduleVariableDeclarationNode.initializer());\n if (metadata != null) {\n moduleVariableDeclarationNode = moduleVariableDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n if (finalKeyword != null) {\n moduleVariableDeclarationNode = moduleVariableDeclarationNode.modify()\n .withFinalKeyword(formatToken(finalKeyword, 0, 1, 0, 0)).apply();\n }\n return moduleVariableDeclarationNode.modify()\n .withTypedBindingPattern(this.modifyNode(moduleVariableDeclarationNode.typedBindingPattern()))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withInitializer(initializer)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 2))\n .apply();\n }\n\n @Override\n public ConstantDeclarationNode transform(ConstantDeclarationNode constantDeclarationNode) {\n if (!isInLineRange(constantDeclarationNode)) {\n return constantDeclarationNode;\n }\n Token constKeyword = getToken(constantDeclarationNode.constKeyword());\n Token variableName = getToken(constantDeclarationNode.variableName());\n Token equalsToken = getToken(constantDeclarationNode.equalsToken());\n Token semicolonToken = getToken(constantDeclarationNode.semicolonToken());\n Token visibilityQualifier = getToken(constantDeclarationNode.visibilityQualifier().orElse(null));\n Node initializer = this.modifyNode(constantDeclarationNode.initializer());\n MetadataNode metadata = this.modifyNode(constantDeclarationNode.metadata().orElse(null));\n TypeDescriptorNode typeDescriptorNode = this.modifyNode(constantDeclarationNode.typeDescriptor().orElse(null));\n if (metadata != null) {\n constantDeclarationNode = constantDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return constantDeclarationNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 1, 1, 0, 0))\n .withConstKeyword(formatToken(constKeyword, 1, 1, 0, 0))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withInitializer(initializer)\n .withSemicolonToken(formatToken(semicolonToken, 1, 1, 0, 1))\n .withTypeDescriptor(typeDescriptorNode)\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public MetadataNode transform(MetadataNode metadataNode) {\n if (!isInLineRange(metadataNode)) {\n return metadataNode;\n }\n NodeList annotations = this.modifyNodeList(metadataNode.annotations());\n Node documentationString = metadataNode.documentationString().orElse(null);\n if (documentationString != null) {\n metadataNode = metadataNode.modify()\n .withDocumentationString(this.modifyNode(documentationString)).apply();\n }\n return metadataNode.modify()\n .withAnnotations(annotations)\n .apply();\n }\n\n @Override\n public BlockStatementNode transform(BlockStatementNode blockStatementNode) {\n if (!isInLineRange(blockStatementNode)) {\n return blockStatementNode;\n }\n int startColumn = getStartColumn(blockStatementNode, blockStatementNode.kind(), false);\n Token openBraceToken = getToken(blockStatementNode.openBraceToken());\n Token closeBraceToken = getToken(blockStatementNode.closeBraceToken());\n NodeList statements = this.modifyNodeList(blockStatementNode.statements());\n\n int trailingNewLines = 1;\n if (blockStatementNode.parent() != null && blockStatementNode.parent().kind() == SyntaxKind.IF_ELSE_STATEMENT) {\n IfElseStatementNode ifElseStatementNode = (IfElseStatementNode) blockStatementNode.parent();\n if (ifElseStatementNode.elseBody().isPresent()) {\n trailingNewLines = 0;\n }\n }\n return blockStatementNode.modify()\n .withOpenBraceToken(formatToken(openBraceToken, 1, 0, 0, 1))\n .withCloseBraceToken(formatToken(closeBraceToken, startColumn, 0, 0, trailingNewLines))\n .withStatements(statements)\n .apply();\n }\n\n @Override\n public MappingConstructorExpressionNode transform(\n MappingConstructorExpressionNode mappingConstructorExpressionNode) {\n if (!isInLineRange(mappingConstructorExpressionNode)) {\n return mappingConstructorExpressionNode;\n }\n int startColumn = getStartColumn(mappingConstructorExpressionNode, mappingConstructorExpressionNode.kind(),\n false);\n Token openBrace = getToken(mappingConstructorExpressionNode.openBrace());\n Token closeBrace = getToken(mappingConstructorExpressionNode.closeBrace());\n SeparatedNodeList fields = this.modifySeparatedNodeList(\n mappingConstructorExpressionNode.fields());\n return mappingConstructorExpressionNode.modify()\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 1))\n .withCloseBrace(formatToken(closeBrace, startColumn, 0, 1, 0))\n .withFields(fields)\n .apply();\n }\n\n @Override\n public ListenerDeclarationNode transform(ListenerDeclarationNode listenerDeclarationNode) {\n if (!isInLineRange(listenerDeclarationNode)) {\n return listenerDeclarationNode;\n }\n Token equalsToken = getToken(listenerDeclarationNode.equalsToken());\n Token variableName = getToken(listenerDeclarationNode.variableName());\n Token semicolonToken = getToken(listenerDeclarationNode.semicolonToken());\n Token listenerKeyword = getToken(listenerDeclarationNode.listenerKeyword());\n Token visibilityQualifier = getToken(listenerDeclarationNode.visibilityQualifier().orElse(null));\n Node initializer = this.modifyNode(listenerDeclarationNode.initializer());\n MetadataNode metadata = this.modifyNode(listenerDeclarationNode.metadata().orElse(null));\n Node typeDescriptor = this.modifyNode(listenerDeclarationNode.typeDescriptor());\n if (visibilityQualifier != null) {\n listenerDeclarationNode = listenerDeclarationNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 0, 0, 0)).apply();\n }\n if (metadata != null) {\n listenerDeclarationNode = listenerDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return listenerDeclarationNode.modify()\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withInitializer(initializer)\n .withListenerKeyword(formatToken(listenerKeyword, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .withTypeDescriptor(typeDescriptor)\n .withVariableName(formatToken(variableName, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public SpecificFieldNode transform(SpecificFieldNode specificFieldNode) {\n if (!isInLineRange(specificFieldNode)) {\n return specificFieldNode;\n }\n int startColumn = getStartColumn(specificFieldNode, specificFieldNode.kind(), true);\n Token fieldName = getToken((Token) specificFieldNode.fieldName());\n Token readOnlyKeyword = specificFieldNode.readonlyKeyword().orElse(null);\n Token colon = getToken(specificFieldNode.colon().orElse(null));\n ExpressionNode expressionNode = this.modifyNode(specificFieldNode.valueExpr().orElse(null));\n if (readOnlyKeyword != null) {\n specificFieldNode = specificFieldNode.modify()\n .withReadonlyKeyword(formatToken(readOnlyKeyword, 0, 0, 0, 0)).apply();\n }\n return specificFieldNode.modify()\n .withFieldName(formatToken(fieldName, startColumn, 0, 0, 0))\n .withColon(formatToken(colon, 0, 1, 0, 0))\n .withValueExpr(expressionNode)\n .apply();\n }\n\n @Override\n public BinaryExpressionNode transform(BinaryExpressionNode binaryExpressionNode) {\n if (!isInLineRange(binaryExpressionNode)) {\n return binaryExpressionNode;\n }\n Node lhsExpr = this.modifyNode(binaryExpressionNode.lhsExpr());\n Node rhsExpr = this.modifyNode(binaryExpressionNode.rhsExpr());\n Token operator = getToken(binaryExpressionNode.operator());\n return binaryExpressionNode.modify()\n .withLhsExpr(lhsExpr)\n .withRhsExpr(rhsExpr)\n .withOperator(formatToken(operator, 1, 1, 0, 0))\n .apply();\n }\n\n @Override\n public ArrayTypeDescriptorNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) {\n if (!isInLineRange(arrayTypeDescriptorNode)) {\n return arrayTypeDescriptorNode;\n }\n Node arrayLength = arrayTypeDescriptorNode.arrayLength().orElse(null);\n Token openBracket = getToken(arrayTypeDescriptorNode.openBracket());\n Token closeBracket = getToken(arrayTypeDescriptorNode.closeBracket());\n TypeDescriptorNode memberTypeDesc = this.modifyNode(arrayTypeDescriptorNode.memberTypeDesc());\n if (arrayLength != null) {\n arrayTypeDescriptorNode = arrayTypeDescriptorNode.modify()\n .withArrayLength(this.modifyNode(arrayLength)).apply();\n }\n return arrayTypeDescriptorNode.modify()\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .withMemberTypeDesc(memberTypeDesc)\n .apply();\n }\n\n @Override\n public AssignmentStatementNode transform(AssignmentStatementNode assignmentStatementNode) {\n if (!isInLineRange(assignmentStatementNode)) {\n return assignmentStatementNode;\n }\n Node varRef = this.modifyNode(assignmentStatementNode.varRef());\n ExpressionNode expression = this.modifyNode(assignmentStatementNode.expression());\n Token equalsToken = getToken(assignmentStatementNode.equalsToken());\n Token semicolonToken = getToken(assignmentStatementNode.semicolonToken());\n return assignmentStatementNode.modify()\n .withVarRef(varRef)\n .withExpression(expression)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .apply();\n }\n\n @Override\n public IndexedExpressionNode transform(IndexedExpressionNode indexedExpressionNode) {\n if (!isInLineRange(indexedExpressionNode)) {\n return indexedExpressionNode;\n }\n SeparatedNodeList keyExpression = this.modifySeparatedNodeList(\n indexedExpressionNode.keyExpression());\n ExpressionNode containerExpression = this.modifyNode(indexedExpressionNode.containerExpression());\n Token openBracket = getToken(indexedExpressionNode.openBracket());\n Token closeBracket = getToken(indexedExpressionNode.closeBracket());\n return indexedExpressionNode.modify()\n .withKeyExpression(keyExpression)\n .withContainerExpression(containerExpression)\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public CheckExpressionNode transform(CheckExpressionNode checkExpressionNode) {\n if (!isInLineRange(checkExpressionNode)) {\n return checkExpressionNode;\n }\n int startColumn = getStartColumn(checkExpressionNode, checkExpressionNode.kind(), false);\n Token checkKeyword = getToken(checkExpressionNode.checkKeyword());\n ExpressionNode expressionNode = this.modifyNode(checkExpressionNode.expression());\n return checkExpressionNode.modify()\n .withCheckKeyword(formatToken(checkKeyword, startColumn, 1, 0, 0))\n .withExpression(expressionNode)\n .apply();\n }\n\n @Override\n public WhileStatementNode transform(WhileStatementNode whileStatementNode) {\n if (!isInLineRange(whileStatementNode)) {\n return whileStatementNode;\n }\n int startColumn = getStartColumn(whileStatementNode, whileStatementNode.kind(), true);\n Token whileKeyword = getToken(whileStatementNode.whileKeyword());\n ExpressionNode condition = this.modifyNode(whileStatementNode.condition());\n BlockStatementNode whileBody = this.modifyNode(whileStatementNode.whileBody());\n return whileStatementNode.modify()\n .withWhileKeyword(formatToken(whileKeyword, startColumn, 0, 0, 0))\n .withCondition(condition)\n .withWhileBody(whileBody)\n .apply();\n }\n\n @Override\n public ReturnStatementNode transform(ReturnStatementNode returnStatementNode) {\n if (!isInLineRange(returnStatementNode)) {\n return returnStatementNode;\n }\n int startColumn = getStartColumn(returnStatementNode, returnStatementNode.kind(), true);\n Token returnKeyword = getToken(returnStatementNode.returnKeyword());\n ExpressionNode expressionNode = returnStatementNode.expression().orElse(null);\n Token semicolonToken = getToken(returnStatementNode.semicolonToken());\n if (expressionNode != null) {\n returnStatementNode = returnStatementNode.modify()\n .withExpression(this.modifyNode(expressionNode)).apply();\n }\n return returnStatementNode.modify()\n .withReturnKeyword(formatToken(returnKeyword, startColumn, 1, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .apply();\n }\n\n @Override\n public MethodCallExpressionNode transform(MethodCallExpressionNode methodCallExpressionNode) {\n if (!isInLineRange(methodCallExpressionNode)) {\n return methodCallExpressionNode;\n }\n SeparatedNodeList arguments = this.modifySeparatedNodeList(methodCallExpressionNode\n .arguments());\n Token openParenToken = getToken(methodCallExpressionNode.openParenToken());\n Token closeParenToken = getToken(methodCallExpressionNode.closeParenToken());\n Token dotToken = getToken(methodCallExpressionNode.dotToken());\n ExpressionNode expression = this.modifyNode(methodCallExpressionNode.expression());\n NameReferenceNode methodName = this.modifyNode(methodCallExpressionNode.methodName());\n return methodCallExpressionNode.modify()\n .withArguments(arguments)\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .withDotToken(formatToken(dotToken, 0, 0, 0, 0))\n .withExpression(expression)\n .withMethodName(methodName)\n .apply();\n }\n\n @Override\n public NilLiteralNode transform(NilLiteralNode nilLiteralNode) {\n Token openParenToken = getToken(nilLiteralNode.openParenToken());\n Token closeParenToken = getToken(nilLiteralNode.closeParenToken());\n return nilLiteralNode.modify()\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public NilTypeDescriptorNode transform(NilTypeDescriptorNode nilTypeDescriptorNode) {\n Token openParenToken = getToken(nilTypeDescriptorNode.openParenToken());\n Token closeParenToken = getToken(nilTypeDescriptorNode.closeParenToken());\n return nilTypeDescriptorNode.modify()\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public UnionTypeDescriptorNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) {\n TypeDescriptorNode leftTypeDesc = this.modifyNode(unionTypeDescriptorNode.leftTypeDesc());\n Token pipeToken = getToken(unionTypeDescriptorNode.pipeToken());\n TypeDescriptorNode rightTypeDesc = this.modifyNode(unionTypeDescriptorNode.rightTypeDesc());\n return unionTypeDescriptorNode.modify()\n .withLeftTypeDesc(leftTypeDesc)\n .withPipeToken(pipeToken)\n .withRightTypeDesc(rightTypeDesc)\n .apply();\n }\n\n @Override\n \n\n @Override\n public ModuleXMLNamespaceDeclarationNode transform(\n ModuleXMLNamespaceDeclarationNode moduleXMLNamespaceDeclarationNode) {\n Token xmlnsKeyword = getToken(moduleXMLNamespaceDeclarationNode.xmlnsKeyword());\n ExpressionNode namespaceuri = this.modifyNode(moduleXMLNamespaceDeclarationNode.namespaceuri());\n Token asKeyword = getToken(moduleXMLNamespaceDeclarationNode.asKeyword());\n IdentifierToken namespacePrefix = this.modifyNode(moduleXMLNamespaceDeclarationNode.namespacePrefix());\n Token semicolonToken = getToken(moduleXMLNamespaceDeclarationNode.semicolonToken());\n return moduleXMLNamespaceDeclarationNode.modify()\n .withNamespacePrefix(namespacePrefix)\n .withNamespaceuri(namespaceuri)\n .withXmlnsKeyword(formatToken(xmlnsKeyword, 0, 0, 0, 0))\n .withAsKeyword(formatToken(asKeyword, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XmlTypeDescriptorNode transform(XmlTypeDescriptorNode xmlTypeDescriptorNode) {\n int startColumn = getStartColumn(xmlTypeDescriptorNode, xmlTypeDescriptorNode.kind(), true);\n Token xmlKeywordToken = getToken(xmlTypeDescriptorNode.xmlKeywordToken());\n TypeParameterNode xmlTypeParamsNode = this.modifyNode(xmlTypeDescriptorNode.xmlTypeParamsNode().orElse(null));\n if (xmlTypeParamsNode != null) {\n xmlTypeDescriptorNode = xmlTypeDescriptorNode.modify()\n .withXmlTypeParamsNode(xmlTypeParamsNode).apply();\n }\n return xmlTypeDescriptorNode.modify()\n .withXmlKeywordToken(formatToken(xmlKeywordToken, startColumn, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLElementNode transform(XMLElementNode xMLElementNode) {\n XMLStartTagNode startTag = this.modifyNode(xMLElementNode.startTag());\n NodeList content = modifyNodeList(xMLElementNode.content());\n XMLEndTagNode endTag = this.modifyNode(xMLElementNode.endTag());\n return xMLElementNode.modify()\n .withStartTag(startTag)\n .withEndTag(endTag)\n .withContent(content)\n .apply();\n }\n\n @Override\n public XMLStartTagNode transform(XMLStartTagNode xMLStartTagNode) {\n Token ltToken = getToken(xMLStartTagNode.ltToken());\n XMLNameNode name = this.modifyNode(xMLStartTagNode.name());\n NodeList attributes = modifyNodeList(xMLStartTagNode.attributes());\n Token getToken = getToken(xMLStartTagNode.getToken());\n return xMLStartTagNode.modify()\n .withName(name)\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withAttributes(attributes)\n .withGetToken(formatToken(getToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLEndTagNode transform(XMLEndTagNode xMLEndTagNode) {\n Token ltToken = getToken(xMLEndTagNode.ltToken());\n Token slashToken = getToken(xMLEndTagNode.slashToken());\n XMLNameNode name = this.modifyNode(xMLEndTagNode.name());\n Token getToken = getToken(xMLEndTagNode.getToken());\n return xMLEndTagNode.modify()\n .withName(name)\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withSlashToken(formatToken(slashToken, 0, 0, 0, 0))\n .withGetToken(formatToken(getToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLSimpleNameNode transform(XMLSimpleNameNode xMLSimpleNameNode) {\n Token name = getToken(xMLSimpleNameNode.name());\n if (xMLSimpleNameNode.parent().kind() == SyntaxKind.XML_PI &&\n ((XMLProcessingInstruction) xMLSimpleNameNode.parent()).data() != null) {\n return xMLSimpleNameNode.modify()\n .withName(formatToken(name, 0, 1, 0, 0))\n .apply();\n }\n return xMLSimpleNameNode.modify()\n .withName(formatToken(name, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLQualifiedNameNode transform(XMLQualifiedNameNode xMLQualifiedNameNode) {\n XMLSimpleNameNode prefix = this.modifyNode(xMLQualifiedNameNode.prefix());\n Token colon = getToken(xMLQualifiedNameNode.colon());\n XMLSimpleNameNode name = this.modifyNode(xMLQualifiedNameNode.name());\n return xMLQualifiedNameNode.modify()\n .withPrefix(prefix)\n .withName(name)\n .withColon(formatToken(colon, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLEmptyElementNode transform(XMLEmptyElementNode xMLEmptyElementNode) {\n Token ltToken = getToken(xMLEmptyElementNode.ltToken());\n XMLNameNode name = this.modifyNode(xMLEmptyElementNode.name());\n NodeList attributes = this.modifyNodeList(xMLEmptyElementNode.attributes());\n Token slashToken = getToken(xMLEmptyElementNode.slashToken());\n Token getToken = getToken(xMLEmptyElementNode.getToken());\n return xMLEmptyElementNode.modify()\n .withName(name)\n .withAttributes(attributes)\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withSlashToken(formatToken(slashToken, 0, 0, 0, 0))\n .withGetToken(formatToken(getToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLTextNode transform(XMLTextNode xMLTextNode) {\n Token content = getToken(xMLTextNode.content());\n return xMLTextNode.modify()\n .withContent(formatToken(content, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLAttributeNode transform(XMLAttributeNode xMLAttributeNode) {\n XMLNameNode attributeName = this.modifyNode(xMLAttributeNode.attributeName());\n Token equalToken = getToken(xMLAttributeNode.equalToken());\n XMLAttributeValue value = this.modifyNode(xMLAttributeNode.value());\n return xMLAttributeNode.modify()\n .withValue(value)\n .withAttributeName(attributeName)\n .withEqualToken(formatToken(equalToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLAttributeValue transform(XMLAttributeValue xMLAttributeValue) {\n Token startQuote = getToken(xMLAttributeValue.startQuote());\n NodeList value = this.modifyNodeList(xMLAttributeValue.value());\n Token endQuote = getToken(xMLAttributeValue.endQuote());\n return xMLAttributeValue.modify()\n .withStartQuote(formatToken(startQuote, 0, 0, 0, 0))\n .withValue(value)\n .withEndQuote(formatToken(endQuote, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLComment transform(XMLComment xMLComment) {\n Token commentStart = getToken(xMLComment.commentStart());\n NodeList content = this.modifyNodeList(xMLComment.content());\n Token commentEnd = getToken(xMLComment.commentEnd());\n return xMLComment.modify()\n .withCommentStart(formatToken(commentStart, 0, 0, 0, 0))\n .withContent(content)\n .withCommentEnd(formatToken(commentEnd, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLProcessingInstruction transform(XMLProcessingInstruction xMLProcessingInstruction) {\n Token piStart = getToken(xMLProcessingInstruction.piStart());\n XMLNameNode target = this.modifyNode(xMLProcessingInstruction.target());\n NodeList data = this.modifyNodeList(xMLProcessingInstruction.data());\n Token piEnd = getToken(xMLProcessingInstruction.piEnd());\n return xMLProcessingInstruction.modify()\n .withTarget(target)\n .withPiStart(formatToken(piStart, 0, 0, 0, 0))\n .withData(data)\n .withPiEnd(formatToken(piEnd, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLFilterExpressionNode transform(XMLFilterExpressionNode xMLFilterExpressionNode) {\n ExpressionNode expression = this.modifyNode(xMLFilterExpressionNode.expression());\n XMLNamePatternChainingNode xmlPatternChain = this.modifyNode(xMLFilterExpressionNode.xmlPatternChain());\n return xMLFilterExpressionNode.modify()\n .withExpression(expression)\n .withXmlPatternChain(xmlPatternChain)\n .apply();\n }\n\n @Override\n public XMLStepExpressionNode transform(XMLStepExpressionNode xMLStepExpressionNode) {\n ExpressionNode expression = this.modifyNode(xMLStepExpressionNode.expression());\n Node xmlStepStart = this.modifyNode(xMLStepExpressionNode.xmlStepStart());\n return xMLStepExpressionNode.modify()\n .withExpression(expression)\n .withXmlStepStart(xmlStepStart)\n .apply();\n }\n\n @Override\n public XMLNamePatternChainingNode transform(XMLNamePatternChainingNode xMLNamePatternChainingNode) {\n Token startToken = getToken(xMLNamePatternChainingNode.startToken());\n SeparatedNodeList xmlNamePattern = modifySeparatedNodeList(xMLNamePatternChainingNode.xmlNamePattern());\n Token gtToken = getToken(xMLNamePatternChainingNode.gtToken());\n return xMLNamePatternChainingNode.modify()\n .withStartToken(formatToken(startToken, 0, 0, 0, 0))\n .withXmlNamePattern(xmlNamePattern)\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLAtomicNamePatternNode transform(XMLAtomicNamePatternNode xMLAtomicNamePatternNode) {\n Token prefix = getToken(xMLAtomicNamePatternNode.prefix());\n Token colon = getToken(xMLAtomicNamePatternNode.colon());\n Token name = getToken(xMLAtomicNamePatternNode.name());\n return xMLAtomicNamePatternNode.modify()\n .withPrefix(formatToken(prefix, 0, 0, 0, 0))\n .withColon(formatToken(colon, 0, 0, 0, 0))\n .withName(formatToken(name, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TemplateExpressionNode transform(TemplateExpressionNode templateExpressionNode) {\n Token type = getToken(templateExpressionNode.type().orElse(null));\n Token startBacktick = getToken(templateExpressionNode.startBacktick());\n NodeList content = modifyNodeList(templateExpressionNode.content());\n Token endBacktick = getToken(templateExpressionNode.endBacktick());\n return templateExpressionNode.modify()\n .withStartBacktick(formatToken(startBacktick, 1, 0, 0, 0))\n .withContent(content)\n .withType(formatToken(type, 0, 0, 0, 0))\n .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ByteArrayLiteralNode transform(ByteArrayLiteralNode byteArrayLiteralNode) {\n Token type = getToken(byteArrayLiteralNode.type());\n Token startBacktick = getToken(byteArrayLiteralNode.startBacktick());\n Token content = getToken(byteArrayLiteralNode.content().orElse(null));\n Token endBacktick = getToken(byteArrayLiteralNode.endBacktick());\n if (content != null) {\n byteArrayLiteralNode = byteArrayLiteralNode.modify()\n .withContent(formatToken(content, 0, 0, 0, 0)).apply();\n }\n return byteArrayLiteralNode.modify()\n .withType(formatToken(type, 0, 0, 0, 0))\n .withStartBacktick(formatToken(startBacktick, 0, 0, 0, 0))\n .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ListConstructorExpressionNode transform(ListConstructorExpressionNode listConstructorExpressionNode) {\n Token openBracket = getToken(listConstructorExpressionNode.openBracket());\n SeparatedNodeList expressions = this.modifySeparatedNodeList(listConstructorExpressionNode.expressions());\n Token closeBracket = getToken(listConstructorExpressionNode.closeBracket());\n return listConstructorExpressionNode.modify()\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withExpressions(expressions)\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TypeReferenceNode transform(TypeReferenceNode typeReferenceNode) {\n Token asteriskToken = getToken(typeReferenceNode.asteriskToken());\n Node typeName = this.modifyNode(typeReferenceNode.typeName());\n Token semicolonToken = getToken(typeReferenceNode.semicolonToken());\n return typeReferenceNode.modify()\n .withTypeName(typeName)\n .withAsteriskToken(formatToken(asteriskToken, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TupleTypeDescriptorNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) {\n int startCol = getStartColumn(tupleTypeDescriptorNode, tupleTypeDescriptorNode.kind(), true);\n Token openBracketToken = getToken(tupleTypeDescriptorNode.openBracketToken());\n SeparatedNodeList memberTypeDesc = this.modifySeparatedNodeList(tupleTypeDescriptorNode.memberTypeDesc());\n Token closeBracketToken = getToken(tupleTypeDescriptorNode.closeBracketToken());\n return tupleTypeDescriptorNode.modify()\n .withOpenBracketToken(formatToken(openBracketToken, startCol, 0, 0, 0))\n .withMemberTypeDesc(memberTypeDesc)\n .withCloseBracketToken(formatToken(closeBracketToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public MappingMatchPatternNode transform(MappingMatchPatternNode mappingMatchPatternNode) {\n Token openBraceToken = getToken(mappingMatchPatternNode.openBraceToken());\n SeparatedNodeList fieldMatchPatterns =\n this.modifySeparatedNodeList(mappingMatchPatternNode.fieldMatchPatterns());\n RestMatchPatternNode restMatchPattern =\n this.modifyNode(mappingMatchPatternNode.restMatchPattern().orElse(null));\n Token closeBraceToken = getToken(mappingMatchPatternNode.closeBraceToken());\n if (restMatchPattern != null) {\n mappingMatchPatternNode = mappingMatchPatternNode.modify()\n .withRestMatchPattern(restMatchPattern).apply();\n }\n return mappingMatchPatternNode.modify()\n .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0))\n .withFieldMatchPatterns(fieldMatchPatterns)\n .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ParameterizedTypeDescriptorNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescriptorNode) {\n int startCol = getStartColumn(parameterizedTypeDescriptorNode, parameterizedTypeDescriptorNode.kind(), true);\n Token parameterizedType = getToken(parameterizedTypeDescriptorNode.parameterizedType());\n TypeParameterNode typeParameter = this.modifyNode(parameterizedTypeDescriptorNode.typeParameter());\n return parameterizedTypeDescriptorNode.modify()\n .withParameterizedType(formatToken(parameterizedType, startCol, 0, 0, 0))\n .withTypeParameter(typeParameter)\n .apply();\n }\n\n @Override\n public TypeParameterNode transform(TypeParameterNode typeParameterNode) {\n Token ltToken = getToken(typeParameterNode.ltToken());\n TypeDescriptorNode typeNode = this.modifyNode(typeParameterNode.typeNode());\n Token gtToken = getToken(typeParameterNode.gtToken());\n return typeParameterNode.modify()\n .withTypeNode(typeNode)\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public StartActionNode transform(StartActionNode startActionNode) {\n if (!isInLineRange(startActionNode)) {\n return startActionNode;\n }\n NodeList annotations = this.modifyNodeList(startActionNode.annotations());\n Token startKeyword = getToken(startActionNode.startKeyword());\n ExpressionNode expression = this.modifyNode(startActionNode.expression());\n return startActionNode.modify()\n .withAnnotations(annotations)\n .withStartKeyword(formatToken(startKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public FlushActionNode transform(FlushActionNode flushActionNode) {\n if (!isInLineRange(flushActionNode)) {\n return flushActionNode;\n }\n Token flushKeyword = getToken(flushActionNode.flushKeyword());\n NameReferenceNode peerWorker = this.modifyNode(flushActionNode.peerWorker());\n return flushActionNode.modify()\n .withFlushKeyword(formatToken(flushKeyword, 0, 1, 0, 0))\n .withPeerWorker(peerWorker)\n .apply();\n }\n\n @Override\n public NamedWorkerDeclarationNode transform(NamedWorkerDeclarationNode namedWorkerDeclarationNode) {\n if (!isInLineRange(namedWorkerDeclarationNode)) {\n return namedWorkerDeclarationNode;\n }\n NodeList annotations = this.modifyNodeList(namedWorkerDeclarationNode.annotations());\n Token workerKeyword = getToken(namedWorkerDeclarationNode.workerKeyword());\n IdentifierToken workerName = this.modifyNode(namedWorkerDeclarationNode.workerName());\n Node returnTypeDesc =\n this.modifyNode(namedWorkerDeclarationNode.returnTypeDesc().orElse(null));\n BlockStatementNode workerBody = this.modifyNode(namedWorkerDeclarationNode.workerBody());\n if (returnTypeDesc != null) {\n namedWorkerDeclarationNode = namedWorkerDeclarationNode.modify()\n .withReturnTypeDesc(returnTypeDesc).apply();\n }\n return namedWorkerDeclarationNode.modify()\n .withAnnotations(annotations)\n .withWorkerKeyword(formatToken(workerKeyword, 0, 0, 0, 0))\n .withWorkerName(workerName)\n .withWorkerBody(workerBody)\n .apply();\n }\n\n @Override\n public TypeDefinitionNode transform(TypeDefinitionNode typeDefinitionNode) {\n if (!isInLineRange(typeDefinitionNode)) {\n return typeDefinitionNode;\n }\n MetadataNode metadata = this.modifyNode(typeDefinitionNode.metadata().orElse(null));\n Token visibilityQualifier = getToken(typeDefinitionNode.visibilityQualifier().orElse(null));\n Token typeKeyword = getToken(typeDefinitionNode.typeKeyword());\n Token typeName = getToken(typeDefinitionNode.typeName());\n Node typeDescriptor = this.modifyNode(typeDefinitionNode.typeDescriptor());\n Token semicolonToken = this.modifyToken(typeDefinitionNode.semicolonToken());\n if (metadata != null) {\n typeDefinitionNode = typeDefinitionNode.modify()\n .withMetadata(metadata).apply();\n }\n if (visibilityQualifier != null) {\n typeDefinitionNode = typeDefinitionNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 1, 1, 0, 0)).apply();\n }\n return typeDefinitionNode.modify()\n .withTypeKeyword(formatToken(typeKeyword, 1, 1, 0, 0))\n .withTypeName(formatToken(typeName, 1, 1, 0, 0))\n .withTypeDescriptor(typeDescriptor)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public CompoundAssignmentStatementNode transform(CompoundAssignmentStatementNode compoundAssignmentStatementNode) {\n if (!isInLineRange(compoundAssignmentStatementNode)) {\n return compoundAssignmentStatementNode;\n }\n ExpressionNode lhsExpression = this.modifyNode(compoundAssignmentStatementNode.lhsExpression());\n Token binaryOperator = getToken(compoundAssignmentStatementNode.binaryOperator());\n Token equalsToken = getToken(compoundAssignmentStatementNode.equalsToken());\n ExpressionNode rhsExpression = this.modifyNode(compoundAssignmentStatementNode.rhsExpression());\n Token semicolonToken = getToken(compoundAssignmentStatementNode.semicolonToken());\n return compoundAssignmentStatementNode.modify()\n .withLhsExpression(lhsExpression)\n .withBinaryOperator(formatToken(binaryOperator, 1, 1, 0, 0))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withRhsExpression(rhsExpression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public BreakStatementNode transform(BreakStatementNode breakStatementNode) {\n if (!isInLineRange(breakStatementNode)) {\n return breakStatementNode;\n }\n Token breakToken = getToken(breakStatementNode.breakToken());\n Token semicolonToken = getToken(breakStatementNode.semicolonToken());\n return breakStatementNode.modify()\n .withBreakToken(formatToken(breakToken, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ContinueStatementNode transform(ContinueStatementNode continueStatementNode) {\n if (!isInLineRange(continueStatementNode)) {\n return continueStatementNode;\n }\n Token continueToken = getToken(continueStatementNode.continueToken());\n Token semicolonToken = getToken(continueStatementNode.semicolonToken());\n return continueStatementNode.modify()\n .withContinueToken(formatToken(continueToken, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ExternalFunctionBodyNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) {\n if (!isInLineRange(externalFunctionBodyNode)) {\n return externalFunctionBodyNode;\n }\n Token equalsToken = getToken(externalFunctionBodyNode.equalsToken());\n NodeList annotations = this.modifyNodeList(externalFunctionBodyNode.annotations());\n Token externalKeyword = getToken(externalFunctionBodyNode.externalKeyword());\n Token semicolonToken = getToken(externalFunctionBodyNode.semicolonToken());\n return externalFunctionBodyNode.modify()\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withAnnotations(annotations)\n .withExternalKeyword(formatToken(externalKeyword, 1, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 1, 0, 0, 0))\n .apply();\n }\n\n @Override\n public PanicStatementNode transform(PanicStatementNode panicStatementNode) {\n if (!isInLineRange(panicStatementNode)) {\n return panicStatementNode;\n }\n Token panicKeyword = getToken(panicStatementNode.panicKeyword());\n ExpressionNode expression = this.modifyNode(panicStatementNode.expression());\n Token semicolonToken = getToken(panicStatementNode.semicolonToken());\n return panicStatementNode.modify()\n .withPanicKeyword(formatToken(panicKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public LocalTypeDefinitionStatementNode transform(\n LocalTypeDefinitionStatementNode localTypeDefinitionStatementNode) {\n if (!isInLineRange(localTypeDefinitionStatementNode)) {\n return localTypeDefinitionStatementNode;\n }\n NodeList annotations = this.modifyNodeList(localTypeDefinitionStatementNode.annotations());\n Token typeKeyword = getToken(localTypeDefinitionStatementNode.typeKeyword());\n Node typeName = this.modifyNode(localTypeDefinitionStatementNode.typeName());\n Node typeDescriptor = this.modifyNode(localTypeDefinitionStatementNode.typeDescriptor());\n Token semicolonToken = getToken(localTypeDefinitionStatementNode.semicolonToken());\n return localTypeDefinitionStatementNode.modify()\n .withAnnotations(annotations)\n .withTypeKeyword(formatToken(typeKeyword, 0, 1, 0, 0))\n .withTypeName(typeName)\n .withTypeDescriptor(typeDescriptor)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public LockStatementNode transform(LockStatementNode lockStatementNode) {\n if (!isInLineRange(lockStatementNode)) {\n return lockStatementNode;\n }\n Token lockKeyword = getToken(lockStatementNode.lockKeyword());\n StatementNode blockStatement = this.modifyNode(lockStatementNode.blockStatement());\n return lockStatementNode.modify()\n .withLockKeyword(formatToken(lockKeyword, 0, 1, 0, 0))\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public ForkStatementNode transform(ForkStatementNode forkStatementNode) {\n if (!isInLineRange(forkStatementNode)) {\n return forkStatementNode;\n }\n Token forkKeyword = getToken(forkStatementNode.forkKeyword());\n Token openBraceToken = getToken(forkStatementNode.openBraceToken());\n NodeList namedWorkerDeclarations =\n this.modifyNodeList(forkStatementNode.namedWorkerDeclarations());\n Token closeBraceToken = getToken(forkStatementNode.closeBraceToken());\n return forkStatementNode.modify()\n .withForkKeyword(formatToken(forkKeyword, 1, 1, 0, 0))\n .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0))\n .withNamedWorkerDeclarations(namedWorkerDeclarations)\n .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ForEachStatementNode transform(ForEachStatementNode forEachStatementNode) {\n if (!isInLineRange(forEachStatementNode)) {\n return forEachStatementNode;\n }\n Token forEachKeyword = getToken(forEachStatementNode.forEachKeyword());\n TypedBindingPatternNode typedBindingPattern = this.modifyNode(forEachStatementNode.typedBindingPattern());\n Token inKeyword = getToken(forEachStatementNode.inKeyword());\n Node actionOrExpressionNode = this.modifyNode(forEachStatementNode.actionOrExpressionNode());\n StatementNode blockStatement = this.modifyNode(forEachStatementNode.blockStatement());\n return forEachStatementNode.modify()\n .withForEachKeyword(formatToken(forEachKeyword, 0, 1, 0, 0))\n .withTypedBindingPattern(typedBindingPattern)\n .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0))\n .withActionOrExpressionNode(actionOrExpressionNode)\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public FailExpressionNode transform(FailExpressionNode failExpressionNode) {\n if (!isInLineRange(failExpressionNode)) {\n return failExpressionNode;\n }\n Token failKeyword = getToken(failExpressionNode.failKeyword());\n ExpressionNode expression = this.modifyNode(failExpressionNode.expression());\n return failExpressionNode.modify()\n .withFailKeyword(formatToken(failKeyword, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public FieldAccessExpressionNode transform(FieldAccessExpressionNode fieldAccessExpressionNode) {\n if (!isInLineRange(fieldAccessExpressionNode)) {\n return fieldAccessExpressionNode;\n }\n ExpressionNode expression = this.modifyNode(fieldAccessExpressionNode.expression());\n Token dotToken = getToken(fieldAccessExpressionNode.dotToken());\n NameReferenceNode fieldName = this.modifyNode(fieldAccessExpressionNode.fieldName());\n return fieldAccessExpressionNode.modify()\n .withExpression(expression)\n .withDotToken(formatToken(dotToken, 0, 0, 0, 0))\n .withFieldName(fieldName)\n .apply();\n }\n\n @Override\n public TypeofExpressionNode transform(TypeofExpressionNode typeofExpressionNode) {\n if (!isInLineRange(typeofExpressionNode)) {\n return typeofExpressionNode;\n }\n Token typeofKeyword = getToken(typeofExpressionNode.typeofKeyword());\n ExpressionNode expression = this.modifyNode(typeofExpressionNode.expression());\n return typeofExpressionNode.modify()\n .withTypeofKeyword(formatToken(typeofKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public UnaryExpressionNode transform(UnaryExpressionNode unaryExpressionNode) {\n if (!isInLineRange(unaryExpressionNode)) {\n return unaryExpressionNode;\n }\n Token unaryOperator = getToken(unaryExpressionNode.unaryOperator());\n ExpressionNode expression = this.modifyNode(unaryExpressionNode.expression());\n return unaryExpressionNode.modify()\n .withUnaryOperator(formatToken(unaryOperator, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public ComputedNameFieldNode transform(ComputedNameFieldNode computedNameFieldNode) {\n if (!isInLineRange(computedNameFieldNode)) {\n return computedNameFieldNode;\n }\n Token openBracket = getToken(computedNameFieldNode.openBracket());\n ExpressionNode fieldNameExpr = this.modifyNode(computedNameFieldNode.fieldNameExpr());\n Token closeBracket = getToken(computedNameFieldNode.closeBracket());\n Token colonToken = getToken(computedNameFieldNode.colonToken());\n ExpressionNode valueExpr = this.modifyNode(computedNameFieldNode.valueExpr());\n return computedNameFieldNode.modify()\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withFieldNameExpr(fieldNameExpr)\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .withColonToken(formatToken(colonToken, 1, 1, 0, 0))\n .withValueExpr(valueExpr)\n .apply();\n }\n\n @Override\n public DefaultableParameterNode transform(DefaultableParameterNode defaultableParameterNode) {\n if (!isInLineRange(defaultableParameterNode)) {\n return defaultableParameterNode;\n }\n NodeList annotations = this.modifyNodeList(defaultableParameterNode.annotations());\n Node typeName = this.modifyNode(defaultableParameterNode.typeName());\n Token paramName = getToken(defaultableParameterNode.paramName().orElse(null));\n Token equalsToken = getToken(defaultableParameterNode.equalsToken());\n Node expression = this.modifyNode(defaultableParameterNode.expression());\n if (paramName != null) {\n defaultableParameterNode = defaultableParameterNode.modify()\n .withParamName(formatToken(paramName, 1, 1, 0, 0)).apply();\n }\n return defaultableParameterNode.modify()\n .withAnnotations(annotations)\n .withTypeName(typeName)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public RestParameterNode transform(RestParameterNode restParameterNode) {\n if (!isInLineRange(restParameterNode)) {\n return restParameterNode;\n }\n NodeList annotations = this.modifyNodeList(restParameterNode.annotations());\n Node typeName = this.modifyNode(restParameterNode.typeName());\n Token ellipsisToken = getToken(restParameterNode.ellipsisToken());\n Token paramName = getToken(restParameterNode.paramName().orElse(null));\n if (paramName != null) {\n restParameterNode = restParameterNode.modify()\n .withParamName(formatToken(paramName, 1, 1, 0, 0)).apply();\n }\n return restParameterNode.modify()\n .withAnnotations(annotations)\n .withTypeName(typeName)\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public SpreadFieldNode transform(SpreadFieldNode spreadFieldNode) {\n if (!isInLineRange(spreadFieldNode)) {\n return spreadFieldNode;\n }\n Token ellipsis = getToken(spreadFieldNode.ellipsis());\n ExpressionNode valueExpr = this.modifyNode(spreadFieldNode.valueExpr());\n return spreadFieldNode.modify()\n .withEllipsis(formatToken(ellipsis, 0, 0, 0, 0))\n .withValueExpr(valueExpr)\n .apply();\n }\n\n @Override\n public NamedArgumentNode transform(NamedArgumentNode namedArgumentNode) {\n if (!isInLineRange(namedArgumentNode)) {\n return namedArgumentNode;\n }\n SimpleNameReferenceNode argumentName = this.modifyNode(namedArgumentNode.argumentName());\n Token equalsToken = getToken(namedArgumentNode.equalsToken());\n ExpressionNode expression = this.modifyNode(namedArgumentNode.expression());\n return namedArgumentNode.modify()\n .withArgumentName(argumentName)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public RestArgumentNode transform(RestArgumentNode restArgumentNode) {\n if (!isInLineRange(restArgumentNode)) {\n return restArgumentNode;\n }\n Token ellipsis = getToken(restArgumentNode.ellipsis());\n ExpressionNode expression = this.modifyNode(restArgumentNode.expression());\n return restArgumentNode.modify()\n .withEllipsis(formatToken(ellipsis, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public ObjectTypeDescriptorNode transform(ObjectTypeDescriptorNode objectTypeDescriptorNode) {\n if (!isInLineRange(objectTypeDescriptorNode)) {\n return objectTypeDescriptorNode;\n }\n NodeList objectTypeQualifiers = this.modifyNodeList(objectTypeDescriptorNode.objectTypeQualifiers());\n Token objectKeyword = getToken(objectTypeDescriptorNode.objectKeyword());\n Token openBrace = getToken(objectTypeDescriptorNode.openBrace());\n NodeList members = this.modifyNodeList(objectTypeDescriptorNode.members());\n Token closeBrace = getToken(objectTypeDescriptorNode.closeBrace());\n return objectTypeDescriptorNode.modify()\n .withObjectTypeQualifiers(objectTypeQualifiers)\n .withObjectKeyword(formatToken(objectKeyword, 0, 1, 1, 0))\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0))\n .withMembers(members)\n .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RecordTypeDescriptorNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) {\n if (!isInLineRange(recordTypeDescriptorNode)) {\n return recordTypeDescriptorNode;\n }\n Token recordKeyword = getToken(recordTypeDescriptorNode.recordKeyword());\n Token bodyStartDelimiter = getToken(recordTypeDescriptorNode.bodyStartDelimiter());\n NodeList fields = this.modifyNodeList(recordTypeDescriptorNode.fields());\n RecordRestDescriptorNode recordRestDescriptor =\n modifyNode(recordTypeDescriptorNode.recordRestDescriptor().orElse(null));\n Token bodyEndDelimiter = getToken(recordTypeDescriptorNode.bodyEndDelimiter());\n if (recordRestDescriptor != null) {\n recordTypeDescriptorNode = recordTypeDescriptorNode.modify()\n .withRecordRestDescriptor(recordRestDescriptor).apply();\n }\n return recordTypeDescriptorNode.modify()\n .withRecordKeyword(formatToken(recordKeyword, 0, 1, 0, 0))\n .withBodyStartDelimiter(formatToken(bodyStartDelimiter, 0, 0, 0, 0))\n .withFields(fields)\n .withBodyEndDelimiter(formatToken(bodyEndDelimiter, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ObjectFieldNode transform(ObjectFieldNode objectFieldNode) {\n if (!isInLineRange(objectFieldNode)) {\n return objectFieldNode;\n }\n MetadataNode metadata = this.modifyNode(objectFieldNode.metadata().orElse(null));\n Token visibilityQualifier = getToken(objectFieldNode.visibilityQualifier().orElse(null));\n Token readonlyKeyword = getToken(objectFieldNode.readonlyKeyword().orElse(null));\n Node typeName = this.modifyNode(objectFieldNode.typeName());\n Token fieldName = getToken(objectFieldNode.fieldName());\n Token equalsToken = getToken(objectFieldNode.equalsToken().orElse(null));\n ExpressionNode expression = this.modifyNode(objectFieldNode.expression().orElse(null));\n Token semicolonToken = getToken(objectFieldNode.semicolonToken());\n if (metadata != null) {\n objectFieldNode = objectFieldNode.modify()\n .withMetadata(metadata).apply();\n }\n if (visibilityQualifier != null) {\n objectFieldNode = objectFieldNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 1, 0, 0)).apply();\n }\n if (readonlyKeyword != null) {\n objectFieldNode = objectFieldNode.modify()\n .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply();\n }\n return objectFieldNode.modify()\n .withTypeName(typeName)\n .withFieldName(formatToken(fieldName, 1, 1, 0, 0))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RecordFieldNode transform(RecordFieldNode recordFieldNode) {\n if (!isInLineRange(recordFieldNode)) {\n return recordFieldNode;\n }\n MetadataNode metadata = this.modifyNode(recordFieldNode.metadata().orElse(null));\n Token readonlyKeyword = getToken(recordFieldNode.readonlyKeyword().orElse(null));\n Node typeName = this.modifyNode(recordFieldNode.typeName());\n Token fieldName = getToken(recordFieldNode.fieldName());\n Token questionMarkToken = getToken(recordFieldNode.questionMarkToken().orElse(null));\n Token semicolonToken = getToken(recordFieldNode.semicolonToken());\n if (metadata != null) {\n recordFieldNode = recordFieldNode.modify()\n .withMetadata(metadata).apply();\n }\n if (readonlyKeyword != null) {\n recordFieldNode = recordFieldNode.modify()\n .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply();\n }\n if (questionMarkToken != null) {\n recordFieldNode = recordFieldNode.modify()\n .withQuestionMarkToken(formatToken(questionMarkToken, 1, 1, 0, 0)).apply();\n }\n return recordFieldNode.modify()\n .withTypeName(typeName)\n .withFieldName(formatToken(fieldName, 0, 1, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RecordFieldWithDefaultValueNode transform(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) {\n if (!isInLineRange(recordFieldWithDefaultValueNode)) {\n return recordFieldWithDefaultValueNode;\n }\n MetadataNode metadata = this.modifyNode(recordFieldWithDefaultValueNode.metadata().orElse(null));\n Token readonlyKeyword = getToken(recordFieldWithDefaultValueNode.readonlyKeyword().orElse(null));\n Node typeName = this.modifyNode(recordFieldWithDefaultValueNode.typeName());\n Token fieldName = getToken(recordFieldWithDefaultValueNode.fieldName());\n Token equalsToken = getToken(recordFieldWithDefaultValueNode.equalsToken());\n ExpressionNode expression = this.modifyNode(recordFieldWithDefaultValueNode.expression());\n Token semicolonToken = getToken(recordFieldWithDefaultValueNode.semicolonToken());\n if (metadata != null) {\n recordFieldWithDefaultValueNode = recordFieldWithDefaultValueNode.modify()\n .withMetadata(metadata).apply();\n }\n if (readonlyKeyword != null) {\n recordFieldWithDefaultValueNode = recordFieldWithDefaultValueNode.modify()\n .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply();\n }\n return recordFieldWithDefaultValueNode.modify()\n .withTypeName(typeName)\n .withFieldName(formatToken(fieldName, 1, 1, 0, 0))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RecordRestDescriptorNode transform(RecordRestDescriptorNode recordRestDescriptorNode) {\n if (!isInLineRange(recordRestDescriptorNode)) {\n return recordRestDescriptorNode;\n }\n Node typeName = this.modifyNode(recordRestDescriptorNode.typeName());\n Token ellipsisToken = getToken(recordRestDescriptorNode.ellipsisToken());\n Token semicolonToken = getToken(recordRestDescriptorNode.semicolonToken());\n return recordRestDescriptorNode.modify()\n .withTypeName(typeName)\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public AnnotationNode transform(AnnotationNode annotationNode) {\n if (!isInLineRange(annotationNode)) {\n return annotationNode;\n }\n Token atToken = getToken(annotationNode.atToken());\n Node annotReference = this.modifyNode(annotationNode.annotReference());\n MappingConstructorExpressionNode annotValue = this.modifyNode(annotationNode.annotValue().orElse(null));\n if (annotValue != null) {\n annotationNode = annotationNode.modify()\n .withAnnotValue(annotValue).apply();\n }\n return annotationNode.modify()\n .withAtToken(formatToken(atToken, 1, 1, 0, 0))\n .withAnnotReference(annotReference)\n .apply();\n }\n\n @Override\n public AnnotationDeclarationNode transform(AnnotationDeclarationNode annotationDeclarationNode) {\n if (!isInLineRange(annotationDeclarationNode)) {\n return annotationDeclarationNode;\n }\n MetadataNode metadata = this.modifyNode(annotationDeclarationNode.metadata().orElse(null));\n Token visibilityQualifier = getToken(annotationDeclarationNode.visibilityQualifier());\n Token constKeyword = getToken(annotationDeclarationNode.constKeyword());\n Token annotationKeyword = getToken(annotationDeclarationNode.annotationKeyword());\n Node typeDescriptor = this.modifyNode(annotationDeclarationNode.typeDescriptor());\n Token annotationTag = getToken(annotationDeclarationNode.annotationTag());\n Token onKeyword = getToken(annotationDeclarationNode.onKeyword());\n SeparatedNodeList attachPoints = this.modifySeparatedNodeList(annotationDeclarationNode.attachPoints());\n Token semicolonToken = getToken(annotationDeclarationNode.semicolonToken());\n if (metadata != null) {\n annotationDeclarationNode = annotationDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return annotationDeclarationNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 1, 0, 0))\n .withConstKeyword(formatToken(constKeyword, 1, 1, 0, 0))\n .withAnnotationKeyword(formatToken(annotationKeyword, 0, 0, 0, 0))\n .withTypeDescriptor(typeDescriptor)\n .withAnnotationTag(formatToken(annotationTag, 0, 0, 0, 0))\n .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0))\n .withAttachPoints(attachPoints)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public AnnotationAttachPointNode transform(AnnotationAttachPointNode annotationAttachPointNode) {\n if (!isInLineRange(annotationAttachPointNode)) {\n return annotationAttachPointNode;\n }\n Token sourceKeyword = getToken(annotationAttachPointNode.sourceKeyword());\n Token firstIdent = getToken(annotationAttachPointNode.firstIdent());\n Token secondIdent = getToken(annotationAttachPointNode.secondIdent());\n return annotationAttachPointNode.modify()\n .withSourceKeyword(formatToken(sourceKeyword, 0, 1, 0, 0))\n .withFirstIdent(formatToken(firstIdent, 0, 0, 0, 0))\n .withSecondIdent(formatToken(secondIdent, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public NamedWorkerDeclarator transform(NamedWorkerDeclarator namedWorkerDeclarator) {\n if (!isInLineRange(namedWorkerDeclarator)) {\n return namedWorkerDeclarator;\n }\n NodeList workerInitStatements =\n this.modifyNodeList(namedWorkerDeclarator.workerInitStatements());\n NodeList namedWorkerDeclarations =\n this.modifyNodeList(namedWorkerDeclarator.namedWorkerDeclarations());\n return namedWorkerDeclarator.modify()\n .withNamedWorkerDeclarations(namedWorkerDeclarations)\n .withWorkerInitStatements(workerInitStatements)\n .apply();\n }\n\n @Override\n public TrapExpressionNode transform(TrapExpressionNode trapExpressionNode) {\n if (!isInLineRange(trapExpressionNode)) {\n return trapExpressionNode;\n }\n Token trapKeyword = getToken(trapExpressionNode.trapKeyword());\n ExpressionNode expression = this.modifyNode(trapExpressionNode.expression());\n return trapExpressionNode.modify()\n .withTrapKeyword(formatToken(trapKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public TypeCastExpressionNode transform(TypeCastExpressionNode typeCastExpressionNode) {\n if (!isInLineRange(typeCastExpressionNode)) {\n return typeCastExpressionNode;\n }\n Token ltToken = getToken(typeCastExpressionNode.ltToken());\n TypeCastParamNode typeCastParam = this.modifyNode(typeCastExpressionNode.typeCastParam());\n Token gtToken = getToken(typeCastExpressionNode.gtToken());\n ExpressionNode expression = this.modifyNode(typeCastExpressionNode.expression());\n return typeCastExpressionNode.modify()\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withTypeCastParam(typeCastParam)\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public TypeCastParamNode transform(TypeCastParamNode typeCastParamNode) {\n if (!isInLineRange(typeCastParamNode)) {\n return typeCastParamNode;\n }\n NodeList annotations = this.modifyNodeList(typeCastParamNode.annotations());\n Node type = this.modifyNode(typeCastParamNode.type());\n return typeCastParamNode.modify()\n .withAnnotations(annotations)\n .withType(type)\n .apply();\n }\n\n @Override\n public TableConstructorExpressionNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) {\n if (!isInLineRange(tableConstructorExpressionNode)) {\n return tableConstructorExpressionNode;\n }\n Token tableKeyword = getToken(tableConstructorExpressionNode.tableKeyword());\n KeySpecifierNode keySpecifier = this.modifyNode(tableConstructorExpressionNode.keySpecifier().orElse(null));\n Token openBracket = getToken(tableConstructorExpressionNode.openBracket());\n SeparatedNodeList mappingConstructors =\n this.modifySeparatedNodeList(tableConstructorExpressionNode.mappingConstructors());\n Token closeBracket = this.modifyToken(tableConstructorExpressionNode.closeBracket());\n return tableConstructorExpressionNode.modify()\n .withTableKeyword(formatToken(tableKeyword, 0, 1, 0, 0))\n .withKeySpecifier(keySpecifier)\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withMappingConstructors(mappingConstructors)\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public KeySpecifierNode transform(KeySpecifierNode keySpecifierNode) {\n if (!isInLineRange(keySpecifierNode)) {\n return keySpecifierNode;\n }\n Token keyKeyword = getToken(keySpecifierNode.keyKeyword());\n Token openParenToken = getToken(keySpecifierNode.openParenToken());\n SeparatedNodeList fieldNames = this.modifySeparatedNodeList(keySpecifierNode.fieldNames());\n Token closeParenToken = getToken(keySpecifierNode.closeParenToken());\n return keySpecifierNode.modify()\n .withKeyKeyword(formatToken(keyKeyword, 0, 1, 0, 0))\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withFieldNames(fieldNames)\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ErrorTypeParamsNode transform(ErrorTypeParamsNode errorTypeParamsNode) {\n if (!isInLineRange(errorTypeParamsNode)) {\n return errorTypeParamsNode;\n }\n Token ltToken = getToken(errorTypeParamsNode.ltToken());\n Node parameter = this.modifyNode(errorTypeParamsNode.parameter());\n Token gtToken = getToken(errorTypeParamsNode.gtToken());\n return errorTypeParamsNode.modify()\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withParameter(parameter)\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public StreamTypeDescriptorNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) {\n if (!isInLineRange(streamTypeDescriptorNode)) {\n return streamTypeDescriptorNode;\n }\n Token streamKeywordToken = getToken(streamTypeDescriptorNode.streamKeywordToken());\n Node streamTypeParamsNode = this.modifyNode(streamTypeDescriptorNode.streamTypeParamsNode().orElse(null));\n if (streamTypeParamsNode != null) {\n streamTypeDescriptorNode = streamTypeDescriptorNode.modify()\n .withStreamTypeParamsNode(streamTypeParamsNode).apply();\n }\n return streamTypeDescriptorNode.modify()\n .withStreamKeywordToken(formatToken(streamKeywordToken, 0, 1, 0, 0))\n .apply();\n }\n\n @Override\n public StreamTypeParamsNode transform(StreamTypeParamsNode streamTypeParamsNode) {\n if (!isInLineRange(streamTypeParamsNode)) {\n return streamTypeParamsNode;\n }\n Token ltToken = getToken(streamTypeParamsNode.ltToken());\n Node leftTypeDescNode = this.modifyNode(streamTypeParamsNode.leftTypeDescNode());\n Token commaToken = getToken(streamTypeParamsNode.commaToken().orElse(null));\n Node rightTypeDescNode = this.modifyNode(streamTypeParamsNode.rightTypeDescNode().orElse(null));\n Token gtToken = getToken(streamTypeParamsNode.gtToken());\n if (commaToken != null) {\n streamTypeParamsNode = streamTypeParamsNode.modify()\n .withCommaToken(formatToken(commaToken, 0, 1, 0, 0)).apply();\n }\n if (rightTypeDescNode != null) {\n streamTypeParamsNode = streamTypeParamsNode.modify()\n .withRightTypeDescNode(rightTypeDescNode).apply();\n }\n return streamTypeParamsNode.modify()\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withLeftTypeDescNode(leftTypeDescNode)\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TypedescTypeDescriptorNode transform(TypedescTypeDescriptorNode typedescTypeDescriptorNode) {\n if (!isInLineRange(typedescTypeDescriptorNode)) {\n return typedescTypeDescriptorNode;\n }\n Token typedescKeywordToken = this.modifyToken(typedescTypeDescriptorNode.typedescKeywordToken());\n TypeParameterNode typedescTypeParamsNode =\n this.modifyNode(typedescTypeDescriptorNode.typedescTypeParamsNode().orElse(null));\n if (typedescTypeParamsNode != null) {\n typedescTypeDescriptorNode = typedescTypeDescriptorNode.modify()\n .withTypedescTypeParamsNode(typedescTypeParamsNode).apply();\n }\n return typedescTypeDescriptorNode.modify()\n .withTypedescKeywordToken(formatToken(typedescKeywordToken, 0, 1, 0, 0))\n .apply();\n }\n\n @Override\n public LetExpressionNode transform(LetExpressionNode letExpressionNode) {\n if (!isInLineRange(letExpressionNode)) {\n return letExpressionNode;\n }\n Token letKeyword = getToken(letExpressionNode.letKeyword());\n SeparatedNodeList letVarDeclarations =\n this.modifySeparatedNodeList(letExpressionNode.letVarDeclarations());\n Token inKeyword = getToken(letExpressionNode.inKeyword());\n ExpressionNode expression = this.modifyNode(letExpressionNode.expression());\n return letExpressionNode.modify()\n .withLetKeyword(formatToken(letKeyword, 0, 1, 0, 0))\n .withLetVarDeclarations(letVarDeclarations)\n .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public LetVariableDeclarationNode transform(LetVariableDeclarationNode letVariableDeclarationNode) {\n if (!isInLineRange(letVariableDeclarationNode)) {\n return letVariableDeclarationNode;\n }\n NodeList annotations = this.modifyNodeList(letVariableDeclarationNode.annotations());\n TypedBindingPatternNode typedBindingPattern = this.modifyNode(letVariableDeclarationNode.typedBindingPattern());\n Token equalsToken = getToken(letVariableDeclarationNode.equalsToken());\n ExpressionNode expression = this.modifyNode(letVariableDeclarationNode.expression());\n return letVariableDeclarationNode.modify()\n .withAnnotations(annotations)\n .withTypedBindingPattern(typedBindingPattern)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public InterpolationNode transform(InterpolationNode interpolationNode) {\n if (!isInLineRange(interpolationNode)) {\n return interpolationNode;\n }\n Token interpolationStartToken = getToken(interpolationNode.interpolationStartToken());\n ExpressionNode expression = this.modifyNode(interpolationNode.expression());\n Token interpolationEndToken = getToken(interpolationNode.interpolationEndToken());\n return interpolationNode.modify()\n .withInterpolationStartToken(formatToken(interpolationStartToken, 0, 0, 0, 0))\n .withExpression(expression)\n .withInterpolationEndToken(formatToken(interpolationEndToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TableTypeDescriptorNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) {\n if (!isInLineRange(tableTypeDescriptorNode)) {\n return tableTypeDescriptorNode;\n }\n Token tableKeywordToken = getToken(tableTypeDescriptorNode.tableKeywordToken());\n Node rowTypeParameterNode = this.modifyNode(tableTypeDescriptorNode.rowTypeParameterNode());\n Node keyConstraintNode = this.modifyNode(tableTypeDescriptorNode.keyConstraintNode());\n return tableTypeDescriptorNode.modify()\n .withTableKeywordToken(formatToken(tableKeywordToken, 0, 1, 0, 0))\n .withRowTypeParameterNode(rowTypeParameterNode)\n .withKeyConstraintNode(keyConstraintNode)\n .apply();\n }\n\n @Override\n public KeyTypeConstraintNode transform(KeyTypeConstraintNode keyTypeConstraintNode) {\n if (!isInLineRange(keyTypeConstraintNode)) {\n return keyTypeConstraintNode;\n }\n Token keyKeywordToken = getToken(keyTypeConstraintNode.keyKeywordToken());\n Node typeParameterNode = this.modifyNode(keyTypeConstraintNode.typeParameterNode());\n return keyTypeConstraintNode.modify()\n .withKeyKeywordToken(formatToken(keyKeywordToken, 0, 1, 0, 0))\n .withTypeParameterNode(typeParameterNode)\n .apply();\n }\n\n @Override\n public FunctionTypeDescriptorNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) {\n if (!isInLineRange(functionTypeDescriptorNode)) {\n return functionTypeDescriptorNode;\n }\n Token functionKeyword = getToken(functionTypeDescriptorNode.functionKeyword());\n FunctionSignatureNode functionSignature = this.modifyNode(functionTypeDescriptorNode.functionSignature());\n return functionTypeDescriptorNode.modify()\n .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0))\n .withFunctionSignature(functionSignature)\n .apply();\n }\n\n @Override\n public ExplicitAnonymousFunctionExpressionNode transform(\n ExplicitAnonymousFunctionExpressionNode explicitAnonymousFunctionExpressionNode) {\n if (!isInLineRange(explicitAnonymousFunctionExpressionNode)) {\n return explicitAnonymousFunctionExpressionNode;\n }\n NodeList annotations =\n this.modifyNodeList(explicitAnonymousFunctionExpressionNode.annotations());\n Token functionKeyword = getToken(explicitAnonymousFunctionExpressionNode.functionKeyword());\n FunctionSignatureNode functionSignature =\n this.modifyNode(explicitAnonymousFunctionExpressionNode.functionSignature());\n FunctionBodyNode functionBody = this.modifyNode(explicitAnonymousFunctionExpressionNode.functionBody());\n return explicitAnonymousFunctionExpressionNode.modify()\n .withAnnotations(annotations)\n .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0))\n .withFunctionSignature(functionSignature)\n .withFunctionBody(functionBody)\n .apply();\n }\n\n @Override\n public ExpressionFunctionBodyNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) {\n if (!isInLineRange(expressionFunctionBodyNode)) {\n return expressionFunctionBodyNode;\n }\n Token rightDoubleArrow = getToken(expressionFunctionBodyNode.rightDoubleArrow());\n ExpressionNode expression = this.modifyNode(expressionFunctionBodyNode.expression());\n Token semicolon = this.modifyToken(expressionFunctionBodyNode.semicolon().orElse(null));\n if (semicolon != null) {\n expressionFunctionBodyNode = expressionFunctionBodyNode.modify()\n .withSemicolon(formatToken(semicolon, 0, 0, 0, 0)).apply();\n }\n return expressionFunctionBodyNode.modify()\n .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public ParenthesisedTypeDescriptorNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) {\n if (!isInLineRange(parenthesisedTypeDescriptorNode)) {\n return parenthesisedTypeDescriptorNode;\n }\n Token openParenToken = getToken(parenthesisedTypeDescriptorNode.openParenToken());\n TypeDescriptorNode typedesc = this.modifyNode(parenthesisedTypeDescriptorNode.typedesc());\n Token closeParenToken = getToken(parenthesisedTypeDescriptorNode.closeParenToken());\n return parenthesisedTypeDescriptorNode.modify()\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withTypedesc(typedesc)\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ImplicitNewExpressionNode transform(ImplicitNewExpressionNode implicitNewExpressionNode) {\n if (!isInLineRange(implicitNewExpressionNode)) {\n return implicitNewExpressionNode;\n }\n Token newKeyword = getToken(implicitNewExpressionNode.newKeyword());\n ParenthesizedArgList parenthesizedArgList =\n this.modifyNode(implicitNewExpressionNode.parenthesizedArgList().orElse(null));\n if (parenthesizedArgList != null) {\n implicitNewExpressionNode = implicitNewExpressionNode.modify()\n .withParenthesizedArgList(parenthesizedArgList).apply();\n }\n return implicitNewExpressionNode.modify()\n .withNewKeyword(formatToken(newKeyword, 0, 1, 0, 0))\n .apply();\n }\n\n @Override\n public QueryConstructTypeNode transform(QueryConstructTypeNode queryConstructTypeNode) {\n if (!isInLineRange(queryConstructTypeNode)) {\n return queryConstructTypeNode;\n }\n Token keyword = getToken(queryConstructTypeNode.keyword());\n KeySpecifierNode keySpecifier = this.modifyNode(queryConstructTypeNode.keySpecifier().orElse(null));\n if (keySpecifier != null) {\n queryConstructTypeNode = queryConstructTypeNode.modify()\n .withKeySpecifier(keySpecifier).apply();\n }\n return queryConstructTypeNode.modify()\n .withKeyword(formatToken(keyword, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public FromClauseNode transform(FromClauseNode fromClauseNode) {\n if (!isInLineRange(fromClauseNode)) {\n return fromClauseNode;\n }\n Token fromKeyword = getToken(fromClauseNode.fromKeyword());\n TypedBindingPatternNode typedBindingPattern = this.modifyNode(fromClauseNode.typedBindingPattern());\n Token inKeyword = getToken(fromClauseNode.inKeyword());\n ExpressionNode expression = this.modifyNode(fromClauseNode.expression());\n return fromClauseNode.modify()\n .withFromKeyword(formatToken(fromKeyword, 1, 1, 0, 0))\n .withTypedBindingPattern(typedBindingPattern)\n .withInKeyword(formatToken(inKeyword, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public WhereClauseNode transform(WhereClauseNode whereClauseNode) {\n if (!isInLineRange(whereClauseNode)) {\n return whereClauseNode;\n }\n Token whereKeyword = getToken(whereClauseNode.whereKeyword());\n ExpressionNode expression = this.modifyNode(whereClauseNode.expression());\n return whereClauseNode.modify()\n .withWhereKeyword(formatToken(whereKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public LetClauseNode transform(LetClauseNode letClauseNode) {\n if (!isInLineRange(letClauseNode)) {\n return letClauseNode;\n }\n Token letKeyword = getToken(letClauseNode.letKeyword());\n SeparatedNodeList letVarDeclarations =\n this.modifySeparatedNodeList(letClauseNode.letVarDeclarations());\n return letClauseNode.modify()\n .withLetKeyword(formatToken(letKeyword, 0, 1, 0, 0))\n .withLetVarDeclarations(letVarDeclarations)\n .apply();\n }\n\n @Override\n public QueryPipelineNode transform(QueryPipelineNode queryPipelineNode) {\n if (!isInLineRange(queryPipelineNode)) {\n return queryPipelineNode;\n }\n FromClauseNode fromClause = this.modifyNode(queryPipelineNode.fromClause());\n NodeList intermediateClauses = this.modifyNodeList(queryPipelineNode.intermediateClauses());\n return queryPipelineNode.modify()\n .withFromClause(fromClause)\n .withIntermediateClauses(intermediateClauses)\n .apply();\n }\n\n @Override\n public SelectClauseNode transform(SelectClauseNode selectClauseNode) {\n if (!isInLineRange(selectClauseNode)) {\n return selectClauseNode;\n }\n Token selectKeyword = getToken(selectClauseNode.selectKeyword());\n ExpressionNode expression = this.modifyNode(selectClauseNode.expression());\n return selectClauseNode.modify()\n .withSelectKeyword(formatToken(selectKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public QueryExpressionNode transform(QueryExpressionNode queryExpressionNode) {\n if (!isInLineRange(queryExpressionNode)) {\n return queryExpressionNode;\n }\n QueryConstructTypeNode queryConstructType =\n this.modifyNode(queryExpressionNode.queryConstructType().orElse(null));\n QueryPipelineNode queryPipeline = this.modifyNode(queryExpressionNode.queryPipeline());\n SelectClauseNode selectClause = this.modifyNode(queryExpressionNode.selectClause());\n OnConflictClauseNode onConflictClause = this.modifyNode(queryExpressionNode.onConflictClause().orElse(null));\n LimitClauseNode limitClause = this.modifyNode(queryExpressionNode.limitClause().orElse(null));\n if (queryConstructType != null) {\n queryExpressionNode = queryExpressionNode.modify()\n .withQueryConstructType(queryConstructType).apply();\n }\n if (onConflictClause != null) {\n queryExpressionNode = queryExpressionNode.modify()\n .withOnConflictClause(onConflictClause).apply();\n }\n if (limitClause != null) {\n queryExpressionNode = queryExpressionNode.modify()\n .withLimitClause(limitClause).apply();\n }\n return queryExpressionNode.modify()\n .withQueryPipeline(queryPipeline)\n .withSelectClause(selectClause)\n .apply();\n }\n\n @Override\n public IntersectionTypeDescriptorNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) {\n if (!isInLineRange(intersectionTypeDescriptorNode)) {\n return intersectionTypeDescriptorNode;\n }\n Node leftTypeDesc = this.modifyNode(intersectionTypeDescriptorNode.leftTypeDesc());\n Token bitwiseAndToken = getToken(intersectionTypeDescriptorNode.bitwiseAndToken());\n Node rightTypeDesc = this.modifyNode(intersectionTypeDescriptorNode.rightTypeDesc());\n return intersectionTypeDescriptorNode.modify()\n .withLeftTypeDesc(leftTypeDesc)\n .withBitwiseAndToken(formatToken(bitwiseAndToken, 1, 1, 0, 0))\n .withRightTypeDesc(rightTypeDesc)\n .apply();\n }\n\n @Override\n public ImplicitAnonymousFunctionParameters transform(\n ImplicitAnonymousFunctionParameters implicitAnonymousFunctionParameters) {\n if (!isInLineRange(implicitAnonymousFunctionParameters)) {\n return implicitAnonymousFunctionParameters;\n }\n Token openParenToken = getToken(implicitAnonymousFunctionParameters.openParenToken());\n SeparatedNodeList parameters =\n this.modifySeparatedNodeList(implicitAnonymousFunctionParameters.parameters());\n Token closeParenToken = getToken(implicitAnonymousFunctionParameters.closeParenToken());\n return implicitAnonymousFunctionParameters.modify()\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withParameters(parameters)\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ImplicitAnonymousFunctionExpressionNode transform(\n ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) {\n if (!isInLineRange(implicitAnonymousFunctionExpressionNode)) {\n return implicitAnonymousFunctionExpressionNode;\n }\n Node params = this.modifyNode(implicitAnonymousFunctionExpressionNode.params());\n Token rightDoubleArrow = getToken(implicitAnonymousFunctionExpressionNode.rightDoubleArrow());\n ExpressionNode expression = this.modifyNode(implicitAnonymousFunctionExpressionNode.expression());\n return implicitAnonymousFunctionExpressionNode.modify()\n .withParams(params)\n .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public SingletonTypeDescriptorNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) {\n if (!isInLineRange(singletonTypeDescriptorNode)) {\n return singletonTypeDescriptorNode;\n }\n ExpressionNode simpleContExprNode = this.modifyNode(singletonTypeDescriptorNode.simpleContExprNode());\n return singletonTypeDescriptorNode.modify()\n .withSimpleContExprNode(simpleContExprNode)\n .apply();\n }\n\n @Override\n public MethodDeclarationNode transform(MethodDeclarationNode methodDeclarationNode) {\n if (!isInLineRange(methodDeclarationNode)) {\n return methodDeclarationNode;\n }\n MetadataNode metadata = this.modifyNode(methodDeclarationNode.metadata().orElse(null));\n NodeList qualifierList = this.modifyNodeList(methodDeclarationNode.qualifierList());\n Token functionKeyword = getToken(methodDeclarationNode.functionKeyword());\n IdentifierToken methodName = this.modifyNode(methodDeclarationNode.methodName());\n FunctionSignatureNode methodSignature = this.modifyNode(methodDeclarationNode.methodSignature());\n Token semicolon = getToken(methodDeclarationNode.semicolon());\n if (metadata != null) {\n methodDeclarationNode = methodDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return methodDeclarationNode.modify()\n .withQualifierList(qualifierList)\n .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0))\n .withMethodName(methodName)\n .withMethodSignature(methodSignature)\n .withSemicolon(formatToken(semicolon, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public WildcardBindingPatternNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) {\n if (!isInLineRange(wildcardBindingPatternNode)) {\n return wildcardBindingPatternNode;\n }\n Token underscoreToken = getToken(wildcardBindingPatternNode.underscoreToken());\n return wildcardBindingPatternNode.modify()\n .withUnderscoreToken(formatToken(underscoreToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ErrorBindingPatternNode transform(ErrorBindingPatternNode errorBindingPatternNode) {\n if (!isInLineRange(errorBindingPatternNode)) {\n return errorBindingPatternNode;\n }\n Token errorKeyword = getToken(errorBindingPatternNode.errorKeyword());\n Node typeReference = this.modifyNode(errorBindingPatternNode.typeReference().orElse(null));\n Token openParenthesis = getToken(errorBindingPatternNode.openParenthesis());\n SeparatedNodeList argListBindingPatterns =\n this.modifySeparatedNodeList(errorBindingPatternNode.argListBindingPatterns());\n Token closeParenthesis = getToken(errorBindingPatternNode.closeParenthesis());\n return errorBindingPatternNode.modify()\n .withErrorKeyword(formatToken(errorKeyword, 0, 1, 0, 0))\n .withTypeReference(typeReference)\n .withOpenParenthesis(formatToken(openParenthesis, 0, 0, 0, 0))\n .withArgListBindingPatterns(argListBindingPatterns)\n .withCloseParenthesis(formatToken(closeParenthesis, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public NamedArgBindingPatternNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) {\n if (!isInLineRange(namedArgBindingPatternNode)) {\n return namedArgBindingPatternNode;\n }\n IdentifierToken argName = this.modifyNode(namedArgBindingPatternNode.argName());\n Token equalsToken = getToken(namedArgBindingPatternNode.equalsToken());\n BindingPatternNode bindingPattern = this.modifyNode(namedArgBindingPatternNode.bindingPattern());\n return namedArgBindingPatternNode.modify()\n .withArgName(argName)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withBindingPattern(bindingPattern)\n .apply();\n }\n\n @Override\n public AsyncSendActionNode transform(AsyncSendActionNode asyncSendActionNode) {\n if (!isInLineRange(asyncSendActionNode)) {\n return asyncSendActionNode;\n }\n ExpressionNode expression = this.modifyNode(asyncSendActionNode.expression());\n Token rightArrowToken = getToken(asyncSendActionNode.rightArrowToken());\n SimpleNameReferenceNode peerWorker = this.modifyNode(asyncSendActionNode.peerWorker());\n return asyncSendActionNode.modify()\n .withExpression(expression)\n .withRightArrowToken(formatToken(rightArrowToken, 1, 1, 0, 0))\n .withPeerWorker(peerWorker)\n .apply();\n }\n\n @Override\n public SyncSendActionNode transform(SyncSendActionNode syncSendActionNode) {\n if (!isInLineRange(syncSendActionNode)) {\n return syncSendActionNode;\n }\n ExpressionNode expression = this.modifyNode(syncSendActionNode.expression());\n Token syncSendToken = getToken(syncSendActionNode.syncSendToken());\n SimpleNameReferenceNode peerWorker = this.modifyNode(syncSendActionNode.peerWorker());\n return syncSendActionNode.modify()\n .withExpression(expression)\n .withSyncSendToken(formatToken(syncSendToken, 1, 1, 0, 0))\n .withPeerWorker(peerWorker)\n .apply();\n }\n\n @Override\n public ReceiveActionNode transform(ReceiveActionNode receiveActionNode) {\n if (!isInLineRange(receiveActionNode)) {\n return receiveActionNode;\n }\n Token leftArrow = getToken(receiveActionNode.leftArrow());\n SimpleNameReferenceNode receiveWorkers = this.modifyNode(receiveActionNode.receiveWorkers());\n return receiveActionNode.modify()\n .withLeftArrow(formatToken(leftArrow, 1, 1, 0, 0))\n .withReceiveWorkers(receiveWorkers)\n .apply();\n }\n\n @Override\n public ReceiveFieldsNode transform(ReceiveFieldsNode receiveFieldsNode) {\n if (!isInLineRange(receiveFieldsNode)) {\n return receiveFieldsNode;\n }\n Token openBrace = getToken(receiveFieldsNode.openBrace());\n SeparatedNodeList receiveFields =\n this.modifySeparatedNodeList(receiveFieldsNode.receiveFields());\n Token closeBrace = getToken(receiveFieldsNode.closeBrace());\n return receiveFieldsNode.modify()\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0))\n .withReceiveFields(receiveFields)\n .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RestDescriptorNode transform(RestDescriptorNode restDescriptorNode) {\n if (!isInLineRange(restDescriptorNode)) {\n return restDescriptorNode;\n }\n TypeDescriptorNode typeDescriptor = this.modifyNode(restDescriptorNode.typeDescriptor());\n Token ellipsisToken = getToken(restDescriptorNode.ellipsisToken());\n return restDescriptorNode.modify()\n .withTypeDescriptor(typeDescriptor)\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public DoubleGTTokenNode transform(DoubleGTTokenNode doubleGTTokenNode) {\n if (!isInLineRange(doubleGTTokenNode)) {\n return doubleGTTokenNode;\n }\n Token openGTToken = getToken(doubleGTTokenNode.openGTToken());\n Token endGTToken = getToken(doubleGTTokenNode.endGTToken());\n return doubleGTTokenNode.modify()\n .withOpenGTToken(formatToken(openGTToken, 0, 0, 0, 0))\n .withEndGTToken(formatToken(endGTToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TrippleGTTokenNode transform(TrippleGTTokenNode trippleGTTokenNode) {\n if (!isInLineRange(trippleGTTokenNode)) {\n return trippleGTTokenNode;\n }\n Token openGTToken = getToken(trippleGTTokenNode.openGTToken());\n Token middleGTToken = getToken(trippleGTTokenNode.middleGTToken());\n Token endGTToken = getToken(trippleGTTokenNode.endGTToken());\n return trippleGTTokenNode.modify()\n .withOpenGTToken(formatToken(openGTToken, 0, 0, 0, 0))\n .withMiddleGTToken(formatToken(middleGTToken, 0, 0, 0, 0))\n .withEndGTToken(formatToken(endGTToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public WaitActionNode transform(WaitActionNode waitActionNode) {\n if (!isInLineRange(waitActionNode)) {\n return waitActionNode;\n }\n Token waitKeyword = getToken(waitActionNode.waitKeyword());\n Node waitFutureExpr = this.modifyNode(waitActionNode.waitFutureExpr());\n return waitActionNode.modify()\n .withWaitKeyword(formatToken(waitKeyword, 1, 1, 0, 0))\n .withWaitFutureExpr(waitFutureExpr)\n .apply();\n }\n\n @Override\n public WaitFieldsListNode transform(WaitFieldsListNode waitFieldsListNode) {\n if (!isInLineRange(waitFieldsListNode)) {\n return waitFieldsListNode;\n }\n Token openBrace = getToken(waitFieldsListNode.openBrace());\n SeparatedNodeList waitFields = this.modifySeparatedNodeList(waitFieldsListNode.waitFields());\n Token closeBrace = getToken(waitFieldsListNode.closeBrace());\n return waitFieldsListNode.modify()\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0))\n .withWaitFields(waitFields)\n .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public WaitFieldNode transform(WaitFieldNode waitFieldNode) {\n if (!isInLineRange(waitFieldNode)) {\n return waitFieldNode;\n }\n SimpleNameReferenceNode fieldName = this.modifyNode(waitFieldNode.fieldName());\n Token colon = getToken(waitFieldNode.colon());\n ExpressionNode waitFutureExpr = this.modifyNode(waitFieldNode.waitFutureExpr());\n return waitFieldNode.modify()\n .withFieldName(fieldName)\n .withColon(formatToken(colon, 1, 1, 0, 0))\n .withWaitFutureExpr(waitFutureExpr)\n .apply();\n }\n\n @Override\n public AnnotAccessExpressionNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) {\n if (!isInLineRange(annotAccessExpressionNode)) {\n return annotAccessExpressionNode;\n }\n ExpressionNode expression = this.modifyNode(annotAccessExpressionNode.expression());\n Token annotChainingToken = getToken(annotAccessExpressionNode.annotChainingToken());\n NameReferenceNode annotTagReference = this.modifyNode(annotAccessExpressionNode.annotTagReference());\n return annotAccessExpressionNode.modify()\n .withExpression(expression)\n .withAnnotChainingToken(formatToken(annotChainingToken, 0, 0, 0, 0))\n .withAnnotTagReference(annotTagReference)\n .apply();\n }\n\n @Override\n public QueryActionNode transform(QueryActionNode queryActionNode) {\n if (!isInLineRange(queryActionNode)) {\n return queryActionNode;\n }\n QueryPipelineNode queryPipeline = this.modifyNode(queryActionNode.queryPipeline());\n Token doKeyword = getToken(queryActionNode.doKeyword());\n BlockStatementNode blockStatement = this.modifyNode(queryActionNode.blockStatement());\n LimitClauseNode limitClause = this.modifyNode(queryActionNode.limitClause().orElse(null));\n if (limitClause != null) {\n queryActionNode = queryActionNode.modify()\n .withLimitClause(limitClause).apply();\n }\n return queryActionNode.modify()\n .withQueryPipeline(queryPipeline)\n .withDoKeyword(formatToken(doKeyword, 1, 1, 0, 0))\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public OptionalFieldAccessExpressionNode transform(\n OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) {\n if (!isInLineRange(optionalFieldAccessExpressionNode)) {\n return optionalFieldAccessExpressionNode;\n }\n ExpressionNode expression = this.modifyNode(optionalFieldAccessExpressionNode.expression());\n Token optionalChainingToken = getToken(optionalFieldAccessExpressionNode.optionalChainingToken());\n NameReferenceNode fieldName = this.modifyNode(optionalFieldAccessExpressionNode.fieldName());\n return optionalFieldAccessExpressionNode.modify()\n .withExpression(expression)\n .withOptionalChainingToken(formatToken(optionalChainingToken, 0, 0, 0, 0))\n .withFieldName(fieldName)\n .apply();\n }\n\n @Override\n public ConditionalExpressionNode transform(ConditionalExpressionNode conditionalExpressionNode) {\n if (!isInLineRange(conditionalExpressionNode)) {\n return conditionalExpressionNode;\n }\n ExpressionNode lhsExpression = this.modifyNode(conditionalExpressionNode.lhsExpression());\n Token questionMarkToken = getToken(conditionalExpressionNode.questionMarkToken());\n ExpressionNode middleExpression = this.modifyNode(conditionalExpressionNode.middleExpression());\n Token colonToken = getToken(conditionalExpressionNode.colonToken());\n ExpressionNode endExpression = this.modifyNode(conditionalExpressionNode.endExpression());\n return conditionalExpressionNode.modify()\n .withLhsExpression(lhsExpression)\n .withQuestionMarkToken(formatToken(questionMarkToken, 1, 1, 0, 0))\n .withMiddleExpression(middleExpression)\n .withColonToken(formatToken(colonToken, 1, 1, 0, 0))\n .withEndExpression(endExpression)\n .apply();\n }\n\n @Override\n public EnumDeclarationNode transform(EnumDeclarationNode enumDeclarationNode) {\n if (!isInLineRange(enumDeclarationNode)) {\n return enumDeclarationNode;\n }\n MetadataNode metadata = this.modifyNode(enumDeclarationNode.metadata().orElse(null));\n Token qualifier = getToken(enumDeclarationNode.qualifier());\n Token enumKeywordToken = getToken(enumDeclarationNode.enumKeywordToken());\n IdentifierToken identifier = this.modifyNode(enumDeclarationNode.identifier());\n Token openBraceToken = getToken(enumDeclarationNode.openBraceToken());\n SeparatedNodeList enumMemberList = this.modifySeparatedNodeList(enumDeclarationNode.enumMemberList());\n Token closeBraceToken = getToken(enumDeclarationNode.closeBraceToken());\n if (metadata != null) {\n enumDeclarationNode = enumDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return enumDeclarationNode.modify()\n .withQualifier(formatToken(qualifier, 1, 1, 0, 0))\n .withEnumKeywordToken(formatToken(enumKeywordToken, 0, 1, 0, 0))\n .withIdentifier(identifier)\n .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0))\n .withEnumMemberList(enumMemberList)\n .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public EnumMemberNode transform(EnumMemberNode enumMemberNode) {\n if (!isInLineRange(enumMemberNode)) {\n return enumMemberNode;\n }\n MetadataNode metadata = this.modifyNode(enumMemberNode.metadata().orElse(null));\n IdentifierToken identifier = this.modifyNode(enumMemberNode.identifier());\n Token equalToken = getToken(enumMemberNode.equalToken().orElse(null));\n ExpressionNode constExprNode = this.modifyNode(enumMemberNode.constExprNode().orElse(null));\n if (metadata != null) {\n enumMemberNode = enumMemberNode.modify()\n .withMetadata(metadata).apply();\n }\n return enumMemberNode.modify()\n .withEqualToken(formatToken(equalToken, 1, 1, 0, 0))\n .withIdentifier(identifier)\n .withConstExprNode(constExprNode)\n .apply();\n }\n\n @Override\n public TransactionStatementNode transform(TransactionStatementNode transactionStatementNode) {\n if (!isInLineRange(transactionStatementNode)) {\n return transactionStatementNode;\n }\n Token transactionKeyword = getToken(transactionStatementNode.transactionKeyword());\n BlockStatementNode blockStatement = this.modifyNode(transactionStatementNode.blockStatement());\n return transactionStatementNode.modify()\n .withTransactionKeyword(formatToken(transactionKeyword, 1, 1, 0, 0))\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public RollbackStatementNode transform(RollbackStatementNode rollbackStatementNode) {\n if (!isInLineRange(rollbackStatementNode)) {\n return rollbackStatementNode;\n }\n Token rollbackKeyword = getToken(rollbackStatementNode.rollbackKeyword());\n ExpressionNode expression = this.modifyNode(rollbackStatementNode.expression().orElse(null));\n Token semicolon = getToken(rollbackStatementNode.semicolon());\n if (expression != null) {\n rollbackStatementNode = rollbackStatementNode.modify()\n .withExpression(expression).apply();\n }\n return rollbackStatementNode.modify()\n .withRollbackKeyword(formatToken(rollbackKeyword, 1, 1, 0, 0))\n .withSemicolon(formatToken(semicolon, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RetryStatementNode transform(RetryStatementNode retryStatementNode) {\n if (!isInLineRange(retryStatementNode)) {\n return retryStatementNode;\n }\n Token retryKeyword = getToken(retryStatementNode.retryKeyword());\n TypeParameterNode typeParameter = this.modifyNode(retryStatementNode.typeParameter().orElse(null));\n ParenthesizedArgList arguments = this.modifyNode(retryStatementNode.arguments().orElse(null));\n StatementNode retryBody = this.modifyNode(retryStatementNode.retryBody());\n if (typeParameter != null) {\n retryStatementNode = retryStatementNode.modify()\n .withTypeParameter(typeParameter).apply();\n }\n if (arguments != null) {\n retryStatementNode = retryStatementNode.modify()\n .withArguments(arguments).apply();\n }\n return retryStatementNode.modify()\n .withRetryKeyword(formatToken(retryKeyword, 1, 1, 0, 0))\n .withRetryBody(retryBody)\n .apply();\n }\n\n @Override\n public CommitActionNode transform(CommitActionNode commitActionNode) {\n if (!isInLineRange(commitActionNode)) {\n return commitActionNode;\n }\n Token commitKeyword = getToken(commitActionNode.commitKeyword());\n return commitActionNode.modify()\n .withCommitKeyword(formatToken(commitKeyword, 1, 1, 0, 0))\n .apply();\n }\n\n @Override\n public TransactionalExpressionNode transform(TransactionalExpressionNode transactionalExpressionNode) {\n if (!isInLineRange(transactionalExpressionNode)) {\n return transactionalExpressionNode;\n }\n Token transactionalKeyword = getToken(transactionalExpressionNode.transactionalKeyword());\n return transactionalExpressionNode.modify()\n .withTransactionalKeyword(formatToken(transactionalKeyword, 1, 1, 0, 0))\n .apply();\n }\n\n @Override\n public ServiceConstructorExpressionNode transform(\n ServiceConstructorExpressionNode serviceConstructorExpressionNode) {\n if (!isInLineRange(serviceConstructorExpressionNode)) {\n return serviceConstructorExpressionNode;\n }\n NodeList annotations = this.modifyNodeList(serviceConstructorExpressionNode.annotations());\n Token serviceKeyword = getToken(serviceConstructorExpressionNode.serviceKeyword());\n Node serviceBody = this.modifyNode(serviceConstructorExpressionNode.serviceBody());\n return serviceConstructorExpressionNode.modify()\n .withAnnotations(annotations)\n .withServiceKeyword(formatToken(serviceKeyword, 1, 1, 0, 0))\n .withServiceBody(serviceBody)\n .apply();\n }\n\n @Override\n public TypeReferenceTypeDescNode transform(TypeReferenceTypeDescNode typeReferenceTypeDescNode) {\n if (!isInLineRange(typeReferenceTypeDescNode)) {\n return typeReferenceTypeDescNode;\n }\n NameReferenceNode typeRef = this.modifyNode(typeReferenceTypeDescNode.typeRef());\n return typeReferenceTypeDescNode.modify()\n .withTypeRef(typeRef)\n .apply();\n }\n\n @Override\n public MatchStatementNode transform(MatchStatementNode matchStatementNode) {\n if (!isInLineRange(matchStatementNode)) {\n return matchStatementNode;\n }\n Token matchKeyword = getToken(matchStatementNode.matchKeyword());\n ExpressionNode condition = this.modifyNode(matchStatementNode.condition());\n Token openBrace = getToken(matchStatementNode.openBrace());\n NodeList matchClauses = this.modifyNodeList(matchStatementNode.matchClauses());\n Token closeBrace = getToken(matchStatementNode.closeBrace());\n return matchStatementNode.modify()\n .withMatchKeyword(formatToken(matchKeyword, 1, 1, 0, 0))\n .withCondition(condition)\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0))\n .withMatchClauses(matchClauses)\n .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public MatchClauseNode transform(MatchClauseNode matchClauseNode) {\n if (!isInLineRange(matchClauseNode)) {\n return matchClauseNode;\n }\n SeparatedNodeList matchPatterns = this.modifySeparatedNodeList(matchClauseNode.matchPatterns());\n MatchGuardNode matchGuard = this.modifyNode(matchClauseNode.matchGuard().orElse(null));\n Token rightDoubleArrow = getToken(matchClauseNode.rightDoubleArrow());\n BlockStatementNode blockStatement = this.modifyNode(matchClauseNode.blockStatement());\n if (matchGuard != null) {\n matchClauseNode = matchClauseNode.modify()\n .withMatchGuard(matchGuard).apply();\n }\n return matchClauseNode.modify()\n .withMatchPatterns(matchPatterns)\n .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0))\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public MatchGuardNode transform(MatchGuardNode matchGuardNode) {\n if (!isInLineRange(matchGuardNode)) {\n return matchGuardNode;\n }\n Token ifKeyword = getToken(matchGuardNode.ifKeyword());\n ExpressionNode expression = this.modifyNode(matchGuardNode.expression());\n return matchGuardNode.modify()\n .withIfKeyword(formatToken(ifKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public DistinctTypeDescriptorNode transform(DistinctTypeDescriptorNode distinctTypeDescriptorNode) {\n if (!isInLineRange(distinctTypeDescriptorNode)) {\n return distinctTypeDescriptorNode;\n }\n Token distinctKeyword = getToken(distinctTypeDescriptorNode.distinctKeyword());\n TypeDescriptorNode typeDescriptor = this.modifyNode(distinctTypeDescriptorNode.typeDescriptor());\n return distinctTypeDescriptorNode.modify()\n .withDistinctKeyword(formatToken(distinctKeyword, 1, 1, 0, 0))\n .withTypeDescriptor(typeDescriptor)\n .apply();\n }\n\n @Override\n public OnConflictClauseNode transform(OnConflictClauseNode onConflictClauseNode) {\n if (!isInLineRange(onConflictClauseNode)) {\n return onConflictClauseNode;\n }\n Token onKeyword = getToken(onConflictClauseNode.onKeyword());\n Token conflictKeyword = getToken(onConflictClauseNode.conflictKeyword());\n ExpressionNode expression = this.modifyNode(onConflictClauseNode.expression());\n return onConflictClauseNode.modify()\n .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0))\n .withConflictKeyword(formatToken(conflictKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public LimitClauseNode transform(LimitClauseNode limitClauseNode) {\n if (!isInLineRange(limitClauseNode)) {\n return limitClauseNode;\n }\n Token limitKeyword = getToken(limitClauseNode.limitKeyword());\n ExpressionNode expression = this.modifyNode(limitClauseNode.expression());\n return limitClauseNode.modify()\n .withLimitKeyword(formatToken(limitKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public JoinClauseNode transform(JoinClauseNode joinClauseNode) {\n if (!isInLineRange(joinClauseNode)) {\n return joinClauseNode;\n }\n Token outerKeyword = getToken(joinClauseNode.outerKeyword().orElse(null));\n Token joinKeyword = getToken(joinClauseNode.joinKeyword());\n TypedBindingPatternNode typedBindingPattern = this.modifyNode(joinClauseNode.typedBindingPattern());\n Token inKeyword = getToken(joinClauseNode.inKeyword());\n ExpressionNode expression = this.modifyNode(joinClauseNode.expression());\n OnClauseNode onCondition = this.modifyNode(joinClauseNode.onCondition());\n if (outerKeyword != null) {\n joinClauseNode = joinClauseNode.modify()\n .withOuterKeyword(formatToken(outerKeyword, 1, 1, 0, 0)).apply();\n }\n return joinClauseNode.modify()\n .withJoinKeyword(formatToken(joinKeyword, 1, 1, 0, 0))\n .withTypedBindingPattern(typedBindingPattern)\n .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .withOnCondition(onCondition)\n .apply();\n }\n\n @Override\n public OnClauseNode transform(OnClauseNode onClauseNode) {\n if (!isInLineRange(onClauseNode)) {\n return onClauseNode;\n }\n Token onKeyword = getToken(onClauseNode.onKeyword());\n Token equalsKeyword = getToken(onClauseNode.equalsKeyword());\n ExpressionNode lhsExpr = this.modifyNode(onClauseNode.lhsExpression());\n ExpressionNode rhsExpr = this.modifyNode(onClauseNode.rhsExpression());\n return onClauseNode.modify()\n .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0))\n .withLhsExpression(lhsExpr)\n .withEqualsKeyword(formatToken(equalsKeyword, 1, 1, 0, 0))\n .withRhsExpression(rhsExpr)\n .apply();\n }\n\n @Override\n public ListMatchPatternNode transform(ListMatchPatternNode listMatchPatternNode) {\n if (!isInLineRange(listMatchPatternNode)) {\n return listMatchPatternNode;\n }\n Token openBracket = getToken(listMatchPatternNode.openBracket());\n SeparatedNodeList matchPatterns = this.modifySeparatedNodeList(listMatchPatternNode.matchPatterns());\n RestMatchPatternNode restMatchPattern = this.modifyNode(listMatchPatternNode.restMatchPattern().orElse(null));\n Token closeBracket = getToken(listMatchPatternNode.closeBracket());\n return listMatchPatternNode.modify()\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withMatchPatterns(matchPatterns)\n .withRestMatchPattern(restMatchPattern)\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RestMatchPatternNode transform(RestMatchPatternNode restMatchPatternNode) {\n if (!isInLineRange(restMatchPatternNode)) {\n return restMatchPatternNode;\n }\n Token ellipsisToken = getToken(restMatchPatternNode.ellipsisToken());\n Token varKeywordToken = getToken(restMatchPatternNode.varKeywordToken());\n SimpleNameReferenceNode variableName = this.modifyNode(restMatchPatternNode.variableName());\n return restMatchPatternNode.modify()\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .withVarKeywordToken(formatToken(varKeywordToken, 1, 1, 0, 0))\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public FieldMatchPatternNode transform(FieldMatchPatternNode fieldMatchPatternNode) {\n if (!isInLineRange(fieldMatchPatternNode)) {\n return fieldMatchPatternNode;\n }\n SimpleNameReferenceNode fieldNameNode = this.modifyNode(fieldMatchPatternNode.fieldNameNode());\n Token colonToken = getToken(fieldMatchPatternNode.colonToken());\n Node matchPattern = this.modifyNode(fieldMatchPatternNode.matchPattern());\n return fieldMatchPatternNode.modify()\n .withFieldNameNode(fieldNameNode)\n .withColonToken(formatToken(colonToken, 1, 1, 0, 0))\n .withMatchPattern(matchPattern)\n .apply();\n }\n\n @Override\n public FunctionalMatchPatternNode transform(FunctionalMatchPatternNode functionalMatchPatternNode) {\n if (!isInLineRange(functionalMatchPatternNode)) {\n return functionalMatchPatternNode;\n }\n Node typeRef = this.modifyNode(functionalMatchPatternNode.typeRef());\n Token openParenthesisToken = getToken(functionalMatchPatternNode.openParenthesisToken());\n SeparatedNodeList argListMatchPatternNode =\n this.modifySeparatedNodeList(functionalMatchPatternNode.argListMatchPatternNode());\n Token closeParenthesisToken = getToken(functionalMatchPatternNode.closeParenthesisToken());\n return functionalMatchPatternNode.modify()\n .withTypeRef(typeRef)\n .withOpenParenthesisToken(formatToken(openParenthesisToken, 0, 0, 0, 0))\n .withArgListMatchPatternNode(argListMatchPatternNode)\n .withCloseParenthesisToken(formatToken(closeParenthesisToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public NamedArgMatchPatternNode transform(NamedArgMatchPatternNode namedArgMatchPatternNode) {\n if (!isInLineRange(namedArgMatchPatternNode)) {\n return namedArgMatchPatternNode;\n }\n IdentifierToken identifier = this.modifyNode(namedArgMatchPatternNode.identifier());\n Token equalToken = getToken(namedArgMatchPatternNode.equalToken());\n Node matchPattern = this.modifyNode(namedArgMatchPatternNode.matchPattern());\n return namedArgMatchPatternNode.modify()\n .withIdentifier(identifier)\n .withEqualToken(formatToken(equalToken, 1, 1, 0, 0))\n .withMatchPattern(matchPattern)\n .apply();\n }\n\n @Override\n public MarkdownDocumentationNode transform(MarkdownDocumentationNode markdownDocumentationNode) {\n if (!isInLineRange(markdownDocumentationNode)) {\n return markdownDocumentationNode;\n }\n NodeList documentationLines = this.modifyNodeList(markdownDocumentationNode.documentationLines());\n return markdownDocumentationNode.modify()\n .withDocumentationLines(documentationLines)\n .apply();\n }\n\n @Override\n public MarkdownDocumentationLineNode transform(MarkdownDocumentationLineNode markdownDocumentationLineNode) {\n if (!isInLineRange(markdownDocumentationLineNode)) {\n return markdownDocumentationLineNode;\n }\n Token hashToken = getToken(markdownDocumentationLineNode.hashToken());\n NodeList documentElements = this.modifyNodeList(markdownDocumentationLineNode.documentElements());\n return markdownDocumentationLineNode.modify()\n .withDocumentElements(documentElements)\n .withHashToken(formatToken(hashToken, 1, 1, 0, 0))\n .apply();\n }\n\n @Override\n public MarkdownParameterDocumentationLineNode transform(\n MarkdownParameterDocumentationLineNode markdownParameterDocumentationLineNode) {\n if (!isInLineRange(markdownParameterDocumentationLineNode)) {\n return markdownParameterDocumentationLineNode;\n }\n Token hashToken = getToken(markdownParameterDocumentationLineNode.hashToken());\n Token plusToken = getToken(markdownParameterDocumentationLineNode.plusToken());\n Token parameterName = getToken(markdownParameterDocumentationLineNode.parameterName());\n Token minusToken = getToken(markdownParameterDocumentationLineNode.minusToken());\n NodeList documentElements =\n this.modifyNodeList(markdownParameterDocumentationLineNode.documentElements());\n return markdownParameterDocumentationLineNode.modify()\n .withHashToken(formatToken(hashToken, 1, 1, 0, 0))\n .withPlusToken(formatToken(plusToken, 1, 1, 0, 0))\n .withParameterName(formatToken(parameterName, 1, 1, 0, 0))\n .withMinusToken(formatToken(minusToken, 1, 1, 0, 0))\n .withDocumentElements(documentElements)\n .apply();\n }\n\n @Override\n public DocumentationReferenceNode transform(DocumentationReferenceNode documentationReferenceNode) {\n if (!isInLineRange(documentationReferenceNode)) {\n return documentationReferenceNode;\n }\n Token referenceType = getToken(documentationReferenceNode.referenceType().orElse(null));\n Token startBacktick = getToken(documentationReferenceNode.startBacktick());\n Node backtickContent = this.modifyNode(documentationReferenceNode.backtickContent());\n Token endBacktick = getToken(documentationReferenceNode.endBacktick());\n if (referenceType != null) {\n documentationReferenceNode = documentationReferenceNode.modify()\n .withReferenceType(referenceType).apply();\n }\n return documentationReferenceNode.modify()\n .withStartBacktick(formatToken(startBacktick, 0, 0, 0, 0))\n .withBacktickContent(backtickContent)\n .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public OrderByClauseNode transform(OrderByClauseNode orderByClauseNode) {\n if (!isInLineRange(orderByClauseNode)) {\n return orderByClauseNode;\n }\n Token orderKeyword = getToken(orderByClauseNode.orderKeyword());\n Token byKeyword = getToken(orderByClauseNode.byKeyword());\n SeparatedNodeList orderKey = this.modifySeparatedNodeList(orderByClauseNode.orderKey());\n return orderByClauseNode.modify()\n .withOrderKeyword(formatToken(orderKeyword, 1, 1, 0, 0))\n .withByKeyword(formatToken(byKeyword, 1, 1, 0, 0))\n .withOrderKey(orderKey)\n .apply();\n }\n\n @Override\n public OrderKeyNode transform(OrderKeyNode orderKeyNode) {\n if (!isInLineRange(orderKeyNode)) {\n return orderKeyNode;\n }\n ExpressionNode expression = this.modifyNode(orderKeyNode.expression());\n Token orderDirection = getToken(orderKeyNode.orderDirection().orElse(null));\n if (orderDirection != null) {\n orderKeyNode = orderKeyNode.modify()\n .withOrderDirection(formatToken(orderDirection, 1, 1, 0, 0)).apply();\n }\n return orderKeyNode.modify()\n .withExpression(expression)\n .apply();\n }\n\n /**\n * Update the minutiae and return the token.\n *\n * @param token token\n * @param leadingSpaces leading spaces\n * @param trailingSpaces trailing spaces\n * @param leadingNewLines leading new lines\n * @param trailingNewLines trailing new lines\n * @return updated token\n */\n private Token formatToken(Token token, int leadingSpaces, int trailingSpaces, int leadingNewLines,\n int trailingNewLines) {\n if (token == null) {\n return token;\n }\n MinutiaeList leadingMinutiaeList = token.leadingMinutiae();\n MinutiaeList trailingMinutiaeList = token.trailingMinutiae();\n\n MinutiaeList newLeadingMinutiaeList = modifyMinutiaeList(leadingMinutiaeList, leadingSpaces, leadingNewLines);\n MinutiaeList newTrailingMinutiaeList = modifyMinutiaeList(trailingMinutiaeList, trailingSpaces,\n trailingNewLines);\n\n return token.modify(newLeadingMinutiaeList, newTrailingMinutiaeList);\n }\n\n private MinutiaeList modifyMinutiaeList(MinutiaeList minutiaeList, int spaces, int newLines) {\n Minutiae minutiae = NodeFactory.createWhitespaceMinutiae(getWhiteSpaces(spaces, newLines));\n return minutiaeList.add(minutiae);\n }\n\n private String getWhiteSpaces(int column, int newLines) {\n StringBuilder whiteSpaces = new StringBuilder();\n for (int i = 0; i <= (newLines - 1); i++) {\n whiteSpaces.append(\"\\n\");\n }\n for (int i = 0; i <= (column - 1); i++) {\n whiteSpaces.append(\" \");\n }\n\n return whiteSpaces.toString();\n }\n\n /**\n * Initialize the token with empty minutiae lists.\n *\n * @param node node\n * @return token with empty minutiae\n */\n private Token getToken(T node) {\n if (node == null) {\n return node;\n }\n MinutiaeList leadingMinutiaeList = AbstractNodeFactory.createEmptyMinutiaeList();\n MinutiaeList trailingMinutiaeList = AbstractNodeFactory.createEmptyMinutiaeList();\n if (node.containsLeadingMinutiae()) {\n leadingMinutiaeList = getCommentMinutiae(node.leadingMinutiae(), true);\n }\n if (node.containsTrailingMinutiae()) {\n trailingMinutiaeList = getCommentMinutiae(node.trailingMinutiae(), false);\n }\n return node.modify(leadingMinutiaeList, trailingMinutiaeList);\n }\n\n private MinutiaeList getCommentMinutiae(MinutiaeList minutiaeList, boolean isLeading) {\n MinutiaeList minutiaes = AbstractNodeFactory.createEmptyMinutiaeList();\n for (int i = 0; i < minutiaeList.size(); i++) {\n if (minutiaeList.get(i).kind().equals(SyntaxKind.COMMENT_MINUTIAE)) {\n if (i > 0) {\n minutiaes = minutiaes.add(minutiaeList.get(i - 1));\n }\n minutiaes = minutiaes.add(minutiaeList.get(i));\n if ((i + 1) < minutiaeList.size() && isLeading) {\n minutiaes = minutiaes.add(minutiaeList.get(i + 1));\n }\n }\n }\n return minutiaes;\n }\n\n \n private Node getParent(T node, SyntaxKind syntaxKind) {\n Node parent = node.parent();\n if (parent == null) {\n parent = node;\n }\n SyntaxKind parentKind = parent.kind();\n if (parentKind == SyntaxKind.MODULE_VAR_DECL) {\n if (parent.parent() != null && parent.parent().kind() == SyntaxKind.MODULE_PART &&\n syntaxKind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n return null;\n }\n return parent;\n } else if (parentKind == SyntaxKind.FUNCTION_DEFINITION ||\n parentKind == SyntaxKind.IF_ELSE_STATEMENT ||\n parentKind == SyntaxKind.ELSE_BLOCK ||\n parentKind == SyntaxKind.SPECIFIC_FIELD ||\n parentKind == SyntaxKind.WHILE_STATEMENT) {\n return parent;\n } else if (syntaxKind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n if (parentKind == SyntaxKind.REQUIRED_PARAM ||\n parentKind == SyntaxKind.POSITIONAL_ARG ||\n parentKind == SyntaxKind.BINARY_EXPRESSION ||\n parentKind == SyntaxKind.RETURN_STATEMENT ||\n parentKind == SyntaxKind.LOCAL_VAR_DECL ||\n (parentKind == SyntaxKind.FUNCTION_CALL && parent.parent() != null &&\n parent.parent().kind() == SyntaxKind.ASSIGNMENT_STATEMENT)) {\n return null;\n }\n return getParent(parent, syntaxKind);\n\n } else if (parentKind == SyntaxKind.SERVICE_DECLARATION ||\n parentKind == SyntaxKind.BINARY_EXPRESSION) {\n if (syntaxKind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n return null;\n }\n return parent;\n } else if (parentKind == SyntaxKind.REQUIRED_PARAM ||\n parentKind == SyntaxKind.RETURN_TYPE_DESCRIPTOR) {\n return null;\n } else if (parent.parent() != null) {\n return getParent(parent, syntaxKind);\n } else {\n return null;\n }\n }\n\n /**\n * Get the node position.\n *\n * @param node node\n * @return node position\n */\n private DiagnosticPos getPosition(Node node) {\n if (node == null) {\n return null;\n }\n LineRange range = node.lineRange();\n LinePosition startPos = range.startLine();\n LinePosition endPos = range.endLine();\n return new DiagnosticPos(null, startPos.line() + 1, endPos.line() + 1,\n startPos.offset(), endPos.offset());\n }\n\n /**\n * return the indented start column.\n *\n * @param node node\n * @param syntaxKind node kind\n * @param addSpaces add spaces or not\n * @return start position\n */\n private int getStartColumn(Node node, SyntaxKind syntaxKind, boolean addSpaces) {\n Node parent = getParent(node, syntaxKind);\n if (parent != null) {\n return getPosition(parent).sCol + (addSpaces ? 4 : 0);\n }\n return 0;\n }\n\n private boolean isInLineRange(Node node) {\n if (this.lineRange == null) {\n return true;\n }\n int nodeStartLine = node.lineRange().startLine().line();\n int nodeStartOffset = node.lineRange().startLine().offset();\n int nodeEndLine = node.lineRange().endLine().line();\n int nodeEndOffset = node.lineRange().endLine().offset();\n\n int startLine = this.lineRange.startLine().line();\n int startOffset = this.lineRange.startLine().offset();\n int endLine = this.lineRange.endLine().line();\n int endOffset = this.lineRange.endLine().offset();\n\n if (nodeStartLine >= startLine && nodeEndLine <= endLine) {\n if (nodeStartLine == startLine || nodeEndLine == endLine) {\n return nodeStartOffset >= startOffset && nodeEndOffset <= endOffset;\n }\n return true;\n }\n return false;\n }\n\n public FormattingOptions getFormattingOptions() {\n return formattingOptions;\n }\n\n void setFormattingOptions(FormattingOptions formattingOptions) {\n this.formattingOptions = formattingOptions;\n }\n\n void setLineRange(LineRange lineRange) {\n this.lineRange = lineRange;\n }\n}", "context_after": "class FormattingTreeModifier extends TreeModifier {\n\n private FormattingOptions formattingOptions;\n private LineRange lineRange;\n\n @Override\n public ImportDeclarationNode transform(ImportDeclarationNode importDeclarationNode) {\n if (!isInLineRange(importDeclarationNode)) {\n return importDeclarationNode;\n }\n Token importKeyword = getToken(importDeclarationNode.importKeyword());\n Token semicolon = getToken(importDeclarationNode.semicolon());\n SeparatedNodeList moduleNames = this.modifySeparatedNodeList(\n importDeclarationNode.moduleName());\n ImportOrgNameNode orgName = this.modifyNode(importDeclarationNode.orgName().orElse(null));\n ImportPrefixNode prefix = this.modifyNode(importDeclarationNode.prefix().orElse(null));\n ImportVersionNode version = this.modifyNode(importDeclarationNode.version().orElse(null));\n if (orgName != null) {\n importDeclarationNode = importDeclarationNode.modify()\n .withOrgName(orgName).apply();\n }\n if (prefix != null) {\n importDeclarationNode = importDeclarationNode.modify()\n .withPrefix(prefix).apply();\n }\n if (version != null) {\n importDeclarationNode = importDeclarationNode.modify()\n .withVersion(version).apply();\n }\n return importDeclarationNode.modify()\n .withImportKeyword(formatToken(importKeyword, 0, 0, 0, 0))\n .withModuleName(moduleNames)\n .withSemicolon(formatToken(semicolon, 0, 0, 0, 1))\n .apply();\n }\n\n @Override\n public ImportOrgNameNode transform(ImportOrgNameNode importOrgNameNode) {\n if (!isInLineRange(importOrgNameNode)) {\n return importOrgNameNode;\n }\n Token orgName = getToken(importOrgNameNode.orgName());\n Token slashToken = getToken(importOrgNameNode.slashToken());\n return importOrgNameNode.modify()\n .withOrgName(formatToken(orgName, 1, 0, 0, 0))\n .withSlashToken(formatToken(slashToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ImportPrefixNode transform(ImportPrefixNode importPrefixNode) {\n if (!isInLineRange(importPrefixNode)) {\n return importPrefixNode;\n }\n Token asKeyword = getToken(importPrefixNode.asKeyword());\n Token prefix = getToken(importPrefixNode.prefix());\n return importPrefixNode.modify()\n .withAsKeyword(formatToken(asKeyword, 1, 0, 0, 0))\n .withPrefix(formatToken(prefix, 1, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ImportVersionNode transform(ImportVersionNode importVersionNode) {\n if (!isInLineRange(importVersionNode)) {\n return importVersionNode;\n }\n Token versionKeyword = getToken(importVersionNode.versionKeyword());\n SeparatedNodeList versionNumber = this.modifySeparatedNodeList(importVersionNode.versionNumber());\n return importVersionNode.modify()\n .withVersionKeyword(formatToken(versionKeyword, 1, 1, 0, 0))\n .withVersionNumber(versionNumber)\n .apply();\n }\n\n @Override\n public IdentifierToken transform(IdentifierToken identifier) {\n if (!isInLineRange(identifier)) {\n return identifier;\n }\n Token identifierToken = getToken(identifier);\n return (IdentifierToken) formatToken(identifierToken, 0, 0, 0, 0);\n }\n\n @Override\n public FunctionDefinitionNode transform(FunctionDefinitionNode functionDefinitionNode) {\n if (!isInLineRange(functionDefinitionNode)) {\n return functionDefinitionNode;\n }\n MetadataNode metadata = this.modifyNode(functionDefinitionNode.metadata().orElse(null));\n NodeList qualifierList = this.modifyNodeList(functionDefinitionNode.qualifierList());\n Token functionKeyword = getToken(functionDefinitionNode.functionKeyword());\n Token functionName = getToken(functionDefinitionNode.functionName());\n FunctionSignatureNode functionSignatureNode = this.modifyNode(functionDefinitionNode.functionSignature());\n FunctionBodyNode functionBodyNode = this.modifyNode(functionDefinitionNode.functionBody());\n if (metadata != null) {\n functionDefinitionNode = functionDefinitionNode.modify()\n .withMetadata(metadata).apply();\n }\n return functionDefinitionNode.modify()\n .withFunctionKeyword(formatToken(functionKeyword, 0, 0, 0, 0))\n .withFunctionName((IdentifierToken) formatToken(functionName, 1, 0, 0, 0))\n .withFunctionSignature(functionSignatureNode)\n .withQualifierList(qualifierList)\n .withFunctionBody(functionBodyNode)\n .apply();\n }\n\n @Override\n public FunctionSignatureNode transform(FunctionSignatureNode functionSignatureNode) {\n if (!isInLineRange(functionSignatureNode)) {\n return functionSignatureNode;\n }\n Token openPara = getToken(functionSignatureNode.openParenToken());\n Token closePara = getToken(functionSignatureNode.closeParenToken());\n SeparatedNodeList parameters = this.modifySeparatedNodeList(functionSignatureNode.parameters());\n ReturnTypeDescriptorNode returnTypeDesc = this.modifyNode(functionSignatureNode.returnTypeDesc().orElse(null));\n if (returnTypeDesc != null) {\n functionSignatureNode = functionSignatureNode.modify()\n .withReturnTypeDesc(returnTypeDesc).apply();\n }\n return functionSignatureNode.modify()\n .withOpenParenToken(formatToken(openPara, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closePara, 0, 0, 0, 0))\n .withParameters(parameters)\n .apply();\n }\n\n @Override\n public ReturnTypeDescriptorNode transform(ReturnTypeDescriptorNode returnTypeDescriptorNode) {\n if (!isInLineRange(returnTypeDescriptorNode)) {\n return returnTypeDescriptorNode;\n }\n Token returnsKeyword = getToken(returnTypeDescriptorNode.returnsKeyword());\n NodeList annotations = this.modifyNodeList(returnTypeDescriptorNode.annotations());\n Node type = this.modifyNode(returnTypeDescriptorNode.type());\n return returnTypeDescriptorNode.modify()\n .withAnnotations(annotations)\n .withReturnsKeyword(formatToken(returnsKeyword, 1, 1, 0, 0))\n .withType(type)\n .apply();\n }\n\n @Override\n public OptionalTypeDescriptorNode transform(OptionalTypeDescriptorNode optionalTypeDescriptorNode) {\n if (!isInLineRange(optionalTypeDescriptorNode)) {\n return optionalTypeDescriptorNode;\n }\n Node typeDescriptor = this.modifyNode(optionalTypeDescriptorNode.typeDescriptor());\n Token questionMarkToken = getToken(optionalTypeDescriptorNode.questionMarkToken());\n return optionalTypeDescriptorNode.modify()\n .withTypeDescriptor(typeDescriptor)\n .withQuestionMarkToken(formatToken(questionMarkToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RequiredParameterNode transform(RequiredParameterNode requiredParameterNode) {\n if (!isInLineRange(requiredParameterNode)) {\n return requiredParameterNode;\n }\n Token paramName = getToken(requiredParameterNode.paramName().orElse(null));\n NodeList annotations = this.modifyNodeList(requiredParameterNode.annotations());\n Node typeName = this.modifyNode(requiredParameterNode.typeName());\n if (paramName != null) {\n requiredParameterNode = requiredParameterNode.modify()\n .withParamName(formatToken(paramName, 1, 0, 0, 0)).apply();\n }\n return requiredParameterNode.modify()\n .withAnnotations(annotations)\n .withTypeName(typeName)\n .apply();\n }\n\n @Override\n public BuiltinSimpleNameReferenceNode transform(BuiltinSimpleNameReferenceNode builtinSimpleNameReferenceNode) {\n if (!isInLineRange(builtinSimpleNameReferenceNode)) {\n return builtinSimpleNameReferenceNode;\n }\n int startCol = getStartColumn(builtinSimpleNameReferenceNode, builtinSimpleNameReferenceNode.kind(), true);\n Token name = getToken(builtinSimpleNameReferenceNode.name());\n return builtinSimpleNameReferenceNode.modify()\n .withName(formatToken(name, startCol, 0, 0, 0))\n .apply();\n }\n\n @Override\n public FunctionBodyBlockNode transform(FunctionBodyBlockNode functionBodyBlockNode) {\n if (!isInLineRange(functionBodyBlockNode)) {\n return functionBodyBlockNode;\n }\n int startColumn = getStartColumn(functionBodyBlockNode, functionBodyBlockNode.kind(), false);\n Token functionBodyOpenBrace = getToken(functionBodyBlockNode.openBraceToken());\n Token functionBodyCloseBrace = getToken(functionBodyBlockNode.closeBraceToken());\n NodeList statements = this.modifyNodeList(functionBodyBlockNode.statements());\n NamedWorkerDeclarator namedWorkerDeclarator =\n this.modifyNode(functionBodyBlockNode.namedWorkerDeclarator().orElse(null));\n if (namedWorkerDeclarator != null) {\n functionBodyBlockNode = functionBodyBlockNode.modify()\n .withNamedWorkerDeclarator(namedWorkerDeclarator).apply();\n }\n return functionBodyBlockNode.modify()\n .withOpenBraceToken(formatToken(functionBodyOpenBrace, 1, 0, 0, 1))\n .withCloseBraceToken(formatToken(functionBodyCloseBrace, startColumn, 0, 0, 1))\n .withStatements(statements)\n .apply();\n }\n\n @Override\n public ExpressionStatementNode transform(ExpressionStatementNode expressionStatementNode) {\n if (!isInLineRange(expressionStatementNode)) {\n return expressionStatementNode;\n }\n ExpressionNode expression = this.modifyNode(expressionStatementNode.expression());\n Token semicolonToken = expressionStatementNode.semicolonToken();\n return expressionStatementNode.modify()\n .withExpression(expression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public FunctionCallExpressionNode transform(FunctionCallExpressionNode functionCallExpressionNode) {\n if (!isInLineRange(functionCallExpressionNode)) {\n return functionCallExpressionNode;\n }\n NameReferenceNode functionName = this.modifyNode(functionCallExpressionNode.functionName());\n Token functionCallOpenPara = getToken(functionCallExpressionNode.openParenToken());\n Token functionCallClosePara = getToken(functionCallExpressionNode.closeParenToken());\n SeparatedNodeList arguments = this.modifySeparatedNodeList(functionCallExpressionNode\n .arguments());\n return functionCallExpressionNode.modify()\n .withFunctionName(functionName)\n .withOpenParenToken(formatToken(functionCallOpenPara, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(functionCallClosePara, 0, 0, 0, 0))\n .withArguments(arguments)\n .apply();\n }\n\n @Override\n public QualifiedNameReferenceNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {\n if (!isInLineRange(qualifiedNameReferenceNode)) {\n return qualifiedNameReferenceNode;\n }\n int startCol = getStartColumn(qualifiedNameReferenceNode, qualifiedNameReferenceNode.kind(), false);\n Token modulePrefix = getToken(qualifiedNameReferenceNode.modulePrefix());\n Token identifier = getToken(qualifiedNameReferenceNode.identifier());\n Token colon = getToken((Token) qualifiedNameReferenceNode.colon());\n return qualifiedNameReferenceNode.modify()\n .withModulePrefix(formatToken(modulePrefix, startCol, 0, 0, 0))\n .withIdentifier((IdentifierToken) formatToken(identifier, 0, 0, 0, 0))\n .withColon(formatToken(colon, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public PositionalArgumentNode transform(PositionalArgumentNode positionalArgumentNode) {\n if (!isInLineRange(positionalArgumentNode)) {\n return positionalArgumentNode;\n }\n ExpressionNode expression = this.modifyNode(positionalArgumentNode.expression());\n return positionalArgumentNode.modify()\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public BasicLiteralNode transform(BasicLiteralNode basicLiteralNode) {\n if (!isInLineRange(basicLiteralNode)) {\n return basicLiteralNode;\n }\n Token literalToken = getToken(basicLiteralNode.literalToken());\n return basicLiteralNode.modify()\n .withLiteralToken(formatToken(literalToken, 0, 0, 0, 0))\n .apply();\n }\n\n\n @Override\n public ServiceDeclarationNode transform(ServiceDeclarationNode serviceDeclarationNode) {\n if (!isInLineRange(serviceDeclarationNode)) {\n return serviceDeclarationNode;\n }\n Token serviceKeyword = getToken(serviceDeclarationNode.serviceKeyword());\n IdentifierToken serviceName = (IdentifierToken) getToken(serviceDeclarationNode.serviceName());\n Token onKeyword = getToken(serviceDeclarationNode.onKeyword());\n MetadataNode metadata = this.modifyNode(serviceDeclarationNode.metadata().orElse(null));\n SeparatedNodeList expressions =\n this.modifySeparatedNodeList(serviceDeclarationNode.expressions());\n Node serviceBody = this.modifyNode(serviceDeclarationNode.serviceBody());\n if (metadata != null) {\n serviceDeclarationNode = serviceDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return serviceDeclarationNode.modify()\n .withServiceKeyword(formatToken(serviceKeyword, 0, 0, 1, 0))\n .withServiceName((IdentifierToken) formatToken(serviceName, 1, 0, 0, 0))\n .withOnKeyword(formatToken(onKeyword, 1, 0, 0, 0))\n .withExpressions(expressions)\n .withServiceBody(serviceBody)\n .apply();\n }\n\n @Override\n public ServiceBodyNode transform(ServiceBodyNode serviceBodyNode) {\n if (!isInLineRange(serviceBodyNode)) {\n return serviceBodyNode;\n }\n Token openBraceToken = getToken(serviceBodyNode.openBraceToken());\n Token closeBraceToken = getToken(serviceBodyNode.closeBraceToken());\n NodeList resources = this.modifyNodeList(serviceBodyNode.resources());\n return serviceBodyNode.modify()\n .withOpenBraceToken(formatToken(openBraceToken, 1, 0, 0, 1))\n .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 1))\n .withResources(resources)\n .apply();\n }\n\n @Override\n public ExplicitNewExpressionNode transform(ExplicitNewExpressionNode explicitNewExpressionNode) {\n if (!isInLineRange(explicitNewExpressionNode)) {\n return explicitNewExpressionNode;\n }\n Token newKeywordToken = getToken(explicitNewExpressionNode.newKeyword());\n TypeDescriptorNode typeDescriptorNode = this.modifyNode(explicitNewExpressionNode.typeDescriptor());\n return explicitNewExpressionNode.modify()\n .withNewKeyword(formatToken(newKeywordToken, 1, 1, 0, 0))\n .withParenthesizedArgList(modifyNode(explicitNewExpressionNode.parenthesizedArgList()))\n .withTypeDescriptor(typeDescriptorNode)\n .apply();\n }\n\n @Override\n public ParenthesizedArgList transform(ParenthesizedArgList parenthesizedArgList) {\n if (!isInLineRange(parenthesizedArgList)) {\n return parenthesizedArgList;\n }\n Token openParenToken = getToken(parenthesizedArgList.openParenToken());\n Token closeParenToken = getToken(parenthesizedArgList.closeParenToken());\n SeparatedNodeList arguments = this.modifySeparatedNodeList(parenthesizedArgList\n .arguments());\n return parenthesizedArgList.modify()\n .withArguments(arguments)\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public VariableDeclarationNode transform(VariableDeclarationNode variableDeclarationNode) {\n if (!isInLineRange(variableDeclarationNode)) {\n return variableDeclarationNode;\n }\n Token semicolonToken = getToken(variableDeclarationNode.semicolonToken());\n Token equalToken = getToken(variableDeclarationNode.equalsToken().orElse(null));\n Token finalToken = getToken(variableDeclarationNode.finalKeyword().orElse(null));\n ExpressionNode initializer = this.modifyNode(variableDeclarationNode.initializer().orElse(null));\n NodeList annotationNodes = this.modifyNodeList(variableDeclarationNode.annotations());\n TypedBindingPatternNode typedBindingPatternNode = this.modifyNode(\n variableDeclarationNode.typedBindingPattern());\n if (equalToken != null) {\n variableDeclarationNode = variableDeclarationNode.modify()\n .withEqualsToken(formatToken(equalToken, 1, 1, 0, 0)).apply();\n }\n if (finalToken != null) {\n variableDeclarationNode = variableDeclarationNode.modify()\n .withFinalKeyword(formatToken(finalToken, 0, 0, 0, 0)).apply();\n }\n if (initializer != null) {\n variableDeclarationNode = variableDeclarationNode.modify()\n .withInitializer(initializer).apply();\n }\n return variableDeclarationNode.modify()\n .withAnnotations(annotationNodes)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .withTypedBindingPattern(typedBindingPatternNode)\n .apply();\n }\n\n @Override\n public TypedBindingPatternNode transform(TypedBindingPatternNode typedBindingPatternNode) {\n if (!isInLineRange(typedBindingPatternNode)) {\n return typedBindingPatternNode;\n }\n BindingPatternNode bindingPatternNode = this.modifyNode(typedBindingPatternNode.bindingPattern());\n TypeDescriptorNode typeDescriptorNode = this.modifyNode(typedBindingPatternNode.typeDescriptor());\n return typedBindingPatternNode.modify()\n .withBindingPattern(bindingPatternNode)\n .withTypeDescriptor(typeDescriptorNode)\n .apply();\n }\n\n @Override\n public CaptureBindingPatternNode transform(CaptureBindingPatternNode captureBindingPatternNode) {\n if (!isInLineRange(captureBindingPatternNode)) {\n return captureBindingPatternNode;\n }\n Token variableName = getToken(captureBindingPatternNode.variableName());\n return captureBindingPatternNode.modify()\n .withVariableName(formatToken(variableName, 1, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ListBindingPatternNode transform(ListBindingPatternNode listBindingPatternNode) {\n if (!isInLineRange(listBindingPatternNode)) {\n return listBindingPatternNode;\n }\n SeparatedNodeList bindingPatternNodes = this.modifySeparatedNodeList(\n listBindingPatternNode.bindingPatterns());\n Token openBracket = getToken(listBindingPatternNode.openBracket());\n Token closeBracket = getToken(listBindingPatternNode.closeBracket());\n RestBindingPatternNode restBindingPattern =\n this.modifyNode(listBindingPatternNode.restBindingPattern().orElse(null));\n if (restBindingPattern != null) {\n listBindingPatternNode = listBindingPatternNode.modify()\n .withRestBindingPattern(restBindingPattern).apply();\n }\n return listBindingPatternNode.modify()\n .withBindingPatterns(bindingPatternNodes)\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public MappingBindingPatternNode transform(MappingBindingPatternNode mappingBindingPatternNode) {\n if (!isInLineRange(mappingBindingPatternNode)) {\n return mappingBindingPatternNode;\n }\n Token openBraceToken = getToken(mappingBindingPatternNode.openBrace());\n Token closeBraceToken = getToken(mappingBindingPatternNode.closeBrace());\n SeparatedNodeList fieldBindingPatternNodes =\n this.modifySeparatedNodeList(mappingBindingPatternNode.fieldBindingPatterns());\n RestBindingPatternNode restBindingPattern =\n this.modifyNode(mappingBindingPatternNode.restBindingPattern().orElse(null));\n if (restBindingPattern != null) {\n mappingBindingPatternNode = mappingBindingPatternNode.modify()\n .withRestBindingPattern(restBindingPattern).apply();\n }\n return mappingBindingPatternNode.modify()\n .withOpenBrace(formatToken(openBraceToken, 1, 0, 0, 1))\n .withCloseBrace(formatToken(closeBraceToken, 0, 0, 1, 0))\n .withFieldBindingPatterns(fieldBindingPatternNodes)\n .apply();\n }\n\n @Override\n public FieldBindingPatternFullNode transform(FieldBindingPatternFullNode fieldBindingPatternFullNode) {\n if (!isInLineRange(fieldBindingPatternFullNode)) {\n return fieldBindingPatternFullNode;\n }\n Token colon = getToken(fieldBindingPatternFullNode.colon());\n BindingPatternNode bindingPatternNode = this.modifyNode(fieldBindingPatternFullNode.bindingPattern());\n SimpleNameReferenceNode variableName = this.modifyNode(fieldBindingPatternFullNode.variableName());\n return fieldBindingPatternFullNode.modify()\n .withBindingPattern(bindingPatternNode)\n .withColon(formatToken(colon, 0, 0, 0, 0))\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public FieldBindingPatternVarnameNode transform(FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode) {\n if (!isInLineRange(fieldBindingPatternVarnameNode)) {\n return fieldBindingPatternVarnameNode;\n }\n SimpleNameReferenceNode variableName = this.modifyNode(fieldBindingPatternVarnameNode.variableName());\n return fieldBindingPatternVarnameNode.modify()\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public RestBindingPatternNode transform(RestBindingPatternNode restBindingPatternNode) {\n if (!isInLineRange(restBindingPatternNode)) {\n return restBindingPatternNode;\n }\n Token ellipsisToken = getToken(restBindingPatternNode.ellipsisToken());\n SimpleNameReferenceNode variableName = restBindingPatternNode.variableName();\n return restBindingPatternNode.modify()\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public RemoteMethodCallActionNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) {\n if (!isInLineRange(remoteMethodCallActionNode)) {\n return remoteMethodCallActionNode;\n }\n Token openParenToken = getToken(remoteMethodCallActionNode.openParenToken());\n Token closeParenToken = getToken(remoteMethodCallActionNode.closeParenToken());\n Token rightArrowToken = getToken(remoteMethodCallActionNode.rightArrowToken());\n SeparatedNodeList arguments = this.modifySeparatedNodeList(remoteMethodCallActionNode\n .arguments());\n ExpressionNode expression = this.modifyNode(remoteMethodCallActionNode.expression());\n SimpleNameReferenceNode methodName = this.modifyNode(remoteMethodCallActionNode.methodName());\n return remoteMethodCallActionNode.modify()\n .withArguments(arguments)\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .withExpression(expression)\n .withMethodName(methodName)\n .withRightArrowToken(formatToken(rightArrowToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public SimpleNameReferenceNode transform(SimpleNameReferenceNode simpleNameReferenceNode) {\n if (!isInLineRange(simpleNameReferenceNode)) {\n return simpleNameReferenceNode;\n }\n Token name = getToken(simpleNameReferenceNode.name());\n return simpleNameReferenceNode.modify()\n .withName(formatToken(name, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public IfElseStatementNode transform(IfElseStatementNode ifElseStatementNode) {\n if (!isInLineRange(ifElseStatementNode)) {\n return ifElseStatementNode;\n }\n BlockStatementNode ifBody = this.modifyNode(ifElseStatementNode.ifBody());\n ExpressionNode condition = this.modifyNode(ifElseStatementNode.condition());\n Token ifKeyword = getToken(ifElseStatementNode.ifKeyword());\n Node elseBody = this.modifyNode(ifElseStatementNode.elseBody().orElse(null));\n\n int startColumn = 1;\n if (ifElseStatementNode.parent().kind() != SyntaxKind.ELSE_BLOCK) {\n startColumn = getStartColumn(ifElseStatementNode, ifElseStatementNode.kind(), true);\n }\n if (elseBody != null) {\n ifElseStatementNode = ifElseStatementNode.modify()\n .withElseBody(elseBody).apply();\n }\n return ifElseStatementNode.modify()\n .withIfKeyword(formatToken(ifKeyword, startColumn, 0, 0, 0))\n .withIfBody(ifBody)\n .withCondition(condition)\n .apply();\n }\n\n @Override\n public ElseBlockNode transform(ElseBlockNode elseBlockNode) {\n if (!isInLineRange(elseBlockNode)) {\n return elseBlockNode;\n }\n Token elseKeyword = getToken(elseBlockNode.elseKeyword());\n StatementNode elseBody = this.modifyNode(elseBlockNode.elseBody());\n return elseBlockNode.modify()\n .withElseKeyword(formatToken(elseKeyword, 1, 0, 0, 0))\n .withElseBody(elseBody)\n .apply();\n }\n\n @Override\n public BracedExpressionNode transform(BracedExpressionNode bracedExpressionNode) {\n if (!isInLineRange(bracedExpressionNode)) {\n return bracedExpressionNode;\n }\n Token openParen = getToken(bracedExpressionNode.openParen());\n Token closeParen = getToken(bracedExpressionNode.closeParen());\n ExpressionNode expression = this.modifyNode(bracedExpressionNode.expression());\n return bracedExpressionNode.modify()\n .withOpenParen(formatToken(openParen, 1, 0, 0, 0))\n .withCloseParen(formatToken(closeParen, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public TypeTestExpressionNode transform(TypeTestExpressionNode typeTestExpressionNode) {\n if (!isInLineRange(typeTestExpressionNode)) {\n return typeTestExpressionNode;\n }\n ExpressionNode expression = this.modifyNode(typeTestExpressionNode.expression());\n Node typeDescriptor = this.modifyNode(typeTestExpressionNode.typeDescriptor());\n Token isToken = getToken(typeTestExpressionNode.isKeyword());\n return typeTestExpressionNode.modify()\n .withExpression(expression)\n .withIsKeyword(formatToken(isToken, 1, 1, 0, 0))\n .withTypeDescriptor(typeDescriptor)\n .apply();\n }\n\n @Override\n public ErrorTypeDescriptorNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) {\n if (!isInLineRange(errorTypeDescriptorNode)) {\n return errorTypeDescriptorNode;\n }\n Token errorKeywordToken = getToken(errorTypeDescriptorNode.errorKeywordToken());\n ErrorTypeParamsNode errorTypeParamsNode =\n this.modifyNode(errorTypeDescriptorNode.errorTypeParamsNode().orElse(null));\n if (errorTypeParamsNode != null) {\n errorTypeDescriptorNode = errorTypeDescriptorNode.modify()\n .withErrorTypeParamsNode(errorTypeParamsNode).apply();\n }\n return errorTypeDescriptorNode.modify()\n .withErrorKeywordToken(formatToken(errorKeywordToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ModuleVariableDeclarationNode transform(ModuleVariableDeclarationNode moduleVariableDeclarationNode) {\n if (!isInLineRange(moduleVariableDeclarationNode)) {\n return moduleVariableDeclarationNode;\n }\n Token equalsToken = getToken(moduleVariableDeclarationNode.equalsToken());\n Token semicolonToken = getToken(moduleVariableDeclarationNode.semicolonToken());\n Token finalKeyword = getToken(moduleVariableDeclarationNode.finalKeyword().orElse(null));\n MetadataNode metadata = this.modifyNode(moduleVariableDeclarationNode.metadata().orElse(null));\n ExpressionNode initializer = this.modifyNode(moduleVariableDeclarationNode.initializer());\n if (metadata != null) {\n moduleVariableDeclarationNode = moduleVariableDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n if (finalKeyword != null) {\n moduleVariableDeclarationNode = moduleVariableDeclarationNode.modify()\n .withFinalKeyword(formatToken(finalKeyword, 0, 1, 0, 0)).apply();\n }\n return moduleVariableDeclarationNode.modify()\n .withTypedBindingPattern(this.modifyNode(moduleVariableDeclarationNode.typedBindingPattern()))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withInitializer(initializer)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 2))\n .apply();\n }\n\n @Override\n public ConstantDeclarationNode transform(ConstantDeclarationNode constantDeclarationNode) {\n if (!isInLineRange(constantDeclarationNode)) {\n return constantDeclarationNode;\n }\n Token constKeyword = getToken(constantDeclarationNode.constKeyword());\n Token variableName = getToken(constantDeclarationNode.variableName());\n Token equalsToken = getToken(constantDeclarationNode.equalsToken());\n Token semicolonToken = getToken(constantDeclarationNode.semicolonToken());\n Token visibilityQualifier = getToken(constantDeclarationNode.visibilityQualifier().orElse(null));\n Node initializer = this.modifyNode(constantDeclarationNode.initializer());\n MetadataNode metadata = this.modifyNode(constantDeclarationNode.metadata().orElse(null));\n TypeDescriptorNode typeDescriptorNode = this.modifyNode(constantDeclarationNode.typeDescriptor().orElse(null));\n if (metadata != null) {\n constantDeclarationNode = constantDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return constantDeclarationNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 1, 1, 0, 0))\n .withConstKeyword(formatToken(constKeyword, 1, 1, 0, 0))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withInitializer(initializer)\n .withSemicolonToken(formatToken(semicolonToken, 1, 1, 0, 1))\n .withTypeDescriptor(typeDescriptorNode)\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public MetadataNode transform(MetadataNode metadataNode) {\n if (!isInLineRange(metadataNode)) {\n return metadataNode;\n }\n NodeList annotations = this.modifyNodeList(metadataNode.annotations());\n Node documentationString = metadataNode.documentationString().orElse(null);\n if (documentationString != null) {\n metadataNode = metadataNode.modify()\n .withDocumentationString(this.modifyNode(documentationString)).apply();\n }\n return metadataNode.modify()\n .withAnnotations(annotations)\n .apply();\n }\n\n @Override\n public BlockStatementNode transform(BlockStatementNode blockStatementNode) {\n if (!isInLineRange(blockStatementNode)) {\n return blockStatementNode;\n }\n int startColumn = getStartColumn(blockStatementNode, blockStatementNode.kind(), false);\n Token openBraceToken = getToken(blockStatementNode.openBraceToken());\n Token closeBraceToken = getToken(blockStatementNode.closeBraceToken());\n NodeList statements = this.modifyNodeList(blockStatementNode.statements());\n\n int trailingNewLines = 1;\n if (blockStatementNode.parent() != null && blockStatementNode.parent().kind() == SyntaxKind.IF_ELSE_STATEMENT) {\n IfElseStatementNode ifElseStatementNode = (IfElseStatementNode) blockStatementNode.parent();\n if (ifElseStatementNode.elseBody().isPresent()) {\n trailingNewLines = 0;\n }\n }\n return blockStatementNode.modify()\n .withOpenBraceToken(formatToken(openBraceToken, 1, 0, 0, 1))\n .withCloseBraceToken(formatToken(closeBraceToken, startColumn, 0, 0, trailingNewLines))\n .withStatements(statements)\n .apply();\n }\n\n @Override\n public MappingConstructorExpressionNode transform(\n MappingConstructorExpressionNode mappingConstructorExpressionNode) {\n if (!isInLineRange(mappingConstructorExpressionNode)) {\n return mappingConstructorExpressionNode;\n }\n int startColumn = getStartColumn(mappingConstructorExpressionNode, mappingConstructorExpressionNode.kind(),\n false);\n Token openBrace = getToken(mappingConstructorExpressionNode.openBrace());\n Token closeBrace = getToken(mappingConstructorExpressionNode.closeBrace());\n SeparatedNodeList fields = this.modifySeparatedNodeList(\n mappingConstructorExpressionNode.fields());\n return mappingConstructorExpressionNode.modify()\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 1))\n .withCloseBrace(formatToken(closeBrace, startColumn, 0, 1, 0))\n .withFields(fields)\n .apply();\n }\n\n @Override\n public ListenerDeclarationNode transform(ListenerDeclarationNode listenerDeclarationNode) {\n if (!isInLineRange(listenerDeclarationNode)) {\n return listenerDeclarationNode;\n }\n Token equalsToken = getToken(listenerDeclarationNode.equalsToken());\n Token variableName = getToken(listenerDeclarationNode.variableName());\n Token semicolonToken = getToken(listenerDeclarationNode.semicolonToken());\n Token listenerKeyword = getToken(listenerDeclarationNode.listenerKeyword());\n Token visibilityQualifier = getToken(listenerDeclarationNode.visibilityQualifier().orElse(null));\n Node initializer = this.modifyNode(listenerDeclarationNode.initializer());\n MetadataNode metadata = this.modifyNode(listenerDeclarationNode.metadata().orElse(null));\n Node typeDescriptor = this.modifyNode(listenerDeclarationNode.typeDescriptor());\n if (visibilityQualifier != null) {\n listenerDeclarationNode = listenerDeclarationNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 0, 0, 0)).apply();\n }\n if (metadata != null) {\n listenerDeclarationNode = listenerDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return listenerDeclarationNode.modify()\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withInitializer(initializer)\n .withListenerKeyword(formatToken(listenerKeyword, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .withTypeDescriptor(typeDescriptor)\n .withVariableName(formatToken(variableName, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public SpecificFieldNode transform(SpecificFieldNode specificFieldNode) {\n if (!isInLineRange(specificFieldNode)) {\n return specificFieldNode;\n }\n int startColumn = getStartColumn(specificFieldNode, specificFieldNode.kind(), true);\n Token fieldName = getToken((Token) specificFieldNode.fieldName());\n Token readOnlyKeyword = specificFieldNode.readonlyKeyword().orElse(null);\n Token colon = getToken(specificFieldNode.colon().orElse(null));\n ExpressionNode expressionNode = this.modifyNode(specificFieldNode.valueExpr().orElse(null));\n if (readOnlyKeyword != null) {\n specificFieldNode = specificFieldNode.modify()\n .withReadonlyKeyword(formatToken(readOnlyKeyword, 0, 0, 0, 0)).apply();\n }\n return specificFieldNode.modify()\n .withFieldName(formatToken(fieldName, startColumn, 0, 0, 0))\n .withColon(formatToken(colon, 0, 1, 0, 0))\n .withValueExpr(expressionNode)\n .apply();\n }\n\n @Override\n public BinaryExpressionNode transform(BinaryExpressionNode binaryExpressionNode) {\n if (!isInLineRange(binaryExpressionNode)) {\n return binaryExpressionNode;\n }\n Node lhsExpr = this.modifyNode(binaryExpressionNode.lhsExpr());\n Node rhsExpr = this.modifyNode(binaryExpressionNode.rhsExpr());\n Token operator = getToken(binaryExpressionNode.operator());\n return binaryExpressionNode.modify()\n .withLhsExpr(lhsExpr)\n .withRhsExpr(rhsExpr)\n .withOperator(formatToken(operator, 1, 1, 0, 0))\n .apply();\n }\n\n @Override\n public ArrayTypeDescriptorNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) {\n if (!isInLineRange(arrayTypeDescriptorNode)) {\n return arrayTypeDescriptorNode;\n }\n Node arrayLength = arrayTypeDescriptorNode.arrayLength().orElse(null);\n Token openBracket = getToken(arrayTypeDescriptorNode.openBracket());\n Token closeBracket = getToken(arrayTypeDescriptorNode.closeBracket());\n TypeDescriptorNode memberTypeDesc = this.modifyNode(arrayTypeDescriptorNode.memberTypeDesc());\n if (arrayLength != null) {\n arrayTypeDescriptorNode = arrayTypeDescriptorNode.modify()\n .withArrayLength(this.modifyNode(arrayLength)).apply();\n }\n return arrayTypeDescriptorNode.modify()\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .withMemberTypeDesc(memberTypeDesc)\n .apply();\n }\n\n @Override\n public AssignmentStatementNode transform(AssignmentStatementNode assignmentStatementNode) {\n if (!isInLineRange(assignmentStatementNode)) {\n return assignmentStatementNode;\n }\n Node varRef = this.modifyNode(assignmentStatementNode.varRef());\n ExpressionNode expression = this.modifyNode(assignmentStatementNode.expression());\n Token equalsToken = getToken(assignmentStatementNode.equalsToken());\n Token semicolonToken = getToken(assignmentStatementNode.semicolonToken());\n return assignmentStatementNode.modify()\n .withVarRef(varRef)\n .withExpression(expression)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .apply();\n }\n\n @Override\n public IndexedExpressionNode transform(IndexedExpressionNode indexedExpressionNode) {\n if (!isInLineRange(indexedExpressionNode)) {\n return indexedExpressionNode;\n }\n SeparatedNodeList keyExpression = this.modifySeparatedNodeList(\n indexedExpressionNode.keyExpression());\n ExpressionNode containerExpression = this.modifyNode(indexedExpressionNode.containerExpression());\n Token openBracket = getToken(indexedExpressionNode.openBracket());\n Token closeBracket = getToken(indexedExpressionNode.closeBracket());\n return indexedExpressionNode.modify()\n .withKeyExpression(keyExpression)\n .withContainerExpression(containerExpression)\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public CheckExpressionNode transform(CheckExpressionNode checkExpressionNode) {\n if (!isInLineRange(checkExpressionNode)) {\n return checkExpressionNode;\n }\n int startColumn = getStartColumn(checkExpressionNode, checkExpressionNode.kind(), false);\n Token checkKeyword = getToken(checkExpressionNode.checkKeyword());\n ExpressionNode expressionNode = this.modifyNode(checkExpressionNode.expression());\n return checkExpressionNode.modify()\n .withCheckKeyword(formatToken(checkKeyword, startColumn, 1, 0, 0))\n .withExpression(expressionNode)\n .apply();\n }\n\n @Override\n public WhileStatementNode transform(WhileStatementNode whileStatementNode) {\n if (!isInLineRange(whileStatementNode)) {\n return whileStatementNode;\n }\n int startColumn = getStartColumn(whileStatementNode, whileStatementNode.kind(), true);\n Token whileKeyword = getToken(whileStatementNode.whileKeyword());\n ExpressionNode condition = this.modifyNode(whileStatementNode.condition());\n BlockStatementNode whileBody = this.modifyNode(whileStatementNode.whileBody());\n return whileStatementNode.modify()\n .withWhileKeyword(formatToken(whileKeyword, startColumn, 0, 0, 0))\n .withCondition(condition)\n .withWhileBody(whileBody)\n .apply();\n }\n\n @Override\n public ReturnStatementNode transform(ReturnStatementNode returnStatementNode) {\n if (!isInLineRange(returnStatementNode)) {\n return returnStatementNode;\n }\n int startColumn = getStartColumn(returnStatementNode, returnStatementNode.kind(), true);\n Token returnKeyword = getToken(returnStatementNode.returnKeyword());\n ExpressionNode expressionNode = returnStatementNode.expression().orElse(null);\n Token semicolonToken = getToken(returnStatementNode.semicolonToken());\n if (expressionNode != null) {\n returnStatementNode = returnStatementNode.modify()\n .withExpression(this.modifyNode(expressionNode)).apply();\n }\n return returnStatementNode.modify()\n .withReturnKeyword(formatToken(returnKeyword, startColumn, 1, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1))\n .apply();\n }\n\n @Override\n public MethodCallExpressionNode transform(MethodCallExpressionNode methodCallExpressionNode) {\n if (!isInLineRange(methodCallExpressionNode)) {\n return methodCallExpressionNode;\n }\n SeparatedNodeList arguments = this.modifySeparatedNodeList(methodCallExpressionNode\n .arguments());\n Token openParenToken = getToken(methodCallExpressionNode.openParenToken());\n Token closeParenToken = getToken(methodCallExpressionNode.closeParenToken());\n Token dotToken = getToken(methodCallExpressionNode.dotToken());\n ExpressionNode expression = this.modifyNode(methodCallExpressionNode.expression());\n NameReferenceNode methodName = this.modifyNode(methodCallExpressionNode.methodName());\n return methodCallExpressionNode.modify()\n .withArguments(arguments)\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .withDotToken(formatToken(dotToken, 0, 0, 0, 0))\n .withExpression(expression)\n .withMethodName(methodName)\n .apply();\n }\n\n @Override\n public NilLiteralNode transform(NilLiteralNode nilLiteralNode) {\n Token openParenToken = getToken(nilLiteralNode.openParenToken());\n Token closeParenToken = getToken(nilLiteralNode.closeParenToken());\n return nilLiteralNode.modify()\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public NilTypeDescriptorNode transform(NilTypeDescriptorNode nilTypeDescriptorNode) {\n Token openParenToken = getToken(nilTypeDescriptorNode.openParenToken());\n Token closeParenToken = getToken(nilTypeDescriptorNode.closeParenToken());\n return nilTypeDescriptorNode.modify()\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public UnionTypeDescriptorNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) {\n TypeDescriptorNode leftTypeDesc = this.modifyNode(unionTypeDescriptorNode.leftTypeDesc());\n Token pipeToken = getToken(unionTypeDescriptorNode.pipeToken());\n TypeDescriptorNode rightTypeDesc = this.modifyNode(unionTypeDescriptorNode.rightTypeDesc());\n return unionTypeDescriptorNode.modify()\n .withLeftTypeDesc(leftTypeDesc)\n .withPipeToken(pipeToken)\n .withRightTypeDesc(rightTypeDesc)\n .apply();\n }\n\n @Override\n \n\n @Override\n public ModuleXMLNamespaceDeclarationNode transform(\n ModuleXMLNamespaceDeclarationNode moduleXMLNamespaceDeclarationNode) {\n Token xmlnsKeyword = getToken(moduleXMLNamespaceDeclarationNode.xmlnsKeyword());\n ExpressionNode namespaceuri = this.modifyNode(moduleXMLNamespaceDeclarationNode.namespaceuri());\n Token asKeyword = getToken(moduleXMLNamespaceDeclarationNode.asKeyword());\n IdentifierToken namespacePrefix = this.modifyNode(moduleXMLNamespaceDeclarationNode.namespacePrefix());\n Token semicolonToken = getToken(moduleXMLNamespaceDeclarationNode.semicolonToken());\n return moduleXMLNamespaceDeclarationNode.modify()\n .withNamespacePrefix(namespacePrefix)\n .withNamespaceuri(namespaceuri)\n .withXmlnsKeyword(formatToken(xmlnsKeyword, 0, 0, 0, 0))\n .withAsKeyword(formatToken(asKeyword, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XmlTypeDescriptorNode transform(XmlTypeDescriptorNode xmlTypeDescriptorNode) {\n int startColumn = getStartColumn(xmlTypeDescriptorNode, xmlTypeDescriptorNode.kind(), true);\n Token xmlKeywordToken = getToken(xmlTypeDescriptorNode.xmlKeywordToken());\n TypeParameterNode xmlTypeParamsNode = this.modifyNode(xmlTypeDescriptorNode.xmlTypeParamsNode().orElse(null));\n if (xmlTypeParamsNode != null) {\n xmlTypeDescriptorNode = xmlTypeDescriptorNode.modify()\n .withXmlTypeParamsNode(xmlTypeParamsNode).apply();\n }\n return xmlTypeDescriptorNode.modify()\n .withXmlKeywordToken(formatToken(xmlKeywordToken, startColumn, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLElementNode transform(XMLElementNode xMLElementNode) {\n XMLStartTagNode startTag = this.modifyNode(xMLElementNode.startTag());\n NodeList content = modifyNodeList(xMLElementNode.content());\n XMLEndTagNode endTag = this.modifyNode(xMLElementNode.endTag());\n return xMLElementNode.modify()\n .withStartTag(startTag)\n .withEndTag(endTag)\n .withContent(content)\n .apply();\n }\n\n @Override\n public XMLStartTagNode transform(XMLStartTagNode xMLStartTagNode) {\n Token ltToken = getToken(xMLStartTagNode.ltToken());\n XMLNameNode name = this.modifyNode(xMLStartTagNode.name());\n NodeList attributes = modifyNodeList(xMLStartTagNode.attributes());\n Token getToken = getToken(xMLStartTagNode.getToken());\n return xMLStartTagNode.modify()\n .withName(name)\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withAttributes(attributes)\n .withGetToken(formatToken(getToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLEndTagNode transform(XMLEndTagNode xMLEndTagNode) {\n Token ltToken = getToken(xMLEndTagNode.ltToken());\n Token slashToken = getToken(xMLEndTagNode.slashToken());\n XMLNameNode name = this.modifyNode(xMLEndTagNode.name());\n Token getToken = getToken(xMLEndTagNode.getToken());\n return xMLEndTagNode.modify()\n .withName(name)\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withSlashToken(formatToken(slashToken, 0, 0, 0, 0))\n .withGetToken(formatToken(getToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLSimpleNameNode transform(XMLSimpleNameNode xMLSimpleNameNode) {\n Token name = getToken(xMLSimpleNameNode.name());\n if (xMLSimpleNameNode.parent().kind() == SyntaxKind.XML_PI &&\n ((XMLProcessingInstruction) xMLSimpleNameNode.parent()).data() != null) {\n return xMLSimpleNameNode.modify()\n .withName(formatToken(name, 0, 1, 0, 0))\n .apply();\n }\n return xMLSimpleNameNode.modify()\n .withName(formatToken(name, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLQualifiedNameNode transform(XMLQualifiedNameNode xMLQualifiedNameNode) {\n XMLSimpleNameNode prefix = this.modifyNode(xMLQualifiedNameNode.prefix());\n Token colon = getToken(xMLQualifiedNameNode.colon());\n XMLSimpleNameNode name = this.modifyNode(xMLQualifiedNameNode.name());\n return xMLQualifiedNameNode.modify()\n .withPrefix(prefix)\n .withName(name)\n .withColon(formatToken(colon, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLEmptyElementNode transform(XMLEmptyElementNode xMLEmptyElementNode) {\n Token ltToken = getToken(xMLEmptyElementNode.ltToken());\n XMLNameNode name = this.modifyNode(xMLEmptyElementNode.name());\n NodeList attributes = this.modifyNodeList(xMLEmptyElementNode.attributes());\n Token slashToken = getToken(xMLEmptyElementNode.slashToken());\n Token getToken = getToken(xMLEmptyElementNode.getToken());\n return xMLEmptyElementNode.modify()\n .withName(name)\n .withAttributes(attributes)\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withSlashToken(formatToken(slashToken, 0, 0, 0, 0))\n .withGetToken(formatToken(getToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLTextNode transform(XMLTextNode xMLTextNode) {\n Token content = getToken(xMLTextNode.content());\n return xMLTextNode.modify()\n .withContent(formatToken(content, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLAttributeNode transform(XMLAttributeNode xMLAttributeNode) {\n XMLNameNode attributeName = this.modifyNode(xMLAttributeNode.attributeName());\n Token equalToken = getToken(xMLAttributeNode.equalToken());\n XMLAttributeValue value = this.modifyNode(xMLAttributeNode.value());\n return xMLAttributeNode.modify()\n .withValue(value)\n .withAttributeName(attributeName)\n .withEqualToken(formatToken(equalToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLAttributeValue transform(XMLAttributeValue xMLAttributeValue) {\n Token startQuote = getToken(xMLAttributeValue.startQuote());\n NodeList value = this.modifyNodeList(xMLAttributeValue.value());\n Token endQuote = getToken(xMLAttributeValue.endQuote());\n return xMLAttributeValue.modify()\n .withStartQuote(formatToken(startQuote, 0, 0, 0, 0))\n .withValue(value)\n .withEndQuote(formatToken(endQuote, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLComment transform(XMLComment xMLComment) {\n Token commentStart = getToken(xMLComment.commentStart());\n NodeList content = this.modifyNodeList(xMLComment.content());\n Token commentEnd = getToken(xMLComment.commentEnd());\n return xMLComment.modify()\n .withCommentStart(formatToken(commentStart, 0, 0, 0, 0))\n .withContent(content)\n .withCommentEnd(formatToken(commentEnd, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLProcessingInstruction transform(XMLProcessingInstruction xMLProcessingInstruction) {\n Token piStart = getToken(xMLProcessingInstruction.piStart());\n XMLNameNode target = this.modifyNode(xMLProcessingInstruction.target());\n NodeList data = this.modifyNodeList(xMLProcessingInstruction.data());\n Token piEnd = getToken(xMLProcessingInstruction.piEnd());\n return xMLProcessingInstruction.modify()\n .withTarget(target)\n .withPiStart(formatToken(piStart, 0, 0, 0, 0))\n .withData(data)\n .withPiEnd(formatToken(piEnd, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLFilterExpressionNode transform(XMLFilterExpressionNode xMLFilterExpressionNode) {\n ExpressionNode expression = this.modifyNode(xMLFilterExpressionNode.expression());\n XMLNamePatternChainingNode xmlPatternChain = this.modifyNode(xMLFilterExpressionNode.xmlPatternChain());\n return xMLFilterExpressionNode.modify()\n .withExpression(expression)\n .withXmlPatternChain(xmlPatternChain)\n .apply();\n }\n\n @Override\n public XMLStepExpressionNode transform(XMLStepExpressionNode xMLStepExpressionNode) {\n ExpressionNode expression = this.modifyNode(xMLStepExpressionNode.expression());\n Node xmlStepStart = this.modifyNode(xMLStepExpressionNode.xmlStepStart());\n return xMLStepExpressionNode.modify()\n .withExpression(expression)\n .withXmlStepStart(xmlStepStart)\n .apply();\n }\n\n @Override\n public XMLNamePatternChainingNode transform(XMLNamePatternChainingNode xMLNamePatternChainingNode) {\n Token startToken = getToken(xMLNamePatternChainingNode.startToken());\n SeparatedNodeList xmlNamePattern = modifySeparatedNodeList(xMLNamePatternChainingNode.xmlNamePattern());\n Token gtToken = getToken(xMLNamePatternChainingNode.gtToken());\n return xMLNamePatternChainingNode.modify()\n .withStartToken(formatToken(startToken, 0, 0, 0, 0))\n .withXmlNamePattern(xmlNamePattern)\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public XMLAtomicNamePatternNode transform(XMLAtomicNamePatternNode xMLAtomicNamePatternNode) {\n Token prefix = getToken(xMLAtomicNamePatternNode.prefix());\n Token colon = getToken(xMLAtomicNamePatternNode.colon());\n Token name = getToken(xMLAtomicNamePatternNode.name());\n return xMLAtomicNamePatternNode.modify()\n .withPrefix(formatToken(prefix, 0, 0, 0, 0))\n .withColon(formatToken(colon, 0, 0, 0, 0))\n .withName(formatToken(name, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TemplateExpressionNode transform(TemplateExpressionNode templateExpressionNode) {\n Token type = getToken(templateExpressionNode.type().orElse(null));\n Token startBacktick = getToken(templateExpressionNode.startBacktick());\n NodeList content = modifyNodeList(templateExpressionNode.content());\n Token endBacktick = getToken(templateExpressionNode.endBacktick());\n return templateExpressionNode.modify()\n .withStartBacktick(formatToken(startBacktick, 1, 0, 0, 0))\n .withContent(content)\n .withType(formatToken(type, 0, 0, 0, 0))\n .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ByteArrayLiteralNode transform(ByteArrayLiteralNode byteArrayLiteralNode) {\n Token type = getToken(byteArrayLiteralNode.type());\n Token startBacktick = getToken(byteArrayLiteralNode.startBacktick());\n Token content = getToken(byteArrayLiteralNode.content().orElse(null));\n Token endBacktick = getToken(byteArrayLiteralNode.endBacktick());\n if (content != null) {\n byteArrayLiteralNode = byteArrayLiteralNode.modify()\n .withContent(formatToken(content, 0, 0, 0, 0)).apply();\n }\n return byteArrayLiteralNode.modify()\n .withType(formatToken(type, 0, 0, 0, 0))\n .withStartBacktick(formatToken(startBacktick, 0, 0, 0, 0))\n .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ListConstructorExpressionNode transform(ListConstructorExpressionNode listConstructorExpressionNode) {\n Token openBracket = getToken(listConstructorExpressionNode.openBracket());\n SeparatedNodeList expressions = this.modifySeparatedNodeList(listConstructorExpressionNode.expressions());\n Token closeBracket = getToken(listConstructorExpressionNode.closeBracket());\n return listConstructorExpressionNode.modify()\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withExpressions(expressions)\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TypeReferenceNode transform(TypeReferenceNode typeReferenceNode) {\n Token asteriskToken = getToken(typeReferenceNode.asteriskToken());\n Node typeName = this.modifyNode(typeReferenceNode.typeName());\n Token semicolonToken = getToken(typeReferenceNode.semicolonToken());\n return typeReferenceNode.modify()\n .withTypeName(typeName)\n .withAsteriskToken(formatToken(asteriskToken, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TupleTypeDescriptorNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) {\n int startCol = getStartColumn(tupleTypeDescriptorNode, tupleTypeDescriptorNode.kind(), true);\n Token openBracketToken = getToken(tupleTypeDescriptorNode.openBracketToken());\n SeparatedNodeList memberTypeDesc = this.modifySeparatedNodeList(tupleTypeDescriptorNode.memberTypeDesc());\n Token closeBracketToken = getToken(tupleTypeDescriptorNode.closeBracketToken());\n return tupleTypeDescriptorNode.modify()\n .withOpenBracketToken(formatToken(openBracketToken, startCol, 0, 0, 0))\n .withMemberTypeDesc(memberTypeDesc)\n .withCloseBracketToken(formatToken(closeBracketToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public MappingMatchPatternNode transform(MappingMatchPatternNode mappingMatchPatternNode) {\n Token openBraceToken = getToken(mappingMatchPatternNode.openBraceToken());\n SeparatedNodeList fieldMatchPatterns =\n this.modifySeparatedNodeList(mappingMatchPatternNode.fieldMatchPatterns());\n RestMatchPatternNode restMatchPattern =\n this.modifyNode(mappingMatchPatternNode.restMatchPattern().orElse(null));\n Token closeBraceToken = getToken(mappingMatchPatternNode.closeBraceToken());\n if (restMatchPattern != null) {\n mappingMatchPatternNode = mappingMatchPatternNode.modify()\n .withRestMatchPattern(restMatchPattern).apply();\n }\n return mappingMatchPatternNode.modify()\n .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0))\n .withFieldMatchPatterns(fieldMatchPatterns)\n .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ParameterizedTypeDescriptorNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescriptorNode) {\n int startCol = getStartColumn(parameterizedTypeDescriptorNode, parameterizedTypeDescriptorNode.kind(), true);\n Token parameterizedType = getToken(parameterizedTypeDescriptorNode.parameterizedType());\n TypeParameterNode typeParameter = this.modifyNode(parameterizedTypeDescriptorNode.typeParameter());\n return parameterizedTypeDescriptorNode.modify()\n .withParameterizedType(formatToken(parameterizedType, startCol, 0, 0, 0))\n .withTypeParameter(typeParameter)\n .apply();\n }\n\n @Override\n public TypeParameterNode transform(TypeParameterNode typeParameterNode) {\n Token ltToken = getToken(typeParameterNode.ltToken());\n TypeDescriptorNode typeNode = this.modifyNode(typeParameterNode.typeNode());\n Token gtToken = getToken(typeParameterNode.gtToken());\n return typeParameterNode.modify()\n .withTypeNode(typeNode)\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public StartActionNode transform(StartActionNode startActionNode) {\n if (!isInLineRange(startActionNode)) {\n return startActionNode;\n }\n NodeList annotations = this.modifyNodeList(startActionNode.annotations());\n Token startKeyword = getToken(startActionNode.startKeyword());\n ExpressionNode expression = this.modifyNode(startActionNode.expression());\n return startActionNode.modify()\n .withAnnotations(annotations)\n .withStartKeyword(formatToken(startKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public FlushActionNode transform(FlushActionNode flushActionNode) {\n if (!isInLineRange(flushActionNode)) {\n return flushActionNode;\n }\n Token flushKeyword = getToken(flushActionNode.flushKeyword());\n NameReferenceNode peerWorker = this.modifyNode(flushActionNode.peerWorker());\n return flushActionNode.modify()\n .withFlushKeyword(formatToken(flushKeyword, 0, 1, 0, 0))\n .withPeerWorker(peerWorker)\n .apply();\n }\n\n @Override\n public NamedWorkerDeclarationNode transform(NamedWorkerDeclarationNode namedWorkerDeclarationNode) {\n if (!isInLineRange(namedWorkerDeclarationNode)) {\n return namedWorkerDeclarationNode;\n }\n NodeList annotations = this.modifyNodeList(namedWorkerDeclarationNode.annotations());\n Token workerKeyword = getToken(namedWorkerDeclarationNode.workerKeyword());\n IdentifierToken workerName = this.modifyNode(namedWorkerDeclarationNode.workerName());\n Node returnTypeDesc =\n this.modifyNode(namedWorkerDeclarationNode.returnTypeDesc().orElse(null));\n BlockStatementNode workerBody = this.modifyNode(namedWorkerDeclarationNode.workerBody());\n if (returnTypeDesc != null) {\n namedWorkerDeclarationNode = namedWorkerDeclarationNode.modify()\n .withReturnTypeDesc(returnTypeDesc).apply();\n }\n return namedWorkerDeclarationNode.modify()\n .withAnnotations(annotations)\n .withWorkerKeyword(formatToken(workerKeyword, 0, 0, 0, 0))\n .withWorkerName(workerName)\n .withWorkerBody(workerBody)\n .apply();\n }\n\n @Override\n public TypeDefinitionNode transform(TypeDefinitionNode typeDefinitionNode) {\n if (!isInLineRange(typeDefinitionNode)) {\n return typeDefinitionNode;\n }\n MetadataNode metadata = this.modifyNode(typeDefinitionNode.metadata().orElse(null));\n Token visibilityQualifier = getToken(typeDefinitionNode.visibilityQualifier().orElse(null));\n Token typeKeyword = getToken(typeDefinitionNode.typeKeyword());\n Token typeName = getToken(typeDefinitionNode.typeName());\n Node typeDescriptor = this.modifyNode(typeDefinitionNode.typeDescriptor());\n Token semicolonToken = this.modifyToken(typeDefinitionNode.semicolonToken());\n if (metadata != null) {\n typeDefinitionNode = typeDefinitionNode.modify()\n .withMetadata(metadata).apply();\n }\n if (visibilityQualifier != null) {\n typeDefinitionNode = typeDefinitionNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 1, 1, 0, 0)).apply();\n }\n return typeDefinitionNode.modify()\n .withTypeKeyword(formatToken(typeKeyword, 1, 1, 0, 0))\n .withTypeName(formatToken(typeName, 1, 1, 0, 0))\n .withTypeDescriptor(typeDescriptor)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public CompoundAssignmentStatementNode transform(CompoundAssignmentStatementNode compoundAssignmentStatementNode) {\n if (!isInLineRange(compoundAssignmentStatementNode)) {\n return compoundAssignmentStatementNode;\n }\n ExpressionNode lhsExpression = this.modifyNode(compoundAssignmentStatementNode.lhsExpression());\n Token binaryOperator = getToken(compoundAssignmentStatementNode.binaryOperator());\n Token equalsToken = getToken(compoundAssignmentStatementNode.equalsToken());\n ExpressionNode rhsExpression = this.modifyNode(compoundAssignmentStatementNode.rhsExpression());\n Token semicolonToken = getToken(compoundAssignmentStatementNode.semicolonToken());\n return compoundAssignmentStatementNode.modify()\n .withLhsExpression(lhsExpression)\n .withBinaryOperator(formatToken(binaryOperator, 1, 1, 0, 0))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withRhsExpression(rhsExpression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public BreakStatementNode transform(BreakStatementNode breakStatementNode) {\n if (!isInLineRange(breakStatementNode)) {\n return breakStatementNode;\n }\n Token breakToken = getToken(breakStatementNode.breakToken());\n Token semicolonToken = getToken(breakStatementNode.semicolonToken());\n return breakStatementNode.modify()\n .withBreakToken(formatToken(breakToken, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ContinueStatementNode transform(ContinueStatementNode continueStatementNode) {\n if (!isInLineRange(continueStatementNode)) {\n return continueStatementNode;\n }\n Token continueToken = getToken(continueStatementNode.continueToken());\n Token semicolonToken = getToken(continueStatementNode.semicolonToken());\n return continueStatementNode.modify()\n .withContinueToken(formatToken(continueToken, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ExternalFunctionBodyNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) {\n if (!isInLineRange(externalFunctionBodyNode)) {\n return externalFunctionBodyNode;\n }\n Token equalsToken = getToken(externalFunctionBodyNode.equalsToken());\n NodeList annotations = this.modifyNodeList(externalFunctionBodyNode.annotations());\n Token externalKeyword = getToken(externalFunctionBodyNode.externalKeyword());\n Token semicolonToken = getToken(externalFunctionBodyNode.semicolonToken());\n return externalFunctionBodyNode.modify()\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withAnnotations(annotations)\n .withExternalKeyword(formatToken(externalKeyword, 1, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 1, 0, 0, 0))\n .apply();\n }\n\n @Override\n public PanicStatementNode transform(PanicStatementNode panicStatementNode) {\n if (!isInLineRange(panicStatementNode)) {\n return panicStatementNode;\n }\n Token panicKeyword = getToken(panicStatementNode.panicKeyword());\n ExpressionNode expression = this.modifyNode(panicStatementNode.expression());\n Token semicolonToken = getToken(panicStatementNode.semicolonToken());\n return panicStatementNode.modify()\n .withPanicKeyword(formatToken(panicKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public LocalTypeDefinitionStatementNode transform(\n LocalTypeDefinitionStatementNode localTypeDefinitionStatementNode) {\n if (!isInLineRange(localTypeDefinitionStatementNode)) {\n return localTypeDefinitionStatementNode;\n }\n NodeList annotations = this.modifyNodeList(localTypeDefinitionStatementNode.annotations());\n Token typeKeyword = getToken(localTypeDefinitionStatementNode.typeKeyword());\n Node typeName = this.modifyNode(localTypeDefinitionStatementNode.typeName());\n Node typeDescriptor = this.modifyNode(localTypeDefinitionStatementNode.typeDescriptor());\n Token semicolonToken = getToken(localTypeDefinitionStatementNode.semicolonToken());\n return localTypeDefinitionStatementNode.modify()\n .withAnnotations(annotations)\n .withTypeKeyword(formatToken(typeKeyword, 0, 1, 0, 0))\n .withTypeName(typeName)\n .withTypeDescriptor(typeDescriptor)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public LockStatementNode transform(LockStatementNode lockStatementNode) {\n if (!isInLineRange(lockStatementNode)) {\n return lockStatementNode;\n }\n Token lockKeyword = getToken(lockStatementNode.lockKeyword());\n StatementNode blockStatement = this.modifyNode(lockStatementNode.blockStatement());\n return lockStatementNode.modify()\n .withLockKeyword(formatToken(lockKeyword, 0, 1, 0, 0))\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public ForkStatementNode transform(ForkStatementNode forkStatementNode) {\n if (!isInLineRange(forkStatementNode)) {\n return forkStatementNode;\n }\n Token forkKeyword = getToken(forkStatementNode.forkKeyword());\n Token openBraceToken = getToken(forkStatementNode.openBraceToken());\n NodeList namedWorkerDeclarations =\n this.modifyNodeList(forkStatementNode.namedWorkerDeclarations());\n Token closeBraceToken = getToken(forkStatementNode.closeBraceToken());\n return forkStatementNode.modify()\n .withForkKeyword(formatToken(forkKeyword, 1, 1, 0, 0))\n .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0))\n .withNamedWorkerDeclarations(namedWorkerDeclarations)\n .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ForEachStatementNode transform(ForEachStatementNode forEachStatementNode) {\n if (!isInLineRange(forEachStatementNode)) {\n return forEachStatementNode;\n }\n Token forEachKeyword = getToken(forEachStatementNode.forEachKeyword());\n TypedBindingPatternNode typedBindingPattern = this.modifyNode(forEachStatementNode.typedBindingPattern());\n Token inKeyword = getToken(forEachStatementNode.inKeyword());\n Node actionOrExpressionNode = this.modifyNode(forEachStatementNode.actionOrExpressionNode());\n StatementNode blockStatement = this.modifyNode(forEachStatementNode.blockStatement());\n return forEachStatementNode.modify()\n .withForEachKeyword(formatToken(forEachKeyword, 0, 1, 0, 0))\n .withTypedBindingPattern(typedBindingPattern)\n .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0))\n .withActionOrExpressionNode(actionOrExpressionNode)\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public FailExpressionNode transform(FailExpressionNode failExpressionNode) {\n if (!isInLineRange(failExpressionNode)) {\n return failExpressionNode;\n }\n Token failKeyword = getToken(failExpressionNode.failKeyword());\n ExpressionNode expression = this.modifyNode(failExpressionNode.expression());\n return failExpressionNode.modify()\n .withFailKeyword(formatToken(failKeyword, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public FieldAccessExpressionNode transform(FieldAccessExpressionNode fieldAccessExpressionNode) {\n if (!isInLineRange(fieldAccessExpressionNode)) {\n return fieldAccessExpressionNode;\n }\n ExpressionNode expression = this.modifyNode(fieldAccessExpressionNode.expression());\n Token dotToken = getToken(fieldAccessExpressionNode.dotToken());\n NameReferenceNode fieldName = this.modifyNode(fieldAccessExpressionNode.fieldName());\n return fieldAccessExpressionNode.modify()\n .withExpression(expression)\n .withDotToken(formatToken(dotToken, 0, 0, 0, 0))\n .withFieldName(fieldName)\n .apply();\n }\n\n @Override\n public TypeofExpressionNode transform(TypeofExpressionNode typeofExpressionNode) {\n if (!isInLineRange(typeofExpressionNode)) {\n return typeofExpressionNode;\n }\n Token typeofKeyword = getToken(typeofExpressionNode.typeofKeyword());\n ExpressionNode expression = this.modifyNode(typeofExpressionNode.expression());\n return typeofExpressionNode.modify()\n .withTypeofKeyword(formatToken(typeofKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public UnaryExpressionNode transform(UnaryExpressionNode unaryExpressionNode) {\n if (!isInLineRange(unaryExpressionNode)) {\n return unaryExpressionNode;\n }\n Token unaryOperator = getToken(unaryExpressionNode.unaryOperator());\n ExpressionNode expression = this.modifyNode(unaryExpressionNode.expression());\n return unaryExpressionNode.modify()\n .withUnaryOperator(formatToken(unaryOperator, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public ComputedNameFieldNode transform(ComputedNameFieldNode computedNameFieldNode) {\n if (!isInLineRange(computedNameFieldNode)) {\n return computedNameFieldNode;\n }\n Token openBracket = getToken(computedNameFieldNode.openBracket());\n ExpressionNode fieldNameExpr = this.modifyNode(computedNameFieldNode.fieldNameExpr());\n Token closeBracket = getToken(computedNameFieldNode.closeBracket());\n Token colonToken = getToken(computedNameFieldNode.colonToken());\n ExpressionNode valueExpr = this.modifyNode(computedNameFieldNode.valueExpr());\n return computedNameFieldNode.modify()\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withFieldNameExpr(fieldNameExpr)\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .withColonToken(formatToken(colonToken, 1, 1, 0, 0))\n .withValueExpr(valueExpr)\n .apply();\n }\n\n @Override\n public DefaultableParameterNode transform(DefaultableParameterNode defaultableParameterNode) {\n if (!isInLineRange(defaultableParameterNode)) {\n return defaultableParameterNode;\n }\n NodeList annotations = this.modifyNodeList(defaultableParameterNode.annotations());\n Node typeName = this.modifyNode(defaultableParameterNode.typeName());\n Token paramName = getToken(defaultableParameterNode.paramName().orElse(null));\n Token equalsToken = getToken(defaultableParameterNode.equalsToken());\n Node expression = this.modifyNode(defaultableParameterNode.expression());\n if (paramName != null) {\n defaultableParameterNode = defaultableParameterNode.modify()\n .withParamName(formatToken(paramName, 1, 1, 0, 0)).apply();\n }\n return defaultableParameterNode.modify()\n .withAnnotations(annotations)\n .withTypeName(typeName)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public RestParameterNode transform(RestParameterNode restParameterNode) {\n if (!isInLineRange(restParameterNode)) {\n return restParameterNode;\n }\n NodeList annotations = this.modifyNodeList(restParameterNode.annotations());\n Node typeName = this.modifyNode(restParameterNode.typeName());\n Token ellipsisToken = getToken(restParameterNode.ellipsisToken());\n Token paramName = getToken(restParameterNode.paramName().orElse(null));\n if (paramName != null) {\n restParameterNode = restParameterNode.modify()\n .withParamName(formatToken(paramName, 1, 1, 0, 0)).apply();\n }\n return restParameterNode.modify()\n .withAnnotations(annotations)\n .withTypeName(typeName)\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public SpreadFieldNode transform(SpreadFieldNode spreadFieldNode) {\n if (!isInLineRange(spreadFieldNode)) {\n return spreadFieldNode;\n }\n Token ellipsis = getToken(spreadFieldNode.ellipsis());\n ExpressionNode valueExpr = this.modifyNode(spreadFieldNode.valueExpr());\n return spreadFieldNode.modify()\n .withEllipsis(formatToken(ellipsis, 0, 0, 0, 0))\n .withValueExpr(valueExpr)\n .apply();\n }\n\n @Override\n public NamedArgumentNode transform(NamedArgumentNode namedArgumentNode) {\n if (!isInLineRange(namedArgumentNode)) {\n return namedArgumentNode;\n }\n SimpleNameReferenceNode argumentName = this.modifyNode(namedArgumentNode.argumentName());\n Token equalsToken = getToken(namedArgumentNode.equalsToken());\n ExpressionNode expression = this.modifyNode(namedArgumentNode.expression());\n return namedArgumentNode.modify()\n .withArgumentName(argumentName)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public RestArgumentNode transform(RestArgumentNode restArgumentNode) {\n if (!isInLineRange(restArgumentNode)) {\n return restArgumentNode;\n }\n Token ellipsis = getToken(restArgumentNode.ellipsis());\n ExpressionNode expression = this.modifyNode(restArgumentNode.expression());\n return restArgumentNode.modify()\n .withEllipsis(formatToken(ellipsis, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public ObjectTypeDescriptorNode transform(ObjectTypeDescriptorNode objectTypeDescriptorNode) {\n if (!isInLineRange(objectTypeDescriptorNode)) {\n return objectTypeDescriptorNode;\n }\n NodeList objectTypeQualifiers = this.modifyNodeList(objectTypeDescriptorNode.objectTypeQualifiers());\n Token objectKeyword = getToken(objectTypeDescriptorNode.objectKeyword());\n Token openBrace = getToken(objectTypeDescriptorNode.openBrace());\n NodeList members = this.modifyNodeList(objectTypeDescriptorNode.members());\n Token closeBrace = getToken(objectTypeDescriptorNode.closeBrace());\n return objectTypeDescriptorNode.modify()\n .withObjectTypeQualifiers(objectTypeQualifiers)\n .withObjectKeyword(formatToken(objectKeyword, 0, 1, 1, 0))\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0))\n .withMembers(members)\n .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RecordTypeDescriptorNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) {\n if (!isInLineRange(recordTypeDescriptorNode)) {\n return recordTypeDescriptorNode;\n }\n Token recordKeyword = getToken(recordTypeDescriptorNode.recordKeyword());\n Token bodyStartDelimiter = getToken(recordTypeDescriptorNode.bodyStartDelimiter());\n NodeList fields = this.modifyNodeList(recordTypeDescriptorNode.fields());\n RecordRestDescriptorNode recordRestDescriptor =\n modifyNode(recordTypeDescriptorNode.recordRestDescriptor().orElse(null));\n Token bodyEndDelimiter = getToken(recordTypeDescriptorNode.bodyEndDelimiter());\n if (recordRestDescriptor != null) {\n recordTypeDescriptorNode = recordTypeDescriptorNode.modify()\n .withRecordRestDescriptor(recordRestDescriptor).apply();\n }\n return recordTypeDescriptorNode.modify()\n .withRecordKeyword(formatToken(recordKeyword, 0, 1, 0, 0))\n .withBodyStartDelimiter(formatToken(bodyStartDelimiter, 0, 0, 0, 0))\n .withFields(fields)\n .withBodyEndDelimiter(formatToken(bodyEndDelimiter, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ObjectFieldNode transform(ObjectFieldNode objectFieldNode) {\n if (!isInLineRange(objectFieldNode)) {\n return objectFieldNode;\n }\n MetadataNode metadata = this.modifyNode(objectFieldNode.metadata().orElse(null));\n Token visibilityQualifier = getToken(objectFieldNode.visibilityQualifier().orElse(null));\n Token readonlyKeyword = getToken(objectFieldNode.readonlyKeyword().orElse(null));\n Node typeName = this.modifyNode(objectFieldNode.typeName());\n Token fieldName = getToken(objectFieldNode.fieldName());\n Token equalsToken = getToken(objectFieldNode.equalsToken().orElse(null));\n ExpressionNode expression = this.modifyNode(objectFieldNode.expression().orElse(null));\n Token semicolonToken = getToken(objectFieldNode.semicolonToken());\n if (metadata != null) {\n objectFieldNode = objectFieldNode.modify()\n .withMetadata(metadata).apply();\n }\n if (visibilityQualifier != null) {\n objectFieldNode = objectFieldNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 1, 0, 0)).apply();\n }\n if (readonlyKeyword != null) {\n objectFieldNode = objectFieldNode.modify()\n .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply();\n }\n return objectFieldNode.modify()\n .withTypeName(typeName)\n .withFieldName(formatToken(fieldName, 1, 1, 0, 0))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RecordFieldNode transform(RecordFieldNode recordFieldNode) {\n if (!isInLineRange(recordFieldNode)) {\n return recordFieldNode;\n }\n MetadataNode metadata = this.modifyNode(recordFieldNode.metadata().orElse(null));\n Token readonlyKeyword = getToken(recordFieldNode.readonlyKeyword().orElse(null));\n Node typeName = this.modifyNode(recordFieldNode.typeName());\n Token fieldName = getToken(recordFieldNode.fieldName());\n Token questionMarkToken = getToken(recordFieldNode.questionMarkToken().orElse(null));\n Token semicolonToken = getToken(recordFieldNode.semicolonToken());\n if (metadata != null) {\n recordFieldNode = recordFieldNode.modify()\n .withMetadata(metadata).apply();\n }\n if (readonlyKeyword != null) {\n recordFieldNode = recordFieldNode.modify()\n .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply();\n }\n if (questionMarkToken != null) {\n recordFieldNode = recordFieldNode.modify()\n .withQuestionMarkToken(formatToken(questionMarkToken, 1, 1, 0, 0)).apply();\n }\n return recordFieldNode.modify()\n .withTypeName(typeName)\n .withFieldName(formatToken(fieldName, 0, 1, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RecordFieldWithDefaultValueNode transform(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) {\n if (!isInLineRange(recordFieldWithDefaultValueNode)) {\n return recordFieldWithDefaultValueNode;\n }\n MetadataNode metadata = this.modifyNode(recordFieldWithDefaultValueNode.metadata().orElse(null));\n Token readonlyKeyword = getToken(recordFieldWithDefaultValueNode.readonlyKeyword().orElse(null));\n Node typeName = this.modifyNode(recordFieldWithDefaultValueNode.typeName());\n Token fieldName = getToken(recordFieldWithDefaultValueNode.fieldName());\n Token equalsToken = getToken(recordFieldWithDefaultValueNode.equalsToken());\n ExpressionNode expression = this.modifyNode(recordFieldWithDefaultValueNode.expression());\n Token semicolonToken = getToken(recordFieldWithDefaultValueNode.semicolonToken());\n if (metadata != null) {\n recordFieldWithDefaultValueNode = recordFieldWithDefaultValueNode.modify()\n .withMetadata(metadata).apply();\n }\n if (readonlyKeyword != null) {\n recordFieldWithDefaultValueNode = recordFieldWithDefaultValueNode.modify()\n .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply();\n }\n return recordFieldWithDefaultValueNode.modify()\n .withTypeName(typeName)\n .withFieldName(formatToken(fieldName, 1, 1, 0, 0))\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RecordRestDescriptorNode transform(RecordRestDescriptorNode recordRestDescriptorNode) {\n if (!isInLineRange(recordRestDescriptorNode)) {\n return recordRestDescriptorNode;\n }\n Node typeName = this.modifyNode(recordRestDescriptorNode.typeName());\n Token ellipsisToken = getToken(recordRestDescriptorNode.ellipsisToken());\n Token semicolonToken = getToken(recordRestDescriptorNode.semicolonToken());\n return recordRestDescriptorNode.modify()\n .withTypeName(typeName)\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public AnnotationNode transform(AnnotationNode annotationNode) {\n if (!isInLineRange(annotationNode)) {\n return annotationNode;\n }\n Token atToken = getToken(annotationNode.atToken());\n Node annotReference = this.modifyNode(annotationNode.annotReference());\n MappingConstructorExpressionNode annotValue = this.modifyNode(annotationNode.annotValue().orElse(null));\n if (annotValue != null) {\n annotationNode = annotationNode.modify()\n .withAnnotValue(annotValue).apply();\n }\n return annotationNode.modify()\n .withAtToken(formatToken(atToken, 1, 1, 0, 0))\n .withAnnotReference(annotReference)\n .apply();\n }\n\n @Override\n public AnnotationDeclarationNode transform(AnnotationDeclarationNode annotationDeclarationNode) {\n if (!isInLineRange(annotationDeclarationNode)) {\n return annotationDeclarationNode;\n }\n MetadataNode metadata = this.modifyNode(annotationDeclarationNode.metadata().orElse(null));\n Token visibilityQualifier = getToken(annotationDeclarationNode.visibilityQualifier());\n Token constKeyword = getToken(annotationDeclarationNode.constKeyword());\n Token annotationKeyword = getToken(annotationDeclarationNode.annotationKeyword());\n Node typeDescriptor = this.modifyNode(annotationDeclarationNode.typeDescriptor());\n Token annotationTag = getToken(annotationDeclarationNode.annotationTag());\n Token onKeyword = getToken(annotationDeclarationNode.onKeyword());\n SeparatedNodeList attachPoints = this.modifySeparatedNodeList(annotationDeclarationNode.attachPoints());\n Token semicolonToken = getToken(annotationDeclarationNode.semicolonToken());\n if (metadata != null) {\n annotationDeclarationNode = annotationDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return annotationDeclarationNode.modify()\n .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 1, 0, 0))\n .withConstKeyword(formatToken(constKeyword, 1, 1, 0, 0))\n .withAnnotationKeyword(formatToken(annotationKeyword, 0, 0, 0, 0))\n .withTypeDescriptor(typeDescriptor)\n .withAnnotationTag(formatToken(annotationTag, 0, 0, 0, 0))\n .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0))\n .withAttachPoints(attachPoints)\n .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public AnnotationAttachPointNode transform(AnnotationAttachPointNode annotationAttachPointNode) {\n if (!isInLineRange(annotationAttachPointNode)) {\n return annotationAttachPointNode;\n }\n Token sourceKeyword = getToken(annotationAttachPointNode.sourceKeyword());\n Token firstIdent = getToken(annotationAttachPointNode.firstIdent());\n Token secondIdent = getToken(annotationAttachPointNode.secondIdent());\n return annotationAttachPointNode.modify()\n .withSourceKeyword(formatToken(sourceKeyword, 0, 1, 0, 0))\n .withFirstIdent(formatToken(firstIdent, 0, 0, 0, 0))\n .withSecondIdent(formatToken(secondIdent, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public NamedWorkerDeclarator transform(NamedWorkerDeclarator namedWorkerDeclarator) {\n if (!isInLineRange(namedWorkerDeclarator)) {\n return namedWorkerDeclarator;\n }\n NodeList workerInitStatements =\n this.modifyNodeList(namedWorkerDeclarator.workerInitStatements());\n NodeList namedWorkerDeclarations =\n this.modifyNodeList(namedWorkerDeclarator.namedWorkerDeclarations());\n return namedWorkerDeclarator.modify()\n .withNamedWorkerDeclarations(namedWorkerDeclarations)\n .withWorkerInitStatements(workerInitStatements)\n .apply();\n }\n\n @Override\n public TrapExpressionNode transform(TrapExpressionNode trapExpressionNode) {\n if (!isInLineRange(trapExpressionNode)) {\n return trapExpressionNode;\n }\n Token trapKeyword = getToken(trapExpressionNode.trapKeyword());\n ExpressionNode expression = this.modifyNode(trapExpressionNode.expression());\n return trapExpressionNode.modify()\n .withTrapKeyword(formatToken(trapKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public TypeCastExpressionNode transform(TypeCastExpressionNode typeCastExpressionNode) {\n if (!isInLineRange(typeCastExpressionNode)) {\n return typeCastExpressionNode;\n }\n Token ltToken = getToken(typeCastExpressionNode.ltToken());\n TypeCastParamNode typeCastParam = this.modifyNode(typeCastExpressionNode.typeCastParam());\n Token gtToken = getToken(typeCastExpressionNode.gtToken());\n ExpressionNode expression = this.modifyNode(typeCastExpressionNode.expression());\n return typeCastExpressionNode.modify()\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withTypeCastParam(typeCastParam)\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public TypeCastParamNode transform(TypeCastParamNode typeCastParamNode) {\n if (!isInLineRange(typeCastParamNode)) {\n return typeCastParamNode;\n }\n NodeList annotations = this.modifyNodeList(typeCastParamNode.annotations());\n Node type = this.modifyNode(typeCastParamNode.type());\n return typeCastParamNode.modify()\n .withAnnotations(annotations)\n .withType(type)\n .apply();\n }\n\n @Override\n public TableConstructorExpressionNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) {\n if (!isInLineRange(tableConstructorExpressionNode)) {\n return tableConstructorExpressionNode;\n }\n Token tableKeyword = getToken(tableConstructorExpressionNode.tableKeyword());\n KeySpecifierNode keySpecifier = this.modifyNode(tableConstructorExpressionNode.keySpecifier().orElse(null));\n Token openBracket = getToken(tableConstructorExpressionNode.openBracket());\n SeparatedNodeList mappingConstructors =\n this.modifySeparatedNodeList(tableConstructorExpressionNode.mappingConstructors());\n Token closeBracket = this.modifyToken(tableConstructorExpressionNode.closeBracket());\n return tableConstructorExpressionNode.modify()\n .withTableKeyword(formatToken(tableKeyword, 0, 1, 0, 0))\n .withKeySpecifier(keySpecifier)\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withMappingConstructors(mappingConstructors)\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public KeySpecifierNode transform(KeySpecifierNode keySpecifierNode) {\n if (!isInLineRange(keySpecifierNode)) {\n return keySpecifierNode;\n }\n Token keyKeyword = getToken(keySpecifierNode.keyKeyword());\n Token openParenToken = getToken(keySpecifierNode.openParenToken());\n SeparatedNodeList fieldNames = this.modifySeparatedNodeList(keySpecifierNode.fieldNames());\n Token closeParenToken = getToken(keySpecifierNode.closeParenToken());\n return keySpecifierNode.modify()\n .withKeyKeyword(formatToken(keyKeyword, 0, 1, 0, 0))\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withFieldNames(fieldNames)\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ErrorTypeParamsNode transform(ErrorTypeParamsNode errorTypeParamsNode) {\n if (!isInLineRange(errorTypeParamsNode)) {\n return errorTypeParamsNode;\n }\n Token ltToken = getToken(errorTypeParamsNode.ltToken());\n Node parameter = this.modifyNode(errorTypeParamsNode.parameter());\n Token gtToken = getToken(errorTypeParamsNode.gtToken());\n return errorTypeParamsNode.modify()\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withParameter(parameter)\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public StreamTypeDescriptorNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) {\n if (!isInLineRange(streamTypeDescriptorNode)) {\n return streamTypeDescriptorNode;\n }\n Token streamKeywordToken = getToken(streamTypeDescriptorNode.streamKeywordToken());\n Node streamTypeParamsNode = this.modifyNode(streamTypeDescriptorNode.streamTypeParamsNode().orElse(null));\n if (streamTypeParamsNode != null) {\n streamTypeDescriptorNode = streamTypeDescriptorNode.modify()\n .withStreamTypeParamsNode(streamTypeParamsNode).apply();\n }\n return streamTypeDescriptorNode.modify()\n .withStreamKeywordToken(formatToken(streamKeywordToken, 0, 1, 0, 0))\n .apply();\n }\n\n @Override\n public StreamTypeParamsNode transform(StreamTypeParamsNode streamTypeParamsNode) {\n if (!isInLineRange(streamTypeParamsNode)) {\n return streamTypeParamsNode;\n }\n Token ltToken = getToken(streamTypeParamsNode.ltToken());\n Node leftTypeDescNode = this.modifyNode(streamTypeParamsNode.leftTypeDescNode());\n Token commaToken = getToken(streamTypeParamsNode.commaToken().orElse(null));\n Node rightTypeDescNode = this.modifyNode(streamTypeParamsNode.rightTypeDescNode().orElse(null));\n Token gtToken = getToken(streamTypeParamsNode.gtToken());\n if (commaToken != null) {\n streamTypeParamsNode = streamTypeParamsNode.modify()\n .withCommaToken(formatToken(commaToken, 0, 1, 0, 0)).apply();\n }\n if (rightTypeDescNode != null) {\n streamTypeParamsNode = streamTypeParamsNode.modify()\n .withRightTypeDescNode(rightTypeDescNode).apply();\n }\n return streamTypeParamsNode.modify()\n .withLtToken(formatToken(ltToken, 0, 0, 0, 0))\n .withLeftTypeDescNode(leftTypeDescNode)\n .withGtToken(formatToken(gtToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TypedescTypeDescriptorNode transform(TypedescTypeDescriptorNode typedescTypeDescriptorNode) {\n if (!isInLineRange(typedescTypeDescriptorNode)) {\n return typedescTypeDescriptorNode;\n }\n Token typedescKeywordToken = this.modifyToken(typedescTypeDescriptorNode.typedescKeywordToken());\n TypeParameterNode typedescTypeParamsNode =\n this.modifyNode(typedescTypeDescriptorNode.typedescTypeParamsNode().orElse(null));\n if (typedescTypeParamsNode != null) {\n typedescTypeDescriptorNode = typedescTypeDescriptorNode.modify()\n .withTypedescTypeParamsNode(typedescTypeParamsNode).apply();\n }\n return typedescTypeDescriptorNode.modify()\n .withTypedescKeywordToken(formatToken(typedescKeywordToken, 0, 1, 0, 0))\n .apply();\n }\n\n @Override\n public LetExpressionNode transform(LetExpressionNode letExpressionNode) {\n if (!isInLineRange(letExpressionNode)) {\n return letExpressionNode;\n }\n Token letKeyword = getToken(letExpressionNode.letKeyword());\n SeparatedNodeList letVarDeclarations =\n this.modifySeparatedNodeList(letExpressionNode.letVarDeclarations());\n Token inKeyword = getToken(letExpressionNode.inKeyword());\n ExpressionNode expression = this.modifyNode(letExpressionNode.expression());\n return letExpressionNode.modify()\n .withLetKeyword(formatToken(letKeyword, 0, 1, 0, 0))\n .withLetVarDeclarations(letVarDeclarations)\n .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public LetVariableDeclarationNode transform(LetVariableDeclarationNode letVariableDeclarationNode) {\n if (!isInLineRange(letVariableDeclarationNode)) {\n return letVariableDeclarationNode;\n }\n NodeList annotations = this.modifyNodeList(letVariableDeclarationNode.annotations());\n TypedBindingPatternNode typedBindingPattern = this.modifyNode(letVariableDeclarationNode.typedBindingPattern());\n Token equalsToken = getToken(letVariableDeclarationNode.equalsToken());\n ExpressionNode expression = this.modifyNode(letVariableDeclarationNode.expression());\n return letVariableDeclarationNode.modify()\n .withAnnotations(annotations)\n .withTypedBindingPattern(typedBindingPattern)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public InterpolationNode transform(InterpolationNode interpolationNode) {\n if (!isInLineRange(interpolationNode)) {\n return interpolationNode;\n }\n Token interpolationStartToken = getToken(interpolationNode.interpolationStartToken());\n ExpressionNode expression = this.modifyNode(interpolationNode.expression());\n Token interpolationEndToken = getToken(interpolationNode.interpolationEndToken());\n return interpolationNode.modify()\n .withInterpolationStartToken(formatToken(interpolationStartToken, 0, 0, 0, 0))\n .withExpression(expression)\n .withInterpolationEndToken(formatToken(interpolationEndToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TableTypeDescriptorNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) {\n if (!isInLineRange(tableTypeDescriptorNode)) {\n return tableTypeDescriptorNode;\n }\n Token tableKeywordToken = getToken(tableTypeDescriptorNode.tableKeywordToken());\n Node rowTypeParameterNode = this.modifyNode(tableTypeDescriptorNode.rowTypeParameterNode());\n Node keyConstraintNode = this.modifyNode(tableTypeDescriptorNode.keyConstraintNode());\n return tableTypeDescriptorNode.modify()\n .withTableKeywordToken(formatToken(tableKeywordToken, 0, 1, 0, 0))\n .withRowTypeParameterNode(rowTypeParameterNode)\n .withKeyConstraintNode(keyConstraintNode)\n .apply();\n }\n\n @Override\n public KeyTypeConstraintNode transform(KeyTypeConstraintNode keyTypeConstraintNode) {\n if (!isInLineRange(keyTypeConstraintNode)) {\n return keyTypeConstraintNode;\n }\n Token keyKeywordToken = getToken(keyTypeConstraintNode.keyKeywordToken());\n Node typeParameterNode = this.modifyNode(keyTypeConstraintNode.typeParameterNode());\n return keyTypeConstraintNode.modify()\n .withKeyKeywordToken(formatToken(keyKeywordToken, 0, 1, 0, 0))\n .withTypeParameterNode(typeParameterNode)\n .apply();\n }\n\n @Override\n public FunctionTypeDescriptorNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) {\n if (!isInLineRange(functionTypeDescriptorNode)) {\n return functionTypeDescriptorNode;\n }\n Token functionKeyword = getToken(functionTypeDescriptorNode.functionKeyword());\n FunctionSignatureNode functionSignature = this.modifyNode(functionTypeDescriptorNode.functionSignature());\n return functionTypeDescriptorNode.modify()\n .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0))\n .withFunctionSignature(functionSignature)\n .apply();\n }\n\n @Override\n public ExplicitAnonymousFunctionExpressionNode transform(\n ExplicitAnonymousFunctionExpressionNode explicitAnonymousFunctionExpressionNode) {\n if (!isInLineRange(explicitAnonymousFunctionExpressionNode)) {\n return explicitAnonymousFunctionExpressionNode;\n }\n NodeList annotations =\n this.modifyNodeList(explicitAnonymousFunctionExpressionNode.annotations());\n Token functionKeyword = getToken(explicitAnonymousFunctionExpressionNode.functionKeyword());\n FunctionSignatureNode functionSignature =\n this.modifyNode(explicitAnonymousFunctionExpressionNode.functionSignature());\n FunctionBodyNode functionBody = this.modifyNode(explicitAnonymousFunctionExpressionNode.functionBody());\n return explicitAnonymousFunctionExpressionNode.modify()\n .withAnnotations(annotations)\n .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0))\n .withFunctionSignature(functionSignature)\n .withFunctionBody(functionBody)\n .apply();\n }\n\n @Override\n public ExpressionFunctionBodyNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) {\n if (!isInLineRange(expressionFunctionBodyNode)) {\n return expressionFunctionBodyNode;\n }\n Token rightDoubleArrow = getToken(expressionFunctionBodyNode.rightDoubleArrow());\n ExpressionNode expression = this.modifyNode(expressionFunctionBodyNode.expression());\n Token semicolon = this.modifyToken(expressionFunctionBodyNode.semicolon().orElse(null));\n if (semicolon != null) {\n expressionFunctionBodyNode = expressionFunctionBodyNode.modify()\n .withSemicolon(formatToken(semicolon, 0, 0, 0, 0)).apply();\n }\n return expressionFunctionBodyNode.modify()\n .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public ParenthesisedTypeDescriptorNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) {\n if (!isInLineRange(parenthesisedTypeDescriptorNode)) {\n return parenthesisedTypeDescriptorNode;\n }\n Token openParenToken = getToken(parenthesisedTypeDescriptorNode.openParenToken());\n TypeDescriptorNode typedesc = this.modifyNode(parenthesisedTypeDescriptorNode.typedesc());\n Token closeParenToken = getToken(parenthesisedTypeDescriptorNode.closeParenToken());\n return parenthesisedTypeDescriptorNode.modify()\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withTypedesc(typedesc)\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ImplicitNewExpressionNode transform(ImplicitNewExpressionNode implicitNewExpressionNode) {\n if (!isInLineRange(implicitNewExpressionNode)) {\n return implicitNewExpressionNode;\n }\n Token newKeyword = getToken(implicitNewExpressionNode.newKeyword());\n ParenthesizedArgList parenthesizedArgList =\n this.modifyNode(implicitNewExpressionNode.parenthesizedArgList().orElse(null));\n if (parenthesizedArgList != null) {\n implicitNewExpressionNode = implicitNewExpressionNode.modify()\n .withParenthesizedArgList(parenthesizedArgList).apply();\n }\n return implicitNewExpressionNode.modify()\n .withNewKeyword(formatToken(newKeyword, 0, 1, 0, 0))\n .apply();\n }\n\n @Override\n public QueryConstructTypeNode transform(QueryConstructTypeNode queryConstructTypeNode) {\n if (!isInLineRange(queryConstructTypeNode)) {\n return queryConstructTypeNode;\n }\n Token keyword = getToken(queryConstructTypeNode.keyword());\n KeySpecifierNode keySpecifier = this.modifyNode(queryConstructTypeNode.keySpecifier().orElse(null));\n if (keySpecifier != null) {\n queryConstructTypeNode = queryConstructTypeNode.modify()\n .withKeySpecifier(keySpecifier).apply();\n }\n return queryConstructTypeNode.modify()\n .withKeyword(formatToken(keyword, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public FromClauseNode transform(FromClauseNode fromClauseNode) {\n if (!isInLineRange(fromClauseNode)) {\n return fromClauseNode;\n }\n Token fromKeyword = getToken(fromClauseNode.fromKeyword());\n TypedBindingPatternNode typedBindingPattern = this.modifyNode(fromClauseNode.typedBindingPattern());\n Token inKeyword = getToken(fromClauseNode.inKeyword());\n ExpressionNode expression = this.modifyNode(fromClauseNode.expression());\n return fromClauseNode.modify()\n .withFromKeyword(formatToken(fromKeyword, 1, 1, 0, 0))\n .withTypedBindingPattern(typedBindingPattern)\n .withInKeyword(formatToken(inKeyword, 0, 0, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public WhereClauseNode transform(WhereClauseNode whereClauseNode) {\n if (!isInLineRange(whereClauseNode)) {\n return whereClauseNode;\n }\n Token whereKeyword = getToken(whereClauseNode.whereKeyword());\n ExpressionNode expression = this.modifyNode(whereClauseNode.expression());\n return whereClauseNode.modify()\n .withWhereKeyword(formatToken(whereKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public LetClauseNode transform(LetClauseNode letClauseNode) {\n if (!isInLineRange(letClauseNode)) {\n return letClauseNode;\n }\n Token letKeyword = getToken(letClauseNode.letKeyword());\n SeparatedNodeList letVarDeclarations =\n this.modifySeparatedNodeList(letClauseNode.letVarDeclarations());\n return letClauseNode.modify()\n .withLetKeyword(formatToken(letKeyword, 0, 1, 0, 0))\n .withLetVarDeclarations(letVarDeclarations)\n .apply();\n }\n\n @Override\n public QueryPipelineNode transform(QueryPipelineNode queryPipelineNode) {\n if (!isInLineRange(queryPipelineNode)) {\n return queryPipelineNode;\n }\n FromClauseNode fromClause = this.modifyNode(queryPipelineNode.fromClause());\n NodeList intermediateClauses = this.modifyNodeList(queryPipelineNode.intermediateClauses());\n return queryPipelineNode.modify()\n .withFromClause(fromClause)\n .withIntermediateClauses(intermediateClauses)\n .apply();\n }\n\n @Override\n public SelectClauseNode transform(SelectClauseNode selectClauseNode) {\n if (!isInLineRange(selectClauseNode)) {\n return selectClauseNode;\n }\n Token selectKeyword = getToken(selectClauseNode.selectKeyword());\n ExpressionNode expression = this.modifyNode(selectClauseNode.expression());\n return selectClauseNode.modify()\n .withSelectKeyword(formatToken(selectKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public QueryExpressionNode transform(QueryExpressionNode queryExpressionNode) {\n if (!isInLineRange(queryExpressionNode)) {\n return queryExpressionNode;\n }\n QueryConstructTypeNode queryConstructType =\n this.modifyNode(queryExpressionNode.queryConstructType().orElse(null));\n QueryPipelineNode queryPipeline = this.modifyNode(queryExpressionNode.queryPipeline());\n SelectClauseNode selectClause = this.modifyNode(queryExpressionNode.selectClause());\n OnConflictClauseNode onConflictClause = this.modifyNode(queryExpressionNode.onConflictClause().orElse(null));\n LimitClauseNode limitClause = this.modifyNode(queryExpressionNode.limitClause().orElse(null));\n if (queryConstructType != null) {\n queryExpressionNode = queryExpressionNode.modify()\n .withQueryConstructType(queryConstructType).apply();\n }\n if (onConflictClause != null) {\n queryExpressionNode = queryExpressionNode.modify()\n .withOnConflictClause(onConflictClause).apply();\n }\n if (limitClause != null) {\n queryExpressionNode = queryExpressionNode.modify()\n .withLimitClause(limitClause).apply();\n }\n return queryExpressionNode.modify()\n .withQueryPipeline(queryPipeline)\n .withSelectClause(selectClause)\n .apply();\n }\n\n @Override\n public IntersectionTypeDescriptorNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) {\n if (!isInLineRange(intersectionTypeDescriptorNode)) {\n return intersectionTypeDescriptorNode;\n }\n Node leftTypeDesc = this.modifyNode(intersectionTypeDescriptorNode.leftTypeDesc());\n Token bitwiseAndToken = getToken(intersectionTypeDescriptorNode.bitwiseAndToken());\n Node rightTypeDesc = this.modifyNode(intersectionTypeDescriptorNode.rightTypeDesc());\n return intersectionTypeDescriptorNode.modify()\n .withLeftTypeDesc(leftTypeDesc)\n .withBitwiseAndToken(formatToken(bitwiseAndToken, 1, 1, 0, 0))\n .withRightTypeDesc(rightTypeDesc)\n .apply();\n }\n\n @Override\n public ImplicitAnonymousFunctionParameters transform(\n ImplicitAnonymousFunctionParameters implicitAnonymousFunctionParameters) {\n if (!isInLineRange(implicitAnonymousFunctionParameters)) {\n return implicitAnonymousFunctionParameters;\n }\n Token openParenToken = getToken(implicitAnonymousFunctionParameters.openParenToken());\n SeparatedNodeList parameters =\n this.modifySeparatedNodeList(implicitAnonymousFunctionParameters.parameters());\n Token closeParenToken = getToken(implicitAnonymousFunctionParameters.closeParenToken());\n return implicitAnonymousFunctionParameters.modify()\n .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0))\n .withParameters(parameters)\n .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ImplicitAnonymousFunctionExpressionNode transform(\n ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) {\n if (!isInLineRange(implicitAnonymousFunctionExpressionNode)) {\n return implicitAnonymousFunctionExpressionNode;\n }\n Node params = this.modifyNode(implicitAnonymousFunctionExpressionNode.params());\n Token rightDoubleArrow = getToken(implicitAnonymousFunctionExpressionNode.rightDoubleArrow());\n ExpressionNode expression = this.modifyNode(implicitAnonymousFunctionExpressionNode.expression());\n return implicitAnonymousFunctionExpressionNode.modify()\n .withParams(params)\n .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public SingletonTypeDescriptorNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) {\n if (!isInLineRange(singletonTypeDescriptorNode)) {\n return singletonTypeDescriptorNode;\n }\n ExpressionNode simpleContExprNode = this.modifyNode(singletonTypeDescriptorNode.simpleContExprNode());\n return singletonTypeDescriptorNode.modify()\n .withSimpleContExprNode(simpleContExprNode)\n .apply();\n }\n\n @Override\n public MethodDeclarationNode transform(MethodDeclarationNode methodDeclarationNode) {\n if (!isInLineRange(methodDeclarationNode)) {\n return methodDeclarationNode;\n }\n MetadataNode metadata = this.modifyNode(methodDeclarationNode.metadata().orElse(null));\n NodeList qualifierList = this.modifyNodeList(methodDeclarationNode.qualifierList());\n Token functionKeyword = getToken(methodDeclarationNode.functionKeyword());\n IdentifierToken methodName = this.modifyNode(methodDeclarationNode.methodName());\n FunctionSignatureNode methodSignature = this.modifyNode(methodDeclarationNode.methodSignature());\n Token semicolon = getToken(methodDeclarationNode.semicolon());\n if (metadata != null) {\n methodDeclarationNode = methodDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return methodDeclarationNode.modify()\n .withQualifierList(qualifierList)\n .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0))\n .withMethodName(methodName)\n .withMethodSignature(methodSignature)\n .withSemicolon(formatToken(semicolon, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public WildcardBindingPatternNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) {\n if (!isInLineRange(wildcardBindingPatternNode)) {\n return wildcardBindingPatternNode;\n }\n Token underscoreToken = getToken(wildcardBindingPatternNode.underscoreToken());\n return wildcardBindingPatternNode.modify()\n .withUnderscoreToken(formatToken(underscoreToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public ErrorBindingPatternNode transform(ErrorBindingPatternNode errorBindingPatternNode) {\n if (!isInLineRange(errorBindingPatternNode)) {\n return errorBindingPatternNode;\n }\n Token errorKeyword = getToken(errorBindingPatternNode.errorKeyword());\n Node typeReference = this.modifyNode(errorBindingPatternNode.typeReference().orElse(null));\n Token openParenthesis = getToken(errorBindingPatternNode.openParenthesis());\n SeparatedNodeList argListBindingPatterns =\n this.modifySeparatedNodeList(errorBindingPatternNode.argListBindingPatterns());\n Token closeParenthesis = getToken(errorBindingPatternNode.closeParenthesis());\n return errorBindingPatternNode.modify()\n .withErrorKeyword(formatToken(errorKeyword, 0, 1, 0, 0))\n .withTypeReference(typeReference)\n .withOpenParenthesis(formatToken(openParenthesis, 0, 0, 0, 0))\n .withArgListBindingPatterns(argListBindingPatterns)\n .withCloseParenthesis(formatToken(closeParenthesis, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public NamedArgBindingPatternNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) {\n if (!isInLineRange(namedArgBindingPatternNode)) {\n return namedArgBindingPatternNode;\n }\n IdentifierToken argName = this.modifyNode(namedArgBindingPatternNode.argName());\n Token equalsToken = getToken(namedArgBindingPatternNode.equalsToken());\n BindingPatternNode bindingPattern = this.modifyNode(namedArgBindingPatternNode.bindingPattern());\n return namedArgBindingPatternNode.modify()\n .withArgName(argName)\n .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0))\n .withBindingPattern(bindingPattern)\n .apply();\n }\n\n @Override\n public AsyncSendActionNode transform(AsyncSendActionNode asyncSendActionNode) {\n if (!isInLineRange(asyncSendActionNode)) {\n return asyncSendActionNode;\n }\n ExpressionNode expression = this.modifyNode(asyncSendActionNode.expression());\n Token rightArrowToken = getToken(asyncSendActionNode.rightArrowToken());\n SimpleNameReferenceNode peerWorker = this.modifyNode(asyncSendActionNode.peerWorker());\n return asyncSendActionNode.modify()\n .withExpression(expression)\n .withRightArrowToken(formatToken(rightArrowToken, 1, 1, 0, 0))\n .withPeerWorker(peerWorker)\n .apply();\n }\n\n @Override\n public SyncSendActionNode transform(SyncSendActionNode syncSendActionNode) {\n if (!isInLineRange(syncSendActionNode)) {\n return syncSendActionNode;\n }\n ExpressionNode expression = this.modifyNode(syncSendActionNode.expression());\n Token syncSendToken = getToken(syncSendActionNode.syncSendToken());\n SimpleNameReferenceNode peerWorker = this.modifyNode(syncSendActionNode.peerWorker());\n return syncSendActionNode.modify()\n .withExpression(expression)\n .withSyncSendToken(formatToken(syncSendToken, 1, 1, 0, 0))\n .withPeerWorker(peerWorker)\n .apply();\n }\n\n @Override\n public ReceiveActionNode transform(ReceiveActionNode receiveActionNode) {\n if (!isInLineRange(receiveActionNode)) {\n return receiveActionNode;\n }\n Token leftArrow = getToken(receiveActionNode.leftArrow());\n SimpleNameReferenceNode receiveWorkers = this.modifyNode(receiveActionNode.receiveWorkers());\n return receiveActionNode.modify()\n .withLeftArrow(formatToken(leftArrow, 1, 1, 0, 0))\n .withReceiveWorkers(receiveWorkers)\n .apply();\n }\n\n @Override\n public ReceiveFieldsNode transform(ReceiveFieldsNode receiveFieldsNode) {\n if (!isInLineRange(receiveFieldsNode)) {\n return receiveFieldsNode;\n }\n Token openBrace = getToken(receiveFieldsNode.openBrace());\n SeparatedNodeList receiveFields =\n this.modifySeparatedNodeList(receiveFieldsNode.receiveFields());\n Token closeBrace = getToken(receiveFieldsNode.closeBrace());\n return receiveFieldsNode.modify()\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0))\n .withReceiveFields(receiveFields)\n .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RestDescriptorNode transform(RestDescriptorNode restDescriptorNode) {\n if (!isInLineRange(restDescriptorNode)) {\n return restDescriptorNode;\n }\n TypeDescriptorNode typeDescriptor = this.modifyNode(restDescriptorNode.typeDescriptor());\n Token ellipsisToken = getToken(restDescriptorNode.ellipsisToken());\n return restDescriptorNode.modify()\n .withTypeDescriptor(typeDescriptor)\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public DoubleGTTokenNode transform(DoubleGTTokenNode doubleGTTokenNode) {\n if (!isInLineRange(doubleGTTokenNode)) {\n return doubleGTTokenNode;\n }\n Token openGTToken = getToken(doubleGTTokenNode.openGTToken());\n Token endGTToken = getToken(doubleGTTokenNode.endGTToken());\n return doubleGTTokenNode.modify()\n .withOpenGTToken(formatToken(openGTToken, 0, 0, 0, 0))\n .withEndGTToken(formatToken(endGTToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public TrippleGTTokenNode transform(TrippleGTTokenNode trippleGTTokenNode) {\n if (!isInLineRange(trippleGTTokenNode)) {\n return trippleGTTokenNode;\n }\n Token openGTToken = getToken(trippleGTTokenNode.openGTToken());\n Token middleGTToken = getToken(trippleGTTokenNode.middleGTToken());\n Token endGTToken = getToken(trippleGTTokenNode.endGTToken());\n return trippleGTTokenNode.modify()\n .withOpenGTToken(formatToken(openGTToken, 0, 0, 0, 0))\n .withMiddleGTToken(formatToken(middleGTToken, 0, 0, 0, 0))\n .withEndGTToken(formatToken(endGTToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public WaitActionNode transform(WaitActionNode waitActionNode) {\n if (!isInLineRange(waitActionNode)) {\n return waitActionNode;\n }\n Token waitKeyword = getToken(waitActionNode.waitKeyword());\n Node waitFutureExpr = this.modifyNode(waitActionNode.waitFutureExpr());\n return waitActionNode.modify()\n .withWaitKeyword(formatToken(waitKeyword, 1, 1, 0, 0))\n .withWaitFutureExpr(waitFutureExpr)\n .apply();\n }\n\n @Override\n public WaitFieldsListNode transform(WaitFieldsListNode waitFieldsListNode) {\n if (!isInLineRange(waitFieldsListNode)) {\n return waitFieldsListNode;\n }\n Token openBrace = getToken(waitFieldsListNode.openBrace());\n SeparatedNodeList waitFields = this.modifySeparatedNodeList(waitFieldsListNode.waitFields());\n Token closeBrace = getToken(waitFieldsListNode.closeBrace());\n return waitFieldsListNode.modify()\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0))\n .withWaitFields(waitFields)\n .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public WaitFieldNode transform(WaitFieldNode waitFieldNode) {\n if (!isInLineRange(waitFieldNode)) {\n return waitFieldNode;\n }\n SimpleNameReferenceNode fieldName = this.modifyNode(waitFieldNode.fieldName());\n Token colon = getToken(waitFieldNode.colon());\n ExpressionNode waitFutureExpr = this.modifyNode(waitFieldNode.waitFutureExpr());\n return waitFieldNode.modify()\n .withFieldName(fieldName)\n .withColon(formatToken(colon, 1, 1, 0, 0))\n .withWaitFutureExpr(waitFutureExpr)\n .apply();\n }\n\n @Override\n public AnnotAccessExpressionNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) {\n if (!isInLineRange(annotAccessExpressionNode)) {\n return annotAccessExpressionNode;\n }\n ExpressionNode expression = this.modifyNode(annotAccessExpressionNode.expression());\n Token annotChainingToken = getToken(annotAccessExpressionNode.annotChainingToken());\n NameReferenceNode annotTagReference = this.modifyNode(annotAccessExpressionNode.annotTagReference());\n return annotAccessExpressionNode.modify()\n .withExpression(expression)\n .withAnnotChainingToken(formatToken(annotChainingToken, 0, 0, 0, 0))\n .withAnnotTagReference(annotTagReference)\n .apply();\n }\n\n @Override\n public QueryActionNode transform(QueryActionNode queryActionNode) {\n if (!isInLineRange(queryActionNode)) {\n return queryActionNode;\n }\n QueryPipelineNode queryPipeline = this.modifyNode(queryActionNode.queryPipeline());\n Token doKeyword = getToken(queryActionNode.doKeyword());\n BlockStatementNode blockStatement = this.modifyNode(queryActionNode.blockStatement());\n LimitClauseNode limitClause = this.modifyNode(queryActionNode.limitClause().orElse(null));\n if (limitClause != null) {\n queryActionNode = queryActionNode.modify()\n .withLimitClause(limitClause).apply();\n }\n return queryActionNode.modify()\n .withQueryPipeline(queryPipeline)\n .withDoKeyword(formatToken(doKeyword, 1, 1, 0, 0))\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public OptionalFieldAccessExpressionNode transform(\n OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) {\n if (!isInLineRange(optionalFieldAccessExpressionNode)) {\n return optionalFieldAccessExpressionNode;\n }\n ExpressionNode expression = this.modifyNode(optionalFieldAccessExpressionNode.expression());\n Token optionalChainingToken = getToken(optionalFieldAccessExpressionNode.optionalChainingToken());\n NameReferenceNode fieldName = this.modifyNode(optionalFieldAccessExpressionNode.fieldName());\n return optionalFieldAccessExpressionNode.modify()\n .withExpression(expression)\n .withOptionalChainingToken(formatToken(optionalChainingToken, 0, 0, 0, 0))\n .withFieldName(fieldName)\n .apply();\n }\n\n @Override\n public ConditionalExpressionNode transform(ConditionalExpressionNode conditionalExpressionNode) {\n if (!isInLineRange(conditionalExpressionNode)) {\n return conditionalExpressionNode;\n }\n ExpressionNode lhsExpression = this.modifyNode(conditionalExpressionNode.lhsExpression());\n Token questionMarkToken = getToken(conditionalExpressionNode.questionMarkToken());\n ExpressionNode middleExpression = this.modifyNode(conditionalExpressionNode.middleExpression());\n Token colonToken = getToken(conditionalExpressionNode.colonToken());\n ExpressionNode endExpression = this.modifyNode(conditionalExpressionNode.endExpression());\n return conditionalExpressionNode.modify()\n .withLhsExpression(lhsExpression)\n .withQuestionMarkToken(formatToken(questionMarkToken, 1, 1, 0, 0))\n .withMiddleExpression(middleExpression)\n .withColonToken(formatToken(colonToken, 1, 1, 0, 0))\n .withEndExpression(endExpression)\n .apply();\n }\n\n @Override\n public EnumDeclarationNode transform(EnumDeclarationNode enumDeclarationNode) {\n if (!isInLineRange(enumDeclarationNode)) {\n return enumDeclarationNode;\n }\n MetadataNode metadata = this.modifyNode(enumDeclarationNode.metadata().orElse(null));\n Token qualifier = getToken(enumDeclarationNode.qualifier());\n Token enumKeywordToken = getToken(enumDeclarationNode.enumKeywordToken());\n IdentifierToken identifier = this.modifyNode(enumDeclarationNode.identifier());\n Token openBraceToken = getToken(enumDeclarationNode.openBraceToken());\n SeparatedNodeList enumMemberList = this.modifySeparatedNodeList(enumDeclarationNode.enumMemberList());\n Token closeBraceToken = getToken(enumDeclarationNode.closeBraceToken());\n if (metadata != null) {\n enumDeclarationNode = enumDeclarationNode.modify()\n .withMetadata(metadata).apply();\n }\n return enumDeclarationNode.modify()\n .withQualifier(formatToken(qualifier, 1, 1, 0, 0))\n .withEnumKeywordToken(formatToken(enumKeywordToken, 0, 1, 0, 0))\n .withIdentifier(identifier)\n .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0))\n .withEnumMemberList(enumMemberList)\n .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public EnumMemberNode transform(EnumMemberNode enumMemberNode) {\n if (!isInLineRange(enumMemberNode)) {\n return enumMemberNode;\n }\n MetadataNode metadata = this.modifyNode(enumMemberNode.metadata().orElse(null));\n IdentifierToken identifier = this.modifyNode(enumMemberNode.identifier());\n Token equalToken = getToken(enumMemberNode.equalToken().orElse(null));\n ExpressionNode constExprNode = this.modifyNode(enumMemberNode.constExprNode().orElse(null));\n if (metadata != null) {\n enumMemberNode = enumMemberNode.modify()\n .withMetadata(metadata).apply();\n }\n return enumMemberNode.modify()\n .withEqualToken(formatToken(equalToken, 1, 1, 0, 0))\n .withIdentifier(identifier)\n .withConstExprNode(constExprNode)\n .apply();\n }\n\n @Override\n public TransactionStatementNode transform(TransactionStatementNode transactionStatementNode) {\n if (!isInLineRange(transactionStatementNode)) {\n return transactionStatementNode;\n }\n Token transactionKeyword = getToken(transactionStatementNode.transactionKeyword());\n BlockStatementNode blockStatement = this.modifyNode(transactionStatementNode.blockStatement());\n return transactionStatementNode.modify()\n .withTransactionKeyword(formatToken(transactionKeyword, 1, 1, 0, 0))\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public RollbackStatementNode transform(RollbackStatementNode rollbackStatementNode) {\n if (!isInLineRange(rollbackStatementNode)) {\n return rollbackStatementNode;\n }\n Token rollbackKeyword = getToken(rollbackStatementNode.rollbackKeyword());\n ExpressionNode expression = this.modifyNode(rollbackStatementNode.expression().orElse(null));\n Token semicolon = getToken(rollbackStatementNode.semicolon());\n if (expression != null) {\n rollbackStatementNode = rollbackStatementNode.modify()\n .withExpression(expression).apply();\n }\n return rollbackStatementNode.modify()\n .withRollbackKeyword(formatToken(rollbackKeyword, 1, 1, 0, 0))\n .withSemicolon(formatToken(semicolon, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RetryStatementNode transform(RetryStatementNode retryStatementNode) {\n if (!isInLineRange(retryStatementNode)) {\n return retryStatementNode;\n }\n Token retryKeyword = getToken(retryStatementNode.retryKeyword());\n TypeParameterNode typeParameter = this.modifyNode(retryStatementNode.typeParameter().orElse(null));\n ParenthesizedArgList arguments = this.modifyNode(retryStatementNode.arguments().orElse(null));\n StatementNode retryBody = this.modifyNode(retryStatementNode.retryBody());\n if (typeParameter != null) {\n retryStatementNode = retryStatementNode.modify()\n .withTypeParameter(typeParameter).apply();\n }\n if (arguments != null) {\n retryStatementNode = retryStatementNode.modify()\n .withArguments(arguments).apply();\n }\n return retryStatementNode.modify()\n .withRetryKeyword(formatToken(retryKeyword, 1, 1, 0, 0))\n .withRetryBody(retryBody)\n .apply();\n }\n\n @Override\n public CommitActionNode transform(CommitActionNode commitActionNode) {\n if (!isInLineRange(commitActionNode)) {\n return commitActionNode;\n }\n Token commitKeyword = getToken(commitActionNode.commitKeyword());\n return commitActionNode.modify()\n .withCommitKeyword(formatToken(commitKeyword, 1, 1, 0, 0))\n .apply();\n }\n\n @Override\n public TransactionalExpressionNode transform(TransactionalExpressionNode transactionalExpressionNode) {\n if (!isInLineRange(transactionalExpressionNode)) {\n return transactionalExpressionNode;\n }\n Token transactionalKeyword = getToken(transactionalExpressionNode.transactionalKeyword());\n return transactionalExpressionNode.modify()\n .withTransactionalKeyword(formatToken(transactionalKeyword, 1, 1, 0, 0))\n .apply();\n }\n\n @Override\n public ServiceConstructorExpressionNode transform(\n ServiceConstructorExpressionNode serviceConstructorExpressionNode) {\n if (!isInLineRange(serviceConstructorExpressionNode)) {\n return serviceConstructorExpressionNode;\n }\n NodeList annotations = this.modifyNodeList(serviceConstructorExpressionNode.annotations());\n Token serviceKeyword = getToken(serviceConstructorExpressionNode.serviceKeyword());\n Node serviceBody = this.modifyNode(serviceConstructorExpressionNode.serviceBody());\n return serviceConstructorExpressionNode.modify()\n .withAnnotations(annotations)\n .withServiceKeyword(formatToken(serviceKeyword, 1, 1, 0, 0))\n .withServiceBody(serviceBody)\n .apply();\n }\n\n @Override\n public TypeReferenceTypeDescNode transform(TypeReferenceTypeDescNode typeReferenceTypeDescNode) {\n if (!isInLineRange(typeReferenceTypeDescNode)) {\n return typeReferenceTypeDescNode;\n }\n NameReferenceNode typeRef = this.modifyNode(typeReferenceTypeDescNode.typeRef());\n return typeReferenceTypeDescNode.modify()\n .withTypeRef(typeRef)\n .apply();\n }\n\n @Override\n public MatchStatementNode transform(MatchStatementNode matchStatementNode) {\n if (!isInLineRange(matchStatementNode)) {\n return matchStatementNode;\n }\n Token matchKeyword = getToken(matchStatementNode.matchKeyword());\n ExpressionNode condition = this.modifyNode(matchStatementNode.condition());\n Token openBrace = getToken(matchStatementNode.openBrace());\n NodeList matchClauses = this.modifyNodeList(matchStatementNode.matchClauses());\n Token closeBrace = getToken(matchStatementNode.closeBrace());\n return matchStatementNode.modify()\n .withMatchKeyword(formatToken(matchKeyword, 1, 1, 0, 0))\n .withCondition(condition)\n .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0))\n .withMatchClauses(matchClauses)\n .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public MatchClauseNode transform(MatchClauseNode matchClauseNode) {\n if (!isInLineRange(matchClauseNode)) {\n return matchClauseNode;\n }\n SeparatedNodeList matchPatterns = this.modifySeparatedNodeList(matchClauseNode.matchPatterns());\n MatchGuardNode matchGuard = this.modifyNode(matchClauseNode.matchGuard().orElse(null));\n Token rightDoubleArrow = getToken(matchClauseNode.rightDoubleArrow());\n BlockStatementNode blockStatement = this.modifyNode(matchClauseNode.blockStatement());\n if (matchGuard != null) {\n matchClauseNode = matchClauseNode.modify()\n .withMatchGuard(matchGuard).apply();\n }\n return matchClauseNode.modify()\n .withMatchPatterns(matchPatterns)\n .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0))\n .withBlockStatement(blockStatement)\n .apply();\n }\n\n @Override\n public MatchGuardNode transform(MatchGuardNode matchGuardNode) {\n if (!isInLineRange(matchGuardNode)) {\n return matchGuardNode;\n }\n Token ifKeyword = getToken(matchGuardNode.ifKeyword());\n ExpressionNode expression = this.modifyNode(matchGuardNode.expression());\n return matchGuardNode.modify()\n .withIfKeyword(formatToken(ifKeyword, 0, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public DistinctTypeDescriptorNode transform(DistinctTypeDescriptorNode distinctTypeDescriptorNode) {\n if (!isInLineRange(distinctTypeDescriptorNode)) {\n return distinctTypeDescriptorNode;\n }\n Token distinctKeyword = getToken(distinctTypeDescriptorNode.distinctKeyword());\n TypeDescriptorNode typeDescriptor = this.modifyNode(distinctTypeDescriptorNode.typeDescriptor());\n return distinctTypeDescriptorNode.modify()\n .withDistinctKeyword(formatToken(distinctKeyword, 1, 1, 0, 0))\n .withTypeDescriptor(typeDescriptor)\n .apply();\n }\n\n @Override\n public OnConflictClauseNode transform(OnConflictClauseNode onConflictClauseNode) {\n if (!isInLineRange(onConflictClauseNode)) {\n return onConflictClauseNode;\n }\n Token onKeyword = getToken(onConflictClauseNode.onKeyword());\n Token conflictKeyword = getToken(onConflictClauseNode.conflictKeyword());\n ExpressionNode expression = this.modifyNode(onConflictClauseNode.expression());\n return onConflictClauseNode.modify()\n .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0))\n .withConflictKeyword(formatToken(conflictKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public LimitClauseNode transform(LimitClauseNode limitClauseNode) {\n if (!isInLineRange(limitClauseNode)) {\n return limitClauseNode;\n }\n Token limitKeyword = getToken(limitClauseNode.limitKeyword());\n ExpressionNode expression = this.modifyNode(limitClauseNode.expression());\n return limitClauseNode.modify()\n .withLimitKeyword(formatToken(limitKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .apply();\n }\n\n @Override\n public JoinClauseNode transform(JoinClauseNode joinClauseNode) {\n if (!isInLineRange(joinClauseNode)) {\n return joinClauseNode;\n }\n Token outerKeyword = getToken(joinClauseNode.outerKeyword().orElse(null));\n Token joinKeyword = getToken(joinClauseNode.joinKeyword());\n TypedBindingPatternNode typedBindingPattern = this.modifyNode(joinClauseNode.typedBindingPattern());\n Token inKeyword = getToken(joinClauseNode.inKeyword());\n ExpressionNode expression = this.modifyNode(joinClauseNode.expression());\n OnClauseNode joinOnCondition = this.modifyNode(joinClauseNode.joinOnCondition());\n if (outerKeyword != null) {\n joinClauseNode = joinClauseNode.modify()\n .withOuterKeyword(formatToken(outerKeyword, 1, 1, 0, 0)).apply();\n }\n return joinClauseNode.modify()\n .withJoinKeyword(formatToken(joinKeyword, 1, 1, 0, 0))\n .withTypedBindingPattern(typedBindingPattern)\n .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0))\n .withExpression(expression)\n .withJoinOnCondition(joinOnCondition)\n .apply();\n }\n\n @Override\n public OnClauseNode transform(OnClauseNode onClauseNode) {\n if (!isInLineRange(onClauseNode)) {\n return onClauseNode;\n }\n Token onKeyword = getToken(onClauseNode.onKeyword());\n Token equalsKeyword = getToken(onClauseNode.equalsKeyword());\n ExpressionNode lhsExpr = this.modifyNode(onClauseNode.lhsExpression());\n ExpressionNode rhsExpr = this.modifyNode(onClauseNode.rhsExpression());\n return onClauseNode.modify()\n .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0))\n .withLhsExpression(lhsExpr)\n .withEqualsKeyword(formatToken(equalsKeyword, 1, 1, 0, 0))\n .withRhsExpression(rhsExpr)\n .apply();\n }\n\n @Override\n public ListMatchPatternNode transform(ListMatchPatternNode listMatchPatternNode) {\n if (!isInLineRange(listMatchPatternNode)) {\n return listMatchPatternNode;\n }\n Token openBracket = getToken(listMatchPatternNode.openBracket());\n SeparatedNodeList matchPatterns = this.modifySeparatedNodeList(listMatchPatternNode.matchPatterns());\n RestMatchPatternNode restMatchPattern = this.modifyNode(listMatchPatternNode.restMatchPattern().orElse(null));\n Token closeBracket = getToken(listMatchPatternNode.closeBracket());\n return listMatchPatternNode.modify()\n .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0))\n .withMatchPatterns(matchPatterns)\n .withRestMatchPattern(restMatchPattern)\n .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public RestMatchPatternNode transform(RestMatchPatternNode restMatchPatternNode) {\n if (!isInLineRange(restMatchPatternNode)) {\n return restMatchPatternNode;\n }\n Token ellipsisToken = getToken(restMatchPatternNode.ellipsisToken());\n Token varKeywordToken = getToken(restMatchPatternNode.varKeywordToken());\n SimpleNameReferenceNode variableName = this.modifyNode(restMatchPatternNode.variableName());\n return restMatchPatternNode.modify()\n .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0))\n .withVarKeywordToken(formatToken(varKeywordToken, 1, 1, 0, 0))\n .withVariableName(variableName)\n .apply();\n }\n\n @Override\n public FieldMatchPatternNode transform(FieldMatchPatternNode fieldMatchPatternNode) {\n if (!isInLineRange(fieldMatchPatternNode)) {\n return fieldMatchPatternNode;\n }\n SimpleNameReferenceNode fieldNameNode = this.modifyNode(fieldMatchPatternNode.fieldNameNode());\n Token colonToken = getToken(fieldMatchPatternNode.colonToken());\n Node matchPattern = this.modifyNode(fieldMatchPatternNode.matchPattern());\n return fieldMatchPatternNode.modify()\n .withFieldNameNode(fieldNameNode)\n .withColonToken(formatToken(colonToken, 1, 1, 0, 0))\n .withMatchPattern(matchPattern)\n .apply();\n }\n\n @Override\n public FunctionalMatchPatternNode transform(FunctionalMatchPatternNode functionalMatchPatternNode) {\n if (!isInLineRange(functionalMatchPatternNode)) {\n return functionalMatchPatternNode;\n }\n Node typeRef = this.modifyNode(functionalMatchPatternNode.typeRef());\n Token openParenthesisToken = getToken(functionalMatchPatternNode.openParenthesisToken());\n SeparatedNodeList argListMatchPatternNode =\n this.modifySeparatedNodeList(functionalMatchPatternNode.argListMatchPatternNode());\n Token closeParenthesisToken = getToken(functionalMatchPatternNode.closeParenthesisToken());\n return functionalMatchPatternNode.modify()\n .withTypeRef(typeRef)\n .withOpenParenthesisToken(formatToken(openParenthesisToken, 0, 0, 0, 0))\n .withArgListMatchPatternNode(argListMatchPatternNode)\n .withCloseParenthesisToken(formatToken(closeParenthesisToken, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public NamedArgMatchPatternNode transform(NamedArgMatchPatternNode namedArgMatchPatternNode) {\n if (!isInLineRange(namedArgMatchPatternNode)) {\n return namedArgMatchPatternNode;\n }\n IdentifierToken identifier = this.modifyNode(namedArgMatchPatternNode.identifier());\n Token equalToken = getToken(namedArgMatchPatternNode.equalToken());\n Node matchPattern = this.modifyNode(namedArgMatchPatternNode.matchPattern());\n return namedArgMatchPatternNode.modify()\n .withIdentifier(identifier)\n .withEqualToken(formatToken(equalToken, 1, 1, 0, 0))\n .withMatchPattern(matchPattern)\n .apply();\n }\n\n @Override\n public MarkdownDocumentationNode transform(MarkdownDocumentationNode markdownDocumentationNode) {\n if (!isInLineRange(markdownDocumentationNode)) {\n return markdownDocumentationNode;\n }\n NodeList documentationLines = this.modifyNodeList(markdownDocumentationNode.documentationLines());\n return markdownDocumentationNode.modify()\n .withDocumentationLines(documentationLines)\n .apply();\n }\n\n @Override\n public MarkdownDocumentationLineNode transform(MarkdownDocumentationLineNode markdownDocumentationLineNode) {\n if (!isInLineRange(markdownDocumentationLineNode)) {\n return markdownDocumentationLineNode;\n }\n Token hashToken = getToken(markdownDocumentationLineNode.hashToken());\n NodeList documentElements = this.modifyNodeList(markdownDocumentationLineNode.documentElements());\n return markdownDocumentationLineNode.modify()\n .withDocumentElements(documentElements)\n .withHashToken(formatToken(hashToken, 1, 1, 0, 0))\n .apply();\n }\n\n @Override\n public MarkdownParameterDocumentationLineNode transform(\n MarkdownParameterDocumentationLineNode markdownParameterDocumentationLineNode) {\n if (!isInLineRange(markdownParameterDocumentationLineNode)) {\n return markdownParameterDocumentationLineNode;\n }\n Token hashToken = getToken(markdownParameterDocumentationLineNode.hashToken());\n Token plusToken = getToken(markdownParameterDocumentationLineNode.plusToken());\n Token parameterName = getToken(markdownParameterDocumentationLineNode.parameterName());\n Token minusToken = getToken(markdownParameterDocumentationLineNode.minusToken());\n NodeList documentElements =\n this.modifyNodeList(markdownParameterDocumentationLineNode.documentElements());\n return markdownParameterDocumentationLineNode.modify()\n .withHashToken(formatToken(hashToken, 1, 1, 0, 0))\n .withPlusToken(formatToken(plusToken, 1, 1, 0, 0))\n .withParameterName(formatToken(parameterName, 1, 1, 0, 0))\n .withMinusToken(formatToken(minusToken, 1, 1, 0, 0))\n .withDocumentElements(documentElements)\n .apply();\n }\n\n @Override\n public DocumentationReferenceNode transform(DocumentationReferenceNode documentationReferenceNode) {\n if (!isInLineRange(documentationReferenceNode)) {\n return documentationReferenceNode;\n }\n Token referenceType = getToken(documentationReferenceNode.referenceType().orElse(null));\n Token startBacktick = getToken(documentationReferenceNode.startBacktick());\n Node backtickContent = this.modifyNode(documentationReferenceNode.backtickContent());\n Token endBacktick = getToken(documentationReferenceNode.endBacktick());\n if (referenceType != null) {\n documentationReferenceNode = documentationReferenceNode.modify()\n .withReferenceType(referenceType).apply();\n }\n return documentationReferenceNode.modify()\n .withStartBacktick(formatToken(startBacktick, 0, 0, 0, 0))\n .withBacktickContent(backtickContent)\n .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0))\n .apply();\n }\n\n @Override\n public OrderByClauseNode transform(OrderByClauseNode orderByClauseNode) {\n if (!isInLineRange(orderByClauseNode)) {\n return orderByClauseNode;\n }\n Token orderKeyword = getToken(orderByClauseNode.orderKeyword());\n Token byKeyword = getToken(orderByClauseNode.byKeyword());\n SeparatedNodeList orderKey = this.modifySeparatedNodeList(orderByClauseNode.orderKey());\n return orderByClauseNode.modify()\n .withOrderKeyword(formatToken(orderKeyword, 1, 1, 0, 0))\n .withByKeyword(formatToken(byKeyword, 1, 1, 0, 0))\n .withOrderKey(orderKey)\n .apply();\n }\n\n @Override\n public OrderKeyNode transform(OrderKeyNode orderKeyNode) {\n if (!isInLineRange(orderKeyNode)) {\n return orderKeyNode;\n }\n ExpressionNode expression = this.modifyNode(orderKeyNode.expression());\n Token orderDirection = getToken(orderKeyNode.orderDirection().orElse(null));\n if (orderDirection != null) {\n orderKeyNode = orderKeyNode.modify()\n .withOrderDirection(formatToken(orderDirection, 1, 1, 0, 0)).apply();\n }\n return orderKeyNode.modify()\n .withExpression(expression)\n .apply();\n }\n\n /**\n * Update the minutiae and return the token.\n *\n * @param token token\n * @param leadingSpaces leading spaces\n * @param trailingSpaces trailing spaces\n * @param leadingNewLines leading new lines\n * @param trailingNewLines trailing new lines\n * @return updated token\n */\n private Token formatToken(Token token, int leadingSpaces, int trailingSpaces, int leadingNewLines,\n int trailingNewLines) {\n if (token == null) {\n return token;\n }\n MinutiaeList leadingMinutiaeList = token.leadingMinutiae();\n MinutiaeList trailingMinutiaeList = token.trailingMinutiae();\n\n MinutiaeList newLeadingMinutiaeList = modifyMinutiaeList(leadingMinutiaeList, leadingSpaces, leadingNewLines);\n MinutiaeList newTrailingMinutiaeList = modifyMinutiaeList(trailingMinutiaeList, trailingSpaces,\n trailingNewLines);\n\n return token.modify(newLeadingMinutiaeList, newTrailingMinutiaeList);\n }\n\n private MinutiaeList modifyMinutiaeList(MinutiaeList minutiaeList, int spaces, int newLines) {\n Minutiae minutiae = NodeFactory.createWhitespaceMinutiae(getWhiteSpaces(spaces, newLines));\n return minutiaeList.add(minutiae);\n }\n\n private String getWhiteSpaces(int column, int newLines) {\n StringBuilder whiteSpaces = new StringBuilder();\n for (int i = 0; i <= (newLines - 1); i++) {\n whiteSpaces.append(System.getProperty(\"line.separator\"));\n }\n for (int i = 0; i <= (column - 1); i++) {\n whiteSpaces.append(\" \");\n }\n\n return whiteSpaces.toString();\n }\n\n /**\n * Initialize the token with empty minutiae lists.\n *\n * @param node node\n * @return token with empty minutiae\n */\n private Token getToken(T node) {\n if (node == null) {\n return node;\n }\n MinutiaeList leadingMinutiaeList = AbstractNodeFactory.createEmptyMinutiaeList();\n MinutiaeList trailingMinutiaeList = AbstractNodeFactory.createEmptyMinutiaeList();\n if (node.containsLeadingMinutiae()) {\n leadingMinutiaeList = getCommentMinutiae(node.leadingMinutiae(), true);\n }\n if (node.containsTrailingMinutiae()) {\n trailingMinutiaeList = getCommentMinutiae(node.trailingMinutiae(), false);\n }\n return node.modify(leadingMinutiaeList, trailingMinutiaeList);\n }\n\n private MinutiaeList getCommentMinutiae(MinutiaeList minutiaeList, boolean isLeading) {\n MinutiaeList minutiaes = AbstractNodeFactory.createEmptyMinutiaeList();\n for (int i = 0; i < minutiaeList.size(); i++) {\n if (minutiaeList.get(i).kind().equals(SyntaxKind.COMMENT_MINUTIAE)) {\n if (i > 0) {\n minutiaes = minutiaes.add(minutiaeList.get(i - 1));\n }\n minutiaes = minutiaes.add(minutiaeList.get(i));\n if ((i + 1) < minutiaeList.size() && isLeading) {\n minutiaes = minutiaes.add(minutiaeList.get(i + 1));\n }\n }\n }\n return minutiaes;\n }\n\n \n private Node getParent(T node, SyntaxKind syntaxKind) {\n Node parent = node.parent();\n if (parent == null) {\n parent = node;\n }\n SyntaxKind parentKind = parent.kind();\n if (parentKind == SyntaxKind.MODULE_VAR_DECL) {\n if (parent.parent() != null && parent.parent().kind() == SyntaxKind.MODULE_PART &&\n syntaxKind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n return null;\n }\n return parent;\n } else if (parentKind == SyntaxKind.FUNCTION_DEFINITION ||\n parentKind == SyntaxKind.IF_ELSE_STATEMENT ||\n parentKind == SyntaxKind.ELSE_BLOCK ||\n parentKind == SyntaxKind.SPECIFIC_FIELD ||\n parentKind == SyntaxKind.WHILE_STATEMENT) {\n return parent;\n } else if (syntaxKind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n if (parentKind == SyntaxKind.REQUIRED_PARAM ||\n parentKind == SyntaxKind.POSITIONAL_ARG ||\n parentKind == SyntaxKind.BINARY_EXPRESSION ||\n parentKind == SyntaxKind.RETURN_STATEMENT ||\n parentKind == SyntaxKind.LOCAL_VAR_DECL ||\n (parentKind == SyntaxKind.FUNCTION_CALL && parent.parent() != null &&\n parent.parent().kind() == SyntaxKind.ASSIGNMENT_STATEMENT)) {\n return null;\n }\n return getParent(parent, syntaxKind);\n\n } else if (parentKind == SyntaxKind.SERVICE_DECLARATION ||\n parentKind == SyntaxKind.BINARY_EXPRESSION) {\n if (syntaxKind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n return null;\n }\n return parent;\n } else if (parentKind == SyntaxKind.REQUIRED_PARAM ||\n parentKind == SyntaxKind.RETURN_TYPE_DESCRIPTOR) {\n return null;\n } else if (parent.parent() != null) {\n return getParent(parent, syntaxKind);\n } else {\n return null;\n }\n }\n\n /**\n * Get the node position.\n *\n * @param node node\n * @return node position\n */\n private DiagnosticPos getPosition(Node node) {\n if (node == null) {\n return null;\n }\n LineRange range = node.lineRange();\n LinePosition startPos = range.startLine();\n LinePosition endPos = range.endLine();\n return new DiagnosticPos(null, startPos.line() + 1, endPos.line() + 1,\n startPos.offset(), endPos.offset());\n }\n\n /**\n * return the indented start column.\n *\n * @param node node\n * @param syntaxKind node kind\n * @param addSpaces add spaces or not\n * @return start position\n */\n private int getStartColumn(Node node, SyntaxKind syntaxKind, boolean addSpaces) {\n Node parent = getParent(node, syntaxKind);\n if (parent != null) {\n return getPosition(parent).sCol + (addSpaces ? 4 : 0);\n }\n return 0;\n }\n\n private boolean isInLineRange(Node node) {\n if (this.lineRange == null) {\n return true;\n }\n int nodeStartLine = node.lineRange().startLine().line();\n int nodeStartOffset = node.lineRange().startLine().offset();\n int nodeEndLine = node.lineRange().endLine().line();\n int nodeEndOffset = node.lineRange().endLine().offset();\n\n int startLine = this.lineRange.startLine().line();\n int startOffset = this.lineRange.startLine().offset();\n int endLine = this.lineRange.endLine().line();\n int endOffset = this.lineRange.endLine().offset();\n\n if (nodeStartLine >= startLine && nodeEndLine <= endLine) {\n if (nodeStartLine == startLine || nodeEndLine == endLine) {\n return nodeStartOffset >= startOffset && nodeEndOffset <= endOffset;\n }\n return true;\n }\n return false;\n }\n\n public FormattingOptions getFormattingOptions() {\n return formattingOptions;\n }\n\n void setFormattingOptions(FormattingOptions formattingOptions) {\n this.formattingOptions = formattingOptions;\n }\n\n void setLineRange(LineRange lineRange) {\n this.lineRange = lineRange;\n }\n}" }, { "comment": "this is fixed size, no need to resize", "method_body": "private Statistics computeOlapScan(LogicalOlapScan olapScan) {\n OlapTable olapTable = olapScan.getTable();\n\n if (olapScan.getSelectedIndexId() != olapScan.getTable().getBaseIndexId() || olapTable instanceof MTMV) {\n \n Optional optStats = cascadesContext.getStatementContext()\n .getStatistics(olapScan.getRelationId());\n if (optStats.isPresent()) {\n double actualRowCount = olapScan.getTable().getRowCountForNereids();\n \n if (actualRowCount > optStats.get().getRowCount()) {\n return optStats.get();\n }\n }\n }\n\n StatisticsBuilder builder = new StatisticsBuilder();\n\n \n if (StatisticConstants.isSystemTable(olapTable) || !FeConstants.enableInternalSchemaDb\n || ConnectContext.get() == null\n || ConnectContext.get().getSessionVariable().internalSession) {\n for (Slot slot : olapScan.getOutput()) {\n builder.putColumnStatistics(slot, ColumnStatistic.UNKNOWN);\n }\n setHasUnknownColStatsInStatementContext();\n builder.setRowCount(olapTable.getRowCountForNereids());\n return builder.build();\n }\n\n \n if (ConnectContext.get() == null || !ConnectContext.get().getSessionVariable().enableStats) {\n \n double rowCount = 1;\n for (Slot slot : olapScan.getOutput()) {\n if (isVisibleSlotReference(slot)) {\n ColumnStatistic cache = getColumnStatistic(olapTable, slot.getName(),\n olapScan.getSelectedIndexIdForMV());\n rowCount = Math.max(rowCount, cache.count);\n }\n builder.putColumnStatistics(slot,\n new ColumnStatisticBuilder(ColumnStatistic.UNKNOWN).setCount(rowCount).build());\n }\n setHasUnknownColStatsInStatementContext();\n return builder.setRowCount(rowCount).build();\n }\n\n \n \n List outputSlotReferences = new ArrayList<>();\n for (Slot slot : olapScan.getOutput()) {\n if (isVisibleSlotReference(slot)) {\n outputSlotReferences.add((SlotReference) slot);\n } else {\n builder.putColumnStatistics(slot, ColumnStatistic.UNKNOWN);\n }\n }\n \n if (!olapScan.getSelectedPartitionIds().isEmpty()) {\n double rowCount = getSelectedPartitionRowCount(olapScan);\n \n if (rowCount > 0) {\n List selectedPartitionNames = new ArrayList<>(olapScan.getSelectedPartitionIds().size());\n olapScan.getSelectedPartitionIds().forEach(id -> {\n selectedPartitionNames.add(olapScan.getTable().getPartition(id).getName());\n });\n for (SlotReference slot : outputSlotReferences) {\n ColumnStatistic cache = getColumnStatsFromPartitionCache(olapScan, slot, selectedPartitionNames);\n ColumnStatisticBuilder colStatsBuilder = new ColumnStatisticBuilder(cache);\n adjustColStats(olapScan, slot, colStatsBuilder);\n builder.putColumnStatistics(slot, colStatsBuilder.build());\n rowCount = Math.max(rowCount, colStatsBuilder.getCount());\n }\n checkIfUnknownStatsUsedAsKey(builder);\n return builder.setRowCount(rowCount).build();\n }\n }\n\n \n double rowCount = olapScan.getTable().getRowCountForNereids();\n for (SlotReference slot : outputSlotReferences) {\n ColumnStatistic cache = getColumnStatsFromTableCache(olapScan, slot);\n ColumnStatisticBuilder colStatsBuilder = new ColumnStatisticBuilder(cache);\n adjustColStats(olapScan, slot, colStatsBuilder);\n builder.putColumnStatistics(slot, colStatsBuilder.build());\n rowCount = Math.max(rowCount, colStatsBuilder.getCount());\n }\n checkIfUnknownStatsUsedAsKey(builder);\n return builder.setRowCount(rowCount).build();\n }", "target_code": "List selectedPartitionNames = new ArrayList<>(olapScan.getSelectedPartitionIds().size());", "method_body_after": "private Statistics computeOlapScan(LogicalOlapScan olapScan) {\n OlapTable olapTable = olapScan.getTable();\n\n if (olapScan.getSelectedIndexId() != olapScan.getTable().getBaseIndexId() || olapTable instanceof MTMV) {\n \n Optional optStats = cascadesContext.getStatementContext()\n .getStatistics(olapScan.getRelationId());\n if (optStats.isPresent()) {\n double actualRowCount = olapScan.getTable().getRowCountForNereids();\n \n if (actualRowCount > optStats.get().getRowCount()) {\n return optStats.get();\n }\n }\n }\n\n StatisticsBuilder builder = new StatisticsBuilder();\n\n \n if (StatisticConstants.isSystemTable(olapTable) || !FeConstants.enableInternalSchemaDb\n || ConnectContext.get() == null\n || ConnectContext.get().getSessionVariable().internalSession) {\n for (Slot slot : olapScan.getOutput()) {\n builder.putColumnStatistics(slot, ColumnStatistic.UNKNOWN);\n }\n setHasUnknownColStatsInStatementContext();\n builder.setRowCount(olapTable.getRowCountForNereids());\n return builder.build();\n }\n\n \n if (ConnectContext.get() == null || !ConnectContext.get().getSessionVariable().enableStats) {\n \n double rowCount = 1;\n for (Slot slot : olapScan.getOutput()) {\n if (isVisibleSlotReference(slot)) {\n ColumnStatistic cache = getColumnStatistic(olapTable, slot.getName(),\n olapScan.getSelectedIndexIdForMV());\n rowCount = Math.max(rowCount, cache.count);\n }\n builder.putColumnStatistics(slot,\n new ColumnStatisticBuilder(ColumnStatistic.UNKNOWN).setCount(rowCount).build());\n }\n setHasUnknownColStatsInStatementContext();\n return builder.setRowCount(rowCount).build();\n }\n\n \n \n List outputSlotReferences = new ArrayList<>();\n for (Slot slot : olapScan.getOutput()) {\n if (isVisibleSlotReference(slot)) {\n outputSlotReferences.add((SlotReference) slot);\n } else {\n builder.putColumnStatistics(slot, ColumnStatistic.UNKNOWN);\n }\n }\n \n if (!olapScan.getSelectedPartitionIds().isEmpty()) {\n double rowCount = getSelectedPartitionRowCount(olapScan);\n \n if (rowCount > 0) {\n List selectedPartitionNames = new ArrayList<>(olapScan.getSelectedPartitionIds().size());\n olapScan.getSelectedPartitionIds().forEach(id -> {\n selectedPartitionNames.add(olapScan.getTable().getPartition(id).getName());\n });\n for (SlotReference slot : outputSlotReferences) {\n ColumnStatistic cache = getColumnStatsFromPartitionCache(olapScan, slot, selectedPartitionNames);\n ColumnStatisticBuilder colStatsBuilder = new ColumnStatisticBuilder(cache);\n adjustColStats(olapScan, slot, colStatsBuilder);\n builder.putColumnStatistics(slot, colStatsBuilder.build());\n rowCount = Math.max(rowCount, colStatsBuilder.getCount());\n }\n checkIfUnknownStatsUsedAsKey(builder);\n return builder.setRowCount(rowCount).build();\n }\n }\n\n \n double rowCount = olapScan.getTable().getRowCountForNereids();\n for (SlotReference slot : outputSlotReferences) {\n ColumnStatistic cache = getColumnStatsFromTableCache(olapScan, slot);\n ColumnStatisticBuilder colStatsBuilder = new ColumnStatisticBuilder(cache);\n adjustColStats(olapScan, slot, colStatsBuilder);\n builder.putColumnStatistics(slot, colStatsBuilder.build());\n rowCount = Math.max(rowCount, colStatsBuilder.getCount());\n }\n checkIfUnknownStatsUsedAsKey(builder);\n return builder.setRowCount(rowCount).build();\n }", "context_before": "class StatsCalculator extends DefaultPlanVisitor {\n public static double DEFAULT_AGGREGATE_RATIO = 0.5;\n public static double AGGREGATE_COLUMN_CORRELATION_COEFFICIENT = 0.75;\n public static double DEFAULT_COLUMN_NDV_RATIO = 0.5;\n\n private static final Logger LOG = LogManager.getLogger(StatsCalculator.class);\n private final GroupExpression groupExpression;\n\n private boolean forbidUnknownColStats = false;\n\n private Map totalColumnStatisticMap = new HashMap<>();\n\n private boolean isPlayNereidsDump = false;\n\n private Map totalHistogramMap = new HashMap<>();\n\n private Map cteIdToStats;\n\n private CascadesContext cascadesContext;\n\n private StatsCalculator(GroupExpression groupExpression, boolean forbidUnknownColStats,\n Map columnStatisticMap, boolean isPlayNereidsDump,\n Map cteIdToStats, CascadesContext context) {\n this.groupExpression = groupExpression;\n this.forbidUnknownColStats = forbidUnknownColStats;\n this.totalColumnStatisticMap = columnStatisticMap;\n this.isPlayNereidsDump = isPlayNereidsDump;\n this.cteIdToStats = Objects.requireNonNull(cteIdToStats, \"CTEIdToStats can't be null\");\n this.cascadesContext = context;\n }\n\n public Map getTotalHistogramMap() {\n return totalHistogramMap;\n }\n\n public void setTotalHistogramMap(Map totalHistogramMap) {\n this.totalHistogramMap = totalHistogramMap;\n }\n\n public Map getTotalColumnStatisticMap() {\n return totalColumnStatisticMap;\n }\n\n /**\n * estimate stats\n */\n public static StatsCalculator estimate(GroupExpression groupExpression, boolean forbidUnknownColStats,\n Map columnStatisticMap, boolean isPlayNereidsDump,\n Map cteIdToStats, CascadesContext context) {\n StatsCalculator statsCalculator = new StatsCalculator(\n groupExpression, forbidUnknownColStats, columnStatisticMap, isPlayNereidsDump, cteIdToStats, context);\n statsCalculator.estimate();\n return statsCalculator;\n }\n\n public static StatsCalculator estimate(GroupExpression groupExpression, boolean forbidUnknownColStats,\n Map columnStatisticMap, boolean isPlayNereidsDump, CascadesContext context) {\n return StatsCalculator.estimate(groupExpression,\n forbidUnknownColStats,\n columnStatisticMap,\n isPlayNereidsDump,\n new HashMap<>(), context);\n }\n\n \n public static void estimate(GroupExpression groupExpression, CascadesContext context) {\n StatsCalculator statsCalculator = new StatsCalculator(groupExpression, false,\n new HashMap<>(), false, Collections.emptyMap(), context);\n statsCalculator.estimate();\n }\n\n private void estimate() {\n Plan plan = groupExpression.getPlan();\n Statistics newStats = plan.accept(this, null);\n newStats.enforceValid();\n\n \n if (groupExpression.getOwnerGroup().getStatistics() == null) {\n boolean isReliable = groupExpression.getPlan().getExpressions().stream()\n .noneMatch(e -> newStats.isInputSlotsUnknown(e.getInputSlots()));\n groupExpression.getOwnerGroup().setStatsReliable(isReliable);\n groupExpression.getOwnerGroup().setStatistics(newStats);\n groupExpression.setEstOutputRowCount(newStats.getRowCount());\n } else {\n \n \n \n \n \n \n \n \n \n \n \n groupExpression.getOwnerGroup().getStatistics().updateNdv(newStats);\n }\n groupExpression.setStatDerived(true);\n }\n\n @Override\n public Statistics visitLogicalSink(LogicalSink logicalSink, Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitLogicalEmptyRelation(LogicalEmptyRelation emptyRelation, Void context) {\n return computeEmptyRelation(emptyRelation);\n }\n\n @Override\n public Statistics visitLogicalLimit(LogicalLimit limit, Void context) {\n return computeLimit(limit);\n }\n\n @Override\n public Statistics visitPhysicalLimit(PhysicalLimit limit, Void context) {\n return computeLimit(limit);\n }\n\n @Override\n public Statistics visitLogicalOneRowRelation(LogicalOneRowRelation oneRowRelation, Void context) {\n return computeOneRowRelation(oneRowRelation.getProjects());\n }\n\n @Override\n public Statistics visitLogicalAggregate(LogicalAggregate aggregate, Void context) {\n return computeAggregate(aggregate);\n }\n\n @Override\n public Statistics visitLogicalRepeat(LogicalRepeat repeat, Void context) {\n return computeRepeat(repeat);\n }\n\n @Override\n public Statistics visitLogicalFilter(LogicalFilter filter, Void context) {\n return computeFilter(filter);\n }\n\n /**\n * returns the sum of deltaRowCount for all selected partitions or for the table.\n */\n private long computeDeltaRowCount(OlapScan olapScan, SlotReference slot) {\n AnalysisManager analysisManager = Env.getCurrentEnv().getAnalysisManager();\n TableStatsMeta tableMeta = analysisManager.findTableStatsStatus(olapScan.getTable().getId());\n\n long deltaRowCount = 0;\n if (tableMeta != null) {\n ColStatsMeta colMeta = tableMeta.findColumnStatsMeta(\n olapScan.getTable().getIndexNameById(olapScan.getSelectedIndexId()), slot.getName());\n if (colMeta != null) {\n if (olapScan.getSelectedPartitionIds().isEmpty()) {\n deltaRowCount = tableMeta.updatedRows.get() - colMeta.updatedRows;\n } else {\n \n for (long partitionId : olapScan.getSelectedPartitionIds()) {\n deltaRowCount += tableMeta.partitionUpdateRows.getOrDefault(partitionId, 0L)\n - colMeta.partitionUpdateRows.getOrDefault(partitionId, 0L);\n }\n }\n }\n }\n return deltaRowCount;\n }\n\n private void adjustColStats(CatalogRelation catalogRelation, SlotReference slot,\n ColumnStatisticBuilder builder) {\n if (builder.getAvgSizeByte() <= 0) {\n builder.setAvgSizeByte(slot.getDataType().toCatalogDataType().getSlotSize());\n }\n if (catalogRelation instanceof OlapScan) {\n OlapScan olapScan = (OlapScan) catalogRelation;\n long delta = computeDeltaRowCount(olapScan, slot);\n if (delta > 0) {\n builder.setCount(builder.getCount() + delta);\n \n \n \n \n builder.setMinExpr(null).setMinValue(Double.NEGATIVE_INFINITY)\n .setMaxExpr(null).setMaxValue(Double.POSITIVE_INFINITY);\n }\n }\n }\n\n private ColumnStatistic getColumnStatsFromTableCache(CatalogRelation catalogRelation, SlotReference slot) {\n long idxId = -1;\n if (catalogRelation instanceof OlapScan) {\n idxId = ((OlapScan) catalogRelation).getSelectedIndexIdForMV();\n }\n return getColumnStatistic(catalogRelation.getTable(), slot.getName(), idxId);\n }\n\n private ColumnStatistic getColumnStatsFromPartitionCache(CatalogRelation catalogRelation, SlotReference slot,\n List partitionNames) {\n long idxId = -1;\n if (catalogRelation instanceof OlapScan) {\n idxId = ((OlapScan) catalogRelation).getSelectedIndexIdForMV();\n }\n return getColumnStatistic(catalogRelation.getTable(), slot.getName(), idxId, partitionNames);\n }\n\n private long getSelectedPartitionRowCount(OlapScan olapScan) {\n long partRowCountSum = 0;\n for (long id : olapScan.getSelectedPartitionIds()) {\n long partRowCount = olapScan.getTable().getPartition(id).getBaseIndex().getRowCount();\n \n if (partRowCount <= 0) {\n return -1;\n }\n partRowCountSum += partRowCount;\n }\n return partRowCountSum;\n }\n\n private void setHasUnknownColStatsInStatementContext() {\n if (ConnectContext.get() != null && ConnectContext.get().getStatementContext() != null) {\n ConnectContext.get().getStatementContext().setHasUnknownColStats(true);\n }\n }\n\n private void checkIfUnknownStatsUsedAsKey(StatisticsBuilder builder) {\n if (ConnectContext.get() != null && ConnectContext.get().getStatementContext() != null) {\n for (Map.Entry entry : builder.getExpressionColumnStatsEntries()) {\n if (entry.getKey() instanceof SlotReference\n && ConnectContext.get().getStatementContext().isKeySlot((SlotReference) entry.getKey())) {\n if (entry.getValue().isUnKnown) {\n ConnectContext.get().getStatementContext().setHasUnknownColStats(true);\n break;\n }\n }\n }\n }\n }\n\n \n\n @Override\n public Statistics visitLogicalOlapScan(LogicalOlapScan olapScan, Void context) {\n return computeOlapScan(olapScan);\n }\n\n private boolean isVisibleSlotReference(Slot slot) {\n if (slot instanceof SlotReference) {\n Optional colOpt = ((SlotReference) slot).getColumn();\n if (colOpt.isPresent()) {\n return colOpt.get().isVisible();\n }\n }\n return false;\n }\n\n @Override\n public Statistics visitLogicalDeferMaterializeOlapScan(LogicalDeferMaterializeOlapScan deferMaterializeOlapScan,\n Void context) {\n return computeOlapScan(deferMaterializeOlapScan.getLogicalOlapScan());\n }\n\n @Override\n public Statistics visitLogicalSchemaScan(LogicalSchemaScan schemaScan, Void context) {\n return computeCatalogRelation(schemaScan);\n }\n\n @Override\n public Statistics visitLogicalFileScan(LogicalFileScan fileScan, Void context) {\n fileScan.getExpressions();\n return computeCatalogRelation(fileScan);\n }\n\n @Override\n public Statistics visitLogicalHudiScan(LogicalHudiScan fileScan, Void context) {\n return computeCatalogRelation(fileScan);\n }\n\n @Override\n public Statistics visitLogicalTVFRelation(LogicalTVFRelation tvfRelation, Void context) {\n return tvfRelation.getFunction().computeStats(tvfRelation.getOutput());\n }\n\n @Override\n public Statistics visitLogicalJdbcScan(LogicalJdbcScan jdbcScan, Void context) {\n jdbcScan.getExpressions();\n return computeCatalogRelation(jdbcScan);\n }\n\n @Override\n public Statistics visitLogicalOdbcScan(LogicalOdbcScan odbcScan, Void context) {\n odbcScan.getExpressions();\n return computeCatalogRelation(odbcScan);\n }\n\n @Override\n public Statistics visitLogicalEsScan(LogicalEsScan esScan, Void context) {\n esScan.getExpressions();\n return computeCatalogRelation(esScan);\n }\n\n @Override\n public Statistics visitLogicalProject(LogicalProject project, Void context) {\n return computeProject(project);\n }\n\n @Override\n public Statistics visitLogicalSort(LogicalSort sort, Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitLogicalTopN(LogicalTopN topN, Void context) {\n return computeTopN(topN);\n }\n\n @Override\n public Statistics visitLogicalDeferMaterializeTopN(LogicalDeferMaterializeTopN topN, Void context) {\n return computeTopN(topN.getLogicalTopN());\n }\n\n @Override\n public Statistics visitLogicalPartitionTopN(LogicalPartitionTopN partitionTopN, Void context) {\n return computePartitionTopN(partitionTopN);\n }\n\n @Override\n public Statistics visitLogicalJoin(LogicalJoin join, Void context) {\n Statistics joinStats = JoinEstimation.estimate(groupExpression.childStatistics(0),\n groupExpression.childStatistics(1), join);\n joinStats = new StatisticsBuilder(joinStats).setWidthInJoinCluster(\n groupExpression.childStatistics(0).getWidthInJoinCluster()\n + groupExpression.childStatistics(1).getWidthInJoinCluster()).build();\n return joinStats;\n }\n\n @Override\n public Statistics visitLogicalAssertNumRows(\n LogicalAssertNumRows assertNumRows, Void context) {\n return computeAssertNumRows(assertNumRows.getAssertNumRowsElement());\n }\n\n @Override\n public Statistics visitLogicalUnion(\n LogicalUnion union, Void context) {\n return computeUnion(union);\n }\n\n @Override\n public Statistics visitLogicalExcept(\n LogicalExcept except, Void context) {\n return computeExcept(except);\n }\n\n @Override\n public Statistics visitLogicalIntersect(\n LogicalIntersect intersect, Void context) {\n return computeIntersect(intersect);\n }\n\n @Override\n public Statistics visitLogicalGenerate(LogicalGenerate generate, Void context) {\n return computeGenerate(generate);\n }\n\n @Override\n public Statistics visitLogicalWindow(LogicalWindow window, Void context) {\n return computeWindow(window);\n }\n\n @Override\n public Statistics visitPhysicalSink(PhysicalSink physicalSink, Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitPhysicalWindow(PhysicalWindow window, Void context) {\n return computeWindow(window);\n }\n\n @Override\n public Statistics visitPhysicalPartitionTopN(PhysicalPartitionTopN partitionTopN, Void context) {\n return computePartitionTopN(partitionTopN);\n }\n\n @Override\n public Statistics visitPhysicalEmptyRelation(PhysicalEmptyRelation emptyRelation, Void context) {\n return computeEmptyRelation(emptyRelation);\n }\n\n @Override\n public Statistics visitPhysicalHashAggregate(PhysicalHashAggregate agg, Void context) {\n return computeAggregate(agg);\n }\n\n @Override\n public Statistics visitPhysicalRepeat(PhysicalRepeat repeat, Void context) {\n return computeRepeat(repeat);\n }\n\n @Override\n public Statistics visitPhysicalOneRowRelation(PhysicalOneRowRelation oneRowRelation, Void context) {\n return computeOneRowRelation(oneRowRelation.getProjects());\n }\n\n @Override\n public Statistics visitPhysicalOlapScan(PhysicalOlapScan olapScan, Void context) {\n return computeCatalogRelation(olapScan);\n }\n\n @Override\n public Statistics visitPhysicalDeferMaterializeOlapScan(PhysicalDeferMaterializeOlapScan deferMaterializeOlapScan,\n Void context) {\n return computeCatalogRelation(deferMaterializeOlapScan.getPhysicalOlapScan());\n }\n\n @Override\n public Statistics visitPhysicalSchemaScan(PhysicalSchemaScan schemaScan, Void context) {\n return computeCatalogRelation(schemaScan);\n }\n\n @Override\n public Statistics visitPhysicalFileScan(PhysicalFileScan fileScan, Void context) {\n return computeCatalogRelation(fileScan);\n }\n\n @Override\n public Statistics visitPhysicalStorageLayerAggregate(\n PhysicalStorageLayerAggregate storageLayerAggregate, Void context) {\n return storageLayerAggregate.getRelation().accept(this, context);\n }\n\n @Override\n public Statistics visitPhysicalTVFRelation(PhysicalTVFRelation tvfRelation, Void context) {\n return tvfRelation.getFunction().computeStats(tvfRelation.getOutput());\n }\n\n @Override\n public Statistics visitPhysicalJdbcScan(PhysicalJdbcScan jdbcScan, Void context) {\n return computeCatalogRelation(jdbcScan);\n }\n\n @Override\n public Statistics visitPhysicalOdbcScan(PhysicalOdbcScan odbcScan, Void context) {\n return computeCatalogRelation(odbcScan);\n }\n\n @Override\n public Statistics visitPhysicalEsScan(PhysicalEsScan esScan, Void context) {\n return computeCatalogRelation(esScan);\n }\n\n @Override\n public Statistics visitPhysicalQuickSort(PhysicalQuickSort sort, Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitPhysicalTopN(PhysicalTopN topN, Void context) {\n return computeTopN(topN);\n }\n\n @Override\n public Statistics visitPhysicalDeferMaterializeTopN(PhysicalDeferMaterializeTopN topN,\n Void context) {\n return computeTopN(topN.getPhysicalTopN());\n }\n\n @Override\n public Statistics visitPhysicalHashJoin(\n PhysicalHashJoin hashJoin, Void context) {\n return JoinEstimation.estimate(groupExpression.childStatistics(0),\n groupExpression.childStatistics(1), hashJoin);\n }\n\n @Override\n public Statistics visitPhysicalNestedLoopJoin(\n PhysicalNestedLoopJoin nestedLoopJoin,\n Void context) {\n return JoinEstimation.estimate(groupExpression.childStatistics(0),\n groupExpression.childStatistics(1), nestedLoopJoin);\n }\n\n \n @Override\n public Statistics visitPhysicalProject(PhysicalProject project, Void context) {\n return computeProject(project);\n }\n\n @Override\n public Statistics visitPhysicalFilter(PhysicalFilter filter, Void context) {\n return computeFilter(filter);\n }\n\n @Override\n public Statistics visitPhysicalDistribute(PhysicalDistribute distribute,\n Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitPhysicalAssertNumRows(PhysicalAssertNumRows assertNumRows,\n Void context) {\n return computeAssertNumRows(assertNumRows.getAssertNumRowsElement());\n }\n\n @Override\n public Statistics visitPhysicalUnion(PhysicalUnion union, Void context) {\n return computeUnion(union);\n }\n\n @Override\n public Statistics visitPhysicalExcept(PhysicalExcept except, Void context) {\n return computeExcept(except);\n }\n\n @Override\n public Statistics visitPhysicalIntersect(PhysicalIntersect intersect, Void context) {\n return computeIntersect(intersect);\n }\n\n @Override\n public Statistics visitPhysicalGenerate(PhysicalGenerate generate, Void context) {\n return computeGenerate(generate);\n }\n\n private Statistics computeAssertNumRows(AssertNumRowsElement assertNumRowsElement) {\n Statistics statistics = groupExpression.childStatistics(0);\n long newRowCount;\n long rowCount = (long) statistics.getRowCount();\n long desiredNumOfRows = assertNumRowsElement.getDesiredNumOfRows();\n switch (assertNumRowsElement.getAssertion()) {\n case EQ:\n newRowCount = desiredNumOfRows;\n break;\n case GE:\n newRowCount = statistics.getRowCount() >= desiredNumOfRows ? rowCount : desiredNumOfRows;\n break;\n case GT:\n newRowCount = statistics.getRowCount() > desiredNumOfRows ? rowCount : desiredNumOfRows;\n break;\n case LE:\n newRowCount = statistics.getRowCount() <= desiredNumOfRows ? rowCount : desiredNumOfRows;\n break;\n case LT:\n newRowCount = statistics.getRowCount() < desiredNumOfRows ? rowCount : desiredNumOfRows;\n break;\n case NE:\n return statistics;\n default:\n throw new IllegalArgumentException(\"Unknown assertion: \" + assertNumRowsElement.getAssertion());\n }\n Statistics newStatistics = statistics.withRowCountAndEnforceValid(newRowCount);\n return new StatisticsBuilder(newStatistics).setWidthInJoinCluster(1).build();\n }\n\n private Statistics computeFilter(Filter filter) {\n Statistics stats = groupExpression.childStatistics(0);\n if (groupExpression.getFirstChildPlan(OlapScan.class) != null) {\n return new FilterEstimation(true).estimate(filter.getPredicate(), stats);\n }\n if (groupExpression.getFirstChildPlan(Aggregate.class) != null) {\n Aggregate agg = (Aggregate) groupExpression.getFirstChildPlan(Aggregate.class);\n List expressions = agg.getOutputExpressions();\n Set slots = expressions\n .stream()\n .filter(Alias.class::isInstance)\n .filter(s -> ((Alias) s).child().anyMatch(AggregateFunction.class::isInstance))\n .map(NamedExpression::toSlot).collect(Collectors.toSet());\n Expression predicate = filter.getPredicate();\n if (predicate.anyMatch(s -> slots.contains(s))) {\n return new FilterEstimation(slots).estimate(filter.getPredicate(), stats);\n }\n } else if (groupExpression.getFirstChildPlan(LogicalJoin.class) != null) {\n LogicalJoin plan = (LogicalJoin) groupExpression.getFirstChildPlan(LogicalJoin.class);\n if (filter instanceof LogicalFilter\n && filter.getConjuncts().stream().anyMatch(e -> e instanceof IsNull)) {\n Statistics isNullStats = computeGeneratedIsNullStats((LogicalJoin) plan, filter);\n if (isNullStats != null) {\n \n stats = isNullStats;\n Set newConjuncts = filter.getConjuncts().stream()\n .filter(e -> !(e instanceof IsNull))\n .collect(Collectors.toSet());\n if (newConjuncts.isEmpty()) {\n return stats;\n } else {\n \n filter = ((LogicalFilter) filter).withConjunctsAndProps(newConjuncts,\n ((LogicalFilter) filter).getGroupExpression(),\n Optional.of(((LogicalFilter) filter).getLogicalProperties()), plan);\n }\n }\n }\n }\n return new FilterEstimation(false).estimate(filter.getPredicate(), stats);\n }\n\n private Statistics computeGeneratedIsNullStats(LogicalJoin join, Filter filter) {\n JoinType joinType = join.getJoinType();\n Plan left = join.left();\n Plan right = join.right();\n if (left == null || right == null\n || ((GroupPlan) left).getGroup() == null || ((GroupPlan) right).getGroup() == null\n || ((GroupPlan) left).getGroup().getStatistics() == null\n || ((GroupPlan) right).getGroup().getStatistics() == null\n || !join.getGroupExpression().isPresent()) {\n return null;\n }\n\n double leftRowCount = ((GroupPlan) left).getGroup().getStatistics().getRowCount();\n double rightRowCount = ((GroupPlan) right).getGroup().getStatistics().getRowCount();\n if (leftRowCount < 0 || Double.isInfinite(leftRowCount)\n || rightRowCount < 0 || Double.isInfinite(rightRowCount)) {\n return null;\n }\n\n Statistics origJoinStats = join.getGroupExpression().get().getOwnerGroup().getStatistics();\n\n \n \n if (joinType.isOuterJoin()) {\n boolean leftHasIsNull = false;\n boolean rightHasIsNull = false;\n boolean isLeftOuterJoin = join.getJoinType() == JoinType.LEFT_OUTER_JOIN;\n boolean isRightOuterJoin = join.getJoinType() == JoinType.RIGHT_OUTER_JOIN;\n boolean isFullOuterJoin = join.getJoinType() == JoinType.FULL_OUTER_JOIN;\n\n for (Expression expr : filter.getConjuncts()) {\n if (expr instanceof IsNull) {\n Expression child = ((IsNull) expr).child();\n if (PlanUtils.isColumnRef(child)) {\n LogicalPlan leftChild = (LogicalPlan) join.left();\n LogicalPlan rightChild = (LogicalPlan) join.right();\n leftHasIsNull = PlanUtils.checkSlotFrom(((GroupPlan) leftChild)\n .getGroup().getLogicalExpression().getPlan(), (SlotReference) child);\n rightHasIsNull = PlanUtils.checkSlotFrom(((GroupPlan) rightChild)\n .getGroup().getLogicalExpression().getPlan(), (SlotReference) child);\n }\n }\n }\n\n boolean isLeftAntiLikeJoin = (isLeftOuterJoin && rightHasIsNull) || (isFullOuterJoin && rightHasIsNull);\n boolean isRightAntiLikeJoin = (isRightOuterJoin && leftHasIsNull) || (isFullOuterJoin && leftHasIsNull);\n if (isLeftAntiLikeJoin || isRightAntiLikeJoin) {\n \n Statistics newStats = null;\n if (isLeftAntiLikeJoin) {\n LogicalJoin newJoin = join.withJoinType(JoinType.LEFT_ANTI_JOIN);\n StatsCalculator statsCalculator = new StatsCalculator(join.getGroupExpression().get(),\n false, getTotalColumnStatisticMap(), false,\n cteIdToStats, cascadesContext);\n\n newStats = ((Plan) newJoin).accept(statsCalculator, null);\n } else if (isRightAntiLikeJoin) {\n LogicalJoin newJoin = join.withJoinType(JoinType.RIGHT_ANTI_JOIN);\n StatsCalculator statsCalculator = new StatsCalculator(join.getGroupExpression().get(),\n false, this.getTotalColumnStatisticMap(), false,\n this.cteIdToStats, this.cascadesContext);\n\n newStats = ((Plan) newJoin).accept(statsCalculator, null);\n }\n newStats.enforceValid();\n\n double selectivity = Statistics.getValidSelectivity(\n newStats.getRowCount() / (leftRowCount * rightRowCount));\n double newRows = origJoinStats.getRowCount() * selectivity;\n\n newStats.withRowCount(newRows);\n return newStats;\n } else {\n return null;\n }\n } else {\n return null;\n }\n }\n\n private ColumnStatistic getColumnStatistic(TableIf table, String colName, long idxId) {\n ConnectContext connectContext = ConnectContext.get();\n if (connectContext != null && connectContext.getSessionVariable().internalSession) {\n return ColumnStatistic.UNKNOWN;\n }\n long catalogId;\n long dbId;\n try {\n catalogId = table.getDatabase().getCatalog().getId();\n dbId = table.getDatabase().getId();\n } catch (Exception e) {\n \n \n \n if (LOG.isDebugEnabled()) {\n LOG.debug(String.format(\"Fail to get catalog id and db id for table %s\", table.getName()));\n }\n catalogId = -1;\n dbId = -1;\n }\n return Env.getCurrentEnv().getStatisticsCache().getColumnStatistics(\n catalogId, dbId, table.getId(), idxId, colName);\n }\n\n private ColumnStatistic getColumnStatistic(TableIf table, String colName, long idxId, List partitionNames) {\n ConnectContext connectContext = ConnectContext.get();\n if (connectContext != null && connectContext.getSessionVariable().internalSession) {\n return ColumnStatistic.UNKNOWN;\n }\n long catalogId;\n long dbId;\n try {\n catalogId = table.getDatabase().getCatalog().getId();\n dbId = table.getDatabase().getId();\n } catch (Exception e) {\n \n \n \n if (LOG.isDebugEnabled()) {\n LOG.debug(String.format(\"Fail to get catalog id and db id for table %s\", table.getName()));\n }\n catalogId = -1;\n dbId = -1;\n }\n if (isPlayNereidsDump) {\n if (totalColumnStatisticMap.get(table.getName() + colName) != null) {\n return totalColumnStatisticMap.get(table.getName() + colName);\n } else {\n return ColumnStatistic.UNKNOWN;\n }\n } else {\n if (!partitionNames.isEmpty()) {\n PartitionColumnStatisticBuilder builder = new PartitionColumnStatisticBuilder();\n boolean hasUnknown = false;\n \n List pColStatsLists = new ArrayList<>(partitionNames.size());\n for (String partitionName : partitionNames) {\n PartitionColumnStatistic pcolStats = Env.getCurrentEnv().getStatisticsCache()\n .getPartitionColumnStatistics(\n catalogId, dbId, table.getId(), idxId, partitionName, colName);\n if (pcolStats.isUnKnown) {\n hasUnknown = true;\n break;\n } else {\n pColStatsLists.add(pcolStats);\n }\n }\n if (!hasUnknown) {\n boolean isFirst = true;\n \n for (PartitionColumnStatistic pcolStats : pColStatsLists) {\n if (isFirst) {\n builder = new PartitionColumnStatisticBuilder(pcolStats);\n isFirst = false;\n } else {\n builder.merge(pcolStats);\n }\n }\n return builder.toColumnStatistics();\n }\n }\n \n return Env.getCurrentEnv().getStatisticsCache().getColumnStatistics(\n catalogId, dbId, table.getId(), idxId, colName);\n }\n }\n\n /**\n * compute stats for catalogRelations except OlapScan\n */\n private Statistics computeCatalogRelation(CatalogRelation catalogRelation) {\n StatisticsBuilder builder = new StatisticsBuilder();\n \n if (!FeConstants.enableInternalSchemaDb\n || ConnectContext.get() == null\n || ConnectContext.get().getSessionVariable().internalSession) {\n builder.setRowCount(catalogRelation.getTable().getRowCountForNereids());\n for (Slot slot : catalogRelation.getOutput()) {\n builder.putColumnStatistics(slot, ColumnStatistic.UNKNOWN);\n }\n setHasUnknownColStatsInStatementContext();\n return builder.build();\n }\n\n List output = catalogRelation.getOutput();\n ImmutableSet.Builder slotSetBuilder = ImmutableSet.builderWithExpectedSize(output.size());\n for (Slot slot : output) {\n if (slot instanceof SlotReference) {\n slotSetBuilder.add((SlotReference) slot);\n }\n }\n Set slotSet = slotSetBuilder.build();\n\n double rowCount = catalogRelation.getTable().getRowCountForNereids();\n for (SlotReference slot : slotSet) {\n ColumnStatistic cache = getColumnStatsFromTableCache(catalogRelation, slot);\n ColumnStatisticBuilder colStatsBuilder = new ColumnStatisticBuilder(cache);\n adjustColStats(catalogRelation, slot, colStatsBuilder);\n rowCount = Math.max(rowCount, colStatsBuilder.getCount());\n builder.putColumnStatistics(slot, colStatsBuilder.build());\n }\n checkIfUnknownStatsUsedAsKey(builder);\n return builder.build();\n }\n\n private Statistics computeTopN(TopN topN) {\n Statistics stats = groupExpression.childStatistics(0);\n return stats.withRowCountAndEnforceValid(Math.min(stats.getRowCount(), topN.getLimit()));\n }\n\n private Statistics computePartitionTopN(PartitionTopN partitionTopN) {\n Statistics childStats = groupExpression.childStatistics(0);\n double rowCount = childStats.getRowCount();\n List partitionKeys = partitionTopN.getPartitionKeys();\n if (!partitionTopN.hasGlobalLimit() && !partitionKeys.isEmpty()) {\n \n \n List partitionByKeyStats = partitionKeys.stream()\n .map(partitionKey -> {\n ColumnStatistic partitionKeyStats = childStats.findColumnStatistics(partitionKey);\n if (partitionKeyStats == null) {\n partitionKeyStats = new ExpressionEstimation().visit(partitionKey, childStats);\n }\n return partitionKeyStats;\n })\n .filter(s -> !s.isUnKnown)\n .collect(Collectors.toList());\n if (partitionByKeyStats.isEmpty()) {\n \n rowCount = rowCount * DEFAULT_COLUMN_NDV_RATIO;\n } else {\n rowCount = Math.min(rowCount, partitionByKeyStats.stream().map(s -> s.ndv)\n .max(Double::compare).get() * partitionTopN.getPartitionLimit());\n }\n } else {\n rowCount = Math.min(rowCount, partitionTopN.getPartitionLimit());\n }\n \n \n \n return childStats.withRowCountAndEnforceValid(rowCount);\n }\n\n private Statistics computeLimit(Limit limit) {\n Statistics stats = groupExpression.childStatistics(0);\n return stats.withRowCountAndEnforceValid(Math.min(stats.getRowCount(), limit.getLimit()));\n }\n\n private double estimateGroupByRowCount(List groupByExpressions, Statistics childStats) {\n double rowCount = 1;\n \n \n \n if (groupByExpressions.isEmpty()) {\n return 1;\n }\n List groupByNdvs = new ArrayList<>();\n for (Expression groupByExpr : groupByExpressions) {\n ColumnStatistic colStats = childStats.findColumnStatistics(groupByExpr);\n if (colStats == null) {\n colStats = ExpressionEstimation.estimate(groupByExpr, childStats);\n }\n if (colStats.isUnKnown()) {\n rowCount = childStats.getRowCount() * DEFAULT_AGGREGATE_RATIO;\n rowCount = Math.max(1, rowCount);\n rowCount = Math.min(rowCount, childStats.getRowCount());\n return rowCount;\n }\n double ndv = colStats.ndv;\n groupByNdvs.add(ndv);\n }\n groupByNdvs.sort(Collections.reverseOrder());\n\n rowCount = groupByNdvs.get(0);\n for (int groupByIndex = 1; groupByIndex < groupByExpressions.size(); ++groupByIndex) {\n rowCount *= Math.max(1, groupByNdvs.get(groupByIndex) * Math.pow(\n AGGREGATE_COLUMN_CORRELATION_COEFFICIENT, groupByIndex + 1D));\n if (rowCount > childStats.getRowCount()) {\n rowCount = childStats.getRowCount();\n break;\n }\n }\n rowCount = Math.max(1, rowCount);\n rowCount = Math.min(rowCount, childStats.getRowCount());\n return rowCount;\n }\n\n private Statistics computeAggregate(Aggregate aggregate) {\n List groupByExpressions = aggregate.getGroupByExpressions();\n Statistics childStats = groupExpression.childStatistics(0);\n double rowCount = estimateGroupByRowCount(groupByExpressions, childStats);\n Map slotToColumnStats = Maps.newHashMap();\n List outputExpressions = aggregate.getOutputExpressions();\n \n \n double factor = childStats.getRowCount() / rowCount;\n for (NamedExpression outputExpression : outputExpressions) {\n ColumnStatistic columnStat = ExpressionEstimation.estimate(outputExpression, childStats);\n ColumnStatisticBuilder builder = new ColumnStatisticBuilder(columnStat);\n builder.setMinValue(columnStat.minValue / factor);\n builder.setMaxValue(columnStat.maxValue / factor);\n if (columnStat.ndv > rowCount) {\n builder.setNdv(rowCount);\n }\n builder.setDataSize(rowCount * outputExpression.getDataType().width());\n slotToColumnStats.put(outputExpression.toSlot(), columnStat);\n }\n return new Statistics(rowCount, 1, slotToColumnStats);\n \n }\n\n private Statistics computeRepeat(Repeat repeat) {\n Statistics childStats = groupExpression.childStatistics(0);\n Map slotIdToColumnStats = childStats.columnStatistics();\n int groupingSetNum = repeat.getGroupingSets().size();\n double rowCount = childStats.getRowCount();\n Map columnStatisticMap = slotIdToColumnStats.entrySet()\n .stream().map(kv -> {\n ColumnStatistic stats = kv.getValue();\n ColumnStatisticBuilder columnStatisticBuilder = new ColumnStatisticBuilder(stats);\n columnStatisticBuilder\n .setCount(stats.count < 0 ? stats.count : stats.count * groupingSetNum)\n .setNumNulls(stats.numNulls < 0 ? stats.numNulls : stats.numNulls * groupingSetNum)\n .setDataSize(stats.dataSize < 0 ? stats.dataSize : stats.dataSize * groupingSetNum);\n return Pair.of(kv.getKey(), columnStatisticBuilder.build());\n }).collect(Collectors.toMap(Pair::key, Pair::value, (item1, item2) -> item1));\n return new Statistics(rowCount < 0 ? rowCount : rowCount * groupingSetNum, 1, columnStatisticMap);\n }\n\n private Statistics computeProject(Project project) {\n List projections = project.getProjects();\n Statistics childStats = groupExpression.childStatistics(0);\n Map columnsStats = projections.stream().map(projection -> {\n ColumnStatistic columnStatistic = ExpressionEstimation.estimate(projection, childStats);\n return new SimpleEntry<>(projection.toSlot(), columnStatistic);\n }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (item1, item2) -> item1));\n return new Statistics(childStats.getRowCount(), childStats.getWidthInJoinCluster(), columnsStats);\n }\n\n private Statistics computeOneRowRelation(List projects) {\n Map columnStatsMap = projects.stream()\n .map(project -> {\n ColumnStatistic statistic = new ColumnStatisticBuilder().setNdv(1).build();\n \n return Pair.of(project.toSlot(), statistic);\n })\n .collect(Collectors.toMap(Pair::key, Pair::value, (item1, item2) -> item1));\n int rowCount = 1;\n return new Statistics(rowCount, 1, columnStatsMap);\n }\n\n private Statistics computeEmptyRelation(EmptyRelation emptyRelation) {\n Map columnStatsMap = emptyRelation.getProjects()\n .stream()\n .map(project -> {\n ColumnStatisticBuilder columnStat = new ColumnStatisticBuilder()\n .setNdv(0)\n .setNumNulls(0)\n .setAvgSizeByte(0);\n return Pair.of(project.toSlot(), columnStat.build());\n })\n .collect(Collectors.toMap(Pair::key, Pair::value, (item1, item2) -> item1));\n int rowCount = 0;\n return new Statistics(rowCount, 1, columnStatsMap);\n }\n\n private Statistics computeUnion(Union union) {\n \n List head;\n Statistics headStats;\n List> childOutputs = Lists.newArrayList(union.getRegularChildrenOutputs());\n List childStats =\n groupExpression.children().stream().map(Group::getStatistics).collect(Collectors.toList());\n\n if (!union.getConstantExprsList().isEmpty()) {\n childOutputs.addAll(union.getConstantExprsList().stream()\n .map(l -> l.stream().map(NamedExpression::toSlot)\n .map(SlotReference.class::cast)\n .collect(Collectors.toList()))\n .collect(Collectors.toList()));\n childStats.addAll(union.getConstantExprsList().stream()\n .map(this::computeOneRowRelation)\n .collect(Collectors.toList()));\n }\n\n head = childOutputs.get(0);\n headStats = childStats.get(0);\n\n StatisticsBuilder statisticsBuilder = new StatisticsBuilder();\n List unionOutput = union.getOutputs();\n for (int i = 0; i < head.size(); i++) {\n double leftRowCount = headStats.getRowCount();\n Slot headSlot = head.get(i);\n for (int j = 1; j < childOutputs.size(); j++) {\n Slot slot = childOutputs.get(j).get(i);\n ColumnStatistic rightStatistic = childStats.get(j).findColumnStatistics(slot);\n double rightRowCount = childStats.get(j).getRowCount();\n ColumnStatistic estimatedColumnStatistics\n = unionColumn(headStats.findColumnStatistics(headSlot),\n headStats.getRowCount(), rightStatistic, rightRowCount, headSlot.getDataType());\n headStats.addColumnStats(headSlot, estimatedColumnStatistics);\n leftRowCount += childStats.get(j).getRowCount();\n }\n statisticsBuilder.setRowCount(leftRowCount);\n statisticsBuilder.putColumnStatistics(unionOutput.get(i), headStats.findColumnStatistics(headSlot));\n }\n return statisticsBuilder.setWidthInJoinCluster(1).build();\n }\n\n private Statistics computeExcept(SetOperation setOperation) {\n Statistics leftStats = groupExpression.childStatistics(0);\n List operatorOutput = setOperation.getOutputs();\n List childSlots = setOperation.getRegularChildOutput(0);\n StatisticsBuilder statisticsBuilder = new StatisticsBuilder();\n for (int i = 0; i < operatorOutput.size(); i++) {\n ColumnStatistic columnStatistic = leftStats.findColumnStatistics(childSlots.get(i));\n statisticsBuilder.putColumnStatistics(operatorOutput.get(i), columnStatistic);\n }\n statisticsBuilder.setRowCount(leftStats.getRowCount());\n return statisticsBuilder.setWidthInJoinCluster(1).build();\n }\n\n private Statistics computeIntersect(SetOperation setOperation) {\n Statistics leftChildStats = groupExpression.childStatistics(0);\n double rowCount = leftChildStats.getRowCount();\n for (int i = 1; i < setOperation.getArity(); ++i) {\n rowCount = Math.min(rowCount, groupExpression.childStatistics(i).getRowCount());\n }\n double minProd = Double.POSITIVE_INFINITY;\n for (Group group : groupExpression.children()) {\n Statistics statistics = group.getStatistics();\n double prod = 1.0;\n for (ColumnStatistic columnStatistic : statistics.columnStatistics().values()) {\n prod *= columnStatistic.ndv;\n }\n if (minProd < prod) {\n minProd = prod;\n }\n }\n rowCount = Math.min(rowCount, minProd);\n List outputs = setOperation.getOutputs();\n List leftChildOutputs = setOperation.getRegularChildOutput(0);\n for (int i = 0; i < outputs.size(); i++) {\n leftChildStats.addColumnStats(outputs.get(i),\n leftChildStats.findColumnStatistics(leftChildOutputs.get(i)));\n }\n return new StatisticsBuilder(leftChildStats.withRowCountAndEnforceValid(rowCount))\n .setWidthInJoinCluster(1).build();\n }\n\n private Statistics computeGenerate(Generate generate) {\n Statistics stats = groupExpression.childStatistics(0);\n int statsFactor = ConnectContext.get().getSessionVariable().generateStatsFactor;\n double count = stats.getRowCount() * generate.getGeneratorOutput().size() * statsFactor;\n Map columnStatsMap = Maps.newHashMap();\n for (Map.Entry entry : stats.columnStatistics().entrySet()) {\n ColumnStatistic columnStatistic = new ColumnStatisticBuilder(entry.getValue()).setCount(count).build();\n columnStatsMap.put(entry.getKey(), columnStatistic);\n }\n for (Slot output : generate.getGeneratorOutput()) {\n ColumnStatistic columnStatistic = new ColumnStatisticBuilder()\n .setCount(count)\n .setMinValue(Double.NEGATIVE_INFINITY)\n .setMaxValue(Double.POSITIVE_INFINITY)\n .setNdv(count)\n .setNumNulls(0)\n .setAvgSizeByte(output.getDataType().width())\n .build();\n columnStatsMap.put(output, columnStatistic);\n }\n return new Statistics(count, 1, columnStatsMap);\n }\n\n private Statistics computeWindow(Window windowOperator) {\n Statistics childStats = groupExpression.childStatistics(0);\n Map childColumnStats = childStats.columnStatistics();\n Map columnStatisticMap = windowOperator.getWindowExpressions().stream()\n .map(expr -> {\n Preconditions.checkArgument(expr instanceof Alias\n && expr.child(0) instanceof WindowExpression,\n \"need WindowExpression, but we meet \" + expr);\n WindowExpression windExpr = (WindowExpression) expr.child(0);\n ColumnStatisticBuilder colStatsBuilder = new ColumnStatisticBuilder();\n colStatsBuilder.setCount(childStats.getRowCount())\n .setOriginal(null);\n\n Double partitionCount = windExpr.getPartitionKeys().stream().map(key -> {\n ColumnStatistic keyStats = childStats.findColumnStatistics(key);\n if (keyStats == null) {\n keyStats = new ExpressionEstimation().visit(key, childStats);\n }\n return keyStats;\n })\n .filter(columnStatistic -> !columnStatistic.isUnKnown)\n .map(colStats -> colStats.ndv).max(Double::compare)\n .orElseGet(() -> -1.0);\n\n if (partitionCount == -1.0) {\n \n colStatsBuilder.setCount(childStats.getRowCount())\n .setNdv(1)\n .setMinValue(Double.NEGATIVE_INFINITY)\n .setMaxValue(Double.POSITIVE_INFINITY);\n } else {\n partitionCount = Math.max(1, partitionCount);\n if (windExpr.getFunction() instanceof AggregateFunction) {\n if (windExpr.getFunction() instanceof Count) {\n colStatsBuilder.setNdv(1)\n .setMinValue(0)\n .setMinExpr(new IntLiteral(0))\n .setMaxValue(childStats.getRowCount())\n .setMaxExpr(new IntLiteral((long) childStats.getRowCount()));\n } else if (windExpr.getFunction() instanceof Min\n || windExpr.getFunction() instanceof Max) {\n Expression minmaxChild = windExpr.getFunction().child(0);\n ColumnStatistic minChildStats = new ExpressionEstimation()\n .visit(minmaxChild, childStats);\n colStatsBuilder.setNdv(1)\n .setMinValue(minChildStats.minValue)\n .setMinExpr(minChildStats.minExpr)\n .setMaxValue(minChildStats.maxValue)\n .setMaxExpr(minChildStats.maxExpr);\n } else {\n \n colStatsBuilder.setNdv(1).setMinValue(Double.NEGATIVE_INFINITY)\n .setMaxValue(Double.POSITIVE_INFINITY);\n }\n } else {\n \n colStatsBuilder.setNdv(childStats.getRowCount() / partitionCount)\n .setMinValue(0)\n .setMinExpr(new IntLiteral(0))\n .setMaxValue(childStats.getRowCount())\n .setMaxExpr(new IntLiteral((long) childStats.getRowCount()));\n }\n }\n return Pair.of(expr.toSlot(), colStatsBuilder.build());\n }).collect(Collectors.toMap(Pair::key, Pair::value, (item1, item2) -> item1));\n columnStatisticMap.putAll(childColumnStats);\n return new Statistics(childStats.getRowCount(), 1, columnStatisticMap);\n }\n\n private ColumnStatistic unionColumn(ColumnStatistic leftStats, double leftRowCount, ColumnStatistic rightStats,\n double rightRowCount, DataType dataType) {\n ColumnStatisticBuilder columnStatisticBuilder = new ColumnStatisticBuilder();\n columnStatisticBuilder.setMaxValue(Math.max(leftStats.maxValue, rightStats.maxValue));\n columnStatisticBuilder.setMinValue(Math.min(leftStats.minValue, rightStats.minValue));\n StatisticRange leftRange = StatisticRange.from(leftStats, dataType);\n StatisticRange rightRange = StatisticRange.from(rightStats, dataType);\n StatisticRange newRange = leftRange.union(rightRange);\n double newRowCount = leftRowCount + rightRowCount;\n double leftSize = (leftRowCount - leftStats.numNulls) * leftStats.avgSizeByte;\n double rightSize = (rightRowCount - rightStats.numNulls) * rightStats.avgSizeByte;\n double newNullFraction = (leftStats.numNulls + rightStats.numNulls) / StatsMathUtil.maxNonNaN(1, newRowCount);\n double newNonNullRowCount = newRowCount * (1 - newNullFraction);\n\n double newAverageRowSize = newNonNullRowCount == 0 ? 0 : (leftSize + rightSize) / newNonNullRowCount;\n columnStatisticBuilder.setMinValue(newRange.getLow())\n .setMaxValue(newRange.getHigh())\n .setNdv(newRange.getDistinctValues())\n .setNumNulls(leftStats.numNulls + rightStats.numNulls)\n .setAvgSizeByte(newAverageRowSize);\n return columnStatisticBuilder.build();\n }\n\n @Override\n public Statistics visitLogicalCTEProducer(LogicalCTEProducer cteProducer, Void context) {\n StatisticsBuilder builder = new StatisticsBuilder(groupExpression.childStatistics(0));\n Statistics statistics = builder.setWidthInJoinCluster(1).build();\n cteIdToStats.put(cteProducer.getCteId(), statistics);\n return statistics;\n }\n\n @Override\n public Statistics visitLogicalCTEConsumer(LogicalCTEConsumer cteConsumer, Void context) {\n CTEId cteId = cteConsumer.getCteId();\n cascadesContext.addCTEConsumerGroup(cteConsumer.getCteId(), groupExpression.getOwnerGroup(),\n cteConsumer.getProducerToConsumerOutputMap());\n Statistics prodStats = cteIdToStats.get(cteId);\n Preconditions.checkArgument(prodStats != null, String.format(\"Stats for CTE: %s not found\", cteId));\n Statistics consumerStats = new Statistics(prodStats.getRowCount(), 1, new HashMap<>());\n for (Slot slot : cteConsumer.getOutput()) {\n Slot prodSlot = cteConsumer.getProducerSlot(slot);\n ColumnStatistic colStats = prodStats.columnStatistics().get(prodSlot);\n if (colStats == null) {\n continue;\n }\n consumerStats.addColumnStats(slot, colStats);\n }\n return consumerStats;\n }\n\n @Override\n public Statistics visitLogicalCTEAnchor(LogicalCTEAnchor cteAnchor, Void context) {\n return groupExpression.childStatistics(1);\n }\n\n @Override\n public Statistics visitPhysicalCTEProducer(PhysicalCTEProducer cteProducer,\n Void context) {\n Statistics statistics = new StatisticsBuilder(groupExpression.childStatistics(0))\n .setWidthInJoinCluster(1).build();\n cteIdToStats.put(cteProducer.getCteId(), statistics);\n cascadesContext.updateConsumerStats(cteProducer.getCteId(), statistics);\n return statistics;\n }\n\n @Override\n public Statistics visitPhysicalCTEConsumer(PhysicalCTEConsumer cteConsumer, Void context) {\n cascadesContext.addCTEConsumerGroup(cteConsumer.getCteId(), groupExpression.getOwnerGroup(),\n cteConsumer.getProducerToConsumerSlotMap());\n CTEId cteId = cteConsumer.getCteId();\n Statistics prodStats = cteIdToStats.get(cteId);\n if (prodStats == null) {\n prodStats = groupExpression.getOwnerGroup().getStatistics();\n }\n Preconditions.checkArgument(prodStats != null, String.format(\"Stats for CTE: %s not found\", cteId));\n Statistics consumerStats = new Statistics(prodStats.getRowCount(), 1, new HashMap<>());\n for (Slot slot : cteConsumer.getOutput()) {\n Slot prodSlot = cteConsumer.getProducerSlot(slot);\n ColumnStatistic colStats = prodStats.columnStatistics().get(prodSlot);\n if (colStats == null) {\n continue;\n }\n consumerStats.addColumnStats(slot, colStats);\n }\n return consumerStats;\n }\n\n @Override\n public Statistics visitPhysicalCTEAnchor(\n PhysicalCTEAnchor cteAnchor, Void context) {\n return groupExpression.childStatistics(1);\n }\n}", "context_after": "class StatsCalculator extends DefaultPlanVisitor {\n public static double DEFAULT_AGGREGATE_RATIO = 0.5;\n public static double AGGREGATE_COLUMN_CORRELATION_COEFFICIENT = 0.75;\n public static double DEFAULT_COLUMN_NDV_RATIO = 0.5;\n\n private static final Logger LOG = LogManager.getLogger(StatsCalculator.class);\n private final GroupExpression groupExpression;\n\n private boolean forbidUnknownColStats = false;\n\n private Map totalColumnStatisticMap = new HashMap<>();\n\n private boolean isPlayNereidsDump = false;\n\n private Map totalHistogramMap = new HashMap<>();\n\n private Map cteIdToStats;\n\n private CascadesContext cascadesContext;\n\n private StatsCalculator(GroupExpression groupExpression, boolean forbidUnknownColStats,\n Map columnStatisticMap, boolean isPlayNereidsDump,\n Map cteIdToStats, CascadesContext context) {\n this.groupExpression = groupExpression;\n this.forbidUnknownColStats = forbidUnknownColStats;\n this.totalColumnStatisticMap = columnStatisticMap;\n this.isPlayNereidsDump = isPlayNereidsDump;\n this.cteIdToStats = Objects.requireNonNull(cteIdToStats, \"CTEIdToStats can't be null\");\n this.cascadesContext = context;\n }\n\n public Map getTotalHistogramMap() {\n return totalHistogramMap;\n }\n\n public void setTotalHistogramMap(Map totalHistogramMap) {\n this.totalHistogramMap = totalHistogramMap;\n }\n\n public Map getTotalColumnStatisticMap() {\n return totalColumnStatisticMap;\n }\n\n /**\n * estimate stats\n */\n public static StatsCalculator estimate(GroupExpression groupExpression, boolean forbidUnknownColStats,\n Map columnStatisticMap, boolean isPlayNereidsDump,\n Map cteIdToStats, CascadesContext context) {\n StatsCalculator statsCalculator = new StatsCalculator(\n groupExpression, forbidUnknownColStats, columnStatisticMap, isPlayNereidsDump, cteIdToStats, context);\n statsCalculator.estimate();\n return statsCalculator;\n }\n\n public static StatsCalculator estimate(GroupExpression groupExpression, boolean forbidUnknownColStats,\n Map columnStatisticMap, boolean isPlayNereidsDump, CascadesContext context) {\n return StatsCalculator.estimate(groupExpression,\n forbidUnknownColStats,\n columnStatisticMap,\n isPlayNereidsDump,\n new HashMap<>(), context);\n }\n\n \n public static void estimate(GroupExpression groupExpression, CascadesContext context) {\n StatsCalculator statsCalculator = new StatsCalculator(groupExpression, false,\n new HashMap<>(), false, Collections.emptyMap(), context);\n statsCalculator.estimate();\n }\n\n private void estimate() {\n Plan plan = groupExpression.getPlan();\n Statistics newStats = plan.accept(this, null);\n newStats.enforceValid();\n\n \n if (groupExpression.getOwnerGroup().getStatistics() == null) {\n boolean isReliable = groupExpression.getPlan().getExpressions().stream()\n .noneMatch(e -> newStats.isInputSlotsUnknown(e.getInputSlots()));\n groupExpression.getOwnerGroup().setStatsReliable(isReliable);\n groupExpression.getOwnerGroup().setStatistics(newStats);\n groupExpression.setEstOutputRowCount(newStats.getRowCount());\n } else {\n \n \n \n \n \n \n \n \n \n \n \n groupExpression.getOwnerGroup().getStatistics().updateNdv(newStats);\n }\n groupExpression.setStatDerived(true);\n }\n\n @Override\n public Statistics visitLogicalSink(LogicalSink logicalSink, Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitLogicalEmptyRelation(LogicalEmptyRelation emptyRelation, Void context) {\n return computeEmptyRelation(emptyRelation);\n }\n\n @Override\n public Statistics visitLogicalLimit(LogicalLimit limit, Void context) {\n return computeLimit(limit);\n }\n\n @Override\n public Statistics visitPhysicalLimit(PhysicalLimit limit, Void context) {\n return computeLimit(limit);\n }\n\n @Override\n public Statistics visitLogicalOneRowRelation(LogicalOneRowRelation oneRowRelation, Void context) {\n return computeOneRowRelation(oneRowRelation.getProjects());\n }\n\n @Override\n public Statistics visitLogicalAggregate(LogicalAggregate aggregate, Void context) {\n return computeAggregate(aggregate);\n }\n\n @Override\n public Statistics visitLogicalRepeat(LogicalRepeat repeat, Void context) {\n return computeRepeat(repeat);\n }\n\n @Override\n public Statistics visitLogicalFilter(LogicalFilter filter, Void context) {\n return computeFilter(filter);\n }\n\n /**\n * returns the sum of deltaRowCount for all selected partitions or for the table.\n */\n private long computeDeltaRowCount(OlapScan olapScan, SlotReference slot) {\n AnalysisManager analysisManager = Env.getCurrentEnv().getAnalysisManager();\n TableStatsMeta tableMeta = analysisManager.findTableStatsStatus(olapScan.getTable().getId());\n\n long deltaRowCount = 0;\n if (tableMeta != null) {\n ColStatsMeta colMeta = tableMeta.findColumnStatsMeta(\n olapScan.getTable().getIndexNameById(olapScan.getSelectedIndexId()), slot.getName());\n if (colMeta != null) {\n if (olapScan.getSelectedPartitionIds().isEmpty()) {\n deltaRowCount = tableMeta.updatedRows.get() - colMeta.updatedRows;\n } else {\n \n for (long partitionId : olapScan.getSelectedPartitionIds()) {\n deltaRowCount += tableMeta.partitionUpdateRows.getOrDefault(partitionId, 0L)\n - colMeta.partitionUpdateRows.getOrDefault(partitionId, 0L);\n }\n }\n }\n }\n return deltaRowCount;\n }\n\n private void adjustColStats(CatalogRelation catalogRelation, SlotReference slot,\n ColumnStatisticBuilder builder) {\n if (builder.getAvgSizeByte() <= 0) {\n builder.setAvgSizeByte(slot.getDataType().toCatalogDataType().getSlotSize());\n }\n if (catalogRelation instanceof OlapScan) {\n OlapScan olapScan = (OlapScan) catalogRelation;\n long delta = computeDeltaRowCount(olapScan, slot);\n if (delta > 0) {\n builder.setCount(builder.getCount() + delta);\n \n \n \n \n builder.setMinExpr(null).setMinValue(Double.NEGATIVE_INFINITY)\n .setMaxExpr(null).setMaxValue(Double.POSITIVE_INFINITY);\n }\n }\n }\n\n private ColumnStatistic getColumnStatsFromTableCache(CatalogRelation catalogRelation, SlotReference slot) {\n long idxId = -1;\n if (catalogRelation instanceof OlapScan) {\n idxId = ((OlapScan) catalogRelation).getSelectedIndexIdForMV();\n }\n return getColumnStatistic(catalogRelation.getTable(), slot.getName(), idxId);\n }\n\n private ColumnStatistic getColumnStatsFromPartitionCache(CatalogRelation catalogRelation, SlotReference slot,\n List partitionNames) {\n long idxId = -1;\n if (catalogRelation instanceof OlapScan) {\n idxId = ((OlapScan) catalogRelation).getSelectedIndexIdForMV();\n }\n return getColumnStatistic(catalogRelation.getTable(), slot.getName(), idxId, partitionNames);\n }\n\n private long getSelectedPartitionRowCount(OlapScan olapScan) {\n long partRowCountSum = 0;\n for (long id : olapScan.getSelectedPartitionIds()) {\n long partRowCount = olapScan.getTable().getPartition(id).getBaseIndex().getRowCount();\n \n if (partRowCount <= 0) {\n return -1;\n }\n partRowCountSum += partRowCount;\n }\n return partRowCountSum;\n }\n\n private void setHasUnknownColStatsInStatementContext() {\n if (ConnectContext.get() != null && ConnectContext.get().getStatementContext() != null) {\n ConnectContext.get().getStatementContext().setHasUnknownColStats(true);\n }\n }\n\n private void checkIfUnknownStatsUsedAsKey(StatisticsBuilder builder) {\n if (ConnectContext.get() != null && ConnectContext.get().getStatementContext() != null) {\n for (Map.Entry entry : builder.getExpressionColumnStatsEntries()) {\n if (entry.getKey() instanceof SlotReference\n && ConnectContext.get().getStatementContext().isKeySlot((SlotReference) entry.getKey())) {\n if (entry.getValue().isUnKnown) {\n ConnectContext.get().getStatementContext().setHasUnknownColStats(true);\n break;\n }\n }\n }\n }\n }\n\n \n\n @Override\n public Statistics visitLogicalOlapScan(LogicalOlapScan olapScan, Void context) {\n return computeOlapScan(olapScan);\n }\n\n private boolean isVisibleSlotReference(Slot slot) {\n if (slot instanceof SlotReference) {\n Optional colOpt = ((SlotReference) slot).getColumn();\n if (colOpt.isPresent()) {\n return colOpt.get().isVisible();\n }\n }\n return false;\n }\n\n @Override\n public Statistics visitLogicalDeferMaterializeOlapScan(LogicalDeferMaterializeOlapScan deferMaterializeOlapScan,\n Void context) {\n return computeOlapScan(deferMaterializeOlapScan.getLogicalOlapScan());\n }\n\n @Override\n public Statistics visitLogicalSchemaScan(LogicalSchemaScan schemaScan, Void context) {\n return computeCatalogRelation(schemaScan);\n }\n\n @Override\n public Statistics visitLogicalFileScan(LogicalFileScan fileScan, Void context) {\n fileScan.getExpressions();\n return computeCatalogRelation(fileScan);\n }\n\n @Override\n public Statistics visitLogicalHudiScan(LogicalHudiScan fileScan, Void context) {\n return computeCatalogRelation(fileScan);\n }\n\n @Override\n public Statistics visitLogicalTVFRelation(LogicalTVFRelation tvfRelation, Void context) {\n return tvfRelation.getFunction().computeStats(tvfRelation.getOutput());\n }\n\n @Override\n public Statistics visitLogicalJdbcScan(LogicalJdbcScan jdbcScan, Void context) {\n jdbcScan.getExpressions();\n return computeCatalogRelation(jdbcScan);\n }\n\n @Override\n public Statistics visitLogicalOdbcScan(LogicalOdbcScan odbcScan, Void context) {\n odbcScan.getExpressions();\n return computeCatalogRelation(odbcScan);\n }\n\n @Override\n public Statistics visitLogicalEsScan(LogicalEsScan esScan, Void context) {\n esScan.getExpressions();\n return computeCatalogRelation(esScan);\n }\n\n @Override\n public Statistics visitLogicalProject(LogicalProject project, Void context) {\n return computeProject(project);\n }\n\n @Override\n public Statistics visitLogicalSort(LogicalSort sort, Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitLogicalTopN(LogicalTopN topN, Void context) {\n return computeTopN(topN);\n }\n\n @Override\n public Statistics visitLogicalDeferMaterializeTopN(LogicalDeferMaterializeTopN topN, Void context) {\n return computeTopN(topN.getLogicalTopN());\n }\n\n @Override\n public Statistics visitLogicalPartitionTopN(LogicalPartitionTopN partitionTopN, Void context) {\n return computePartitionTopN(partitionTopN);\n }\n\n @Override\n public Statistics visitLogicalJoin(LogicalJoin join, Void context) {\n Statistics joinStats = JoinEstimation.estimate(groupExpression.childStatistics(0),\n groupExpression.childStatistics(1), join);\n joinStats = new StatisticsBuilder(joinStats).setWidthInJoinCluster(\n groupExpression.childStatistics(0).getWidthInJoinCluster()\n + groupExpression.childStatistics(1).getWidthInJoinCluster()).build();\n return joinStats;\n }\n\n @Override\n public Statistics visitLogicalAssertNumRows(\n LogicalAssertNumRows assertNumRows, Void context) {\n return computeAssertNumRows(assertNumRows.getAssertNumRowsElement());\n }\n\n @Override\n public Statistics visitLogicalUnion(\n LogicalUnion union, Void context) {\n return computeUnion(union);\n }\n\n @Override\n public Statistics visitLogicalExcept(\n LogicalExcept except, Void context) {\n return computeExcept(except);\n }\n\n @Override\n public Statistics visitLogicalIntersect(\n LogicalIntersect intersect, Void context) {\n return computeIntersect(intersect);\n }\n\n @Override\n public Statistics visitLogicalGenerate(LogicalGenerate generate, Void context) {\n return computeGenerate(generate);\n }\n\n @Override\n public Statistics visitLogicalWindow(LogicalWindow window, Void context) {\n return computeWindow(window);\n }\n\n @Override\n public Statistics visitPhysicalSink(PhysicalSink physicalSink, Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitPhysicalWindow(PhysicalWindow window, Void context) {\n return computeWindow(window);\n }\n\n @Override\n public Statistics visitPhysicalPartitionTopN(PhysicalPartitionTopN partitionTopN, Void context) {\n return computePartitionTopN(partitionTopN);\n }\n\n @Override\n public Statistics visitPhysicalEmptyRelation(PhysicalEmptyRelation emptyRelation, Void context) {\n return computeEmptyRelation(emptyRelation);\n }\n\n @Override\n public Statistics visitPhysicalHashAggregate(PhysicalHashAggregate agg, Void context) {\n return computeAggregate(agg);\n }\n\n @Override\n public Statistics visitPhysicalRepeat(PhysicalRepeat repeat, Void context) {\n return computeRepeat(repeat);\n }\n\n @Override\n public Statistics visitPhysicalOneRowRelation(PhysicalOneRowRelation oneRowRelation, Void context) {\n return computeOneRowRelation(oneRowRelation.getProjects());\n }\n\n @Override\n public Statistics visitPhysicalOlapScan(PhysicalOlapScan olapScan, Void context) {\n return computeCatalogRelation(olapScan);\n }\n\n @Override\n public Statistics visitPhysicalDeferMaterializeOlapScan(PhysicalDeferMaterializeOlapScan deferMaterializeOlapScan,\n Void context) {\n return computeCatalogRelation(deferMaterializeOlapScan.getPhysicalOlapScan());\n }\n\n @Override\n public Statistics visitPhysicalSchemaScan(PhysicalSchemaScan schemaScan, Void context) {\n return computeCatalogRelation(schemaScan);\n }\n\n @Override\n public Statistics visitPhysicalFileScan(PhysicalFileScan fileScan, Void context) {\n return computeCatalogRelation(fileScan);\n }\n\n @Override\n public Statistics visitPhysicalStorageLayerAggregate(\n PhysicalStorageLayerAggregate storageLayerAggregate, Void context) {\n return storageLayerAggregate.getRelation().accept(this, context);\n }\n\n @Override\n public Statistics visitPhysicalTVFRelation(PhysicalTVFRelation tvfRelation, Void context) {\n return tvfRelation.getFunction().computeStats(tvfRelation.getOutput());\n }\n\n @Override\n public Statistics visitPhysicalJdbcScan(PhysicalJdbcScan jdbcScan, Void context) {\n return computeCatalogRelation(jdbcScan);\n }\n\n @Override\n public Statistics visitPhysicalOdbcScan(PhysicalOdbcScan odbcScan, Void context) {\n return computeCatalogRelation(odbcScan);\n }\n\n @Override\n public Statistics visitPhysicalEsScan(PhysicalEsScan esScan, Void context) {\n return computeCatalogRelation(esScan);\n }\n\n @Override\n public Statistics visitPhysicalQuickSort(PhysicalQuickSort sort, Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitPhysicalTopN(PhysicalTopN topN, Void context) {\n return computeTopN(topN);\n }\n\n @Override\n public Statistics visitPhysicalDeferMaterializeTopN(PhysicalDeferMaterializeTopN topN,\n Void context) {\n return computeTopN(topN.getPhysicalTopN());\n }\n\n @Override\n public Statistics visitPhysicalHashJoin(\n PhysicalHashJoin hashJoin, Void context) {\n return JoinEstimation.estimate(groupExpression.childStatistics(0),\n groupExpression.childStatistics(1), hashJoin);\n }\n\n @Override\n public Statistics visitPhysicalNestedLoopJoin(\n PhysicalNestedLoopJoin nestedLoopJoin,\n Void context) {\n return JoinEstimation.estimate(groupExpression.childStatistics(0),\n groupExpression.childStatistics(1), nestedLoopJoin);\n }\n\n \n @Override\n public Statistics visitPhysicalProject(PhysicalProject project, Void context) {\n return computeProject(project);\n }\n\n @Override\n public Statistics visitPhysicalFilter(PhysicalFilter filter, Void context) {\n return computeFilter(filter);\n }\n\n @Override\n public Statistics visitPhysicalDistribute(PhysicalDistribute distribute,\n Void context) {\n return groupExpression.childStatistics(0);\n }\n\n @Override\n public Statistics visitPhysicalAssertNumRows(PhysicalAssertNumRows assertNumRows,\n Void context) {\n return computeAssertNumRows(assertNumRows.getAssertNumRowsElement());\n }\n\n @Override\n public Statistics visitPhysicalUnion(PhysicalUnion union, Void context) {\n return computeUnion(union);\n }\n\n @Override\n public Statistics visitPhysicalExcept(PhysicalExcept except, Void context) {\n return computeExcept(except);\n }\n\n @Override\n public Statistics visitPhysicalIntersect(PhysicalIntersect intersect, Void context) {\n return computeIntersect(intersect);\n }\n\n @Override\n public Statistics visitPhysicalGenerate(PhysicalGenerate generate, Void context) {\n return computeGenerate(generate);\n }\n\n private Statistics computeAssertNumRows(AssertNumRowsElement assertNumRowsElement) {\n Statistics statistics = groupExpression.childStatistics(0);\n long newRowCount;\n long rowCount = (long) statistics.getRowCount();\n long desiredNumOfRows = assertNumRowsElement.getDesiredNumOfRows();\n switch (assertNumRowsElement.getAssertion()) {\n case EQ:\n newRowCount = desiredNumOfRows;\n break;\n case GE:\n newRowCount = statistics.getRowCount() >= desiredNumOfRows ? rowCount : desiredNumOfRows;\n break;\n case GT:\n newRowCount = statistics.getRowCount() > desiredNumOfRows ? rowCount : desiredNumOfRows;\n break;\n case LE:\n newRowCount = statistics.getRowCount() <= desiredNumOfRows ? rowCount : desiredNumOfRows;\n break;\n case LT:\n newRowCount = statistics.getRowCount() < desiredNumOfRows ? rowCount : desiredNumOfRows;\n break;\n case NE:\n return statistics;\n default:\n throw new IllegalArgumentException(\"Unknown assertion: \" + assertNumRowsElement.getAssertion());\n }\n Statistics newStatistics = statistics.withRowCountAndEnforceValid(newRowCount);\n return new StatisticsBuilder(newStatistics).setWidthInJoinCluster(1).build();\n }\n\n private Statistics computeFilter(Filter filter) {\n Statistics stats = groupExpression.childStatistics(0);\n if (groupExpression.getFirstChildPlan(OlapScan.class) != null) {\n return new FilterEstimation(true).estimate(filter.getPredicate(), stats);\n }\n if (groupExpression.getFirstChildPlan(Aggregate.class) != null) {\n Aggregate agg = (Aggregate) groupExpression.getFirstChildPlan(Aggregate.class);\n List expressions = agg.getOutputExpressions();\n Set slots = expressions\n .stream()\n .filter(Alias.class::isInstance)\n .filter(s -> ((Alias) s).child().anyMatch(AggregateFunction.class::isInstance))\n .map(NamedExpression::toSlot).collect(Collectors.toSet());\n Expression predicate = filter.getPredicate();\n if (predicate.anyMatch(s -> slots.contains(s))) {\n return new FilterEstimation(slots).estimate(filter.getPredicate(), stats);\n }\n } else if (groupExpression.getFirstChildPlan(LogicalJoin.class) != null) {\n LogicalJoin plan = (LogicalJoin) groupExpression.getFirstChildPlan(LogicalJoin.class);\n if (filter instanceof LogicalFilter\n && filter.getConjuncts().stream().anyMatch(e -> e instanceof IsNull)) {\n Statistics isNullStats = computeGeneratedIsNullStats((LogicalJoin) plan, filter);\n if (isNullStats != null) {\n \n stats = isNullStats;\n Set newConjuncts = filter.getConjuncts().stream()\n .filter(e -> !(e instanceof IsNull))\n .collect(Collectors.toSet());\n if (newConjuncts.isEmpty()) {\n return stats;\n } else {\n \n filter = ((LogicalFilter) filter).withConjunctsAndProps(newConjuncts,\n ((LogicalFilter) filter).getGroupExpression(),\n Optional.of(((LogicalFilter) filter).getLogicalProperties()), plan);\n }\n }\n }\n }\n return new FilterEstimation(false).estimate(filter.getPredicate(), stats);\n }\n\n private Statistics computeGeneratedIsNullStats(LogicalJoin join, Filter filter) {\n JoinType joinType = join.getJoinType();\n Plan left = join.left();\n Plan right = join.right();\n if (left == null || right == null\n || ((GroupPlan) left).getGroup() == null || ((GroupPlan) right).getGroup() == null\n || ((GroupPlan) left).getGroup().getStatistics() == null\n || ((GroupPlan) right).getGroup().getStatistics() == null\n || !join.getGroupExpression().isPresent()) {\n return null;\n }\n\n double leftRowCount = ((GroupPlan) left).getGroup().getStatistics().getRowCount();\n double rightRowCount = ((GroupPlan) right).getGroup().getStatistics().getRowCount();\n if (leftRowCount < 0 || Double.isInfinite(leftRowCount)\n || rightRowCount < 0 || Double.isInfinite(rightRowCount)) {\n return null;\n }\n\n Statistics origJoinStats = join.getGroupExpression().get().getOwnerGroup().getStatistics();\n\n \n \n if (joinType.isOuterJoin()) {\n boolean leftHasIsNull = false;\n boolean rightHasIsNull = false;\n boolean isLeftOuterJoin = join.getJoinType() == JoinType.LEFT_OUTER_JOIN;\n boolean isRightOuterJoin = join.getJoinType() == JoinType.RIGHT_OUTER_JOIN;\n boolean isFullOuterJoin = join.getJoinType() == JoinType.FULL_OUTER_JOIN;\n\n for (Expression expr : filter.getConjuncts()) {\n if (expr instanceof IsNull) {\n Expression child = ((IsNull) expr).child();\n if (PlanUtils.isColumnRef(child)) {\n LogicalPlan leftChild = (LogicalPlan) join.left();\n LogicalPlan rightChild = (LogicalPlan) join.right();\n leftHasIsNull = PlanUtils.checkSlotFrom(((GroupPlan) leftChild)\n .getGroup().getLogicalExpression().getPlan(), (SlotReference) child);\n rightHasIsNull = PlanUtils.checkSlotFrom(((GroupPlan) rightChild)\n .getGroup().getLogicalExpression().getPlan(), (SlotReference) child);\n }\n }\n }\n\n boolean isLeftAntiLikeJoin = (isLeftOuterJoin && rightHasIsNull) || (isFullOuterJoin && rightHasIsNull);\n boolean isRightAntiLikeJoin = (isRightOuterJoin && leftHasIsNull) || (isFullOuterJoin && leftHasIsNull);\n if (isLeftAntiLikeJoin || isRightAntiLikeJoin) {\n \n Statistics newStats = null;\n if (isLeftAntiLikeJoin) {\n LogicalJoin newJoin = join.withJoinType(JoinType.LEFT_ANTI_JOIN);\n StatsCalculator statsCalculator = new StatsCalculator(join.getGroupExpression().get(),\n false, getTotalColumnStatisticMap(), false,\n cteIdToStats, cascadesContext);\n\n newStats = ((Plan) newJoin).accept(statsCalculator, null);\n } else if (isRightAntiLikeJoin) {\n LogicalJoin newJoin = join.withJoinType(JoinType.RIGHT_ANTI_JOIN);\n StatsCalculator statsCalculator = new StatsCalculator(join.getGroupExpression().get(),\n false, this.getTotalColumnStatisticMap(), false,\n this.cteIdToStats, this.cascadesContext);\n\n newStats = ((Plan) newJoin).accept(statsCalculator, null);\n }\n newStats.enforceValid();\n\n double selectivity = Statistics.getValidSelectivity(\n newStats.getRowCount() / (leftRowCount * rightRowCount));\n double newRows = origJoinStats.getRowCount() * selectivity;\n\n newStats.withRowCount(newRows);\n return newStats;\n } else {\n return null;\n }\n } else {\n return null;\n }\n }\n\n private ColumnStatistic getColumnStatistic(TableIf table, String colName, long idxId) {\n ConnectContext connectContext = ConnectContext.get();\n if (connectContext != null && connectContext.getSessionVariable().internalSession) {\n return ColumnStatistic.UNKNOWN;\n }\n long catalogId;\n long dbId;\n try {\n catalogId = table.getDatabase().getCatalog().getId();\n dbId = table.getDatabase().getId();\n } catch (Exception e) {\n \n \n \n if (LOG.isDebugEnabled()) {\n LOG.debug(String.format(\"Fail to get catalog id and db id for table %s\", table.getName()));\n }\n catalogId = -1;\n dbId = -1;\n }\n return Env.getCurrentEnv().getStatisticsCache().getColumnStatistics(\n catalogId, dbId, table.getId(), idxId, colName);\n }\n\n private ColumnStatistic getColumnStatistic(TableIf table, String colName, long idxId, List partitionNames) {\n ConnectContext connectContext = ConnectContext.get();\n if (connectContext != null && connectContext.getSessionVariable().internalSession) {\n return ColumnStatistic.UNKNOWN;\n }\n long catalogId;\n long dbId;\n try {\n catalogId = table.getDatabase().getCatalog().getId();\n dbId = table.getDatabase().getId();\n } catch (Exception e) {\n \n \n \n if (LOG.isDebugEnabled()) {\n LOG.debug(String.format(\"Fail to get catalog id and db id for table %s\", table.getName()));\n }\n catalogId = -1;\n dbId = -1;\n }\n if (isPlayNereidsDump) {\n if (totalColumnStatisticMap.get(table.getName() + colName) != null) {\n return totalColumnStatisticMap.get(table.getName() + colName);\n } else {\n return ColumnStatistic.UNKNOWN;\n }\n } else {\n if (!partitionNames.isEmpty()) {\n PartitionColumnStatisticBuilder builder = new PartitionColumnStatisticBuilder();\n boolean hasUnknown = false;\n \n List pColStatsLists = new ArrayList<>(partitionNames.size());\n for (String partitionName : partitionNames) {\n PartitionColumnStatistic pcolStats = Env.getCurrentEnv().getStatisticsCache()\n .getPartitionColumnStatistics(\n catalogId, dbId, table.getId(), idxId, partitionName, colName);\n if (pcolStats.isUnKnown) {\n hasUnknown = true;\n break;\n } else {\n pColStatsLists.add(pcolStats);\n }\n }\n if (!hasUnknown) {\n boolean isFirst = true;\n \n for (PartitionColumnStatistic pcolStats : pColStatsLists) {\n if (isFirst) {\n builder = new PartitionColumnStatisticBuilder(pcolStats);\n isFirst = false;\n } else {\n builder.merge(pcolStats);\n }\n }\n return builder.toColumnStatistics();\n }\n }\n \n return Env.getCurrentEnv().getStatisticsCache().getColumnStatistics(\n catalogId, dbId, table.getId(), idxId, colName);\n }\n }\n\n /**\n * compute stats for catalogRelations except OlapScan\n */\n private Statistics computeCatalogRelation(CatalogRelation catalogRelation) {\n StatisticsBuilder builder = new StatisticsBuilder();\n \n if (!FeConstants.enableInternalSchemaDb\n || ConnectContext.get() == null\n || ConnectContext.get().getSessionVariable().internalSession) {\n builder.setRowCount(catalogRelation.getTable().getRowCountForNereids());\n for (Slot slot : catalogRelation.getOutput()) {\n builder.putColumnStatistics(slot, ColumnStatistic.UNKNOWN);\n }\n setHasUnknownColStatsInStatementContext();\n return builder.build();\n }\n\n List output = catalogRelation.getOutput();\n ImmutableSet.Builder slotSetBuilder = ImmutableSet.builderWithExpectedSize(output.size());\n for (Slot slot : output) {\n if (slot instanceof SlotReference) {\n slotSetBuilder.add((SlotReference) slot);\n }\n }\n Set slotSet = slotSetBuilder.build();\n\n double rowCount = catalogRelation.getTable().getRowCountForNereids();\n for (SlotReference slot : slotSet) {\n ColumnStatistic cache = getColumnStatsFromTableCache(catalogRelation, slot);\n ColumnStatisticBuilder colStatsBuilder = new ColumnStatisticBuilder(cache);\n adjustColStats(catalogRelation, slot, colStatsBuilder);\n rowCount = Math.max(rowCount, colStatsBuilder.getCount());\n builder.putColumnStatistics(slot, colStatsBuilder.build());\n }\n checkIfUnknownStatsUsedAsKey(builder);\n return builder.build();\n }\n\n private Statistics computeTopN(TopN topN) {\n Statistics stats = groupExpression.childStatistics(0);\n return stats.withRowCountAndEnforceValid(Math.min(stats.getRowCount(), topN.getLimit()));\n }\n\n private Statistics computePartitionTopN(PartitionTopN partitionTopN) {\n Statistics childStats = groupExpression.childStatistics(0);\n double rowCount = childStats.getRowCount();\n List partitionKeys = partitionTopN.getPartitionKeys();\n if (!partitionTopN.hasGlobalLimit() && !partitionKeys.isEmpty()) {\n \n \n List partitionByKeyStats = partitionKeys.stream()\n .map(partitionKey -> {\n ColumnStatistic partitionKeyStats = childStats.findColumnStatistics(partitionKey);\n if (partitionKeyStats == null) {\n partitionKeyStats = new ExpressionEstimation().visit(partitionKey, childStats);\n }\n return partitionKeyStats;\n })\n .filter(s -> !s.isUnKnown)\n .collect(Collectors.toList());\n if (partitionByKeyStats.isEmpty()) {\n \n rowCount = rowCount * DEFAULT_COLUMN_NDV_RATIO;\n } else {\n rowCount = Math.min(rowCount, partitionByKeyStats.stream().map(s -> s.ndv)\n .max(Double::compare).get() * partitionTopN.getPartitionLimit());\n }\n } else {\n rowCount = Math.min(rowCount, partitionTopN.getPartitionLimit());\n }\n \n \n \n return childStats.withRowCountAndEnforceValid(rowCount);\n }\n\n private Statistics computeLimit(Limit limit) {\n Statistics stats = groupExpression.childStatistics(0);\n return stats.withRowCountAndEnforceValid(Math.min(stats.getRowCount(), limit.getLimit()));\n }\n\n private double estimateGroupByRowCount(List groupByExpressions, Statistics childStats) {\n double rowCount = 1;\n \n \n \n if (groupByExpressions.isEmpty()) {\n return 1;\n }\n List groupByNdvs = new ArrayList<>();\n for (Expression groupByExpr : groupByExpressions) {\n ColumnStatistic colStats = childStats.findColumnStatistics(groupByExpr);\n if (colStats == null) {\n colStats = ExpressionEstimation.estimate(groupByExpr, childStats);\n }\n if (colStats.isUnKnown()) {\n rowCount = childStats.getRowCount() * DEFAULT_AGGREGATE_RATIO;\n rowCount = Math.max(1, rowCount);\n rowCount = Math.min(rowCount, childStats.getRowCount());\n return rowCount;\n }\n double ndv = colStats.ndv;\n groupByNdvs.add(ndv);\n }\n groupByNdvs.sort(Collections.reverseOrder());\n\n rowCount = groupByNdvs.get(0);\n for (int groupByIndex = 1; groupByIndex < groupByExpressions.size(); ++groupByIndex) {\n rowCount *= Math.max(1, groupByNdvs.get(groupByIndex) * Math.pow(\n AGGREGATE_COLUMN_CORRELATION_COEFFICIENT, groupByIndex + 1D));\n if (rowCount > childStats.getRowCount()) {\n rowCount = childStats.getRowCount();\n break;\n }\n }\n rowCount = Math.max(1, rowCount);\n rowCount = Math.min(rowCount, childStats.getRowCount());\n return rowCount;\n }\n\n private Statistics computeAggregate(Aggregate aggregate) {\n List groupByExpressions = aggregate.getGroupByExpressions();\n Statistics childStats = groupExpression.childStatistics(0);\n double rowCount = estimateGroupByRowCount(groupByExpressions, childStats);\n Map slotToColumnStats = Maps.newHashMap();\n List outputExpressions = aggregate.getOutputExpressions();\n \n \n double factor = childStats.getRowCount() / rowCount;\n for (NamedExpression outputExpression : outputExpressions) {\n ColumnStatistic columnStat = ExpressionEstimation.estimate(outputExpression, childStats);\n ColumnStatisticBuilder builder = new ColumnStatisticBuilder(columnStat);\n builder.setMinValue(columnStat.minValue / factor);\n builder.setMaxValue(columnStat.maxValue / factor);\n if (columnStat.ndv > rowCount) {\n builder.setNdv(rowCount);\n }\n builder.setDataSize(rowCount * outputExpression.getDataType().width());\n slotToColumnStats.put(outputExpression.toSlot(), columnStat);\n }\n return new Statistics(rowCount, 1, slotToColumnStats);\n \n }\n\n private Statistics computeRepeat(Repeat repeat) {\n Statistics childStats = groupExpression.childStatistics(0);\n Map slotIdToColumnStats = childStats.columnStatistics();\n int groupingSetNum = repeat.getGroupingSets().size();\n double rowCount = childStats.getRowCount();\n Map columnStatisticMap = slotIdToColumnStats.entrySet()\n .stream().map(kv -> {\n ColumnStatistic stats = kv.getValue();\n ColumnStatisticBuilder columnStatisticBuilder = new ColumnStatisticBuilder(stats);\n columnStatisticBuilder\n .setCount(stats.count < 0 ? stats.count : stats.count * groupingSetNum)\n .setNumNulls(stats.numNulls < 0 ? stats.numNulls : stats.numNulls * groupingSetNum)\n .setDataSize(stats.dataSize < 0 ? stats.dataSize : stats.dataSize * groupingSetNum);\n return Pair.of(kv.getKey(), columnStatisticBuilder.build());\n }).collect(Collectors.toMap(Pair::key, Pair::value, (item1, item2) -> item1));\n return new Statistics(rowCount < 0 ? rowCount : rowCount * groupingSetNum, 1, columnStatisticMap);\n }\n\n private Statistics computeProject(Project project) {\n List projections = project.getProjects();\n Statistics childStats = groupExpression.childStatistics(0);\n Map columnsStats = projections.stream().map(projection -> {\n ColumnStatistic columnStatistic = ExpressionEstimation.estimate(projection, childStats);\n return new SimpleEntry<>(projection.toSlot(), columnStatistic);\n }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (item1, item2) -> item1));\n return new Statistics(childStats.getRowCount(), childStats.getWidthInJoinCluster(), columnsStats);\n }\n\n private Statistics computeOneRowRelation(List projects) {\n Map columnStatsMap = projects.stream()\n .map(project -> {\n ColumnStatistic statistic = new ColumnStatisticBuilder().setNdv(1).build();\n \n return Pair.of(project.toSlot(), statistic);\n })\n .collect(Collectors.toMap(Pair::key, Pair::value, (item1, item2) -> item1));\n int rowCount = 1;\n return new Statistics(rowCount, 1, columnStatsMap);\n }\n\n private Statistics computeEmptyRelation(EmptyRelation emptyRelation) {\n Map columnStatsMap = emptyRelation.getProjects()\n .stream()\n .map(project -> {\n ColumnStatisticBuilder columnStat = new ColumnStatisticBuilder()\n .setNdv(0)\n .setNumNulls(0)\n .setAvgSizeByte(0);\n return Pair.of(project.toSlot(), columnStat.build());\n })\n .collect(Collectors.toMap(Pair::key, Pair::value, (item1, item2) -> item1));\n int rowCount = 0;\n return new Statistics(rowCount, 1, columnStatsMap);\n }\n\n private Statistics computeUnion(Union union) {\n \n List head;\n Statistics headStats;\n List> childOutputs = Lists.newArrayList(union.getRegularChildrenOutputs());\n List childStats =\n groupExpression.children().stream().map(Group::getStatistics).collect(Collectors.toList());\n\n if (!union.getConstantExprsList().isEmpty()) {\n childOutputs.addAll(union.getConstantExprsList().stream()\n .map(l -> l.stream().map(NamedExpression::toSlot)\n .map(SlotReference.class::cast)\n .collect(Collectors.toList()))\n .collect(Collectors.toList()));\n childStats.addAll(union.getConstantExprsList().stream()\n .map(this::computeOneRowRelation)\n .collect(Collectors.toList()));\n }\n\n head = childOutputs.get(0);\n headStats = childStats.get(0);\n\n StatisticsBuilder statisticsBuilder = new StatisticsBuilder();\n List unionOutput = union.getOutputs();\n for (int i = 0; i < head.size(); i++) {\n double leftRowCount = headStats.getRowCount();\n Slot headSlot = head.get(i);\n for (int j = 1; j < childOutputs.size(); j++) {\n Slot slot = childOutputs.get(j).get(i);\n ColumnStatistic rightStatistic = childStats.get(j).findColumnStatistics(slot);\n double rightRowCount = childStats.get(j).getRowCount();\n ColumnStatistic estimatedColumnStatistics\n = unionColumn(headStats.findColumnStatistics(headSlot),\n headStats.getRowCount(), rightStatistic, rightRowCount, headSlot.getDataType());\n headStats.addColumnStats(headSlot, estimatedColumnStatistics);\n leftRowCount += childStats.get(j).getRowCount();\n }\n statisticsBuilder.setRowCount(leftRowCount);\n statisticsBuilder.putColumnStatistics(unionOutput.get(i), headStats.findColumnStatistics(headSlot));\n }\n return statisticsBuilder.setWidthInJoinCluster(1).build();\n }\n\n private Statistics computeExcept(SetOperation setOperation) {\n Statistics leftStats = groupExpression.childStatistics(0);\n List operatorOutput = setOperation.getOutputs();\n List childSlots = setOperation.getRegularChildOutput(0);\n StatisticsBuilder statisticsBuilder = new StatisticsBuilder();\n for (int i = 0; i < operatorOutput.size(); i++) {\n ColumnStatistic columnStatistic = leftStats.findColumnStatistics(childSlots.get(i));\n statisticsBuilder.putColumnStatistics(operatorOutput.get(i), columnStatistic);\n }\n statisticsBuilder.setRowCount(leftStats.getRowCount());\n return statisticsBuilder.setWidthInJoinCluster(1).build();\n }\n\n private Statistics computeIntersect(SetOperation setOperation) {\n Statistics leftChildStats = groupExpression.childStatistics(0);\n double rowCount = leftChildStats.getRowCount();\n for (int i = 1; i < setOperation.getArity(); ++i) {\n rowCount = Math.min(rowCount, groupExpression.childStatistics(i).getRowCount());\n }\n double minProd = Double.POSITIVE_INFINITY;\n for (Group group : groupExpression.children()) {\n Statistics statistics = group.getStatistics();\n double prod = 1.0;\n for (ColumnStatistic columnStatistic : statistics.columnStatistics().values()) {\n prod *= columnStatistic.ndv;\n }\n if (minProd < prod) {\n minProd = prod;\n }\n }\n rowCount = Math.min(rowCount, minProd);\n List outputs = setOperation.getOutputs();\n List leftChildOutputs = setOperation.getRegularChildOutput(0);\n for (int i = 0; i < outputs.size(); i++) {\n leftChildStats.addColumnStats(outputs.get(i),\n leftChildStats.findColumnStatistics(leftChildOutputs.get(i)));\n }\n return new StatisticsBuilder(leftChildStats.withRowCountAndEnforceValid(rowCount))\n .setWidthInJoinCluster(1).build();\n }\n\n private Statistics computeGenerate(Generate generate) {\n Statistics stats = groupExpression.childStatistics(0);\n int statsFactor = ConnectContext.get().getSessionVariable().generateStatsFactor;\n double count = stats.getRowCount() * generate.getGeneratorOutput().size() * statsFactor;\n Map columnStatsMap = Maps.newHashMap();\n for (Map.Entry entry : stats.columnStatistics().entrySet()) {\n ColumnStatistic columnStatistic = new ColumnStatisticBuilder(entry.getValue()).setCount(count).build();\n columnStatsMap.put(entry.getKey(), columnStatistic);\n }\n for (Slot output : generate.getGeneratorOutput()) {\n ColumnStatistic columnStatistic = new ColumnStatisticBuilder()\n .setCount(count)\n .setMinValue(Double.NEGATIVE_INFINITY)\n .setMaxValue(Double.POSITIVE_INFINITY)\n .setNdv(count)\n .setNumNulls(0)\n .setAvgSizeByte(output.getDataType().width())\n .build();\n columnStatsMap.put(output, columnStatistic);\n }\n return new Statistics(count, 1, columnStatsMap);\n }\n\n private Statistics computeWindow(Window windowOperator) {\n Statistics childStats = groupExpression.childStatistics(0);\n Map childColumnStats = childStats.columnStatistics();\n Map columnStatisticMap = windowOperator.getWindowExpressions().stream()\n .map(expr -> {\n Preconditions.checkArgument(expr instanceof Alias\n && expr.child(0) instanceof WindowExpression,\n \"need WindowExpression, but we meet \" + expr);\n WindowExpression windExpr = (WindowExpression) expr.child(0);\n ColumnStatisticBuilder colStatsBuilder = new ColumnStatisticBuilder();\n colStatsBuilder.setCount(childStats.getRowCount())\n .setOriginal(null);\n\n Double partitionCount = windExpr.getPartitionKeys().stream().map(key -> {\n ColumnStatistic keyStats = childStats.findColumnStatistics(key);\n if (keyStats == null) {\n keyStats = new ExpressionEstimation().visit(key, childStats);\n }\n return keyStats;\n })\n .filter(columnStatistic -> !columnStatistic.isUnKnown)\n .map(colStats -> colStats.ndv).max(Double::compare)\n .orElseGet(() -> -1.0);\n\n if (partitionCount == -1.0) {\n \n colStatsBuilder.setCount(childStats.getRowCount())\n .setNdv(1)\n .setMinValue(Double.NEGATIVE_INFINITY)\n .setMaxValue(Double.POSITIVE_INFINITY);\n } else {\n partitionCount = Math.max(1, partitionCount);\n if (windExpr.getFunction() instanceof AggregateFunction) {\n if (windExpr.getFunction() instanceof Count) {\n colStatsBuilder.setNdv(1)\n .setMinValue(0)\n .setMinExpr(new IntLiteral(0))\n .setMaxValue(childStats.getRowCount())\n .setMaxExpr(new IntLiteral((long) childStats.getRowCount()));\n } else if (windExpr.getFunction() instanceof Min\n || windExpr.getFunction() instanceof Max) {\n Expression minmaxChild = windExpr.getFunction().child(0);\n ColumnStatistic minChildStats = new ExpressionEstimation()\n .visit(minmaxChild, childStats);\n colStatsBuilder.setNdv(1)\n .setMinValue(minChildStats.minValue)\n .setMinExpr(minChildStats.minExpr)\n .setMaxValue(minChildStats.maxValue)\n .setMaxExpr(minChildStats.maxExpr);\n } else {\n \n colStatsBuilder.setNdv(1).setMinValue(Double.NEGATIVE_INFINITY)\n .setMaxValue(Double.POSITIVE_INFINITY);\n }\n } else {\n \n colStatsBuilder.setNdv(childStats.getRowCount() / partitionCount)\n .setMinValue(0)\n .setMinExpr(new IntLiteral(0))\n .setMaxValue(childStats.getRowCount())\n .setMaxExpr(new IntLiteral((long) childStats.getRowCount()));\n }\n }\n return Pair.of(expr.toSlot(), colStatsBuilder.build());\n }).collect(Collectors.toMap(Pair::key, Pair::value, (item1, item2) -> item1));\n columnStatisticMap.putAll(childColumnStats);\n return new Statistics(childStats.getRowCount(), 1, columnStatisticMap);\n }\n\n private ColumnStatistic unionColumn(ColumnStatistic leftStats, double leftRowCount, ColumnStatistic rightStats,\n double rightRowCount, DataType dataType) {\n ColumnStatisticBuilder columnStatisticBuilder = new ColumnStatisticBuilder();\n columnStatisticBuilder.setMaxValue(Math.max(leftStats.maxValue, rightStats.maxValue));\n columnStatisticBuilder.setMinValue(Math.min(leftStats.minValue, rightStats.minValue));\n StatisticRange leftRange = StatisticRange.from(leftStats, dataType);\n StatisticRange rightRange = StatisticRange.from(rightStats, dataType);\n StatisticRange newRange = leftRange.union(rightRange);\n double newRowCount = leftRowCount + rightRowCount;\n double leftSize = (leftRowCount - leftStats.numNulls) * leftStats.avgSizeByte;\n double rightSize = (rightRowCount - rightStats.numNulls) * rightStats.avgSizeByte;\n double newNullFraction = (leftStats.numNulls + rightStats.numNulls) / StatsMathUtil.maxNonNaN(1, newRowCount);\n double newNonNullRowCount = newRowCount * (1 - newNullFraction);\n\n double newAverageRowSize = newNonNullRowCount == 0 ? 0 : (leftSize + rightSize) / newNonNullRowCount;\n columnStatisticBuilder.setMinValue(newRange.getLow())\n .setMaxValue(newRange.getHigh())\n .setNdv(newRange.getDistinctValues())\n .setNumNulls(leftStats.numNulls + rightStats.numNulls)\n .setAvgSizeByte(newAverageRowSize);\n return columnStatisticBuilder.build();\n }\n\n @Override\n public Statistics visitLogicalCTEProducer(LogicalCTEProducer cteProducer, Void context) {\n StatisticsBuilder builder = new StatisticsBuilder(groupExpression.childStatistics(0));\n Statistics statistics = builder.setWidthInJoinCluster(1).build();\n cteIdToStats.put(cteProducer.getCteId(), statistics);\n return statistics;\n }\n\n @Override\n public Statistics visitLogicalCTEConsumer(LogicalCTEConsumer cteConsumer, Void context) {\n CTEId cteId = cteConsumer.getCteId();\n cascadesContext.addCTEConsumerGroup(cteConsumer.getCteId(), groupExpression.getOwnerGroup(),\n cteConsumer.getProducerToConsumerOutputMap());\n Statistics prodStats = cteIdToStats.get(cteId);\n Preconditions.checkArgument(prodStats != null, String.format(\"Stats for CTE: %s not found\", cteId));\n Statistics consumerStats = new Statistics(prodStats.getRowCount(), 1, new HashMap<>());\n for (Slot slot : cteConsumer.getOutput()) {\n Slot prodSlot = cteConsumer.getProducerSlot(slot);\n ColumnStatistic colStats = prodStats.columnStatistics().get(prodSlot);\n if (colStats == null) {\n continue;\n }\n consumerStats.addColumnStats(slot, colStats);\n }\n return consumerStats;\n }\n\n @Override\n public Statistics visitLogicalCTEAnchor(LogicalCTEAnchor cteAnchor, Void context) {\n return groupExpression.childStatistics(1);\n }\n\n @Override\n public Statistics visitPhysicalCTEProducer(PhysicalCTEProducer cteProducer,\n Void context) {\n Statistics statistics = new StatisticsBuilder(groupExpression.childStatistics(0))\n .setWidthInJoinCluster(1).build();\n cteIdToStats.put(cteProducer.getCteId(), statistics);\n cascadesContext.updateConsumerStats(cteProducer.getCteId(), statistics);\n return statistics;\n }\n\n @Override\n public Statistics visitPhysicalCTEConsumer(PhysicalCTEConsumer cteConsumer, Void context) {\n cascadesContext.addCTEConsumerGroup(cteConsumer.getCteId(), groupExpression.getOwnerGroup(),\n cteConsumer.getProducerToConsumerSlotMap());\n CTEId cteId = cteConsumer.getCteId();\n Statistics prodStats = cteIdToStats.get(cteId);\n if (prodStats == null) {\n prodStats = groupExpression.getOwnerGroup().getStatistics();\n }\n Preconditions.checkArgument(prodStats != null, String.format(\"Stats for CTE: %s not found\", cteId));\n Statistics consumerStats = new Statistics(prodStats.getRowCount(), 1, new HashMap<>());\n for (Slot slot : cteConsumer.getOutput()) {\n Slot prodSlot = cteConsumer.getProducerSlot(slot);\n ColumnStatistic colStats = prodStats.columnStatistics().get(prodSlot);\n if (colStats == null) {\n continue;\n }\n consumerStats.addColumnStats(slot, colStats);\n }\n return consumerStats;\n }\n\n @Override\n public Statistics visitPhysicalCTEAnchor(\n PhysicalCTEAnchor cteAnchor, Void context) {\n return groupExpression.childStatistics(1);\n }\n}" }, { "comment": "Why use `AtomicBoolean`? Does some race conditions exist?", "method_body": "private LogicalPlan replaceSortExpression(LogicalSort sort, Map sMap) {\n List orderKeys = sort.getOrderKeys();\n AtomicBoolean changed = new AtomicBoolean(false);\n List newKeys = orderKeys.stream().map(k -> {\n Expression newExpr = ExpressionUtils.replace(k.getExpr(), sMap);\n if (newExpr != k.getExpr()) {\n changed.set(true);\n }\n return new OrderKey(newExpr, k.isAsc(), k.isNullFirst());\n }).collect(Collectors.toList());\n if (changed.get()) {\n return new LogicalSort<>(newKeys, sort.child());\n } else {\n return sort;\n }\n }", "target_code": "AtomicBoolean changed = new AtomicBoolean(false);", "method_body_after": "private LogicalPlan replaceSortExpression(LogicalSort sort, Map sMap) {\n List orderKeys = sort.getOrderKeys();\n AtomicBoolean changed = new AtomicBoolean(false);\n List newKeys = orderKeys.stream().map(k -> {\n Expression newExpr = ExpressionUtils.replace(k.getExpr(), sMap);\n if (newExpr != k.getExpr()) {\n changed.set(true);\n }\n return new OrderKey(newExpr, k.isAsc(), k.isNullFirst());\n }).collect(Collectors.toList());\n if (changed.get()) {\n return new LogicalSort<>(newKeys, sort.child());\n } else {\n return sort;\n }\n }", "context_before": "class ReplaceExpressionByChildOutput implements AnalysisRuleFactory {\n @Override\n public List buildRules() {\n return ImmutableList.builder()\n .add(RuleType.REPLACE_SORT_EXPRESSION_BY_CHILD_OUTPUT.build(\n logicalSort(logicalProject()).then(sort -> {\n LogicalProject project = sort.child();\n Map sMap = Maps.newHashMap();\n project.getProjects().stream()\n .filter(Alias.class::isInstance)\n .map(Alias.class::cast)\n .forEach(p -> sMap.put(p.child(), p.toSlot()));\n return replaceSortExpression(sort, sMap);\n })\n ))\n .add(RuleType.REPLACE_SORT_EXPRESSION_BY_CHILD_OUTPUT.build(\n logicalSort(logicalAggregate()).then(sort -> {\n LogicalAggregate aggregate = sort.child();\n Map sMap = Maps.newHashMap();\n aggregate.getOutputExpressions().stream()\n .filter(Alias.class::isInstance)\n .map(Alias.class::cast)\n .forEach(p -> sMap.put(p.child(), p.toSlot()));\n return replaceSortExpression(sort, sMap);\n })\n )).add(RuleType.REPLACE_SORT_EXPRESSION_BY_CHILD_OUTPUT.build(\n logicalSort(logicalHaving(logicalAggregate())).then(sort -> {\n LogicalAggregate aggregate = sort.child().child();\n Map sMap = Maps.newHashMap();\n aggregate.getOutputExpressions().stream()\n .filter(Alias.class::isInstance)\n .map(Alias.class::cast)\n .forEach(p -> sMap.put(p.child(), p.toSlot()));\n return replaceSortExpression(sort, sMap);\n })\n ))\n .build();\n }\n\n \n}", "context_after": "class ReplaceExpressionByChildOutput implements AnalysisRuleFactory {\n @Override\n public List buildRules() {\n return ImmutableList.builder()\n .add(RuleType.REPLACE_SORT_EXPRESSION_BY_CHILD_OUTPUT.build(\n logicalSort(logicalProject()).then(sort -> {\n LogicalProject project = sort.child();\n Map sMap = Maps.newHashMap();\n project.getProjects().stream()\n .filter(Alias.class::isInstance)\n .map(Alias.class::cast)\n .forEach(p -> sMap.put(p.child(), p.toSlot()));\n return replaceSortExpression(sort, sMap);\n })\n ))\n .add(RuleType.REPLACE_SORT_EXPRESSION_BY_CHILD_OUTPUT.build(\n logicalSort(logicalAggregate()).then(sort -> {\n LogicalAggregate aggregate = sort.child();\n Map sMap = Maps.newHashMap();\n aggregate.getOutputExpressions().stream()\n .filter(Alias.class::isInstance)\n .map(Alias.class::cast)\n .forEach(p -> sMap.put(p.child(), p.toSlot()));\n return replaceSortExpression(sort, sMap);\n })\n )).add(RuleType.REPLACE_SORT_EXPRESSION_BY_CHILD_OUTPUT.build(\n logicalSort(logicalHaving(logicalAggregate())).then(sort -> {\n LogicalAggregate aggregate = sort.child().child();\n Map sMap = Maps.newHashMap();\n aggregate.getOutputExpressions().stream()\n .filter(Alias.class::isInstance)\n .map(Alias.class::cast)\n .forEach(p -> sMap.put(p.child(), p.toSlot()));\n return replaceSortExpression(sort, sMap);\n })\n ))\n .build();\n }\n\n \n}" }, { "comment": "`parallelism2` is the task number for before rescale, and `2 * parallelism2` is the task number for after rescale. Ideally, we should wait for all tasks after rescale to become running. That's exactly what FLINK-34336 fix. > Why does it increase the hang possibility? When the rescale with cooldown=30s, the rescale action will be executed after 30s. When the `waitForRunningTasks` is called, the job is still running with old parallelism, so the `waitForRunningTasks` can be done. When we disable the scaling cooldown, the rescale may happen immediately. When it happens, the job will be running with `2 * parallelism2`, so `waitForRunningTasks` with old parallelism2 will be hang forever. > And why did we merge it into master? IIUC, we should wait for all tasks after rescale to become running. But these 2 callers wait for all tasks before rescale. These 2 wait are unexpected, so I think it's a separate bug regardless of we disable/enable cooldown. It means we should wait for all tasks after rescale even if we enable cooldown=30s. That's why I create a separate JIRA (FLINK-34336) to follow it. Why do I merge it into master? 1. I think `Disable the scaling cooldown to speed up the test` and FLINK-34336 are separate 2. I guess `Disable the scaling cooldown to speed up the test` may be cause AutoRescalingITCase hang, so I prepare https://github.com/apache/flink/pull/24248 in advanced. It's easy to be merged to fix the hang. > Why do we merge it then now? 1. `Disable the scaling cooldown to speed up the test` is a minor improvement for test instead of bugfix. So it's not necessary for 1.19 IIUC. 2. 1.19 will be released soon, I'm afraid to affect this release. So I merge it in master(1.20) first. 3. After FLINK-34226 is fixed, this minor improvement doesn't have any negative impact(I didn't notice any), so merging it may be make sense. WDYT?", "method_body": "public void testCheckpointRescalingNonPartitionedStateCausesException() throws Exception {\n final int parallelism = totalSlots / 2;\n final int parallelism2 = totalSlots;\n final int maxParallelism = 13;\n\n ClusterClient client = cluster.getClusterClient();\n\n try {\n JobGraph jobGraph =\n createJobGraphWithOperatorState(\n parallelism, maxParallelism, OperatorCheckpointMethod.NON_PARTITIONED);\n \n StateSourceBase.canFinishLatch = new CountDownLatch(1);\n\n final JobID jobID = jobGraph.getJobID();\n\n client.submitJob(jobGraph).get();\n\n \n waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);\n \n StateSourceBase.workStartedLatch.await();\n\n waitForNewCheckpoint(jobID, cluster.getMiniCluster());\n\n JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();\n for (JobVertex vertex : jobGraph.getVertices()) {\n builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);\n }\n\n restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();\n\n waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);\n waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);\n\n StateSourceBase.canFinishLatch.countDown();\n\n client.requestJobResult(jobID).get();\n } catch (JobExecutionException exception) {\n if (!(exception.getCause() instanceof IllegalStateException)) {\n throw exception;\n }\n }\n }", "target_code": "waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);", "method_body_after": "public void testCheckpointRescalingNonPartitionedStateCausesException() throws Exception {\n final int parallelism = totalSlots / 2;\n final int parallelism2 = totalSlots;\n final int maxParallelism = 13;\n\n ClusterClient client = cluster.getClusterClient();\n\n try {\n JobGraph jobGraph =\n createJobGraphWithOperatorState(\n parallelism, maxParallelism, OperatorCheckpointMethod.NON_PARTITIONED);\n \n StateSourceBase.canFinishLatch = new CountDownLatch(1);\n\n final JobID jobID = jobGraph.getJobID();\n\n client.submitJob(jobGraph).get();\n\n \n waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);\n \n StateSourceBase.workStartedLatch.await();\n\n waitForNewCheckpoint(jobID, cluster.getMiniCluster());\n\n JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();\n for (JobVertex vertex : jobGraph.getVertices()) {\n builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);\n }\n\n restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();\n\n waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);\n waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);\n\n StateSourceBase.canFinishLatch.countDown();\n\n client.requestJobResult(jobID).get();\n } catch (JobExecutionException exception) {\n if (!(exception.getCause() instanceof IllegalStateException)) {\n throw exception;\n }\n }\n }", "context_before": "class AutoRescalingITCase extends TestLogger {\n\n @ClassRule\n public static final TestExecutorResource EXECUTOR_RESOURCE =\n TestingUtils.defaultExecutorResource();\n\n private static final int numTaskManagers = 2;\n private static final int slotsPerTaskManager = 2;\n private static final int totalSlots = numTaskManagers * slotsPerTaskManager;\n\n @Parameterized.Parameters(name = \"backend = {0}, buffersPerChannel = {1}\")\n public static Collection data() {\n return Arrays.asList(\n new Object[][] {\n {\"rocksdb\", 0}, {\"rocksdb\", 2}, {\"filesystem\", 0}, {\"filesystem\", 2}\n });\n }\n\n public AutoRescalingITCase(String backend, int buffersPerChannel) {\n this.backend = backend;\n this.buffersPerChannel = buffersPerChannel;\n }\n\n private final String backend;\n\n private final int buffersPerChannel;\n\n private String currentBackend = null;\n\n enum OperatorCheckpointMethod {\n NON_PARTITIONED,\n CHECKPOINTED_FUNCTION,\n CHECKPOINTED_FUNCTION_BROADCAST,\n LIST_CHECKPOINTED\n }\n\n private static MiniClusterWithClientResource cluster;\n private static RestClusterClient restClusterClient;\n\n @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Before\n public void setup() throws Exception {\n \n if (!Objects.equals(currentBackend, backend)) {\n shutDownExistingCluster();\n\n currentBackend = backend;\n\n Configuration config = new Configuration();\n\n final File checkpointDir = temporaryFolder.newFolder();\n final File savepointDir = temporaryFolder.newFolder();\n\n config.set(StateBackendOptions.STATE_BACKEND, currentBackend);\n config.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, true);\n config.set(CheckpointingOptions.LOCAL_RECOVERY, true);\n config.set(\n CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir.toURI().toString());\n config.set(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir.toURI().toString());\n config.set(\n NettyShuffleEnvironmentOptions.NETWORK_BUFFERS_PER_CHANNEL, buffersPerChannel);\n\n config.set(JobManagerOptions.SCHEDULER, JobManagerOptions.SchedulerType.Adaptive);\n \n config.set(JobManagerOptions.SCHEDULER_SCALING_INTERVAL_MIN, Duration.ofMillis(0));\n\n \n \n \n config.set(WebOptions.REFRESH_INTERVAL, 50L);\n config.set(JobManagerOptions.SLOT_IDLE_TIMEOUT, 50L);\n\n cluster =\n new MiniClusterWithClientResource(\n new MiniClusterResourceConfiguration.Builder()\n .setConfiguration(config)\n .setNumberTaskManagers(numTaskManagers)\n .setNumberSlotsPerTaskManager(slotsPerTaskManager)\n .build());\n cluster.before();\n restClusterClient = cluster.getRestClusterClient();\n }\n }\n\n @AfterClass\n public static void shutDownExistingCluster() {\n if (cluster != null) {\n cluster.after();\n cluster = null;\n }\n }\n\n @Test\n public void testCheckpointRescalingInKeyedState() throws Exception {\n testCheckpointRescalingKeyedState(false);\n }\n\n @Test\n public void testCheckpointRescalingOutKeyedState() throws Exception {\n testCheckpointRescalingKeyedState(true);\n }\n\n /**\n * Tests that a job with purely keyed state can be restarted from a checkpoint with a different\n * parallelism.\n */\n public void testCheckpointRescalingKeyedState(boolean scaleOut) throws Exception {\n final int numberKeys = 42;\n final int numberElements = 1000;\n final int parallelism = scaleOut ? totalSlots / 2 : totalSlots;\n final int parallelism2 = scaleOut ? totalSlots : totalSlots / 2;\n final int maxParallelism = 13;\n\n Duration timeout = Duration.ofMinutes(3);\n Deadline deadline = Deadline.now().plus(timeout);\n\n ClusterClient client = cluster.getClusterClient();\n\n try {\n\n JobGraph jobGraph =\n createJobGraphWithKeyedState(\n new Configuration(),\n parallelism,\n maxParallelism,\n numberKeys,\n numberElements);\n\n final JobID jobID = jobGraph.getJobID();\n\n client.submitJob(jobGraph).get();\n\n SubtaskIndexSource.SOURCE_LATCH.trigger();\n\n \n \n assertTrue(\n SubtaskIndexFlatMapper.workCompletedLatch.await(\n deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));\n\n \n\n Set> actualResult = CollectionSink.getElementsSet();\n\n Set> expectedResult = new HashSet<>();\n\n for (int key = 0; key < numberKeys; key++) {\n int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);\n\n expectedResult.add(\n Tuple2.of(\n KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(\n maxParallelism, parallelism, keyGroupIndex),\n numberElements * key));\n }\n\n assertEquals(expectedResult, actualResult);\n\n \n CollectionSink.clearElementsSet();\n\n waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);\n\n \n \n \n \n waitForNewCheckpoint(jobID, cluster.getMiniCluster());\n\n SubtaskIndexSource.SOURCE_LATCH.reset();\n\n JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();\n for (JobVertex vertex : jobGraph.getVertices()) {\n builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);\n }\n\n restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();\n\n waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);\n waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);\n\n SubtaskIndexSource.SOURCE_LATCH.trigger();\n\n client.requestJobResult(jobID).get();\n\n Set> actualResult2 = CollectionSink.getElementsSet();\n\n Set> expectedResult2 = new HashSet<>();\n\n for (int key = 0; key < numberKeys; key++) {\n int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);\n expectedResult2.add(\n Tuple2.of(\n KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(\n maxParallelism, parallelism2, keyGroupIndex),\n key * 2 * numberElements));\n }\n\n assertEquals(expectedResult2, actualResult2);\n\n } finally {\n \n CollectionSink.clearElementsSet();\n }\n }\n\n /**\n * Tests that a job cannot be restarted from a checkpoint with a different parallelism if the\n * rescaled operator has non-partitioned state.\n */\n @Test\n \n\n /**\n * Tests that a job with non partitioned state can be restarted from a checkpoint with a\n * different parallelism if the operator with non-partitioned state are not rescaled.\n */\n @Test\n public void testCheckpointRescalingWithKeyedAndNonPartitionedState() throws Exception {\n int numberKeys = 42;\n int numberElements = 1000;\n int parallelism = totalSlots / 2;\n int parallelism2 = totalSlots;\n int maxParallelism = 13;\n\n Duration timeout = Duration.ofMinutes(3);\n Deadline deadline = Deadline.now().plus(timeout);\n\n ClusterClient client = cluster.getClusterClient();\n\n try {\n\n JobGraph jobGraph =\n createJobGraphWithKeyedAndNonPartitionedOperatorState(\n parallelism,\n maxParallelism,\n parallelism,\n numberKeys,\n numberElements,\n numberElements);\n\n final JobID jobID = jobGraph.getJobID();\n\n client.submitJob(jobGraph).get();\n\n SubtaskIndexSource.SOURCE_LATCH.trigger();\n\n \n \n assertTrue(\n SubtaskIndexFlatMapper.workCompletedLatch.await(\n deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));\n\n \n\n Set> actualResult = CollectionSink.getElementsSet();\n\n Set> expectedResult = new HashSet<>();\n\n for (int key = 0; key < numberKeys; key++) {\n int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);\n\n expectedResult.add(\n Tuple2.of(\n KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(\n maxParallelism, parallelism, keyGroupIndex),\n numberElements * key));\n }\n\n assertEquals(expectedResult, actualResult);\n\n \n CollectionSink.clearElementsSet();\n\n waitForNewCheckpoint(jobID, cluster.getMiniCluster());\n\n SubtaskIndexSource.SOURCE_LATCH.reset();\n\n JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();\n for (JobVertex vertex : jobGraph.getVertices()) {\n if (vertex.getMaxParallelism() >= parallelism2) {\n builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);\n } else {\n builder.setParallelismForJobVertex(\n vertex.getID(), vertex.getMaxParallelism(), vertex.getMaxParallelism());\n }\n }\n\n restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();\n\n \n waitForRunningTasks(restClusterClient, jobID, parallelism + parallelism2);\n waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);\n\n SubtaskIndexSource.SOURCE_LATCH.trigger();\n\n client.requestJobResult(jobID).get();\n\n Set> actualResult2 = CollectionSink.getElementsSet();\n\n Set> expectedResult2 = new HashSet<>();\n\n for (int key = 0; key < numberKeys; key++) {\n int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);\n expectedResult2.add(\n Tuple2.of(\n KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(\n maxParallelism, parallelism2, keyGroupIndex),\n key * 2 * numberElements));\n }\n\n assertEquals(expectedResult2, actualResult2);\n\n } finally {\n \n CollectionSink.clearElementsSet();\n }\n }\n\n @Test\n public void testCheckpointRescalingInPartitionedOperatorState() throws Exception {\n testCheckpointRescalingPartitionedOperatorState(\n false, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION);\n }\n\n @Test\n public void testCheckpointRescalingOutPartitionedOperatorState() throws Exception {\n testCheckpointRescalingPartitionedOperatorState(\n true, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION);\n }\n\n @Test\n public void testCheckpointRescalingInBroadcastOperatorState() throws Exception {\n testCheckpointRescalingPartitionedOperatorState(\n false, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST);\n }\n\n @Test\n public void testCheckpointRescalingOutBroadcastOperatorState() throws Exception {\n testCheckpointRescalingPartitionedOperatorState(\n true, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST);\n }\n\n /** Tests rescaling of partitioned operator state. */\n public void testCheckpointRescalingPartitionedOperatorState(\n boolean scaleOut, OperatorCheckpointMethod checkpointMethod) throws Exception {\n final int parallelism = scaleOut ? totalSlots : totalSlots / 2;\n final int parallelism2 = scaleOut ? totalSlots / 2 : totalSlots;\n final int maxParallelism = 13;\n\n ClusterClient client = cluster.getClusterClient();\n\n int counterSize = Math.max(parallelism, parallelism2);\n\n if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION\n || checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST) {\n PartitionedStateSource.checkCorrectSnapshot = new int[counterSize];\n PartitionedStateSource.checkCorrectRestore = new int[counterSize];\n PartitionedStateSource.checkCorrectSnapshots.clear();\n } else {\n throw new UnsupportedOperationException(\"Unsupported method:\" + checkpointMethod);\n }\n\n JobGraph jobGraph =\n createJobGraphWithOperatorState(parallelism, maxParallelism, checkpointMethod);\n \n StateSourceBase.canFinishLatch = new CountDownLatch(1);\n\n final JobID jobID = jobGraph.getJobID();\n\n client.submitJob(jobGraph).get();\n\n \n waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);\n \n StateSourceBase.workStartedLatch.await();\n\n waitForNewCheckpoint(jobID, cluster.getMiniCluster());\n\n JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();\n for (JobVertex vertex : jobGraph.getVertices()) {\n builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);\n }\n\n restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();\n\n waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);\n waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);\n\n StateSourceBase.canFinishLatch.countDown();\n\n client.requestJobResult(jobID).get();\n\n int sumExp = 0;\n int sumAct = 0;\n\n if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION) {\n for (int c : PartitionedStateSource.checkCorrectSnapshot) {\n sumExp += c;\n }\n\n for (int c : PartitionedStateSource.checkCorrectRestore) {\n sumAct += c;\n }\n } else {\n for (int c : PartitionedStateSource.checkCorrectSnapshot) {\n sumExp += c;\n }\n\n for (int c : PartitionedStateSource.checkCorrectRestore) {\n sumAct += c;\n }\n\n sumExp *= parallelism2;\n }\n\n assertEquals(sumExp, sumAct);\n }\n\n \n\n private static void configureCheckpointing(CheckpointConfig config) {\n config.setCheckpointInterval(100);\n config.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);\n config.enableUnalignedCheckpoints(true);\n }\n\n private static JobGraph createJobGraphWithOperatorState(\n int parallelism, int maxParallelism, OperatorCheckpointMethod checkpointMethod) {\n\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n configureCheckpointing(env.getCheckpointConfig());\n env.setParallelism(parallelism);\n env.getConfig().setMaxParallelism(maxParallelism);\n env.setRestartStrategy(RestartStrategies.noRestart());\n\n StateSourceBase.workStartedLatch = new CountDownLatch(parallelism);\n\n SourceFunction src;\n\n switch (checkpointMethod) {\n case CHECKPOINTED_FUNCTION:\n src = new PartitionedStateSource(false);\n break;\n case CHECKPOINTED_FUNCTION_BROADCAST:\n src = new PartitionedStateSource(true);\n break;\n case NON_PARTITIONED:\n src = new NonPartitionedStateSource();\n break;\n default:\n throw new IllegalArgumentException(checkpointMethod.name());\n }\n\n DataStream input = env.addSource(src);\n\n input.sinkTo(new DiscardingSink<>());\n\n return env.getStreamGraph().getJobGraph();\n }\n\n public static JobGraph createJobGraphWithKeyedState(\n Configuration configuration,\n int parallelism,\n int maxParallelism,\n int numberKeys,\n int numberElements) {\n StreamExecutionEnvironment env =\n StreamExecutionEnvironment.getExecutionEnvironment(configuration);\n env.setParallelism(parallelism);\n if (0 < maxParallelism) {\n env.getConfig().setMaxParallelism(maxParallelism);\n }\n\n configureCheckpointing(env.getCheckpointConfig());\n env.setRestartStrategy(RestartStrategies.noRestart());\n env.getConfig().setUseSnapshotCompression(true);\n\n DataStream input =\n env.addSource(new SubtaskIndexSource(numberKeys, numberElements, parallelism))\n .keyBy(\n new KeySelector() {\n private static final long serialVersionUID =\n -7952298871120320940L;\n\n @Override\n public Integer getKey(Integer value) {\n return value;\n }\n });\n\n SubtaskIndexFlatMapper.workCompletedLatch = new CountDownLatch(numberKeys);\n\n DataStream> result =\n input.flatMap(new SubtaskIndexFlatMapper(numberElements));\n\n result.addSink(new CollectionSink<>());\n\n return env.getStreamGraph().getJobGraph();\n }\n\n private static JobGraph createJobGraphWithKeyedAndNonPartitionedOperatorState(\n int parallelism,\n int maxParallelism,\n int fixedParallelism,\n int numberKeys,\n int numberElements,\n int numberElementsAfterRestart) {\n\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n env.setParallelism(parallelism);\n env.getConfig().setMaxParallelism(maxParallelism);\n configureCheckpointing(env.getCheckpointConfig());\n env.setRestartStrategy(RestartStrategies.noRestart());\n\n DataStream input =\n env.addSource(\n new SubtaskIndexNonPartitionedStateSource(\n numberKeys,\n numberElements,\n numberElementsAfterRestart,\n parallelism))\n .setParallelism(fixedParallelism)\n .setMaxParallelism(fixedParallelism)\n .keyBy(\n new KeySelector() {\n private static final long serialVersionUID =\n -7952298871120320940L;\n\n @Override\n public Integer getKey(Integer value) {\n return value;\n }\n });\n\n SubtaskIndexFlatMapper.workCompletedLatch = new CountDownLatch(numberKeys);\n\n DataStream> result =\n input.flatMap(new SubtaskIndexFlatMapper(numberElements));\n\n result.addSink(new CollectionSink<>());\n\n return env.getStreamGraph().getJobGraph();\n }\n\n private static class SubtaskIndexSource extends RichParallelSourceFunction {\n\n private static final long serialVersionUID = -400066323594122516L;\n\n private final int numberKeys;\n\n private final int originalParallelism;\n protected int numberElements;\n\n protected int counter = 0;\n\n private boolean running = true;\n\n private static final OneShotLatch SOURCE_LATCH = new OneShotLatch();\n\n SubtaskIndexSource(int numberKeys, int numberElements, int originalParallelism) {\n this.numberKeys = numberKeys;\n this.numberElements = numberElements;\n this.originalParallelism = originalParallelism;\n }\n\n @Override\n public void run(SourceContext ctx) throws Exception {\n RuntimeContext runtimeContext = getRuntimeContext();\n final int subtaskIndex = runtimeContext.getTaskInfo().getIndexOfThisSubtask();\n\n boolean isRestartedOrRescaled =\n runtimeContext.getTaskInfo().getNumberOfParallelSubtasks()\n != originalParallelism\n || runtimeContext.getTaskInfo().getAttemptNumber() > 0;\n while (running) {\n SOURCE_LATCH.await();\n if (counter < numberElements) {\n synchronized (ctx.getCheckpointLock()) {\n for (int value = subtaskIndex;\n value < numberKeys;\n value +=\n runtimeContext\n .getTaskInfo()\n .getNumberOfParallelSubtasks()) {\n ctx.collect(value);\n }\n\n counter++;\n }\n } else {\n if (isRestartedOrRescaled) {\n running = false;\n } else {\n Thread.sleep(100);\n }\n }\n }\n }\n\n @Override\n public void cancel() {\n running = false;\n }\n }\n\n private static class SubtaskIndexNonPartitionedStateSource extends SubtaskIndexSource\n implements ListCheckpointed {\n\n private static final long serialVersionUID = 8388073059042040203L;\n private final int numElementsAfterRestart;\n\n SubtaskIndexNonPartitionedStateSource(\n int numberKeys,\n int numberElements,\n int numElementsAfterRestart,\n int originalParallelism) {\n super(numberKeys, numberElements, originalParallelism);\n this.numElementsAfterRestart = numElementsAfterRestart;\n }\n\n @Override\n public List snapshotState(long checkpointId, long timestamp) {\n return Collections.singletonList(this.counter);\n }\n\n @Override\n public void restoreState(List state) {\n if (state.size() != 1) {\n throw new RuntimeException(\n \"Test failed due to unexpected recovered state size \" + state.size());\n }\n this.counter = state.get(0);\n this.numberElements += numElementsAfterRestart;\n }\n }\n\n private static class SubtaskIndexFlatMapper\n extends RichFlatMapFunction>\n implements CheckpointedFunction {\n\n private static final long serialVersionUID = 5273172591283191348L;\n\n private static CountDownLatch workCompletedLatch = new CountDownLatch(1);\n\n private transient ValueState counter;\n private transient ValueState sum;\n\n private final int numberElements;\n\n SubtaskIndexFlatMapper(int numberElements) {\n this.numberElements = numberElements;\n }\n\n @Override\n public void flatMap(Integer value, Collector> out)\n throws Exception {\n\n int count = counter.value() + 1;\n counter.update(count);\n\n int s = sum.value() + value;\n sum.update(s);\n\n if (count % numberElements == 0) {\n out.collect(\n Tuple2.of(getRuntimeContext().getTaskInfo().getIndexOfThisSubtask(), s));\n workCompletedLatch.countDown();\n }\n }\n\n @Override\n public void snapshotState(FunctionSnapshotContext context) {\n \n }\n\n @Override\n public void initializeState(FunctionInitializationContext context) {\n counter =\n context.getKeyedStateStore()\n .getState(new ValueStateDescriptor<>(\"counter\", Integer.class, 0));\n sum =\n context.getKeyedStateStore()\n .getState(new ValueStateDescriptor<>(\"sum\", Integer.class, 0));\n }\n }\n\n private static class CollectionSink implements SinkFunction {\n\n private static final Set elements =\n Collections.newSetFromMap(new ConcurrentHashMap<>());\n\n private static final long serialVersionUID = -1652452958040267745L;\n\n public static Set getElementsSet() {\n return (Set) elements;\n }\n\n public static void clearElementsSet() {\n elements.clear();\n }\n\n @Override\n public void invoke(IN value) {\n elements.add(value);\n }\n }\n\n private static class StateSourceBase extends RichParallelSourceFunction {\n\n private static final long serialVersionUID = 7512206069681177940L;\n private static CountDownLatch workStartedLatch = new CountDownLatch(1);\n private static CountDownLatch canFinishLatch = new CountDownLatch(0);\n\n protected volatile int counter = 0;\n protected volatile boolean running = true;\n\n @Override\n public void run(SourceContext ctx) throws Exception {\n while (running) {\n synchronized (ctx.getCheckpointLock()) {\n ++counter;\n ctx.collect(1);\n }\n\n Thread.sleep(2);\n\n if (counter == 10) {\n workStartedLatch.countDown();\n }\n\n if (counter >= 500) {\n break;\n }\n }\n\n canFinishLatch.await();\n }\n\n @Override\n public void cancel() {\n running = false;\n }\n }\n\n private static class NonPartitionedStateSource extends StateSourceBase\n implements ListCheckpointed {\n\n private static final long serialVersionUID = -8108185918123186841L;\n\n @Override\n public List snapshotState(long checkpointId, long timestamp) {\n return Collections.singletonList(this.counter);\n }\n\n @Override\n public void restoreState(List state) {\n if (!state.isEmpty()) {\n this.counter = state.get(0);\n }\n }\n }\n\n private static class PartitionedStateSource extends StateSourceBase\n implements CheckpointedFunction {\n\n private static final long serialVersionUID = -359715965103593462L;\n private static final int NUM_PARTITIONS = 7;\n\n private transient ListState counterPartitions;\n private final boolean broadcast;\n\n private static final ConcurrentHashMap checkCorrectSnapshots =\n new ConcurrentHashMap<>();\n private static int[] checkCorrectSnapshot;\n private static int[] checkCorrectRestore;\n\n public PartitionedStateSource(boolean broadcast) {\n this.broadcast = broadcast;\n }\n\n @Override\n public void snapshotState(FunctionSnapshotContext context) throws Exception {\n\n if (getRuntimeContext().getTaskInfo().getAttemptNumber() == 0) {\n int[] snapshot =\n checkCorrectSnapshots.computeIfAbsent(\n context.getCheckpointId(),\n (x) -> new int[checkCorrectRestore.length]);\n snapshot[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] = counter;\n }\n\n counterPartitions.clear();\n\n int div = counter / NUM_PARTITIONS;\n int mod = counter % NUM_PARTITIONS;\n\n for (int i = 0; i < NUM_PARTITIONS; ++i) {\n int partitionValue = div;\n if (mod > 0) {\n --mod;\n ++partitionValue;\n }\n counterPartitions.add(partitionValue);\n }\n }\n\n @Override\n public void initializeState(FunctionInitializationContext context) throws Exception {\n if (broadcast) {\n this.counterPartitions =\n context.getOperatorStateStore()\n .getUnionListState(\n new ListStateDescriptor<>(\n \"counter_partitions\", IntSerializer.INSTANCE));\n } else {\n this.counterPartitions =\n context.getOperatorStateStore()\n .getListState(\n new ListStateDescriptor<>(\n \"counter_partitions\", IntSerializer.INSTANCE));\n }\n\n if (context.isRestored()) {\n for (int v : counterPartitions.get()) {\n counter += v;\n }\n checkCorrectRestore[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] =\n counter;\n context.getRestoredCheckpointId()\n .ifPresent((id) -> checkCorrectSnapshot = checkCorrectSnapshots.get(id));\n }\n }\n }\n}", "context_after": "class AutoRescalingITCase extends TestLogger {\n\n @ClassRule\n public static final TestExecutorResource EXECUTOR_RESOURCE =\n TestingUtils.defaultExecutorResource();\n\n private static final int numTaskManagers = 2;\n private static final int slotsPerTaskManager = 2;\n private static final int totalSlots = numTaskManagers * slotsPerTaskManager;\n\n @Parameterized.Parameters(name = \"backend = {0}, buffersPerChannel = {1}\")\n public static Collection data() {\n return Arrays.asList(\n new Object[][] {\n {\"rocksdb\", 0}, {\"rocksdb\", 2}, {\"filesystem\", 0}, {\"filesystem\", 2}\n });\n }\n\n public AutoRescalingITCase(String backend, int buffersPerChannel) {\n this.backend = backend;\n this.buffersPerChannel = buffersPerChannel;\n }\n\n private final String backend;\n\n private final int buffersPerChannel;\n\n private String currentBackend = null;\n\n enum OperatorCheckpointMethod {\n NON_PARTITIONED,\n CHECKPOINTED_FUNCTION,\n CHECKPOINTED_FUNCTION_BROADCAST,\n LIST_CHECKPOINTED\n }\n\n private static MiniClusterWithClientResource cluster;\n private static RestClusterClient restClusterClient;\n\n @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Before\n public void setup() throws Exception {\n \n if (!Objects.equals(currentBackend, backend)) {\n shutDownExistingCluster();\n\n currentBackend = backend;\n\n Configuration config = new Configuration();\n\n final File checkpointDir = temporaryFolder.newFolder();\n final File savepointDir = temporaryFolder.newFolder();\n\n config.set(StateBackendOptions.STATE_BACKEND, currentBackend);\n config.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, true);\n config.set(CheckpointingOptions.LOCAL_RECOVERY, true);\n config.set(\n CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir.toURI().toString());\n config.set(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir.toURI().toString());\n config.set(\n NettyShuffleEnvironmentOptions.NETWORK_BUFFERS_PER_CHANNEL, buffersPerChannel);\n\n config.set(JobManagerOptions.SCHEDULER, JobManagerOptions.SchedulerType.Adaptive);\n \n config.set(JobManagerOptions.SCHEDULER_SCALING_INTERVAL_MIN, Duration.ofMillis(0));\n\n \n \n \n config.set(WebOptions.REFRESH_INTERVAL, 50L);\n config.set(JobManagerOptions.SLOT_IDLE_TIMEOUT, 50L);\n\n cluster =\n new MiniClusterWithClientResource(\n new MiniClusterResourceConfiguration.Builder()\n .setConfiguration(config)\n .setNumberTaskManagers(numTaskManagers)\n .setNumberSlotsPerTaskManager(slotsPerTaskManager)\n .build());\n cluster.before();\n restClusterClient = cluster.getRestClusterClient();\n }\n }\n\n @AfterClass\n public static void shutDownExistingCluster() {\n if (cluster != null) {\n cluster.after();\n cluster = null;\n }\n }\n\n @Test\n public void testCheckpointRescalingInKeyedState() throws Exception {\n testCheckpointRescalingKeyedState(false);\n }\n\n @Test\n public void testCheckpointRescalingOutKeyedState() throws Exception {\n testCheckpointRescalingKeyedState(true);\n }\n\n /**\n * Tests that a job with purely keyed state can be restarted from a checkpoint with a different\n * parallelism.\n */\n public void testCheckpointRescalingKeyedState(boolean scaleOut) throws Exception {\n final int numberKeys = 42;\n final int numberElements = 1000;\n final int parallelism = scaleOut ? totalSlots / 2 : totalSlots;\n final int parallelism2 = scaleOut ? totalSlots : totalSlots / 2;\n final int maxParallelism = 13;\n\n Duration timeout = Duration.ofMinutes(3);\n Deadline deadline = Deadline.now().plus(timeout);\n\n ClusterClient client = cluster.getClusterClient();\n\n try {\n\n JobGraph jobGraph =\n createJobGraphWithKeyedState(\n new Configuration(),\n parallelism,\n maxParallelism,\n numberKeys,\n numberElements);\n\n final JobID jobID = jobGraph.getJobID();\n\n client.submitJob(jobGraph).get();\n\n SubtaskIndexSource.SOURCE_LATCH.trigger();\n\n \n \n assertTrue(\n SubtaskIndexFlatMapper.workCompletedLatch.await(\n deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));\n\n \n\n Set> actualResult = CollectionSink.getElementsSet();\n\n Set> expectedResult = new HashSet<>();\n\n for (int key = 0; key < numberKeys; key++) {\n int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);\n\n expectedResult.add(\n Tuple2.of(\n KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(\n maxParallelism, parallelism, keyGroupIndex),\n numberElements * key));\n }\n\n assertEquals(expectedResult, actualResult);\n\n \n CollectionSink.clearElementsSet();\n\n waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);\n\n \n \n \n \n waitForNewCheckpoint(jobID, cluster.getMiniCluster());\n\n SubtaskIndexSource.SOURCE_LATCH.reset();\n\n JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();\n for (JobVertex vertex : jobGraph.getVertices()) {\n builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);\n }\n\n restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();\n\n waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);\n waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);\n\n SubtaskIndexSource.SOURCE_LATCH.trigger();\n\n client.requestJobResult(jobID).get();\n\n Set> actualResult2 = CollectionSink.getElementsSet();\n\n Set> expectedResult2 = new HashSet<>();\n\n for (int key = 0; key < numberKeys; key++) {\n int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);\n expectedResult2.add(\n Tuple2.of(\n KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(\n maxParallelism, parallelism2, keyGroupIndex),\n key * 2 * numberElements));\n }\n\n assertEquals(expectedResult2, actualResult2);\n\n } finally {\n \n CollectionSink.clearElementsSet();\n }\n }\n\n /**\n * Tests that a job cannot be restarted from a checkpoint with a different parallelism if the\n * rescaled operator has non-partitioned state.\n */\n @Test\n \n\n /**\n * Tests that a job with non partitioned state can be restarted from a checkpoint with a\n * different parallelism if the operator with non-partitioned state are not rescaled.\n */\n @Test\n public void testCheckpointRescalingWithKeyedAndNonPartitionedState() throws Exception {\n int numberKeys = 42;\n int numberElements = 1000;\n int parallelism = totalSlots / 2;\n int parallelism2 = totalSlots;\n int maxParallelism = 13;\n\n Duration timeout = Duration.ofMinutes(3);\n Deadline deadline = Deadline.now().plus(timeout);\n\n ClusterClient client = cluster.getClusterClient();\n\n try {\n\n JobGraph jobGraph =\n createJobGraphWithKeyedAndNonPartitionedOperatorState(\n parallelism,\n maxParallelism,\n parallelism,\n numberKeys,\n numberElements,\n numberElements);\n\n final JobID jobID = jobGraph.getJobID();\n\n client.submitJob(jobGraph).get();\n\n SubtaskIndexSource.SOURCE_LATCH.trigger();\n\n \n \n assertTrue(\n SubtaskIndexFlatMapper.workCompletedLatch.await(\n deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS));\n\n \n\n Set> actualResult = CollectionSink.getElementsSet();\n\n Set> expectedResult = new HashSet<>();\n\n for (int key = 0; key < numberKeys; key++) {\n int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);\n\n expectedResult.add(\n Tuple2.of(\n KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(\n maxParallelism, parallelism, keyGroupIndex),\n numberElements * key));\n }\n\n assertEquals(expectedResult, actualResult);\n\n \n CollectionSink.clearElementsSet();\n\n waitForNewCheckpoint(jobID, cluster.getMiniCluster());\n\n SubtaskIndexSource.SOURCE_LATCH.reset();\n\n JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();\n for (JobVertex vertex : jobGraph.getVertices()) {\n if (vertex.getMaxParallelism() >= parallelism2) {\n builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);\n } else {\n builder.setParallelismForJobVertex(\n vertex.getID(), vertex.getMaxParallelism(), vertex.getMaxParallelism());\n }\n }\n\n restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();\n\n \n waitForRunningTasks(restClusterClient, jobID, parallelism + parallelism2);\n waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);\n\n SubtaskIndexSource.SOURCE_LATCH.trigger();\n\n client.requestJobResult(jobID).get();\n\n Set> actualResult2 = CollectionSink.getElementsSet();\n\n Set> expectedResult2 = new HashSet<>();\n\n for (int key = 0; key < numberKeys; key++) {\n int keyGroupIndex = KeyGroupRangeAssignment.assignToKeyGroup(key, maxParallelism);\n expectedResult2.add(\n Tuple2.of(\n KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(\n maxParallelism, parallelism2, keyGroupIndex),\n key * 2 * numberElements));\n }\n\n assertEquals(expectedResult2, actualResult2);\n\n } finally {\n \n CollectionSink.clearElementsSet();\n }\n }\n\n @Test\n public void testCheckpointRescalingInPartitionedOperatorState() throws Exception {\n testCheckpointRescalingPartitionedOperatorState(\n false, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION);\n }\n\n @Test\n public void testCheckpointRescalingOutPartitionedOperatorState() throws Exception {\n testCheckpointRescalingPartitionedOperatorState(\n true, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION);\n }\n\n @Test\n public void testCheckpointRescalingInBroadcastOperatorState() throws Exception {\n testCheckpointRescalingPartitionedOperatorState(\n false, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST);\n }\n\n @Test\n public void testCheckpointRescalingOutBroadcastOperatorState() throws Exception {\n testCheckpointRescalingPartitionedOperatorState(\n true, OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST);\n }\n\n /** Tests rescaling of partitioned operator state. */\n public void testCheckpointRescalingPartitionedOperatorState(\n boolean scaleOut, OperatorCheckpointMethod checkpointMethod) throws Exception {\n final int parallelism = scaleOut ? totalSlots : totalSlots / 2;\n final int parallelism2 = scaleOut ? totalSlots / 2 : totalSlots;\n final int maxParallelism = 13;\n\n ClusterClient client = cluster.getClusterClient();\n\n int counterSize = Math.max(parallelism, parallelism2);\n\n if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION\n || checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST) {\n PartitionedStateSource.checkCorrectSnapshot = new int[counterSize];\n PartitionedStateSource.checkCorrectRestore = new int[counterSize];\n PartitionedStateSource.checkCorrectSnapshots.clear();\n } else {\n throw new UnsupportedOperationException(\"Unsupported method:\" + checkpointMethod);\n }\n\n JobGraph jobGraph =\n createJobGraphWithOperatorState(parallelism, maxParallelism, checkpointMethod);\n \n StateSourceBase.canFinishLatch = new CountDownLatch(1);\n\n final JobID jobID = jobGraph.getJobID();\n\n client.submitJob(jobGraph).get();\n\n \n waitForAllTaskRunning(cluster.getMiniCluster(), jobGraph.getJobID(), false);\n \n StateSourceBase.workStartedLatch.await();\n\n waitForNewCheckpoint(jobID, cluster.getMiniCluster());\n\n JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder();\n for (JobVertex vertex : jobGraph.getVertices()) {\n builder.setParallelismForJobVertex(vertex.getID(), parallelism2, parallelism2);\n }\n\n restClusterClient.updateJobResourceRequirements(jobID, builder.build()).join();\n\n waitForRunningTasks(restClusterClient, jobID, 2 * parallelism2);\n waitForAvailableSlots(restClusterClient, totalSlots - parallelism2);\n\n StateSourceBase.canFinishLatch.countDown();\n\n client.requestJobResult(jobID).get();\n\n int sumExp = 0;\n int sumAct = 0;\n\n if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION) {\n for (int c : PartitionedStateSource.checkCorrectSnapshot) {\n sumExp += c;\n }\n\n for (int c : PartitionedStateSource.checkCorrectRestore) {\n sumAct += c;\n }\n } else {\n for (int c : PartitionedStateSource.checkCorrectSnapshot) {\n sumExp += c;\n }\n\n for (int c : PartitionedStateSource.checkCorrectRestore) {\n sumAct += c;\n }\n\n sumExp *= parallelism2;\n }\n\n assertEquals(sumExp, sumAct);\n }\n\n \n\n private static void configureCheckpointing(CheckpointConfig config) {\n config.setCheckpointInterval(100);\n config.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);\n config.enableUnalignedCheckpoints(true);\n }\n\n private static JobGraph createJobGraphWithOperatorState(\n int parallelism, int maxParallelism, OperatorCheckpointMethod checkpointMethod) {\n\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n configureCheckpointing(env.getCheckpointConfig());\n env.setParallelism(parallelism);\n env.getConfig().setMaxParallelism(maxParallelism);\n env.setRestartStrategy(RestartStrategies.noRestart());\n\n StateSourceBase.workStartedLatch = new CountDownLatch(parallelism);\n\n SourceFunction src;\n\n switch (checkpointMethod) {\n case CHECKPOINTED_FUNCTION:\n src = new PartitionedStateSource(false);\n break;\n case CHECKPOINTED_FUNCTION_BROADCAST:\n src = new PartitionedStateSource(true);\n break;\n case NON_PARTITIONED:\n src = new NonPartitionedStateSource();\n break;\n default:\n throw new IllegalArgumentException(checkpointMethod.name());\n }\n\n DataStream input = env.addSource(src);\n\n input.sinkTo(new DiscardingSink<>());\n\n return env.getStreamGraph().getJobGraph();\n }\n\n public static JobGraph createJobGraphWithKeyedState(\n Configuration configuration,\n int parallelism,\n int maxParallelism,\n int numberKeys,\n int numberElements) {\n StreamExecutionEnvironment env =\n StreamExecutionEnvironment.getExecutionEnvironment(configuration);\n env.setParallelism(parallelism);\n if (0 < maxParallelism) {\n env.getConfig().setMaxParallelism(maxParallelism);\n }\n\n configureCheckpointing(env.getCheckpointConfig());\n env.setRestartStrategy(RestartStrategies.noRestart());\n env.getConfig().setUseSnapshotCompression(true);\n\n DataStream input =\n env.addSource(new SubtaskIndexSource(numberKeys, numberElements, parallelism))\n .keyBy(\n new KeySelector() {\n private static final long serialVersionUID =\n -7952298871120320940L;\n\n @Override\n public Integer getKey(Integer value) {\n return value;\n }\n });\n\n SubtaskIndexFlatMapper.workCompletedLatch = new CountDownLatch(numberKeys);\n\n DataStream> result =\n input.flatMap(new SubtaskIndexFlatMapper(numberElements));\n\n result.addSink(new CollectionSink<>());\n\n return env.getStreamGraph().getJobGraph();\n }\n\n private static JobGraph createJobGraphWithKeyedAndNonPartitionedOperatorState(\n int parallelism,\n int maxParallelism,\n int fixedParallelism,\n int numberKeys,\n int numberElements,\n int numberElementsAfterRestart) {\n\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n env.setParallelism(parallelism);\n env.getConfig().setMaxParallelism(maxParallelism);\n configureCheckpointing(env.getCheckpointConfig());\n env.setRestartStrategy(RestartStrategies.noRestart());\n\n DataStream input =\n env.addSource(\n new SubtaskIndexNonPartitionedStateSource(\n numberKeys,\n numberElements,\n numberElementsAfterRestart,\n parallelism))\n .setParallelism(fixedParallelism)\n .setMaxParallelism(fixedParallelism)\n .keyBy(\n new KeySelector() {\n private static final long serialVersionUID =\n -7952298871120320940L;\n\n @Override\n public Integer getKey(Integer value) {\n return value;\n }\n });\n\n SubtaskIndexFlatMapper.workCompletedLatch = new CountDownLatch(numberKeys);\n\n DataStream> result =\n input.flatMap(new SubtaskIndexFlatMapper(numberElements));\n\n result.addSink(new CollectionSink<>());\n\n return env.getStreamGraph().getJobGraph();\n }\n\n private static class SubtaskIndexSource extends RichParallelSourceFunction {\n\n private static final long serialVersionUID = -400066323594122516L;\n\n private final int numberKeys;\n\n private final int originalParallelism;\n protected int numberElements;\n\n protected int counter = 0;\n\n private boolean running = true;\n\n private static final OneShotLatch SOURCE_LATCH = new OneShotLatch();\n\n SubtaskIndexSource(int numberKeys, int numberElements, int originalParallelism) {\n this.numberKeys = numberKeys;\n this.numberElements = numberElements;\n this.originalParallelism = originalParallelism;\n }\n\n @Override\n public void run(SourceContext ctx) throws Exception {\n RuntimeContext runtimeContext = getRuntimeContext();\n final int subtaskIndex = runtimeContext.getTaskInfo().getIndexOfThisSubtask();\n\n boolean isRestartedOrRescaled =\n runtimeContext.getTaskInfo().getNumberOfParallelSubtasks()\n != originalParallelism\n || runtimeContext.getTaskInfo().getAttemptNumber() > 0;\n while (running) {\n SOURCE_LATCH.await();\n if (counter < numberElements) {\n synchronized (ctx.getCheckpointLock()) {\n for (int value = subtaskIndex;\n value < numberKeys;\n value +=\n runtimeContext\n .getTaskInfo()\n .getNumberOfParallelSubtasks()) {\n ctx.collect(value);\n }\n\n counter++;\n }\n } else {\n if (isRestartedOrRescaled) {\n running = false;\n } else {\n Thread.sleep(100);\n }\n }\n }\n }\n\n @Override\n public void cancel() {\n running = false;\n }\n }\n\n private static class SubtaskIndexNonPartitionedStateSource extends SubtaskIndexSource\n implements ListCheckpointed {\n\n private static final long serialVersionUID = 8388073059042040203L;\n private final int numElementsAfterRestart;\n\n SubtaskIndexNonPartitionedStateSource(\n int numberKeys,\n int numberElements,\n int numElementsAfterRestart,\n int originalParallelism) {\n super(numberKeys, numberElements, originalParallelism);\n this.numElementsAfterRestart = numElementsAfterRestart;\n }\n\n @Override\n public List snapshotState(long checkpointId, long timestamp) {\n return Collections.singletonList(this.counter);\n }\n\n @Override\n public void restoreState(List state) {\n if (state.size() != 1) {\n throw new RuntimeException(\n \"Test failed due to unexpected recovered state size \" + state.size());\n }\n this.counter = state.get(0);\n this.numberElements += numElementsAfterRestart;\n }\n }\n\n private static class SubtaskIndexFlatMapper\n extends RichFlatMapFunction>\n implements CheckpointedFunction {\n\n private static final long serialVersionUID = 5273172591283191348L;\n\n private static CountDownLatch workCompletedLatch = new CountDownLatch(1);\n\n private transient ValueState counter;\n private transient ValueState sum;\n\n private final int numberElements;\n\n SubtaskIndexFlatMapper(int numberElements) {\n this.numberElements = numberElements;\n }\n\n @Override\n public void flatMap(Integer value, Collector> out)\n throws Exception {\n\n int count = counter.value() + 1;\n counter.update(count);\n\n int s = sum.value() + value;\n sum.update(s);\n\n if (count % numberElements == 0) {\n out.collect(\n Tuple2.of(getRuntimeContext().getTaskInfo().getIndexOfThisSubtask(), s));\n workCompletedLatch.countDown();\n }\n }\n\n @Override\n public void snapshotState(FunctionSnapshotContext context) {\n \n }\n\n @Override\n public void initializeState(FunctionInitializationContext context) {\n counter =\n context.getKeyedStateStore()\n .getState(new ValueStateDescriptor<>(\"counter\", Integer.class, 0));\n sum =\n context.getKeyedStateStore()\n .getState(new ValueStateDescriptor<>(\"sum\", Integer.class, 0));\n }\n }\n\n private static class CollectionSink implements SinkFunction {\n\n private static final Set elements =\n Collections.newSetFromMap(new ConcurrentHashMap<>());\n\n private static final long serialVersionUID = -1652452958040267745L;\n\n public static Set getElementsSet() {\n return (Set) elements;\n }\n\n public static void clearElementsSet() {\n elements.clear();\n }\n\n @Override\n public void invoke(IN value) {\n elements.add(value);\n }\n }\n\n private static class StateSourceBase extends RichParallelSourceFunction {\n\n private static final long serialVersionUID = 7512206069681177940L;\n private static CountDownLatch workStartedLatch = new CountDownLatch(1);\n private static CountDownLatch canFinishLatch = new CountDownLatch(0);\n\n protected volatile int counter = 0;\n protected volatile boolean running = true;\n\n @Override\n public void run(SourceContext ctx) throws Exception {\n while (running) {\n synchronized (ctx.getCheckpointLock()) {\n ++counter;\n ctx.collect(1);\n }\n\n Thread.sleep(2);\n\n if (counter == 10) {\n workStartedLatch.countDown();\n }\n\n if (counter >= 500) {\n break;\n }\n }\n\n canFinishLatch.await();\n }\n\n @Override\n public void cancel() {\n running = false;\n }\n }\n\n private static class NonPartitionedStateSource extends StateSourceBase\n implements ListCheckpointed {\n\n private static final long serialVersionUID = -8108185918123186841L;\n\n @Override\n public List snapshotState(long checkpointId, long timestamp) {\n return Collections.singletonList(this.counter);\n }\n\n @Override\n public void restoreState(List state) {\n if (!state.isEmpty()) {\n this.counter = state.get(0);\n }\n }\n }\n\n private static class PartitionedStateSource extends StateSourceBase\n implements CheckpointedFunction {\n\n private static final long serialVersionUID = -359715965103593462L;\n private static final int NUM_PARTITIONS = 7;\n\n private transient ListState counterPartitions;\n private final boolean broadcast;\n\n private static final ConcurrentHashMap checkCorrectSnapshots =\n new ConcurrentHashMap<>();\n private static int[] checkCorrectSnapshot;\n private static int[] checkCorrectRestore;\n\n public PartitionedStateSource(boolean broadcast) {\n this.broadcast = broadcast;\n }\n\n @Override\n public void snapshotState(FunctionSnapshotContext context) throws Exception {\n\n if (getRuntimeContext().getTaskInfo().getAttemptNumber() == 0) {\n int[] snapshot =\n checkCorrectSnapshots.computeIfAbsent(\n context.getCheckpointId(),\n (x) -> new int[checkCorrectRestore.length]);\n snapshot[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] = counter;\n }\n\n counterPartitions.clear();\n\n int div = counter / NUM_PARTITIONS;\n int mod = counter % NUM_PARTITIONS;\n\n for (int i = 0; i < NUM_PARTITIONS; ++i) {\n int partitionValue = div;\n if (mod > 0) {\n --mod;\n ++partitionValue;\n }\n counterPartitions.add(partitionValue);\n }\n }\n\n @Override\n public void initializeState(FunctionInitializationContext context) throws Exception {\n if (broadcast) {\n this.counterPartitions =\n context.getOperatorStateStore()\n .getUnionListState(\n new ListStateDescriptor<>(\n \"counter_partitions\", IntSerializer.INSTANCE));\n } else {\n this.counterPartitions =\n context.getOperatorStateStore()\n .getListState(\n new ListStateDescriptor<>(\n \"counter_partitions\", IntSerializer.INSTANCE));\n }\n\n if (context.isRestored()) {\n for (int v : counterPartitions.get()) {\n counter += v;\n }\n checkCorrectRestore[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] =\n counter;\n context.getRestoredCheckpointId()\n .ifPresent((id) -> checkCorrectSnapshot = checkCorrectSnapshots.get(id));\n }\n }\n }\n}" }, { "comment": "Is this guaranteed to be non-null and non-empty?", "method_body": "private String getPotentialSetterName() {\n String fieldName = declaringField.getName();\n\n return \"set\" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);\n }", "target_code": "String fieldName = declaringField.getName();", "method_body_after": "private String getPotentialSetterName() {\n String fieldName = declaringField.getName();\n\n return \"set\" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);\n }", "context_before": "class %s to set header collection.\", potentialSetterName,\n deserializedHeaders.getClass().getSimpleName());\n return true;\n }\n\n return false;\n } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {\n return false;\n }", "context_after": "class %s to set header collection.\", potentialSetterName,\n deserializedHeaders.getClass().getSimpleName());\n return true;\n }\n\n return false;\n } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {\n return false;\n }" }, { "comment": "That's a good question. Maybe we should use `org.quartz.Scheduler.isInStandbyMode()` but in that case we should modify the behavior of `QuartzScheduler.pause()` and `QuartzScheduler.resume()`. Currently, we call `pauseAll()/resumeAll()` but we should probably call `standby()/start()` instead. @machi1990 WDYT?", "method_body": "public void testNoSchedulerInvocations() throws InterruptedException {\n \n assertFalse(Jobs.LATCH.await(5, TimeUnit.SECONDS));\n }", "target_code": "assertFalse(Jobs.LATCH.await(5, TimeUnit.SECONDS));", "method_body_after": "public void testNoSchedulerInvocations() throws InterruptedException {\n assertFalse(quartzScheduler.isRunning());\n }", "context_before": "class DisabledSchedulerTest {\n\n @RegisterExtension\n static final QuarkusUnitTest test = new QuarkusUnitTest()\n .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)\n .addClasses(Jobs.class)\n .addAsResource(new StringAsset(\"quarkus.quartz.enabled=false\"),\n \"application.properties\"));\n\n @Test\n \n\n static class Jobs {\n static final CountDownLatch LATCH = new CountDownLatch(1);\n\n @Scheduled(every = \"1s\")\n void checkEverySecond() {\n LATCH.countDown();\n }\n }\n}", "context_after": "class DisabledSchedulerTest {\n\n @Inject\n Scheduler quartzScheduler;\n\n @RegisterExtension\n static final QuarkusUnitTest test = new QuarkusUnitTest()\n .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)\n .addClasses(Jobs.class)\n .addAsResource(new StringAsset(\"quarkus.scheduler.enabled=false\"),\n \"application.properties\"));\n\n @Test\n \n\n static class Jobs {\n @Scheduled(every = \"1s\")\n void checkEverySecond() {\n \n }\n }\n}" }, { "comment": "We should use Junit5 `assertThat(xxx).isEqualTo()` clause.", "method_body": "public void testAdapt() throws Exception {\n KeyedStateBackend keyedStateBackend = new TestKeyedStateBackend();\n AsyncKeyedStateBackendAdaptor adaptor =\n new AsyncKeyedStateBackendAdaptor(keyedStateBackend);\n StateDescriptor descriptor =\n new ValueStateDescriptor<>(\"testState\", BasicTypeInfo.INT_TYPE_INFO);\n\n org.apache.flink.api.common.state.v2.ValueState valueState =\n (org.apache.flink.api.common.state.v2.ValueState)\n adaptor.createState(null, null, descriptor);\n\n \n valueState.clear();\n valueState.update(10);\n assertEquals(10, valueState.value().intValue());\n\n \n valueState\n .asyncClear()\n .thenAccept(\n clear -> {\n valueState\n .asyncUpdate(20)\n .thenAccept(\n update -> {\n assertEquals(20, valueState.value().intValue());\n });\n });\n }", "target_code": "assertEquals(10, valueState.value().intValue());", "method_body_after": "public void testAdapt() throws Exception {\n KeyedStateBackend keyedStateBackend = new TestKeyedStateBackend();\n AsyncKeyedStateBackendAdaptor adaptor =\n new AsyncKeyedStateBackendAdaptor<>(keyedStateBackend);\n StateDescriptor descriptor =\n new ValueStateDescriptor<>(\"testState\", BasicTypeInfo.INT_TYPE_INFO);\n\n org.apache.flink.api.common.state.v2.ValueState valueState =\n adaptor.createState(\n VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, descriptor);\n\n \n valueState.clear();\n assertThat(valueState.value()).isNull();\n valueState.update(10);\n assertThat(valueState.value()).isEqualTo(10);\n\n \n valueState\n .asyncClear()\n .thenAccept(\n clear -> {\n assertThat(valueState.value()).isNull();\n valueState\n .asyncUpdate(20)\n .thenCompose(\n empty -> {\n assertThat(valueState.value()).isEqualTo(20);\n return valueState.asyncValue();\n })\n .thenAccept(\n value -> {\n assertThat(value).isEqualTo(20);\n });\n });\n }", "context_before": "class AsyncKeyedStateBackendAdaptorTest {\n\n @Test\n \n\n static class TestValueState implements ValueState {\n private Integer value;\n\n TestValueState() {\n this.value = null;\n }\n\n @Override\n public Integer value() throws IOException {\n return value;\n }\n\n @Override\n public void update(Integer value) throws IOException {\n this.value = value;\n }\n\n @Override\n public void clear() {\n this.value = null;\n }\n }\n\n static class TestKeyedStateBackend implements KeyedStateBackend {\n\n @Override\n public void setCurrentKey(Integer newKey) {}\n\n @Override\n public Integer getCurrentKey() {\n return 0;\n }\n\n @Override\n public TypeSerializer getKeySerializer() {\n return null;\n }\n\n @Override\n public void applyToAllKeys(\n N namespace,\n TypeSerializer namespaceSerializer,\n org.apache.flink.api.common.state.StateDescriptor stateDescriptor,\n KeyedStateFunction function)\n throws Exception {}\n\n @Override\n public Stream getKeys(String state, N namespace) {\n return Stream.empty();\n }\n\n @Override\n public Stream> getKeysAndNamespaces(String state) {\n return Stream.empty();\n }\n\n @Override\n public S getOrCreateKeyedState(\n TypeSerializer namespaceSerializer,\n org.apache.flink.api.common.state.StateDescriptor stateDescriptor)\n throws Exception {\n switch (stateDescriptor.getType()) {\n case VALUE:\n return (S) new TestValueState();\n default:\n throw new IllegalArgumentException(\n \"Unsupported state type: \" + stateDescriptor.getType());\n }\n }\n\n @Override\n public S getPartitionedState(\n N namespace,\n TypeSerializer namespaceSerializer,\n org.apache.flink.api.common.state.StateDescriptor stateDescriptor)\n throws Exception {\n return null;\n }\n\n @Override\n public void dispose() {}\n\n @Override\n public void registerKeySelectionListener(KeySelectionListener listener) {}\n\n @Override\n public boolean deregisterKeySelectionListener(KeySelectionListener listener) {\n return false;\n }\n\n @NotNull\n @Override\n public IS createOrUpdateInternalState(\n @NotNull TypeSerializer namespaceSerializer,\n @NotNull org.apache.flink.api.common.state.StateDescriptor stateDesc,\n @NotNull\n StateSnapshotTransformer.StateSnapshotTransformFactory\n snapshotTransformFactory)\n throws Exception {\n return null;\n }\n\n @NotNull\n @Override\n public & Keyed>\n KeyGroupedInternalPriorityQueue create(\n @NotNull String stateName,\n @NotNull TypeSerializer byteOrderedElementSerializer) {\n return null;\n }\n }\n}", "context_after": "class AsyncKeyedStateBackendAdaptorTest {\n\n @Test\n \n\n static class TestValueState implements ValueState {\n private Integer value;\n\n TestValueState() {\n this.value = null;\n }\n\n @Override\n public Integer value() throws IOException {\n return value;\n }\n\n @Override\n public void update(Integer value) throws IOException {\n this.value = value;\n }\n\n @Override\n public void clear() {\n this.value = null;\n }\n }\n\n static class TestKeyedStateBackend implements KeyedStateBackend {\n\n @Override\n public void setCurrentKey(Integer newKey) {}\n\n @Override\n public Integer getCurrentKey() {\n return 0;\n }\n\n @Override\n public TypeSerializer getKeySerializer() {\n return null;\n }\n\n @Override\n public void applyToAllKeys(\n N namespace,\n TypeSerializer namespaceSerializer,\n org.apache.flink.api.common.state.StateDescriptor stateDescriptor,\n KeyedStateFunction function)\n throws Exception {}\n\n @Override\n public Stream getKeys(String state, N namespace) {\n return Stream.empty();\n }\n\n @Override\n public Stream> getKeysAndNamespaces(String state) {\n return Stream.empty();\n }\n\n @Override\n public S getOrCreateKeyedState(\n TypeSerializer namespaceSerializer,\n org.apache.flink.api.common.state.StateDescriptor stateDescriptor)\n throws Exception {\n switch (stateDescriptor.getType()) {\n case VALUE:\n return (S) new TestValueState();\n default:\n throw new IllegalArgumentException(\n \"Unsupported state type: \" + stateDescriptor.getType());\n }\n }\n\n @Override\n public S getPartitionedState(\n N namespace,\n TypeSerializer namespaceSerializer,\n org.apache.flink.api.common.state.StateDescriptor stateDescriptor)\n throws Exception {\n return null;\n }\n\n @Override\n public void dispose() {}\n\n @Override\n public void registerKeySelectionListener(KeySelectionListener listener) {}\n\n @Override\n public boolean deregisterKeySelectionListener(KeySelectionListener listener) {\n return false;\n }\n\n @Nonnull\n @Override\n public IS createOrUpdateInternalState(\n @Nonnull TypeSerializer namespaceSerializer,\n @Nonnull org.apache.flink.api.common.state.StateDescriptor stateDesc,\n @Nonnull\n StateSnapshotTransformer.StateSnapshotTransformFactory\n snapshotTransformFactory)\n throws Exception {\n return null;\n }\n\n @Nonnull\n @Override\n public & Keyed>\n KeyGroupedInternalPriorityQueue create(\n @Nonnull String stateName,\n @Nonnull TypeSerializer byteOrderedElementSerializer) {\n return null;\n }\n }\n}" }, { "comment": "Is this ip equals with Leader's IP all the time? Is it possible that getMasterIp return null?", "method_body": "public static void getFrontendsInfo(Catalog catalog, List> infos) {\n String masterIp = Catalog.getCurrentCatalog().getMasterIp();\n\n \n List allFe = catalog.getHaProtocol().getElectableNodes(true /* include leader */);\n allFe.addAll(catalog.getHaProtocol().getObserverNodes());\n List> allFeHosts = convertToHostPortPair(allFe);\n List> helperNodes = catalog.getHelperNodes();\n\n for (Frontend fe : catalog.getFrontends(null /* all */)) {\n\n List info = new ArrayList();\n info.add(fe.getNodeName());\n info.add(fe.getHost());\n\n info.add(FrontendOptions.getHostnameByIp(fe.getHost()));\n info.add(Integer.toString(fe.getEditLogPort()));\n info.add(Integer.toString(Config.http_port));\n\n if (fe.getHost().equals(catalog.getSelfNode().first)) {\n info.add(Integer.toString(Config.query_port));\n info.add(Integer.toString(Config.rpc_port));\n } else {\n info.add(Integer.toString(fe.getQueryPort()));\n info.add(Integer.toString(fe.getRpcPort()));\n }\n\n info.add(fe.getRole().name());\n info.add(String.valueOf(fe.getHost().equals(masterIp));\n\n info.add(Integer.toString(catalog.getClusterId()));\n info.add(String.valueOf(isJoin(allFeHosts, fe)));\n\n if (fe.getHost().equals(catalog.getSelfNode().first)) {\n info.add(\"true\");\n info.add(Long.toString(catalog.getEditLog().getMaxJournalId()));\n } else {\n info.add(String.valueOf(fe.isAlive()));\n info.add(Long.toString(fe.getReplayedJournalId()));\n }\n info.add(TimeUtils.longToTimeString(fe.getLastUpdateTime()));\n\n info.add(String.valueOf(isHelperNode(helperNodes, fe)));\n\n info.add(fe.getHeartbeatErrMsg());\n\n infos.add(info);\n }\n }", "target_code": "String masterIp = Catalog.getCurrentCatalog().getMasterIp();", "method_body_after": "public static void getFrontendsInfo(Catalog catalog, List> infos) {\n String masterIp = Catalog.getCurrentCatalog().getMasterIp();\n if (masterIp == null) {\n masterIp = \"\";\n }\n\n \n List allFe = catalog.getHaProtocol().getElectableNodes(true /* include leader */);\n allFe.addAll(catalog.getHaProtocol().getObserverNodes());\n List> allFeHosts = convertToHostPortPair(allFe);\n List> helperNodes = catalog.getHelperNodes();\n\n for (Frontend fe : catalog.getFrontends(null /* all */)) {\n\n List info = new ArrayList();\n info.add(fe.getNodeName());\n info.add(fe.getHost());\n\n info.add(FrontendOptions.getHostnameByIp(fe.getHost()));\n info.add(Integer.toString(fe.getEditLogPort()));\n info.add(Integer.toString(Config.http_port));\n\n if (fe.getHost().equals(catalog.getSelfNode().first)) {\n info.add(Integer.toString(Config.query_port));\n info.add(Integer.toString(Config.rpc_port));\n } else {\n info.add(Integer.toString(fe.getQueryPort()));\n info.add(Integer.toString(fe.getRpcPort()));\n }\n\n info.add(fe.getRole().name());\n info.add(String.valueOf(fe.getHost().equals(masterIp)));\n\n info.add(Integer.toString(catalog.getClusterId()));\n info.add(String.valueOf(isJoin(allFeHosts, fe)));\n\n if (fe.getHost().equals(catalog.getSelfNode().first)) {\n info.add(\"true\");\n info.add(Long.toString(catalog.getEditLog().getMaxJournalId()));\n } else {\n info.add(String.valueOf(fe.isAlive()));\n info.add(Long.toString(fe.getReplayedJournalId()));\n }\n info.add(TimeUtils.longToTimeString(fe.getLastUpdateTime()));\n\n info.add(String.valueOf(isHelperNode(helperNodes, fe)));\n\n info.add(fe.getHeartbeatErrMsg());\n\n infos.add(info);\n }\n }", "context_before": "class FrontendsProcNode implements ProcNodeInterface {\n private static final Logger LOG = LogManager.getLogger(FrontendsProcNode.class);\n\n public static final ImmutableList TITLE_NAMES = new ImmutableList.Builder()\n .add(\"Name\").add(\"IP\").add(\"HostName\").add(\"EditLogPort\").add(\"HttpPort\").add(\"QueryPort\").add(\"RpcPort\")\n .add(\"Role\").add(\"IsMaster\").add(\"ClusterId\").add(\"Join\").add(\"Alive\")\n .add(\"ReplayedJournalId\").add(\"LastHeartbeat\").add(\"IsHelper\").add(\"ErrMsg\")\n .build();\n\n public static final int HOSTNAME_INDEX = 2;\n\n private Catalog catalog;\n\n public FrontendsProcNode(Catalog catalog) {\n this.catalog = catalog;\n }\n\n @Override\n public ProcResult fetchResult() {\n BaseProcResult result = new BaseProcResult();\n result.setNames(TITLE_NAMES);\n\n List> infos = Lists.newArrayList();\n\n getFrontendsInfo(catalog, infos);\n\n for (List info : infos) {\n result.addRow(info);\n }\n\n return result;\n }\n\n \n\n private static boolean isHelperNode(List> helperNodes, Frontend fe) {\n return helperNodes.stream().anyMatch(p -> p.first.equals(fe.getHost()) && p.second == fe.getEditLogPort());\n }\n\n private static boolean isJoin(List> allFeHosts, Frontend fe) {\n for (Pair pair : allFeHosts) {\n if (fe.getHost().equals(pair.first) && fe.getEditLogPort() == pair.second) {\n return true;\n }\n }\n return false;\n }\n\n private static List> convertToHostPortPair(List addrs) {\n List> hostPortPair = Lists.newArrayList();\n for (InetSocketAddress addr : addrs) {\n hostPortPair.add(Pair.create(addr.getAddress().getHostAddress(), addr.getPort()));\n }\n return hostPortPair;\n }\n}", "context_after": "class FrontendsProcNode implements ProcNodeInterface {\n private static final Logger LOG = LogManager.getLogger(FrontendsProcNode.class);\n\n public static final ImmutableList TITLE_NAMES = new ImmutableList.Builder()\n .add(\"Name\").add(\"IP\").add(\"HostName\").add(\"EditLogPort\").add(\"HttpPort\").add(\"QueryPort\").add(\"RpcPort\")\n .add(\"Role\").add(\"IsMaster\").add(\"ClusterId\").add(\"Join\").add(\"Alive\")\n .add(\"ReplayedJournalId\").add(\"LastHeartbeat\").add(\"IsHelper\").add(\"ErrMsg\")\n .build();\n\n public static final int HOSTNAME_INDEX = 2;\n\n private Catalog catalog;\n\n public FrontendsProcNode(Catalog catalog) {\n this.catalog = catalog;\n }\n\n @Override\n public ProcResult fetchResult() {\n BaseProcResult result = new BaseProcResult();\n result.setNames(TITLE_NAMES);\n\n List> infos = Lists.newArrayList();\n\n getFrontendsInfo(catalog, infos);\n\n for (List info : infos) {\n result.addRow(info);\n }\n\n return result;\n }\n\n \n\n private static boolean isHelperNode(List> helperNodes, Frontend fe) {\n return helperNodes.stream().anyMatch(p -> p.first.equals(fe.getHost()) && p.second == fe.getEditLogPort());\n }\n\n private static boolean isJoin(List> allFeHosts, Frontend fe) {\n for (Pair pair : allFeHosts) {\n if (fe.getHost().equals(pair.first) && fe.getEditLogPort() == pair.second) {\n return true;\n }\n }\n return false;\n }\n\n private static List> convertToHostPortPair(List addrs) {\n List> hostPortPair = Lists.newArrayList();\n for (InetSocketAddress addr : addrs) {\n hostPortPair.add(Pair.create(addr.getAddress().getHostAddress(), addr.getPort()));\n }\n return hostPortPair;\n }\n}" }, { "comment": "Shall we add java docs to this public method", "method_body": "protected static MapValue getPathParamOrderMap(AttachedFunction resource) {\n Object annotation = resource.getAnnotation(HTTP_PACKAGE_PATH, ANN_NAME_PARAM_ORDER_CONFIG);\n return annotation == null ? new MapValueImpl() :\n (MapValue) ((MapValue) annotation).get(ANN_FIELD_PATH_PARAM_ORDER);\n }", "target_code": "return annotation == null ? new MapValueImpl() :", "method_body_after": "protected static MapValue getPathParamOrderMap(AttachedFunction resource) {\n Object annotation = resource.getAnnotation(HTTP_PACKAGE_PATH, ANN_NAME_PARAM_ORDER_CONFIG);\n return annotation == null ? new MapValueImpl() :\n (MapValue) ((MapValue) annotation).get(ANN_FIELD_PATH_PARAM_ORDER);\n }", "context_before": "class HttpResource {\n\n private static final Logger log = LoggerFactory.getLogger(HttpResource.class);\n\n private static final String METHODS_FIELD = \"methods\";\n private static final String PATH_FIELD = \"path\";\n private static final String BODY_FIELD = \"body\";\n private static final String CONSUMES_FIELD = \"consumes\";\n private static final String PRODUCES_FIELD = \"produces\";\n private static final String CORS_FIELD = \"cors\";\n private static final String TRANSACTION_INFECTABLE_FIELD = \"transactionInfectable\";\n\n private AttachedFunction balResource;\n private List methods;\n private String path;\n private String entityBodyAttribute;\n private List consumes;\n private List produces;\n private List producesSubTypes;\n private CorsHeaders corsHeaders;\n private SignatureParams signatureParams;\n private HttpService parentService;\n private boolean transactionInfectable = true; \n private boolean interruptible;\n\n private boolean transactionAnnotated = false;\n\n protected HttpResource(AttachedFunction resource, HttpService parentService) {\n this.balResource = resource;\n this.parentService = parentService;\n this.producesSubTypes = new ArrayList<>();\n }\n\n public boolean isTransactionAnnotated() {\n return transactionAnnotated;\n }\n\n public String getName() {\n return balResource.getName();\n }\n\n public String getServiceName() {\n return balResource.parent.getName();\n }\n\n public SignatureParams getSignatureParams() {\n return signatureParams;\n }\n\n public HttpService getParentService() {\n return parentService;\n }\n\n public AttachedFunction getBalResource() {\n return balResource;\n }\n\n public List getMethods() {\n return methods;\n }\n\n public void setMethods(List methods) {\n this.methods = methods;\n }\n\n public String getPath() {\n return path;\n }\n\n public void setPath(String resourcePath) {\n if (resourcePath == null || resourcePath.isEmpty()) {\n log.debug(\"Path not specified in the Resource instance, using default sub path\");\n path = balResource.getName();\n } else {\n path = resourcePath;\n }\n }\n\n public List getConsumes() {\n return consumes;\n }\n\n public void setConsumes(List consumes) {\n this.consumes = consumes;\n }\n\n public List getProduces() {\n return produces;\n }\n\n public void setProduces(List produces) {\n this.produces = produces;\n\n if (produces != null) {\n List subAttributeValues = produces.stream()\n .map(mediaType -> mediaType.trim().substring(0, mediaType.indexOf('/')))\n .distinct()\n .collect(Collectors.toList());\n setProducesSubTypes(subAttributeValues);\n }\n }\n\n public List getProducesSubTypes() {\n return producesSubTypes;\n }\n\n public void setProducesSubTypes(List producesSubTypes) {\n this.producesSubTypes = producesSubTypes;\n }\n\n public CorsHeaders getCorsHeaders() {\n return corsHeaders;\n }\n\n public void setCorsHeaders(CorsHeaders corsHeaders) {\n this.corsHeaders = corsHeaders;\n }\n\n public boolean isTransactionInfectable() {\n return transactionInfectable;\n }\n\n public void setTransactionInfectable(boolean transactionInfectable) {\n this.transactionInfectable = transactionInfectable;\n }\n\n public boolean isInterruptible() {\n return interruptible;\n }\n\n public void setInterruptible(boolean interruptible) {\n this.interruptible = interruptible;\n }\n\n public String getEntityBodyAttributeValue() {\n return entityBodyAttribute;\n }\n\n public void setEntityBodyAttributeValue(String entityBodyAttribute) {\n this.entityBodyAttribute = entityBodyAttribute;\n }\n\n public static HttpResource buildHttpResource(AttachedFunction resource, HttpService httpService) {\n HttpResource httpResource = new HttpResource(resource, httpService);\n MapValue resourceConfigAnnotation = getResourceConfigAnnotation(resource);\n httpResource.setInterruptible(httpService.isInterruptible() || hasInterruptibleAnnotation(resource));\n\n setupTransactionAnnotations(resource, httpResource);\n if (checkConfigAnnotationAvailability(resourceConfigAnnotation)) {\n httpResource.setPath(resourceConfigAnnotation.getStringValue(PATH_FIELD));\n httpResource.setMethods(\n getAsStringList(resourceConfigAnnotation.getArrayValue(METHODS_FIELD).getStringArray()));\n httpResource.setConsumes(\n getAsStringList(resourceConfigAnnotation.getArrayValue(CONSUMES_FIELD).getStringArray()));\n httpResource.setProduces(\n getAsStringList(resourceConfigAnnotation.getArrayValue(PRODUCES_FIELD).getStringArray()));\n httpResource.setEntityBodyAttributeValue(resourceConfigAnnotation.getStringValue(BODY_FIELD));\n httpResource.setCorsHeaders(CorsHeaders.buildCorsHeaders(resourceConfigAnnotation.getMapValue(CORS_FIELD)));\n httpResource\n .setTransactionInfectable(resourceConfigAnnotation.getBooleanValue(TRANSACTION_INFECTABLE_FIELD));\n\n processResourceCors(httpResource, httpService);\n httpResource.prepareAndValidateSignatureParams();\n return httpResource;\n }\n\n if (log.isDebugEnabled()) {\n log.debug(\"resourceConfig not specified in the Resource instance, using default sub path\");\n }\n httpResource.setPath(resource.getName());\n httpResource.prepareAndValidateSignatureParams();\n return httpResource;\n }\n\n private static void setupTransactionAnnotations(AttachedFunction resource, HttpResource httpResource) {\n MapValue transactionConfigAnnotation = HttpUtil.getTransactionConfigAnnotation(resource,\n TransactionConstants.TRANSACTION_PACKAGE_PATH);\n if (transactionConfigAnnotation != null) {\n httpResource.transactionAnnotated = true;\n }\n }\n\n public static MapValue getResourceConfigAnnotation(AttachedFunction resource) {\n return (MapValue) resource.getAnnotation(HTTP_PACKAGE_PATH, ANN_NAME_RESOURCE_CONFIG);\n }\n\n \n\n private static boolean hasInterruptibleAnnotation(AttachedFunction resource) {\n return resource.getAnnotation(PACKAGE_BALLERINA_BUILTIN, ANN_NAME_INTERRUPTIBLE) != null;\n }\n\n private static List getAsStringList(Object[] values) {\n if (values == null) {\n return null;\n }\n List valuesList = new ArrayList<>();\n for (Object val : values) {\n valuesList.add(val.toString().trim());\n }\n return !valuesList.isEmpty() ? valuesList : null;\n }\n\n private static void processResourceCors(HttpResource resource, HttpService service) {\n CorsHeaders corsHeaders = resource.getCorsHeaders();\n if (!corsHeaders.isAvailable()) {\n \n resource.setCorsHeaders(service.getCorsHeaders());\n return;\n }\n\n if (corsHeaders.getAllowOrigins() == null) {\n corsHeaders.setAllowOrigins(Stream.of(\"*\").collect(Collectors.toList()));\n }\n\n if (corsHeaders.getAllowMethods() != null) {\n return;\n }\n\n if (resource.getMethods() != null) {\n corsHeaders.setAllowMethods(resource.getMethods());\n return;\n }\n corsHeaders.setAllowMethods(DispatcherUtil.addAllMethods());\n }\n\n private void prepareAndValidateSignatureParams() {\n signatureParams = new SignatureParams(this);\n signatureParams.validate();\n }\n\n public List getParamTypes() {\n List paramTypes = new ArrayList<>();\n paramTypes.addAll(Arrays.asList(this.balResource.getParameterType()));\n return paramTypes;\n }\n}", "context_after": "class HttpResource {\n\n private static final Logger log = LoggerFactory.getLogger(HttpResource.class);\n\n private static final String METHODS_FIELD = \"methods\";\n private static final String PATH_FIELD = \"path\";\n private static final String BODY_FIELD = \"body\";\n private static final String CONSUMES_FIELD = \"consumes\";\n private static final String PRODUCES_FIELD = \"produces\";\n private static final String CORS_FIELD = \"cors\";\n private static final String TRANSACTION_INFECTABLE_FIELD = \"transactionInfectable\";\n\n private AttachedFunction balResource;\n private List methods;\n private String path;\n private String entityBodyAttribute;\n private List consumes;\n private List produces;\n private List producesSubTypes;\n private CorsHeaders corsHeaders;\n private SignatureParams signatureParams;\n private HttpService parentService;\n private boolean transactionInfectable = true; \n private boolean interruptible;\n\n private boolean transactionAnnotated = false;\n\n protected HttpResource(AttachedFunction resource, HttpService parentService) {\n this.balResource = resource;\n this.parentService = parentService;\n this.producesSubTypes = new ArrayList<>();\n }\n\n public boolean isTransactionAnnotated() {\n return transactionAnnotated;\n }\n\n public String getName() {\n return balResource.getName();\n }\n\n public String getServiceName() {\n return balResource.parent.getName();\n }\n\n public SignatureParams getSignatureParams() {\n return signatureParams;\n }\n\n public HttpService getParentService() {\n return parentService;\n }\n\n public AttachedFunction getBalResource() {\n return balResource;\n }\n\n public List getMethods() {\n return methods;\n }\n\n public void setMethods(List methods) {\n this.methods = methods;\n }\n\n public String getPath() {\n return path;\n }\n\n public void setPath(String resourcePath) {\n if (resourcePath == null || resourcePath.isEmpty()) {\n log.debug(\"Path not specified in the Resource instance, using default sub path\");\n path = balResource.getName();\n } else {\n path = resourcePath;\n }\n }\n\n public List getConsumes() {\n return consumes;\n }\n\n public void setConsumes(List consumes) {\n this.consumes = consumes;\n }\n\n public List getProduces() {\n return produces;\n }\n\n public void setProduces(List produces) {\n this.produces = produces;\n\n if (produces != null) {\n List subAttributeValues = produces.stream()\n .map(mediaType -> mediaType.trim().substring(0, mediaType.indexOf('/')))\n .distinct()\n .collect(Collectors.toList());\n setProducesSubTypes(subAttributeValues);\n }\n }\n\n public List getProducesSubTypes() {\n return producesSubTypes;\n }\n\n public void setProducesSubTypes(List producesSubTypes) {\n this.producesSubTypes = producesSubTypes;\n }\n\n public CorsHeaders getCorsHeaders() {\n return corsHeaders;\n }\n\n public void setCorsHeaders(CorsHeaders corsHeaders) {\n this.corsHeaders = corsHeaders;\n }\n\n public boolean isTransactionInfectable() {\n return transactionInfectable;\n }\n\n public void setTransactionInfectable(boolean transactionInfectable) {\n this.transactionInfectable = transactionInfectable;\n }\n\n public boolean isInterruptible() {\n return interruptible;\n }\n\n public void setInterruptible(boolean interruptible) {\n this.interruptible = interruptible;\n }\n\n public String getEntityBodyAttributeValue() {\n return entityBodyAttribute;\n }\n\n public void setEntityBodyAttributeValue(String entityBodyAttribute) {\n this.entityBodyAttribute = entityBodyAttribute;\n }\n\n public static HttpResource buildHttpResource(AttachedFunction resource, HttpService httpService) {\n HttpResource httpResource = new HttpResource(resource, httpService);\n MapValue resourceConfigAnnotation = getResourceConfigAnnotation(resource);\n httpResource.setInterruptible(httpService.isInterruptible() || hasInterruptibleAnnotation(resource));\n\n setupTransactionAnnotations(resource, httpResource);\n if (checkConfigAnnotationAvailability(resourceConfigAnnotation)) {\n httpResource.setPath(resourceConfigAnnotation.getStringValue(PATH_FIELD));\n httpResource.setMethods(\n getAsStringList(resourceConfigAnnotation.getArrayValue(METHODS_FIELD).getStringArray()));\n httpResource.setConsumes(\n getAsStringList(resourceConfigAnnotation.getArrayValue(CONSUMES_FIELD).getStringArray()));\n httpResource.setProduces(\n getAsStringList(resourceConfigAnnotation.getArrayValue(PRODUCES_FIELD).getStringArray()));\n httpResource.setEntityBodyAttributeValue(resourceConfigAnnotation.getStringValue(BODY_FIELD));\n httpResource.setCorsHeaders(CorsHeaders.buildCorsHeaders(resourceConfigAnnotation.getMapValue(CORS_FIELD)));\n httpResource\n .setTransactionInfectable(resourceConfigAnnotation.getBooleanValue(TRANSACTION_INFECTABLE_FIELD));\n\n processResourceCors(httpResource, httpService);\n httpResource.prepareAndValidateSignatureParams();\n return httpResource;\n }\n\n if (log.isDebugEnabled()) {\n log.debug(\"resourceConfig not specified in the Resource instance, using default sub path\");\n }\n httpResource.setPath(resource.getName());\n httpResource.prepareAndValidateSignatureParams();\n return httpResource;\n }\n\n private static void setupTransactionAnnotations(AttachedFunction resource, HttpResource httpResource) {\n MapValue transactionConfigAnnotation = HttpUtil.getTransactionConfigAnnotation(resource,\n TransactionConstants.TRANSACTION_PACKAGE_PATH);\n if (transactionConfigAnnotation != null) {\n httpResource.transactionAnnotated = true;\n }\n }\n\n /**\n * Get the `MapValue` resource configuration of the given resource.\n *\n * @param resource The resource\n * @return the resource configuration of the given resource\n */\n public static MapValue getResourceConfigAnnotation(AttachedFunction resource) {\n return (MapValue) resource.getAnnotation(HTTP_PACKAGE_PATH, ANN_NAME_RESOURCE_CONFIG);\n }\n\n \n\n private static boolean hasInterruptibleAnnotation(AttachedFunction resource) {\n return resource.getAnnotation(PACKAGE_BALLERINA_BUILTIN, ANN_NAME_INTERRUPTIBLE) != null;\n }\n\n private static List getAsStringList(Object[] values) {\n if (values == null) {\n return null;\n }\n List valuesList = new ArrayList<>();\n for (Object val : values) {\n valuesList.add(val.toString().trim());\n }\n return !valuesList.isEmpty() ? valuesList : null;\n }\n\n private static void processResourceCors(HttpResource resource, HttpService service) {\n CorsHeaders corsHeaders = resource.getCorsHeaders();\n if (!corsHeaders.isAvailable()) {\n \n resource.setCorsHeaders(service.getCorsHeaders());\n return;\n }\n\n if (corsHeaders.getAllowOrigins() == null) {\n corsHeaders.setAllowOrigins(Stream.of(\"*\").collect(Collectors.toList()));\n }\n\n if (corsHeaders.getAllowMethods() != null) {\n return;\n }\n\n if (resource.getMethods() != null) {\n corsHeaders.setAllowMethods(resource.getMethods());\n return;\n }\n corsHeaders.setAllowMethods(DispatcherUtil.addAllMethods());\n }\n\n private void prepareAndValidateSignatureParams() {\n signatureParams = new SignatureParams(this);\n signatureParams.validate();\n }\n\n public List getParamTypes() {\n List paramTypes = new ArrayList<>();\n paramTypes.addAll(Arrays.asList(this.balResource.getParameterType()));\n return paramTypes;\n }\n}" }, { "comment": "i think, maybe could simplify to ```java new DeleteFromUsingCommand(nameParts, tableAlias, isTempPart, partitions, logicalQuery, Optional.empty()).run(ctx, executor); ```", "method_body": "public void run(ConnectContext ctx, StmtExecutor executor) throws Exception {\n LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext());\n updateSessionVariableForDelete(ctx.getSessionVariable());\n NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext());\n planner.plan(logicalPlanAdapter, ctx.getSessionVariable().toThrift());\n executor.setPlanner(planner);\n executor.checkBlockRules();\n \n if (planner.getPhysicalPlan() instanceof PhysicalEmptyRelation) {\n Env.getCurrentEnv()\n .getDeleteHandler().processEmptyRelation(ctx.getState());\n return;\n }\n Optional> optFilter = (planner.getPhysicalPlan()\n .>collect(PhysicalFilter.class::isInstance)).stream()\n .findAny();\n Optional optScan = (planner.getPhysicalPlan()\n .collect(PhysicalOlapScan.class::isInstance)).stream()\n .findAny();\n Optional optRelation = (logicalQuery\n .collect(UnboundRelation.class::isInstance)).stream()\n .findAny();\n Preconditions.checkArgument(optFilter.isPresent(), \"delete command must contain filter\");\n Preconditions.checkArgument(optScan.isPresent(), \"delete command could be only used on olap table\");\n Preconditions.checkArgument(optRelation.isPresent(), \"delete command could be only used on olap table\");\n PhysicalOlapScan scan = optScan.get();\n UnboundRelation relation = optRelation.get();\n PhysicalFilter filter = optFilter.get();\n\n if (!Env.getCurrentEnv().getAccessManager()\n .checkTblPriv(ConnectContext.get(), scan.getDatabase().getCatalog().getName(),\n scan.getDatabase().getFullName(),\n scan.getTable().getName(), PrivPredicate.LOAD)) {\n String message = ErrorCode.ERR_TABLEACCESS_DENIED_ERROR.formatErrorMsg(\"LOAD\",\n ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(),\n scan.getDatabase().getFullName() + \": \" + scan.getTable().getName());\n throw new AnalysisException(message);\n }\n\n \n OlapTable olapTable = scan.getTable();\n Set columns = olapTable.getFullSchema().stream().map(Column::getName).collect(Collectors.toSet());\n try {\n Plan plan = planner.getPhysicalPlan();\n checkSubQuery(plan);\n for (Expression conjunct : filter.getConjuncts()) {\n conjunct.collect(SlotReference.class::isInstance)\n .forEach(s -> checkColumn(columns, s, olapTable));\n checkPredicate(conjunct);\n }\n } catch (Exception e) {\n try {\n new DeleteFromUsingCommand(nameParts, tableAlias, isTempPart, partitions,\n logicalQuery, Optional.empty()).run(ctx, executor);\n return;\n } catch (Exception e2) {\n throw e;\n }\n }\n\n if (olapTable.getKeysType() == KeysType.UNIQUE_KEYS && olapTable.getEnableUniqueKeyMergeOnWrite()) {\n EqualTo deleteSignEqualTo =\n (EqualTo) new NereidsParser().parseExpression(\"__DORIS_DELETE_SIGN__ = 1\");\n UpdateCommand updateCommand = new UpdateCommand(this.nameParts, this.tableAlias,\n Collections.singletonList(deleteSignEqualTo), this.logicalQuery, Optional.empty());\n updateCommand.setDeleteCommand(true);\n updateCommand.run(ctx, executor);\n return;\n }\n\n \n List predicates = planner.getScanNodes().get(0).getConjuncts().stream()\n .filter(c -> {\n \n List slotRefs = Lists.newArrayList();\n c.collect(SlotRef.class::isInstance, slotRefs);\n return slotRefs.stream().map(SlotRef.class::cast)\n .noneMatch(s -> Column.DELETE_SIGN.equalsIgnoreCase(s.getColumnName()));\n })\n .map(c -> {\n if (c instanceof Predicate) {\n return (Predicate) c;\n } else {\n throw new AnalysisException(\"non predicate in filter: \" + c.toSql());\n }\n }).collect(Collectors.toList());\n if (predicates.isEmpty()) {\n \n \n throw new AnalysisException(\"delete all rows is forbidden temporary.\");\n }\n Env.getCurrentEnv()\n .getDeleteHandler()\n .process((Database) scan.getDatabase(), scan.getTable(),\n Lists.newArrayList(relation.getPartNames()), predicates, ctx.getState());\n }", "target_code": "updateCommand.run(ctx, executor);", "method_body_after": "public void run(ConnectContext ctx, StmtExecutor executor) throws Exception {\n LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext());\n updateSessionVariableForDelete(ctx.getSessionVariable());\n NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext());\n planner.plan(logicalPlanAdapter, ctx.getSessionVariable().toThrift());\n executor.setPlanner(planner);\n executor.checkBlockRules();\n \n if (planner.getPhysicalPlan() instanceof PhysicalEmptyRelation) {\n Env.getCurrentEnv()\n .getDeleteHandler().processEmptyRelation(ctx.getState());\n return;\n }\n Optional> optFilter = (planner.getPhysicalPlan()\n .>collect(PhysicalFilter.class::isInstance)).stream()\n .findAny();\n Optional optScan = (planner.getPhysicalPlan()\n .collect(PhysicalOlapScan.class::isInstance)).stream()\n .findAny();\n Optional optRelation = (logicalQuery\n .collect(UnboundRelation.class::isInstance)).stream()\n .findAny();\n Preconditions.checkArgument(optFilter.isPresent(), \"delete command must contain filter\");\n Preconditions.checkArgument(optScan.isPresent(), \"delete command could be only used on olap table\");\n Preconditions.checkArgument(optRelation.isPresent(), \"delete command could be only used on olap table\");\n PhysicalOlapScan scan = optScan.get();\n UnboundRelation relation = optRelation.get();\n PhysicalFilter filter = optFilter.get();\n\n if (!Env.getCurrentEnv().getAccessManager()\n .checkTblPriv(ConnectContext.get(), scan.getDatabase().getCatalog().getName(),\n scan.getDatabase().getFullName(),\n scan.getTable().getName(), PrivPredicate.LOAD)) {\n String message = ErrorCode.ERR_TABLEACCESS_DENIED_ERROR.formatErrorMsg(\"LOAD\",\n ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(),\n scan.getDatabase().getFullName() + \": \" + scan.getTable().getName());\n throw new AnalysisException(message);\n }\n\n \n OlapTable olapTable = scan.getTable();\n Set columns = olapTable.getFullSchema().stream().map(Column::getName).collect(Collectors.toSet());\n try {\n Plan plan = planner.getPhysicalPlan();\n checkSubQuery(plan);\n for (Expression conjunct : filter.getConjuncts()) {\n conjunct.collect(SlotReference.class::isInstance)\n .forEach(s -> checkColumn(columns, s, olapTable));\n checkPredicate(conjunct);\n }\n } catch (Exception e) {\n try {\n new DeleteFromUsingCommand(nameParts, tableAlias, isTempPart, partitions,\n logicalQuery, Optional.empty()).run(ctx, executor);\n return;\n } catch (Exception e2) {\n throw e;\n }\n }\n\n if (olapTable.getKeysType() == KeysType.UNIQUE_KEYS && olapTable.getEnableUniqueKeyMergeOnWrite()\n && !olapTable.getEnableDeleteOnDeletePredicate()) {\n new DeleteFromUsingCommand(nameParts, tableAlias, isTempPart, partitions,\n logicalQuery, Optional.empty()).run(ctx, executor);\n return;\n }\n\n \n List predicates = planner.getScanNodes().get(0).getConjuncts().stream()\n .filter(c -> {\n \n List slotRefs = Lists.newArrayList();\n c.collect(SlotRef.class::isInstance, slotRefs);\n return slotRefs.stream().map(SlotRef.class::cast)\n .noneMatch(s -> Column.DELETE_SIGN.equalsIgnoreCase(s.getColumnName()));\n })\n .map(c -> {\n if (c instanceof Predicate) {\n return (Predicate) c;\n } else {\n throw new AnalysisException(\"non predicate in filter: \" + c.toSql());\n }\n }).collect(Collectors.toList());\n if (predicates.isEmpty()) {\n \n \n throw new AnalysisException(\"delete all rows is forbidden temporary.\");\n }\n Env.getCurrentEnv()\n .getDeleteHandler()\n .process((Database) scan.getDatabase(), scan.getTable(),\n Lists.newArrayList(relation.getPartNames()), predicates, ctx.getState());\n }", "context_before": "class DeleteFromCommand extends Command implements ForwardWithSync {\n\n private final List nameParts;\n private final String tableAlias;\n private final boolean isTempPart;\n private final List partitions;\n private final LogicalPlan logicalQuery;\n\n /**\n * constructor\n */\n public DeleteFromCommand(List nameParts, String tableAlias,\n boolean isTempPart, List partitions, LogicalPlan logicalQuery) {\n super(PlanType.DELETE_COMMAND);\n this.nameParts = Utils.copyRequiredList(nameParts);\n this.tableAlias = tableAlias;\n this.isTempPart = isTempPart;\n this.partitions = Utils.copyRequiredList(partitions);\n this.logicalQuery = logicalQuery;\n }\n\n @Override\n \n\n private void updateSessionVariableForDelete(SessionVariable sessionVariable) {\n sessionVariable.setIsSingleSetVar(true);\n try {\n \n VariableMgr.setVar(sessionVariable,\n new SetVar(SessionVariable.FORBID_UNKNOWN_COLUMN_STATS, new StringLiteral(\"false\")));\n \n List disableRules = Lists.newArrayList(\n RuleType.ELIMINATE_NOT_NULL.name(), RuleType.INFER_FILTER_NOT_NULL.name());\n disableRules.addAll(sessionVariable.getDisableNereidsRuleNames());\n VariableMgr.setVar(sessionVariable,\n new SetVar(SessionVariable.DISABLE_NEREIDS_RULES,\n new StringLiteral(StringUtils.join(disableRules, \",\"))));\n } catch (Exception e) {\n throw new AnalysisException(\"set session variable by delete from command failed\", e);\n }\n }\n\n private void checkColumn(Set tableColumns, SlotReference slotReference, OlapTable table) {\n \n if (!slotReference.getColumn().isPresent()) {\n throw new AnalysisException(\"\");\n }\n Column column = slotReference.getColumn().get();\n\n if (Column.DELETE_SIGN.equalsIgnoreCase(column.getName())) {\n return;\n }\n \n if (Column.isShadowColumn(column.getName())) {\n throw new AnalysisException(\"Can not apply delete condition to shadow column \" + column.getName());\n }\n \n String shadowName = Column.getShadowName(column.getName());\n if (tableColumns.contains(shadowName)) {\n throw new AnalysisException(String.format(\"Column '%s' is under\"\n + \" schema change operation. Do not allow delete operation\", shadowName));\n }\n \n \n \n \n if (!column.getType().isScalarType()\n || (column.getType().isOnlyMetricType() && !column.getType().isJsonbType())) {\n throw new AnalysisException(String.format(\"Can not apply delete condition to column type: \"\n + column.getType()));\n }\n \n if (slotReference.getDataType().isFloatLikeType()) {\n throw new AnalysisException(\"Column[\" + column.getName() + \"] type is float or double.\");\n }\n \n if (!column.isKey()) {\n if (table.getKeysType() == KeysType.AGG_KEYS) {\n throw new AnalysisException(\"delete predicate on value column only supports Unique table with\"\n + \" merge-on-write enabled and Duplicate table, but \" + \"Table[\" + table.getName()\n + \"] is an Aggregate table.\");\n } else if (table.getKeysType() == KeysType.UNIQUE_KEYS && !table.getEnableUniqueKeyMergeOnWrite()) {\n throw new AnalysisException(\"delete predicate on value column only supports Unique table with\"\n + \" merge-on-write enabled and Duplicate table, but \" + \"Table[\" + table.getName()\n + \"] is an unique table without merge-on-write.\");\n }\n }\n\n for (String indexName : table.getIndexNameToId().keySet()) {\n MaterializedIndexMeta meta = table.getIndexMetaByIndexId(table.getIndexIdByName(indexName));\n Set columns = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);\n meta.getSchema().stream()\n .map(col -> org.apache.doris.analysis.CreateMaterializedViewStmt.mvColumnBreaker(col.getName()))\n .forEach(name -> columns.add(name));\n if (!columns.contains(column.getName())) {\n throw new AnalysisException(\"Column[\" + column.getName() + \"] not exist in index \" + indexName\n + \". maybe you need drop the corresponding materialized-view.\");\n }\n }\n }\n\n private void checkSubQuery(Plan plan) {\n while (true) {\n if (!(plan instanceof PhysicalDistribute\n || plan instanceof PhysicalOlapScan\n || plan instanceof PhysicalProject\n || plan instanceof PhysicalFilter)) {\n throw new AnalysisException(\"Where clause only supports compound predicate,\"\n + \" binary predicate, is_null predicate or in predicate.\");\n }\n if (plan instanceof PhysicalOlapScan) {\n break;\n }\n plan = ((PhysicalUnary) plan).child();\n }\n }\n\n private void checkComparisonPredicate(ComparisonPredicate cp) {\n if (!(cp.left() instanceof SlotReference)) {\n throw new AnalysisException(\n \"Left expr of binary predicate should be column name, predicate: \" + cp.toSql()\n + \", left expr type:\" + cp.left().getDataType());\n }\n if (!(cp.right() instanceof Literal)) {\n throw new AnalysisException(\n \"Right expr of binary predicate should be value, predicate: \" + cp.toSql()\n + \", right expr type:\" + cp.right().getDataType());\n }\n }\n\n private void checkIsNull(IsNull isNull) {\n if (!(isNull.child() instanceof SlotReference)) {\n throw new AnalysisException(\n \"Child expr of is_null predicate should be column name, predicate: \" + isNull.toSql());\n }\n }\n\n private void checkInPredicate(InPredicate in) {\n if (!(in.getCompareExpr() instanceof SlotReference)) {\n throw new AnalysisException(\n \"Left expr of in predicate should be column name, predicate: \" + in.toSql()\n + \", left expr type:\" + in.getCompareExpr().getDataType());\n }\n int maxAllowedInElementNumOfDelete = Config.max_allowed_in_element_num_of_delete;\n if (in.getOptions().size() > maxAllowedInElementNumOfDelete) {\n throw new AnalysisException(\"Element num of in predicate should not be more than \"\n + maxAllowedInElementNumOfDelete);\n }\n for (Expression option : in.getOptions()) {\n if (!(option instanceof Literal)) {\n throw new AnalysisException(\"Child of in predicate should be value, but get \" + option);\n }\n }\n }\n\n private void checkPredicate(Expression predicate) {\n if (predicate instanceof And) {\n checkPredicate(((And) predicate).left());\n checkPredicate(((And) predicate).right());\n } else if (predicate instanceof ComparisonPredicate) {\n checkComparisonPredicate((ComparisonPredicate) predicate);\n } else if (predicate instanceof IsNull) {\n checkIsNull((IsNull) predicate);\n } else if (predicate instanceof Not) {\n Expression child = ((Not) predicate).child();\n if (child instanceof IsNull) {\n checkIsNull((IsNull) child);\n } else if (child instanceof ComparisonPredicate) {\n checkComparisonPredicate((ComparisonPredicate) child);\n } else if (child instanceof InPredicate) {\n checkInPredicate((InPredicate) child);\n } else {\n throw new AnalysisException(\"Where clause only supports compound predicate,\"\n + \" binary predicate, is_null predicate or in predicate. But we meet \"\n + child.toSql());\n }\n } else if (predicate instanceof InPredicate) {\n checkInPredicate((InPredicate) predicate);\n } else {\n throw new AnalysisException(\"Where clause only supports compound predicate,\"\n + \" binary predicate, is_null predicate or in predicate. But we meet \"\n + predicate.toSql());\n }\n }\n\n @Override\n public R accept(PlanVisitor visitor, C context) {\n return visitor.visitDeleteFromCommand(this, context);\n }\n}", "context_after": "class DeleteFromCommand extends Command implements ForwardWithSync {\n\n private final List nameParts;\n private final String tableAlias;\n private final boolean isTempPart;\n private final List partitions;\n private final LogicalPlan logicalQuery;\n\n /**\n * constructor\n */\n public DeleteFromCommand(List nameParts, String tableAlias,\n boolean isTempPart, List partitions, LogicalPlan logicalQuery) {\n super(PlanType.DELETE_COMMAND);\n this.nameParts = Utils.copyRequiredList(nameParts);\n this.tableAlias = tableAlias;\n this.isTempPart = isTempPart;\n this.partitions = Utils.copyRequiredList(partitions);\n this.logicalQuery = logicalQuery;\n }\n\n @Override\n \n\n private void updateSessionVariableForDelete(SessionVariable sessionVariable) {\n sessionVariable.setIsSingleSetVar(true);\n try {\n \n VariableMgr.setVar(sessionVariable,\n new SetVar(SessionVariable.FORBID_UNKNOWN_COLUMN_STATS, new StringLiteral(\"false\")));\n \n List disableRules = Lists.newArrayList(\n RuleType.ELIMINATE_NOT_NULL.name(), RuleType.INFER_FILTER_NOT_NULL.name());\n disableRules.addAll(sessionVariable.getDisableNereidsRuleNames());\n VariableMgr.setVar(sessionVariable,\n new SetVar(SessionVariable.DISABLE_NEREIDS_RULES,\n new StringLiteral(StringUtils.join(disableRules, \",\"))));\n } catch (Exception e) {\n throw new AnalysisException(\"set session variable by delete from command failed\", e);\n }\n }\n\n private void checkColumn(Set tableColumns, SlotReference slotReference, OlapTable table) {\n \n if (!slotReference.getColumn().isPresent()) {\n throw new AnalysisException(\"\");\n }\n Column column = slotReference.getColumn().get();\n\n if (Column.DELETE_SIGN.equalsIgnoreCase(column.getName())) {\n return;\n }\n \n if (Column.isShadowColumn(column.getName())) {\n throw new AnalysisException(\"Can not apply delete condition to shadow column \" + column.getName());\n }\n \n String shadowName = Column.getShadowName(column.getName());\n if (tableColumns.contains(shadowName)) {\n throw new AnalysisException(String.format(\"Column '%s' is under\"\n + \" schema change operation. Do not allow delete operation\", shadowName));\n }\n \n \n \n \n if (!column.getType().isScalarType()\n || (column.getType().isOnlyMetricType() && !column.getType().isJsonbType())) {\n throw new AnalysisException(String.format(\"Can not apply delete condition to column type: \"\n + column.getType()));\n }\n \n if (slotReference.getDataType().isFloatLikeType()) {\n throw new AnalysisException(\"Column[\" + column.getName() + \"] type is float or double.\");\n }\n \n if (!column.isKey()) {\n if (table.getKeysType() == KeysType.AGG_KEYS) {\n throw new AnalysisException(\"delete predicate on value column only supports Unique table with\"\n + \" merge-on-write enabled and Duplicate table, but \" + \"Table[\" + table.getName()\n + \"] is an Aggregate table.\");\n } else if (table.getKeysType() == KeysType.UNIQUE_KEYS && !table.getEnableUniqueKeyMergeOnWrite()) {\n throw new AnalysisException(\"delete predicate on value column only supports Unique table with\"\n + \" merge-on-write enabled and Duplicate table, but \" + \"Table[\" + table.getName()\n + \"] is an unique table without merge-on-write.\");\n }\n }\n\n for (String indexName : table.getIndexNameToId().keySet()) {\n MaterializedIndexMeta meta = table.getIndexMetaByIndexId(table.getIndexIdByName(indexName));\n Set columns = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);\n meta.getSchema().stream()\n .map(col -> org.apache.doris.analysis.CreateMaterializedViewStmt.mvColumnBreaker(col.getName()))\n .forEach(name -> columns.add(name));\n if (!columns.contains(column.getName())) {\n throw new AnalysisException(\"Column[\" + column.getName() + \"] not exist in index \" + indexName\n + \". maybe you need drop the corresponding materialized-view.\");\n }\n }\n }\n\n private void checkSubQuery(Plan plan) {\n while (true) {\n if (!(plan instanceof PhysicalDistribute\n || plan instanceof PhysicalOlapScan\n || plan instanceof PhysicalProject\n || plan instanceof PhysicalFilter)) {\n throw new AnalysisException(\"Where clause only supports compound predicate,\"\n + \" binary predicate, is_null predicate or in predicate.\");\n }\n if (plan instanceof PhysicalOlapScan) {\n break;\n }\n plan = ((PhysicalUnary) plan).child();\n }\n }\n\n private void checkComparisonPredicate(ComparisonPredicate cp) {\n if (!(cp.left() instanceof SlotReference)) {\n throw new AnalysisException(\n \"Left expr of binary predicate should be column name, predicate: \" + cp.toSql()\n + \", left expr type:\" + cp.left().getDataType());\n }\n if (!(cp.right() instanceof Literal)) {\n throw new AnalysisException(\n \"Right expr of binary predicate should be value, predicate: \" + cp.toSql()\n + \", right expr type:\" + cp.right().getDataType());\n }\n }\n\n private void checkIsNull(IsNull isNull) {\n if (!(isNull.child() instanceof SlotReference)) {\n throw new AnalysisException(\n \"Child expr of is_null predicate should be column name, predicate: \" + isNull.toSql());\n }\n }\n\n private void checkInPredicate(InPredicate in) {\n if (!(in.getCompareExpr() instanceof SlotReference)) {\n throw new AnalysisException(\n \"Left expr of in predicate should be column name, predicate: \" + in.toSql()\n + \", left expr type:\" + in.getCompareExpr().getDataType());\n }\n int maxAllowedInElementNumOfDelete = Config.max_allowed_in_element_num_of_delete;\n if (in.getOptions().size() > maxAllowedInElementNumOfDelete) {\n throw new AnalysisException(\"Element num of in predicate should not be more than \"\n + maxAllowedInElementNumOfDelete);\n }\n for (Expression option : in.getOptions()) {\n if (!(option instanceof Literal)) {\n throw new AnalysisException(\"Child of in predicate should be value, but get \" + option);\n }\n }\n }\n\n private void checkPredicate(Expression predicate) {\n if (predicate instanceof And) {\n checkPredicate(((And) predicate).left());\n checkPredicate(((And) predicate).right());\n } else if (predicate instanceof ComparisonPredicate) {\n checkComparisonPredicate((ComparisonPredicate) predicate);\n } else if (predicate instanceof IsNull) {\n checkIsNull((IsNull) predicate);\n } else if (predicate instanceof Not) {\n Expression child = ((Not) predicate).child();\n if (child instanceof IsNull) {\n checkIsNull((IsNull) child);\n } else if (child instanceof ComparisonPredicate) {\n checkComparisonPredicate((ComparisonPredicate) child);\n } else if (child instanceof InPredicate) {\n checkInPredicate((InPredicate) child);\n } else {\n throw new AnalysisException(\"Where clause only supports compound predicate,\"\n + \" binary predicate, is_null predicate or in predicate. But we meet \"\n + child.toSql());\n }\n } else if (predicate instanceof InPredicate) {\n checkInPredicate((InPredicate) predicate);\n } else {\n throw new AnalysisException(\"Where clause only supports compound predicate,\"\n + \" binary predicate, is_null predicate or in predicate. But we meet \"\n + predicate.toSql());\n }\n }\n\n @Override\n public R accept(PlanVisitor visitor, C context) {\n return visitor.visitDeleteFromCommand(this, context);\n }\n}" }, { "comment": "Could you please move such logic to the lines since no data in a partition is just a special case for it. ```java if (partitionColumnSize > 0) { xxx } ```", "method_body": "public void commitPartitions() throws Exception {\n FileSystem fs = factory.create(tmpPath.toUri());\n List taskPaths = listTaskTemporaryPaths(fs, tmpPath);\n if (taskPaths.isEmpty() && !staticPartitions.isEmpty()) {\n commitEmptyPartition(fs);\n return;\n }\n try (PartitionLoader loader = new PartitionLoader(overwrite, fs, metaStoreFactory)) {\n if (partitionColumnSize > 0) {\n for (Map.Entry, List> entry :\n collectPartSpecToPaths(fs, taskPaths, partitionColumnSize).entrySet()) {\n loader.loadPartition(entry.getKey(), entry.getValue());\n }\n } else {\n loader.loadNonPartition(taskPaths);\n }\n } finally {\n for (Path taskPath : taskPaths) {\n fs.delete(taskPath, true);\n }\n }\n }", "target_code": "if (taskPaths.isEmpty() && !staticPartitions.isEmpty()) {", "method_body_after": "public void commitPartitions() throws Exception {\n FileSystem fs = factory.create(tmpPath.toUri());\n List taskPaths = listTaskTemporaryPaths(fs, tmpPath);\n\n try (PartitionLoader loader =\n new PartitionLoader(overwrite, fs, metaStoreFactory, isToLocal)) {\n if (partitionColumnSize > 0) {\n if (taskPaths.isEmpty() && !staticPartitions.isEmpty()) {\n loader.loadEmptyPartition(this.staticPartitions);\n } else {\n for (Map.Entry, List> entry :\n collectPartSpecToPaths(fs, taskPaths, partitionColumnSize).entrySet()) {\n loader.loadPartition(entry.getKey(), entry.getValue());\n }\n }\n } else {\n loader.loadNonPartition(taskPaths);\n }\n } finally {\n for (Path taskPath : taskPaths) {\n fs.delete(taskPath, true);\n }\n }\n }", "context_before": "class FileSystemCommitter implements Serializable {\n\n private static final long serialVersionUID = 1L;\n\n private final FileSystemFactory factory;\n private final TableMetaStoreFactory metaStoreFactory;\n private final boolean overwrite;\n private final Path tmpPath;\n private final int partitionColumnSize;\n private final LinkedHashMap staticPartitions;\n\n FileSystemCommitter(\n FileSystemFactory factory,\n TableMetaStoreFactory metaStoreFactory,\n boolean overwrite,\n Path tmpPath,\n int partitionColumnSize,\n LinkedHashMap staticPartitions) {\n this.factory = factory;\n this.metaStoreFactory = metaStoreFactory;\n this.overwrite = overwrite;\n this.tmpPath = tmpPath;\n this.partitionColumnSize = partitionColumnSize;\n this.staticPartitions = staticPartitions;\n }\n\n /** For committing job's output after successful batch job completion. */\n \n\n private void commitEmptyPartition(FileSystem fs) throws Exception {\n try (PartitionLoader loader = new PartitionLoader(overwrite, fs, metaStoreFactory)) {\n loader.loadEmptyPartition(this.staticPartitions);\n }\n }\n}", "context_after": "class FileSystemCommitter implements Serializable {\n\n private static final long serialVersionUID = 1L;\n\n private final FileSystemFactory factory;\n private final TableMetaStoreFactory metaStoreFactory;\n private final boolean overwrite;\n private final boolean isToLocal;\n private final Path tmpPath;\n private final int partitionColumnSize;\n private final LinkedHashMap staticPartitions;\n\n FileSystemCommitter(\n FileSystemFactory factory,\n TableMetaStoreFactory metaStoreFactory,\n boolean overwrite,\n Path tmpPath,\n int partitionColumnSize,\n boolean isToLocal,\n LinkedHashMap staticPartitions) {\n this.factory = factory;\n this.metaStoreFactory = metaStoreFactory;\n this.overwrite = overwrite;\n this.tmpPath = tmpPath;\n this.partitionColumnSize = partitionColumnSize;\n this.isToLocal = isToLocal;\n this.staticPartitions = staticPartitions;\n }\n\n /** For committing job's output after successful batch job completion. */\n \n}" }, { "comment": "If columnSet.getString (\"REMARKS\") is null, is this a safe behavior?", "method_body": "public List convertToSRTable(ResultSet columnSet) throws SQLException {\n List fullSchema = Lists.newArrayList();\n while (columnSet.next()) {\n Type type = convertColumnType(columnSet.getInt(\"DATA_TYPE\"),\n columnSet.getString(\"TYPE_NAME\"),\n columnSet.getInt(\"COLUMN_SIZE\"),\n columnSet.getInt(\"DECIMAL_DIGITS\"));\n fullSchema.add(new Column(columnSet.getString(\"COLUMN_NAME\"), type,\n columnSet.getString(\"IS_NULLABLE\").equals(\"YES\"),\n columnSet.getString(\"REMARKS\")));\n }\n return fullSchema;\n }", "target_code": "columnSet.getString(\"REMARKS\")));", "method_body_after": "public List convertToSRTable(ResultSet columnSet) throws SQLException {\n List fullSchema = Lists.newArrayList();\n while (columnSet.next()) {\n Type type = convertColumnType(columnSet.getInt(\"DATA_TYPE\"),\n columnSet.getString(\"TYPE_NAME\"),\n columnSet.getInt(\"COLUMN_SIZE\"),\n columnSet.getInt(\"DECIMAL_DIGITS\"));\n\n String comment = \"\";\n \n try {\n if (columnSet.getString(\"REMARKS\") != null) {\n comment = columnSet.getString(\"REMARKS\");\n }\n } catch (SQLException ignored) { }\n\n fullSchema.add(new Column(columnSet.getString(\"COLUMN_NAME\"), type,\n columnSet.getString(\"IS_NULLABLE\").equals(\"YES\"), comment));\n }\n return fullSchema;\n }", "context_before": "class JDBCSchemaResolver {\n\n boolean supportPartitionInformation = false;\n\n public Collection listSchemas(Connection connection) {\n try (ResultSet resultSet = connection.getMetaData().getSchemas()) {\n ImmutableSet.Builder schemaNames = ImmutableSet.builder();\n while (resultSet.next()) {\n String schemaName = resultSet.getString(\"TABLE_SCHEM\");\n \n if (!schemaName.equalsIgnoreCase(\"information_schema\")) {\n schemaNames.add(schemaName);\n }\n }\n return schemaNames.build();\n } catch (SQLException e) {\n throw new StarRocksConnectorException(e.getMessage());\n }\n }\n\n public ResultSet getTables(Connection connection, String dbName) throws SQLException {\n return connection.getMetaData().getTables(dbName, null, null,\n new String[] {\"TABLE\", \"VIEW\"});\n }\n\n public ResultSet getColumns(Connection connection, String dbName, String tblName) throws SQLException {\n return connection.getMetaData().getColumns(dbName, null, tblName, \"%\");\n }\n\n public Table getTable(long id, String name, List schema, String dbName,\n String catalogName, Map properties) throws DdlException {\n return new JDBCTable(id, name, schema, dbName, catalogName, properties);\n }\n\n public Table getTable(long id, String name, List schema, List partitionColumns, String dbName,\n String catalogName, Map properties) throws DdlException {\n return new JDBCTable(id, name, schema, partitionColumns, dbName, catalogName, properties);\n }\n\n public List listPartitionNames(Connection connection, String databaseName, String tableName) {\n return Lists.newArrayList();\n }\n\n public List listPartitionColumns(Connection connection, String databaseName, String tableName) {\n return Lists.newArrayList();\n }\n\n public List getPartitions(Connection connection, Table table) {\n return Lists.newArrayList();\n }\n\n \n\n public Type convertColumnType(int dataType, String typeName, int columnSize, int digits) throws SQLException {\n throw new SQLException(\"should not arrival here\");\n }\n\n public boolean checkAndSetSupportPartitionInformation(Connection connection) {\n return false;\n\n }\n\n public boolean isSupportPartitionInformation() {\n return supportPartitionInformation;\n }\n\n}", "context_after": "class JDBCSchemaResolver {\n\n boolean supportPartitionInformation = false;\n\n public Collection listSchemas(Connection connection) {\n try (ResultSet resultSet = connection.getMetaData().getSchemas()) {\n ImmutableSet.Builder schemaNames = ImmutableSet.builder();\n while (resultSet.next()) {\n String schemaName = resultSet.getString(\"TABLE_SCHEM\");\n \n if (!schemaName.equalsIgnoreCase(\"information_schema\")) {\n schemaNames.add(schemaName);\n }\n }\n return schemaNames.build();\n } catch (SQLException e) {\n throw new StarRocksConnectorException(e.getMessage());\n }\n }\n\n public ResultSet getTables(Connection connection, String dbName) throws SQLException {\n return connection.getMetaData().getTables(dbName, null, null,\n new String[] {\"TABLE\", \"VIEW\"});\n }\n\n public ResultSet getColumns(Connection connection, String dbName, String tblName) throws SQLException {\n return connection.getMetaData().getColumns(dbName, null, tblName, \"%\");\n }\n\n public Table getTable(long id, String name, List schema, String dbName,\n String catalogName, Map properties) throws DdlException {\n return new JDBCTable(id, name, schema, dbName, catalogName, properties);\n }\n\n public Table getTable(long id, String name, List schema, List partitionColumns, String dbName,\n String catalogName, Map properties) throws DdlException {\n return new JDBCTable(id, name, schema, partitionColumns, dbName, catalogName, properties);\n }\n\n public List listPartitionNames(Connection connection, String databaseName, String tableName) {\n return Lists.newArrayList();\n }\n\n public List listPartitionColumns(Connection connection, String databaseName, String tableName) {\n return Lists.newArrayList();\n }\n\n public List getPartitions(Connection connection, Table table) {\n return Lists.newArrayList();\n }\n\n \n\n public Type convertColumnType(int dataType, String typeName, int columnSize, int digits) throws SQLException {\n throw new SQLException(\"should not arrival here\");\n }\n\n public boolean checkAndSetSupportPartitionInformation(Connection connection) {\n return false;\n\n }\n\n public boolean isSupportPartitionInformation() {\n return supportPartitionInformation;\n }\n\n}" }, { "comment": "add a white space `\"interrupted...` => `\" interrupted...`", "method_body": "private void loop() throws Exception {\n\t\twhile (!wasClosed) {\n\t\t\ttry {\n\t\t\t\tdispatcher.dispatch(deque.take());\n\t\t\t} catch (InterruptedException e) {\n\t\t\t\tif (!wasClosed) {\n\t\t\t\t\tLOG.debug(taskName + \"interrupted while waiting for a request (continue waiting)\", e);\n\t\t\t\t} else {\n\t\t\t\t\tThread.currentThread().interrupt();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}", "target_code": "LOG.debug(taskName + \"interrupted while waiting for a request (continue waiting)\", e);", "method_body_after": "private void loop() throws Exception {\n\t\twhile (!wasClosed) {\n\t\t\ttry {\n\t\t\t\tdispatcher.dispatch(deque.take());\n\t\t\t} catch (InterruptedException e) {\n\t\t\t\tif (!wasClosed) {\n\t\t\t\t\tLOG.debug(taskName + \" interrupted while waiting for a request (continue waiting)\", e);\n\t\t\t\t} else {\n\t\t\t\t\tThread.currentThread().interrupt();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}", "context_before": "class ChannelStateWriteRequestExecutorImpl implements ChannelStateWriteRequestExecutor {\n\n\tprivate static final Logger LOG = LoggerFactory.getLogger(ChannelStateWriteRequestExecutorImpl.class);\n\tprivate static final int DEFAULT_HANDOVER_CAPACITY = 10_000;\n\n\tprivate final ChannelStateWriteRequestDispatcher dispatcher;\n\tprivate final BlockingDeque deque;\n\tprivate final Thread thread;\n\tprivate volatile Exception thrown = null;\n\tprivate volatile boolean wasClosed = false;\n\tprivate final String taskName;\n\n\tChannelStateWriteRequestExecutorImpl(String taskName, ChannelStateWriteRequestDispatcher dispatcher) {\n\t\tthis(taskName, new LinkedBlockingDeque<>(DEFAULT_HANDOVER_CAPACITY), dispatcher);\n\t}\n\n\tChannelStateWriteRequestExecutorImpl(\n\t\t\tString taskName,\n\t\t\tBlockingDeque deque,\n\t\t\tChannelStateWriteRequestDispatcher dispatcher) {\n\t\tthis.taskName = taskName;\n\t\tthis.dispatcher = dispatcher;\n\t\tthis.deque = deque;\n\t\tthis.thread = new Thread(this::run, \"Channel state writer \" + taskName);\n\t\tthis.thread.setDaemon(true);\n\t}\n\n\t@VisibleForTesting\n\tvoid run() {\n\t\ttry {\n\t\t\tloop();\n\t\t} catch (Exception ex) {\n\t\t\tthrown = ex;\n\t\t} finally {\n\t\t\tcleanupRequests();\n\t\t\tdispatcher.fail(thrown == null ? new CancellationException() : thrown);\n\t\t}\n\t\tLOG.debug(\"{} loop terminated\", taskName);\n\t}\n\n\t\n\n\tprivate void cleanupRequests() {\n\t\tThrowable cause = thrown == null ? new CancellationException() : thrown;\n\t\tList drained = new ArrayList<>();\n\t\tdeque.drainTo(drained);\n\t\tLOG.info(\"{} discarding {} drained requests\", taskName, drained.size());\n\t\tfor (ChannelStateWriteRequest request : drained) {\n\t\t\trequest.cancel(cause);\n\t\t}\n\t}\n\n\t@Override\n\tpublic void start() throws IllegalStateException {\n\t\tthis.thread.start();\n\t}\n\n\t@Override\n\tpublic void submit(ChannelStateWriteRequest request) throws Exception {\n\t\tsubmitInternal(request, () -> deque.add(request));\n\t}\n\n\t@Override\n\tpublic void submitPriority(ChannelStateWriteRequest request) throws Exception {\n\t\tsubmitInternal(request, () -> deque.addFirst(request));\n\t}\n\n\tprivate void submitInternal(ChannelStateWriteRequest request, RunnableWithException action) throws Exception {\n\t\ttry {\n\t\t\taction.run();\n\t\t} catch (Exception ex) {\n\t\t\trequest.cancel(ex);\n\t\t\tthrow ex;\n\t\t}\n\t\tensureRunning();\n\t}\n\n\tprivate void ensureRunning() throws Exception {\n\t\t\n\t\t\n\t\tif (wasClosed || !thread.isAlive()) {\n\t\t\tcleanupRequests();\n\t\t\tthrow ExceptionUtils.firstOrSuppressed(new IllegalStateException(\"not running\"), thrown);\n\t\t}\n\t}\n\n\t@Override\n\tpublic void close() throws IOException {\n\t\twasClosed = true;\n\t\twhile (thread.isAlive()) {\n\t\t\tthread.interrupt();\n\t\t\ttry {\n\t\t\t\tthread.join();\n\t\t\t} catch (InterruptedException e) {\n\t\t\t\tif (!thread.isAlive()) {\n\t\t\t\t\tThread.currentThread().interrupt();\n\t\t\t\t}\n\t\t\t\tLOG.debug(taskName + \" interrupted while waiting for the writer thread to die\", e);\n\t\t\t}\n\t\t}\n\t\tif (thrown != null) {\n\t\t\tthrow new IOException(thrown);\n\t\t}\n\t}\n\n\t@VisibleForTesting\n\tThread getThread() {\n\t\treturn thread;\n\t}\n}", "context_after": "class ChannelStateWriteRequestExecutorImpl implements ChannelStateWriteRequestExecutor {\n\n\tprivate static final Logger LOG = LoggerFactory.getLogger(ChannelStateWriteRequestExecutorImpl.class);\n\tprivate static final int DEFAULT_HANDOVER_CAPACITY = 10_000;\n\n\tprivate final ChannelStateWriteRequestDispatcher dispatcher;\n\tprivate final BlockingDeque deque;\n\tprivate final Thread thread;\n\tprivate volatile Exception thrown = null;\n\tprivate volatile boolean wasClosed = false;\n\tprivate final String taskName;\n\n\tChannelStateWriteRequestExecutorImpl(String taskName, ChannelStateWriteRequestDispatcher dispatcher) {\n\t\tthis(taskName, dispatcher, new LinkedBlockingDeque<>(DEFAULT_HANDOVER_CAPACITY));\n\t}\n\n\tChannelStateWriteRequestExecutorImpl(\n\t\t\tString taskName,\n\t\t\tChannelStateWriteRequestDispatcher dispatcher,\n\t\t\tBlockingDeque deque) {\n\t\tthis.taskName = taskName;\n\t\tthis.dispatcher = dispatcher;\n\t\tthis.deque = deque;\n\t\tthis.thread = new Thread(this::run, \"Channel state writer \" + taskName);\n\t\tthis.thread.setDaemon(true);\n\t}\n\n\t@VisibleForTesting\n\tvoid run() {\n\t\ttry {\n\t\t\tloop();\n\t\t} catch (Exception ex) {\n\t\t\tthrown = ex;\n\t\t} finally {\n\t\t\ttry {\n\t\t\t\tcloseAll(\n\t\t\t\t\tthis::cleanupRequests,\n\t\t\t\t\t() -> dispatcher.fail(thrown == null ? new CancellationException() : thrown)\n\t\t\t\t);\n\t\t\t} catch (Exception e) {\n\t\t\t\t\n\t\t\t\tthrown = ExceptionUtils.firstOrSuppressed(e, thrown);\n\t\t\t}\n\t\t}\n\t\tLOG.debug(\"{} loop terminated\", taskName);\n\t}\n\n\t\n\n\tprivate void cleanupRequests() throws Exception {\n\t\tThrowable cause = thrown == null ? new CancellationException() : thrown;\n\t\tList drained = new ArrayList<>();\n\t\tdeque.drainTo(drained);\n\t\tLOG.info(\"{} discarding {} drained requests\", taskName, drained.size());\n\t\tcloseAll(drained.stream().map(request -> () -> request.cancel(cause)).collect(Collectors.toList()));\n\t}\n\n\t@Override\n\tpublic void start() throws IllegalStateException {\n\t\tthis.thread.start();\n\t}\n\n\t@Override\n\tpublic void submit(ChannelStateWriteRequest request) throws Exception {\n\t\tsubmitInternal(request, () -> deque.add(request));\n\t}\n\n\t@Override\n\tpublic void submitPriority(ChannelStateWriteRequest request) throws Exception {\n\t\tsubmitInternal(request, () -> deque.addFirst(request));\n\t}\n\n\tprivate void submitInternal(ChannelStateWriteRequest request, RunnableWithException action) throws Exception {\n\t\ttry {\n\t\t\taction.run();\n\t\t} catch (Exception ex) {\n\t\t\trequest.cancel(ex);\n\t\t\tthrow ex;\n\t\t}\n\t\tensureRunning();\n\t}\n\n\tprivate void ensureRunning() throws Exception {\n\t\t\n\t\t\n\t\tif (wasClosed || !thread.isAlive()) {\n\t\t\tcleanupRequests();\n\t\t\tthrow ExceptionUtils.firstOrSuppressed(new IllegalStateException(\"not running\"), thrown);\n\t\t}\n\t}\n\n\t@Override\n\tpublic void close() throws IOException {\n\t\twasClosed = true;\n\t\twhile (thread.isAlive()) {\n\t\t\tthread.interrupt();\n\t\t\ttry {\n\t\t\t\tthread.join();\n\t\t\t} catch (InterruptedException e) {\n\t\t\t\tif (!thread.isAlive()) {\n\t\t\t\t\tThread.currentThread().interrupt();\n\t\t\t\t}\n\t\t\t\tLOG.debug(taskName + \" interrupted while waiting for the writer thread to die\", e);\n\t\t\t}\n\t\t}\n\t\tif (thrown != null) {\n\t\t\tthrow new IOException(thrown);\n\t\t}\n\t}\n\n\t@VisibleForTesting\n\tThread getThread() {\n\t\treturn thread;\n\t}\n}" }, { "comment": "This is not new code, copied from the old Dev UI, just move from a POST handler to the Processor. Happy to change this, but maybe in a follow up PR ?", "method_body": "private List sourcePackagesForRoot(Path langPath) {\n if (!Files.exists(langPath)) {\n return Collections.emptyList();\n }\n File[] rootFiles = langPath.toFile().listFiles();\n List rootPackages = new ArrayList<>(1);\n if (rootFiles != null) {\n for (File rootFile : rootFiles) {\n if (rootFile.isDirectory()) {\n rootPackages.add(rootFile.toPath());\n }\n }\n }\n if (rootPackages.isEmpty()) {\n return List.of(\"\");\n }\n List result = new ArrayList<>(rootPackages.size());\n for (Path rootPackage : rootPackages) {\n List paths = new ArrayList<>();\n SimpleFileVisitor simpleFileVisitor = new DetectPackageFileVisitor(paths);\n try {\n Files.walkFileTree(rootPackage, simpleFileVisitor);\n if (paths.isEmpty()) {\n continue;\n }\n String commonPath = commonPath(paths);\n String rootPackageStr = commonPath.replace(langPath.toAbsolutePath().toString(), \"\")\n .replace(File.separator, \".\");\n if (rootPackageStr.startsWith(\".\")) {\n rootPackageStr = rootPackageStr.substring(1);\n }\n if (rootPackageStr.endsWith(\".\")) {\n rootPackageStr = rootPackageStr.substring(0, rootPackageStr.length() - 1);\n }\n result.add(rootPackageStr);\n } catch (IOException e) {\n log.debug(\"Unable to determine the sources directories\", e);\n \n }\n }\n return result;\n }", "target_code": "return result;", "method_body_after": "private List sourcePackagesForRoot(Path langPath) {\n if (!Files.exists(langPath)) {\n return Collections.emptyList();\n }\n File[] rootFiles = langPath.toFile().listFiles();\n List rootPackages = new ArrayList<>(1);\n if (rootFiles != null) {\n for (File rootFile : rootFiles) {\n if (rootFile.isDirectory()) {\n rootPackages.add(rootFile.toPath());\n }\n }\n }\n if (rootPackages.isEmpty()) {\n return List.of(\"\");\n }\n List result = new ArrayList<>(rootPackages.size());\n for (Path rootPackage : rootPackages) {\n List paths = new ArrayList<>();\n SimpleFileVisitor simpleFileVisitor = new DetectPackageFileVisitor(paths);\n try {\n Files.walkFileTree(rootPackage, simpleFileVisitor);\n if (paths.isEmpty()) {\n continue;\n }\n String commonPath = commonPath(paths);\n String rootPackageStr = commonPath.replace(langPath.toAbsolutePath().toString(), \"\")\n .replace(File.separator, \".\");\n if (rootPackageStr.startsWith(\".\")) {\n rootPackageStr = rootPackageStr.substring(1);\n }\n if (rootPackageStr.endsWith(\".\")) {\n rootPackageStr = rootPackageStr.substring(0, rootPackageStr.length() - 1);\n }\n result.add(rootPackageStr);\n } catch (IOException e) {\n log.debug(\"Unable to determine the sources directories\", e);\n \n }\n }\n return result;\n }", "context_before": "class BuildTimeContentProcessor {\n private static final Logger log = Logger.getLogger(BuildTimeContentProcessor.class);\n\n private static final String SLASH = \"/\";\n private static final String DEV_UI = \"dev-ui\";\n private static final String BUILD_TIME_PATH = \"dev-ui-templates/build-time\";\n\n final Config config = ConfigProvider.getConfig();\n\n /**\n * Here we create references to internal dev ui files so that they can be imported by ref.\n * This will be merged into the final importmap\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n InternalImportMapBuildItem createKnownInternalImportMap(NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) {\n\n String contextRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath() + DEV_UI + SLASH;\n\n InternalImportMapBuildItem internalImportMapBuildItem = new InternalImportMapBuildItem();\n\n internalImportMapBuildItem.add(\"devui/\", contextRoot);\n \n internalImportMapBuildItem.add(\"qwc/\", contextRoot + \"qwc/\");\n internalImportMapBuildItem.add(\"qwc-no-data\", contextRoot + \"qwc/qwc-no-data.js\");\n internalImportMapBuildItem.add(\"qwc-hot-reload-element\", contextRoot + \"qwc/qwc-hot-reload-element.js\");\n internalImportMapBuildItem.add(\"qwc-server-log\", contextRoot + \"qwc/qwc-server-log.js\");\n internalImportMapBuildItem.add(\"qwc-extension-link\", contextRoot + \"qwc/qwc-extension-link.js\");\n \n internalImportMapBuildItem.add(\"qui/\", contextRoot + \"qui/\");\n internalImportMapBuildItem.add(\"qui-card\", contextRoot + \"qui/qui-card.js\");\n\n internalImportMapBuildItem.add(\"qui-badge\", contextRoot + \"qui/qui-badge.js\");\n internalImportMapBuildItem.add(\"qui-alert\", contextRoot + \"qui/qui-alert.js\");\n internalImportMapBuildItem.add(\"qui-code-block\", contextRoot + \"qui/qui-code-block.js\");\n internalImportMapBuildItem.add(\"qui-ide-link\", contextRoot + \"qui/qui-ide-link.js\");\n\n \n internalImportMapBuildItem.add(\"echarts/\", contextRoot + \"echarts/\");\n internalImportMapBuildItem.add(\"echarts-gauge-grade\", contextRoot + \"echarts/echarts-gauge-grade.js\");\n internalImportMapBuildItem.add(\"echarts-pie\", contextRoot + \"echarts/echarts-pie.js\");\n internalImportMapBuildItem.add(\"echarts-horizontal-stacked-bar\",\n contextRoot + \"echarts/echarts-horizontal-stacked-bar.js\");\n\n \n internalImportMapBuildItem.add(\"icon/\", contextRoot + \"icon/\");\n \n internalImportMapBuildItem.add(\"controller/\", contextRoot + \"controller/\");\n internalImportMapBuildItem.add(\"log-controller\", contextRoot + \"controller/log-controller.js\");\n internalImportMapBuildItem.add(\"storage-controller\", contextRoot + \"controller/storage-controller.js\");\n internalImportMapBuildItem.add(\"router-controller\", contextRoot + \"controller/router-controller.js\");\n internalImportMapBuildItem.add(\"notifier\", contextRoot + \"controller/notifier.js\");\n internalImportMapBuildItem.add(\"jsonrpc\", contextRoot + \"controller/jsonrpc.js\");\n \n internalImportMapBuildItem.add(\"state/\", contextRoot + \"state/\");\n internalImportMapBuildItem.add(\"theme-state\", contextRoot + \"state/theme-state.js\");\n internalImportMapBuildItem.add(\"connection-state\", contextRoot + \"state/connection-state.js\");\n internalImportMapBuildItem.add(\"devui-state\", contextRoot + \"state/devui-state.js\");\n\n return internalImportMapBuildItem;\n }\n\n /**\n * Here we map all the pages (as defined by the extensions) build time data\n *\n * @param pageBuildItems\n * @param buildTimeConstProducer\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n void mapPageBuildTimeData(List cards,\n List menus,\n List footers,\n CurateOutcomeBuildItem curateOutcomeBuildItem,\n BuildProducer buildTimeConstProducer) {\n\n for (CardPageBuildItem card : cards) {\n String extensionPathName = card.getExtensionPathName(curateOutcomeBuildItem);\n Map buildTimeData = getBuildTimeDataForCard(curateOutcomeBuildItem, card);\n if (!buildTimeData.isEmpty()) {\n buildTimeConstProducer.produce(\n new BuildTimeConstBuildItem(extensionPathName, buildTimeData));\n }\n }\n for (MenuPageBuildItem menu : menus) {\n String extensionPathName = menu.getExtensionPathName(curateOutcomeBuildItem);\n Map buildTimeData = getBuildTimeDataForPage(menu);\n if (!buildTimeData.isEmpty()) {\n buildTimeConstProducer.produce(\n new BuildTimeConstBuildItem(extensionPathName, buildTimeData));\n }\n }\n for (FooterPageBuildItem footer : footers) {\n String extensionPathName = footer.getExtensionPathName(curateOutcomeBuildItem);\n Map buildTimeData = getBuildTimeDataForPage(footer);\n if (!buildTimeData.isEmpty()) {\n buildTimeConstProducer.produce(\n new BuildTimeConstBuildItem(extensionPathName, buildTimeData));\n }\n }\n }\n\n private Map getBuildTimeDataForPage(AbstractPageBuildItem pageBuildItem) {\n Map m = new HashMap<>();\n if (pageBuildItem.hasBuildTimeData()) {\n m.putAll(pageBuildItem.getBuildTimeData());\n }\n return m;\n }\n\n private Map getBuildTimeDataForCard(CurateOutcomeBuildItem curateOutcomeBuildItem,\n CardPageBuildItem pageBuildItem) {\n Map m = getBuildTimeDataForPage(pageBuildItem);\n\n if (pageBuildItem.getOptionalCard().isPresent()) {\n \n List pages = new ArrayList<>();\n List pageBuilders = pageBuildItem.getPages();\n for (PageBuilder pageBuilder : pageBuilders) {\n String path = pageBuildItem.getExtensionPathName(curateOutcomeBuildItem);\n pageBuilder.namespace(path);\n pageBuilder.extension(path);\n pages.add(pageBuilder.build());\n }\n\n m.put(\"pages\", pages);\n }\n return m;\n }\n\n /**\n * Here we find all build time data and make then available via a const\n *\n * js components can import the const with \"import {constName} from '{ext}-data';\"\n *\n * @param pageBuildItems\n * @param quteTemplateProducer\n * @param internalImportMapProducer\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n void createBuildTimeConstJsTemplate(CurateOutcomeBuildItem curateOutcomeBuildItem,\n NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,\n List buildTimeConstBuildItems,\n BuildProducer quteTemplateProducer,\n BuildProducer internalImportMapProducer) {\n\n String contextRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath() + DEV_UI + SLASH;\n\n QuteTemplateBuildItem quteTemplateBuildItem = new QuteTemplateBuildItem(\n QuteTemplateBuildItem.DEV_UI);\n\n InternalImportMapBuildItem internalImportMapBuildItem = new InternalImportMapBuildItem();\n\n for (BuildTimeConstBuildItem buildTimeConstBuildItem : buildTimeConstBuildItems) {\n Map data = new HashMap<>();\n if (buildTimeConstBuildItem.hasBuildTimeData()) {\n for (Map.Entry pageData : buildTimeConstBuildItem.getBuildTimeData().entrySet()) {\n try {\n String key = pageData.getKey();\n String value = DatabindCodec.prettyMapper().writeValueAsString(pageData.getValue());\n data.put(key, value);\n } catch (JsonProcessingException ex) {\n log.error(\"Could not create Json Data for Dev UI page\", ex);\n }\n }\n }\n if (!data.isEmpty()) {\n Map qutedata = new HashMap<>();\n qutedata.put(\"buildTimeData\", data);\n\n String ref = buildTimeConstBuildItem.getExtensionPathName(curateOutcomeBuildItem) + \"-data\";\n String file = ref + \".js\";\n quteTemplateBuildItem.add(\"build-time-data.js\", file, qutedata);\n internalImportMapBuildItem.add(ref, contextRoot + file);\n }\n }\n\n quteTemplateProducer.produce(quteTemplateBuildItem);\n internalImportMapProducer.produce(internalImportMapBuildItem);\n }\n\n /**\n * Here we find all the mvnpm jars\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n void gatherMvnpmJars(BuildProducer mvnpmProducer, CurateOutcomeBuildItem curateOutcomeBuildItem) {\n Set mvnpmJars = new HashSet<>();\n ClassLoader tccl = Thread.currentThread().getContextClassLoader();\n try {\n Enumeration jarsWithImportMaps = tccl.getResources(Location.IMPORTMAP_PATH);\n Set jarUrls = new HashSet<>(Collections.list(jarsWithImportMaps));\n for (URL jarUrl : jarUrls) {\n final JarURLConnection connection = (JarURLConnection) jarUrl.openConnection();\n mvnpmJars.add(connection.getJarFileURL());\n }\n mvnpmProducer.produce(new MvnpmBuildItem(mvnpmJars));\n } catch (IOException ex) {\n throw new UncheckedIOException(ex);\n }\n }\n\n /**\n * Here we create index.html\n * We aggregate all import maps into one\n * This includes import maps from 3rd party libs from mvnpm.org and internal ones defined above\n *\n * @return The QuteTemplate Build item that will create the end result\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n QuteTemplateBuildItem createIndexHtmlTemplate(\n MvnpmBuildItem mvnpmBuildItem,\n ThemeVarsBuildItem themeVarsBuildItem,\n NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,\n List internalImportMapBuildItems) {\n QuteTemplateBuildItem quteTemplateBuildItem = new QuteTemplateBuildItem(\n QuteTemplateBuildItem.DEV_UI);\n\n Aggregator aggregator = new Aggregator(mvnpmBuildItem.getMvnpmJars());\n\n for (InternalImportMapBuildItem importMapBuildItem : internalImportMapBuildItems) {\n Map importMap = importMapBuildItem.getImportMap();\n aggregator.addMappings(importMap);\n }\n String importmap = aggregator.aggregateAsJson(nonApplicationRootPathBuildItem.getNonApplicationRootPath());\n aggregator.reset();\n\n String themeVars = themeVarsBuildItem.getTemplateValue();\n String nonApplicationRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath();\n String contextRoot = nonApplicationRoot + DEV_UI + SLASH;\n\n Map data = Map.of(\n \"nonApplicationRoot\", nonApplicationRoot,\n \"contextRoot\", contextRoot,\n \"importmap\", importmap,\n \"themeVars\", themeVars);\n\n quteTemplateBuildItem.add(\"index.html\", data);\n\n return quteTemplateBuildItem;\n }\n\n \n @BuildStep(onlyIf = IsDevelopment.class)\n void loadAllBuildTimeTemplates(BuildProducer buildTimeContentProducer,\n List templates) {\n ClassLoader cl = Thread.currentThread().getContextClassLoader();\n for (QuteTemplateBuildItem template : templates) {\n\n List contentPerExtension = new ArrayList<>();\n\n List templatesWithData = template.getTemplateDatas();\n for (QuteTemplateBuildItem.TemplateData e : templatesWithData) {\n\n String templateName = e.getTemplateName(); \n Map data = e.getData();\n String resourceName = BUILD_TIME_PATH + SLASH + templateName;\n String fileName = e.getFileName();\n \n try (InputStream templateStream = cl.getResourceAsStream(resourceName)) {\n if (templateStream != null) {\n byte[] templateContent = IoUtil.readBytes(templateStream);\n \n DevUIContent content = DevUIContent.builder()\n .fileName(fileName)\n .template(templateContent)\n .addData(data)\n .build();\n contentPerExtension.add(content);\n }\n } catch (IOException ioe) {\n throw new UncheckedIOException(\"An error occurred while processing \" + resourceName, ioe);\n }\n }\n buildTimeContentProducer.produce(new StaticContentBuildItem(\n StaticContentBuildItem.DEV_UI, contentPerExtension));\n }\n }\n\n /**\n * Creates json data that is available in Javascript\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n void createBuildTimeData(BuildProducer buildTimeConstProducer,\n BuildProducer themeVarsProducer,\n List internalPages,\n ExtensionsBuildItem extensionsBuildItem,\n NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,\n LaunchModeBuildItem launchModeBuildItem,\n Optional effectiveIdeBuildItem) {\n\n BuildTimeConstBuildItem internalBuildTimeData = new BuildTimeConstBuildItem(AbstractDevUIBuildItem.DEV_UI);\n\n addThemeBuildTimeData(internalBuildTimeData, themeVarsProducer);\n addMenuSectionBuildTimeData(internalBuildTimeData, internalPages, extensionsBuildItem);\n addFooterTabBuildTimeData(internalBuildTimeData, extensionsBuildItem);\n addVersionInfoBuildTimeData(internalBuildTimeData, nonApplicationRootPathBuildItem);\n addIdeBuildTimeData(internalBuildTimeData, effectiveIdeBuildItem, launchModeBuildItem);\n buildTimeConstProducer.produce(internalBuildTimeData);\n }\n\n private void addThemeBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n BuildProducer themeVarsProducer) {\n \n Map> themes = new HashMap<>();\n Map dark = new HashMap<>();\n Map light = new HashMap<>();\n\n computeColors(themes, dark, light);\n\n internalBuildTimeData.addBuildTimeData(\"themes\", themes);\n\n \n themeVarsProducer.produce(new ThemeVarsBuildItem(light.keySet(), QUARKUS_BLUE.toString()));\n }\n\n private void addMenuSectionBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n List internalPages,\n ExtensionsBuildItem extensionsBuildItem) {\n \n @SuppressWarnings(\"unchecked\")\n List sectionMenu = new ArrayList();\n Collections.sort(internalPages, (t, t1) -> {\n return ((Integer) t.getPosition()).compareTo(t1.getPosition());\n });\n\n for (InternalPageBuildItem internalPageBuildItem : internalPages) {\n List pages = internalPageBuildItem.getPages();\n for (Page page : pages) {\n sectionMenu.add(page);\n }\n internalBuildTimeData.addAllBuildTimeData(internalPageBuildItem.getBuildTimeData());\n }\n\n \n for (Extension e : extensionsBuildItem.getSectionMenuExtensions()) {\n List pagesFromExtension = e.getMenuPages();\n sectionMenu.addAll(pagesFromExtension);\n }\n\n internalBuildTimeData.addBuildTimeData(\"menuItems\", sectionMenu);\n }\n\n private void addFooterTabBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n ExtensionsBuildItem extensionsBuildItem) {\n \n @SuppressWarnings(\"unchecked\")\n List footerTabs = new ArrayList();\n Page serverLog = Page.webComponentPageBuilder().internal()\n .namespace(\"devui-logstream\")\n .title(\"Server\")\n .icon(\"font-awesome-solid:server\")\n .componentLink(\"qwc-server-log.js\").build();\n footerTabs.add(serverLog);\n\n Page testLog = Page.webComponentPageBuilder().internal()\n .namespace(\"devui-continuous-testing\")\n .title(\"Testing\")\n .icon(\"font-awesome-solid:flask-vial\")\n .componentLink(\"qwc-test-log.js\").build();\n footerTabs.add(testLog);\n\n \n if (Version.getVersion().equalsIgnoreCase(\"999-SNAPSHOT\")) {\n Page devUiLog = Page.webComponentPageBuilder().internal()\n .namespace(\"devui-jsonrpcstream\")\n .title(\"Dev UI\")\n .icon(\"font-awesome-solid:satellite-dish\")\n .componentLink(\"qwc-jsonrpc-messages.js\").build();\n footerTabs.add(devUiLog);\n }\n \n for (Extension e : extensionsBuildItem.getFooterTabsExtensions()) {\n List tabsFromExtension = e.getFooterPages();\n footerTabs.addAll(tabsFromExtension);\n }\n\n internalBuildTimeData.addBuildTimeData(\"footerTabs\", footerTabs);\n internalBuildTimeData.addBuildTimeData(\"loggerLevels\", LEVELS);\n }\n\n private void addVersionInfoBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) {\n \n String contextRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath() + DEV_UI + SLASH;\n Map applicationInfo = new HashMap<>();\n applicationInfo.put(\"contextRoot\", contextRoot);\n applicationInfo.put(\"quarkusVersion\", Version.getVersion());\n applicationInfo.put(\"applicationName\", config.getOptionalValue(\"quarkus.application.name\", String.class).orElse(\"\"));\n applicationInfo.put(\"applicationVersion\",\n config.getOptionalValue(\"quarkus.application.version\", String.class).orElse(\"\"));\n internalBuildTimeData.addBuildTimeData(\"applicationInfo\", applicationInfo);\n }\n\n private void addIdeBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n Optional effectiveIdeBuildItem,\n LaunchModeBuildItem launchModeBuildItem) {\n\n Map ideInfo = new HashMap<>();\n boolean disable = launchModeBuildItem.getDevModeType().orElse(DevModeType.LOCAL) != DevModeType.LOCAL;\n ideInfo.put(\"disable\", disable);\n if (effectiveIdeBuildItem.isPresent()) {\n EffectiveIdeBuildItem eibi = effectiveIdeBuildItem.get();\n if (!disable) {\n \n Ide ide = eibi.getIde();\n ideInfo.put(\"ideName\", ide.name());\n ideInfo.put(\"idePackages\", getAllUserPackages());\n }\n }\n internalBuildTimeData.addBuildTimeData(\"ideInfo\", ideInfo);\n }\n\n private List getAllUserPackages() {\n List sourcesDir = DevConsoleManager.getHotReplacementContext().getSourcesDir();\n List packages = new ArrayList<>();\n\n for (Path sourcePaths : sourcesDir) {\n packages.addAll(sourcePackagesForRoot(sourcePaths));\n }\n return packages;\n }\n\n /**\n * Return the most general packages used in the application\n *

\n * TODO: this likely covers almost all typical use cases, but probably needs some tweaks for extreme corner cases\n */\n \n\n private String commonPath(List paths) {\n String commonPath = \"\";\n List dirs = new ArrayList<>(paths.size());\n for (int i = 0; i < paths.size(); i++) {\n dirs.add(i, paths.get(i).split(Pattern.quote(File.separator)));\n }\n for (int j = 0; j < dirs.get(0).length; j++) {\n String thisDir = dirs.get(0)[j]; \n boolean allMatched = true;\n for (int i = 1; i < dirs.size() && allMatched; i++) { \n if (dirs.get(i).length < j) { \n allMatched = false;\n break;\n }\n allMatched = dirs.get(i)[j].equals(thisDir); \n }\n if (allMatched) {\n commonPath += thisDir + File.separator;\n } else {\n break;\n }\n }\n return commonPath;\n }\n\n private static final List LEVELS = List.of(\n OFF.getName(),\n SEVERE.getName(),\n ERROR.getName(),\n FATAL.getName(),\n WARNING.getName(),\n WARN.getName(),\n INFO.getName(),\n DEBUG.getName(),\n TRACE.getName(),\n CONFIG.getName(),\n FINE.getName(),\n FINER.getName(),\n FINEST.getName(),\n ALL.getName());\n\n private static void computeColors(Map> themes, Map dark,\n Map light) {\n \n light.put(\"--quarkus-blue\", QUARKUS_BLUE.toString());\n dark.put(\"--quarkus-blue\", QUARKUS_BLUE.toString());\n\n light.put(\"--quarkus-red\", QUARKUS_RED.toString());\n dark.put(\"--quarkus-red\", QUARKUS_RED.toString());\n\n light.put(\"--quarkus-center\", QUARKUS_DARK.toString());\n dark.put(\"--quarkus-center\", QUARKUS_LIGHT.toString());\n\n \n\n \n light.put(\"--lumo-base-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-base-color\", Color.from(210, 10, 23).toString());\n\n \n light.put(\"--lumo-contrast-5pct\", Color.from(214, 61, 25, 0.05).toString());\n dark.put(\"--lumo-contrast-5pct\", Color.from(214, 65, 85, 0.06).toString());\n light.put(\"--lumo-contrast-10pct\", Color.from(214, 57, 24, 0.1).toString());\n dark.put(\"--lumo-contrast-10pct\", Color.from(214, 60, 80, 0.14).toString());\n light.put(\"--lumo-contrast-20pct\", Color.from(214, 53, 23, 0.16).toString());\n dark.put(\"--lumo-contrast-20pct\", Color.from(214, 64, 82, 0.23).toString());\n light.put(\"--lumo-contrast-30pct\", Color.from(214, 50, 22, 0.26).toString());\n dark.put(\"--lumo-contrast-30pct\", Color.from(214, 69, 84, 0.32).toString());\n light.put(\"--lumo-contrast-40pct\", Color.from(214, 47, 21, 0.38).toString());\n dark.put(\"--lumo-contrast-40pct\", Color.from(214, 73, 86, 0.41).toString());\n light.put(\"--lumo-contrast-50pct\", Color.from(214, 45, 20, 0.52).toString());\n dark.put(\"--lumo-contrast-50pct\", Color.from(214, 78, 88, 0.50).toString());\n light.put(\"--lumo-contrast-60pct\", Color.from(214, 43, 19, 0.6).toString());\n dark.put(\"--lumo-contrast-60pct\", Color.from(214, 82, 90, 0.6).toString());\n light.put(\"--lumo-contrast-70pct\", Color.from(214, 42, 18, 0.69).toString());\n dark.put(\"--lumo-contrast-70pct\", Color.from(214, 87, 92, 0.7).toString());\n light.put(\"--lumo-contrast-80pct\", Color.from(214, 41, 17, 0.83).toString());\n dark.put(\"--lumo-contrast-80pct\", Color.from(214, 91, 94, 0.8).toString());\n light.put(\"--lumo-contrast-90pct\", Color.from(214, 40, 16, 0.94).toString());\n dark.put(\"--lumo-contrast-90pct\", Color.from(214, 96, 96, 0.9).toString());\n light.put(\"--lumo-contrast\", Color.from(214, 35, 15).toString());\n dark.put(\"--lumo-contrast\", Color.from(214, 100, 98).toString());\n\n \n light.put(\"--lumo-primary-color-10pct\", Color.from(214, 100, 60, 0.13).toString());\n dark.put(\"--lumo-primary-color-10pct\", Color.from(214, 90, 63, 0.1).toString());\n light.put(\"--lumo-primary-color-50pct\", Color.from(QUARKUS_BLUE, 0.76).toString());\n dark.put(\"--lumo-primary-color-50pct\", Color.from(QUARKUS_BLUE, 0.5).toString());\n light.put(\"--lumo-primary-color\", QUARKUS_BLUE.toString());\n dark.put(\"--lumo-primary-color\", QUARKUS_BLUE.toString());\n light.put(\"--lumo-primary-text-color\", QUARKUS_BLUE.toString());\n dark.put(\"--lumo-primary-text-color\", QUARKUS_BLUE.toString());\n light.put(\"--lumo-primary-contrast-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-primary-contrast-color\", Color.from(0, 100, 100).toString());\n\n \n light.put(\"--lumo-error-color-10pct\", Color.from(3, 85, 49, 0.1).toString());\n dark.put(\"--lumo-error-color-10pct\", Color.from(3, 90, 63, 0.1).toString());\n light.put(\"--lumo-error-color-50pct\", Color.from(3, 85, 49, 0.5).toString());\n dark.put(\"--lumo-error-color-50pct\", Color.from(3, 90, 63, 0.5).toString());\n light.put(\"--lumo-error-color\", Color.from(3, 85, 48).toString());\n dark.put(\"--lumo-error-color\", Color.from(3, 90, 63).toString());\n light.put(\"--lumo-error-text-color\", Color.from(3, 89, 42).toString());\n dark.put(\"--lumo-error-text-color\", Color.from(3, 100, 67).toString());\n light.put(\"--lumo-error-contrast-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-error-contrast-color\", Color.from(0, 100, 100).toString());\n\n \n light.put(\"--lumo-warning-color-10pct\", Color.from(30, 100, 50, 0.1).toString());\n dark.put(\"--lumo-warning-color-10pct\", Color.from(30, 100, 50, 0.1).toString());\n light.put(\"--lumo-warning-color-50pct\", Color.from(30, 100, 50, 0.5).toString());\n dark.put(\"--lumo-warning-color-50pct\", Color.from(30, 100, 50, 0.5).toString());\n light.put(\"--lumo-warning-color\", Color.from(30, 100, 50).toString());\n dark.put(\"--lumo-warning-color\", Color.from(30, 100, 50).toString());\n light.put(\"--lumo-warning-text-color\", Color.from(30, 89, 42).toString());\n dark.put(\"--lumo-warning-text-color\", Color.from(30, 100, 67).toString());\n light.put(\"--lumo-warning-contrast-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-warning-contrast-color\", Color.from(0, 100, 100).toString());\n\n \n light.put(\"--lumo-success-color-10pct\", Color.from(145, 72, 31, 0.1).toString());\n dark.put(\"--lumo-success-color-10pct\", Color.from(145, 65, 42, 0.1).toString());\n light.put(\"--lumo-success-color-50pct\", Color.from(145, 72, 31, 0.5).toString());\n dark.put(\"--lumo-success-color-50pct\", Color.from(145, 65, 42, 0.5).toString());\n light.put(\"--lumo-success-color\", Color.from(145, 72, 30).toString());\n dark.put(\"--lumo-success-color\", Color.from(145, 65, 42).toString());\n light.put(\"--lumo-success-text-color\", Color.from(145, 85, 25).toString());\n dark.put(\"--lumo-success-text-color\", Color.from(145, 85, 47).toString());\n light.put(\"--lumo-success-contrast-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-success-contrast-color\", Color.from(0, 100, 100).toString());\n\n \n light.put(\"--lumo-header-text-color\", Color.from(214, 35, 15).toString());\n dark.put(\"--lumo-header-text-color\", Color.from(214, 100, 98).toString());\n light.put(\"--lumo-body-text-color\", Color.from(214, 40, 16, 0.94).toString());\n dark.put(\"--lumo-body-text-color\", Color.from(214, 96, 96, 0.9).toString());\n light.put(\"--lumo-secondary-text-color\", Color.from(214, 42, 18, 0.69).toString());\n dark.put(\"--lumo-secondary-text-color\", Color.from(214, 87, 92, 0.7).toString());\n light.put(\"--lumo-tertiary-text-color\", Color.from(214, 45, 20, 0.52).toString());\n dark.put(\"--lumo-tertiary-text-color\", Color.from(214, 78, 88, 0.5).toString());\n light.put(\"--lumo-disabled-text-color\", Color.from(214, 50, 22, 0.26).toString());\n dark.put(\"--lumo-disabled-text-color\", Color.from(214, 69, 84, 0.32).toString());\n\n themes.put(\"dark\", dark);\n themes.put(\"light\", light);\n }\n\n private static final Color QUARKUS_BLUE = Color.from(211, 63, 54);\n private static final Color QUARKUS_RED = Color.from(343, 100, 50);\n private static final Color QUARKUS_DARK = Color.from(180, 36, 5);\n private static final Color QUARKUS_LIGHT = Color.from(0, 0, 90);\n\n /**\n * This represents a HSLA color\n * see https:\n */\n static class Color {\n private int hue; \n private int saturation; \n private int lightness; \n private double alpha; \n\n private Color(int hue, int saturation, int lightness, double alpha) {\n if (hue < 0 || hue > 360) {\n throw new RuntimeException(\n \"Invalid hue, number needs to be between 0 and 360. Defines a degree on the color wheel\");\n }\n this.hue = hue;\n\n if (saturation < 0 || saturation > 100) {\n throw new RuntimeException(\n \"Invalid saturation, number needs to be between 0 and 100. 0% is a shade of gray and 100% is the full color (full saturation)\");\n }\n this.saturation = saturation;\n\n if (lightness < 0 || lightness > 100) {\n throw new RuntimeException(\n \"Invalid lightness, number needs to be between 0 and 100. 0% is black, 50% is normal, and 100% is white\");\n }\n this.lightness = lightness;\n\n if (alpha < 0 || alpha > 1) {\n throw new RuntimeException(\n \"Invalid alpha, number needs to be between 0 and 1. 0 is fully transparent, 1 is not transparent at all\");\n }\n this.alpha = alpha;\n }\n\n @Override\n public String toString() {\n return \"hsla(\" + this.hue + \", \" + this.saturation + \"%, \" + this.lightness + \"%, \" + this.alpha + \")\";\n }\n\n static Color from(Color color, double alpha) {\n return new Color(color.hue, color.saturation, color.lightness, alpha);\n }\n\n static Color from(int hue, int saturation, int lightness) {\n return new Color(hue, saturation, lightness, 1);\n }\n\n static Color from(int hue, int saturation, int lightness, double alpha) {\n return new Color(hue, saturation, lightness, alpha);\n }\n }\n\n private static class DetectPackageFileVisitor extends SimpleFileVisitor {\n private final List paths;\n\n public DetectPackageFileVisitor(List paths) {\n this.paths = paths;\n }\n\n @Override\n public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) {\n boolean hasRegularFiles = false;\n File[] files = dir.toFile().listFiles();\n if (files != null) {\n for (File file : files) {\n if (file.isFile()) {\n hasRegularFiles = true;\n break;\n }\n }\n }\n if (hasRegularFiles) {\n paths.add(dir.toAbsolutePath().toString());\n }\n return FileVisitResult.CONTINUE;\n }\n }\n}", "context_after": "class BuildTimeContentProcessor {\n private static final Logger log = Logger.getLogger(BuildTimeContentProcessor.class);\n\n private static final String SLASH = \"/\";\n private static final String DEV_UI = \"dev-ui\";\n private static final String BUILD_TIME_PATH = \"dev-ui-templates/build-time\";\n\n final Config config = ConfigProvider.getConfig();\n\n /**\n * Here we create references to internal dev ui files so that they can be imported by ref.\n * This will be merged into the final importmap\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n InternalImportMapBuildItem createKnownInternalImportMap(NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) {\n\n String contextRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath() + DEV_UI + SLASH;\n\n InternalImportMapBuildItem internalImportMapBuildItem = new InternalImportMapBuildItem();\n\n internalImportMapBuildItem.add(\"devui/\", contextRoot);\n \n internalImportMapBuildItem.add(\"qwc/\", contextRoot + \"qwc/\");\n internalImportMapBuildItem.add(\"qwc-no-data\", contextRoot + \"qwc/qwc-no-data.js\");\n internalImportMapBuildItem.add(\"qwc-hot-reload-element\", contextRoot + \"qwc/qwc-hot-reload-element.js\");\n internalImportMapBuildItem.add(\"qwc-server-log\", contextRoot + \"qwc/qwc-server-log.js\");\n internalImportMapBuildItem.add(\"qwc-extension-link\", contextRoot + \"qwc/qwc-extension-link.js\");\n \n internalImportMapBuildItem.add(\"qui/\", contextRoot + \"qui/\");\n internalImportMapBuildItem.add(\"qui-card\", contextRoot + \"qui/qui-card.js\");\n\n internalImportMapBuildItem.add(\"qui-badge\", contextRoot + \"qui/qui-badge.js\");\n internalImportMapBuildItem.add(\"qui-alert\", contextRoot + \"qui/qui-alert.js\");\n internalImportMapBuildItem.add(\"qui-code-block\", contextRoot + \"qui/qui-code-block.js\");\n internalImportMapBuildItem.add(\"qui-ide-link\", contextRoot + \"qui/qui-ide-link.js\");\n\n \n internalImportMapBuildItem.add(\"echarts/\", contextRoot + \"echarts/\");\n internalImportMapBuildItem.add(\"echarts-gauge-grade\", contextRoot + \"echarts/echarts-gauge-grade.js\");\n internalImportMapBuildItem.add(\"echarts-pie\", contextRoot + \"echarts/echarts-pie.js\");\n internalImportMapBuildItem.add(\"echarts-horizontal-stacked-bar\",\n contextRoot + \"echarts/echarts-horizontal-stacked-bar.js\");\n\n \n internalImportMapBuildItem.add(\"icon/\", contextRoot + \"icon/\");\n \n internalImportMapBuildItem.add(\"controller/\", contextRoot + \"controller/\");\n internalImportMapBuildItem.add(\"log-controller\", contextRoot + \"controller/log-controller.js\");\n internalImportMapBuildItem.add(\"storage-controller\", contextRoot + \"controller/storage-controller.js\");\n internalImportMapBuildItem.add(\"router-controller\", contextRoot + \"controller/router-controller.js\");\n internalImportMapBuildItem.add(\"notifier\", contextRoot + \"controller/notifier.js\");\n internalImportMapBuildItem.add(\"jsonrpc\", contextRoot + \"controller/jsonrpc.js\");\n \n internalImportMapBuildItem.add(\"state/\", contextRoot + \"state/\");\n internalImportMapBuildItem.add(\"theme-state\", contextRoot + \"state/theme-state.js\");\n internalImportMapBuildItem.add(\"connection-state\", contextRoot + \"state/connection-state.js\");\n internalImportMapBuildItem.add(\"devui-state\", contextRoot + \"state/devui-state.js\");\n\n return internalImportMapBuildItem;\n }\n\n /**\n * Here we map all the pages (as defined by the extensions) build time data\n *\n * @param pageBuildItems\n * @param buildTimeConstProducer\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n void mapPageBuildTimeData(List cards,\n List menus,\n List footers,\n CurateOutcomeBuildItem curateOutcomeBuildItem,\n BuildProducer buildTimeConstProducer) {\n\n for (CardPageBuildItem card : cards) {\n String extensionPathName = card.getExtensionPathName(curateOutcomeBuildItem);\n Map buildTimeData = getBuildTimeDataForCard(curateOutcomeBuildItem, card);\n if (!buildTimeData.isEmpty()) {\n buildTimeConstProducer.produce(\n new BuildTimeConstBuildItem(extensionPathName, buildTimeData));\n }\n }\n for (MenuPageBuildItem menu : menus) {\n String extensionPathName = menu.getExtensionPathName(curateOutcomeBuildItem);\n Map buildTimeData = getBuildTimeDataForPage(menu);\n if (!buildTimeData.isEmpty()) {\n buildTimeConstProducer.produce(\n new BuildTimeConstBuildItem(extensionPathName, buildTimeData));\n }\n }\n for (FooterPageBuildItem footer : footers) {\n String extensionPathName = footer.getExtensionPathName(curateOutcomeBuildItem);\n Map buildTimeData = getBuildTimeDataForPage(footer);\n if (!buildTimeData.isEmpty()) {\n buildTimeConstProducer.produce(\n new BuildTimeConstBuildItem(extensionPathName, buildTimeData));\n }\n }\n }\n\n private Map getBuildTimeDataForPage(AbstractPageBuildItem pageBuildItem) {\n Map m = new HashMap<>();\n if (pageBuildItem.hasBuildTimeData()) {\n m.putAll(pageBuildItem.getBuildTimeData());\n }\n return m;\n }\n\n private Map getBuildTimeDataForCard(CurateOutcomeBuildItem curateOutcomeBuildItem,\n CardPageBuildItem pageBuildItem) {\n Map m = getBuildTimeDataForPage(pageBuildItem);\n\n if (pageBuildItem.getOptionalCard().isPresent()) {\n \n List pages = new ArrayList<>();\n List pageBuilders = pageBuildItem.getPages();\n for (PageBuilder pageBuilder : pageBuilders) {\n String path = pageBuildItem.getExtensionPathName(curateOutcomeBuildItem);\n pageBuilder.namespace(path);\n pageBuilder.extension(path);\n pages.add(pageBuilder.build());\n }\n\n m.put(\"pages\", pages);\n }\n return m;\n }\n\n /**\n * Here we find all build time data and make then available via a const\n *\n * js components can import the const with \"import {constName} from '{ext}-data';\"\n *\n * @param pageBuildItems\n * @param quteTemplateProducer\n * @param internalImportMapProducer\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n void createBuildTimeConstJsTemplate(CurateOutcomeBuildItem curateOutcomeBuildItem,\n NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,\n List buildTimeConstBuildItems,\n BuildProducer quteTemplateProducer,\n BuildProducer internalImportMapProducer) {\n\n String contextRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath() + DEV_UI + SLASH;\n\n QuteTemplateBuildItem quteTemplateBuildItem = new QuteTemplateBuildItem(\n QuteTemplateBuildItem.DEV_UI);\n\n InternalImportMapBuildItem internalImportMapBuildItem = new InternalImportMapBuildItem();\n\n for (BuildTimeConstBuildItem buildTimeConstBuildItem : buildTimeConstBuildItems) {\n Map data = new HashMap<>();\n if (buildTimeConstBuildItem.hasBuildTimeData()) {\n for (Map.Entry pageData : buildTimeConstBuildItem.getBuildTimeData().entrySet()) {\n try {\n String key = pageData.getKey();\n String value = DatabindCodec.prettyMapper().writeValueAsString(pageData.getValue());\n data.put(key, value);\n } catch (JsonProcessingException ex) {\n log.error(\"Could not create Json Data for Dev UI page\", ex);\n }\n }\n }\n if (!data.isEmpty()) {\n Map qutedata = new HashMap<>();\n qutedata.put(\"buildTimeData\", data);\n\n String ref = buildTimeConstBuildItem.getExtensionPathName(curateOutcomeBuildItem) + \"-data\";\n String file = ref + \".js\";\n quteTemplateBuildItem.add(\"build-time-data.js\", file, qutedata);\n internalImportMapBuildItem.add(ref, contextRoot + file);\n }\n }\n\n quteTemplateProducer.produce(quteTemplateBuildItem);\n internalImportMapProducer.produce(internalImportMapBuildItem);\n }\n\n /**\n * Here we find all the mvnpm jars\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n void gatherMvnpmJars(BuildProducer mvnpmProducer, CurateOutcomeBuildItem curateOutcomeBuildItem) {\n Set mvnpmJars = new HashSet<>();\n ClassLoader tccl = Thread.currentThread().getContextClassLoader();\n try {\n Enumeration jarsWithImportMaps = tccl.getResources(Location.IMPORTMAP_PATH);\n while (jarsWithImportMaps.hasMoreElements()) {\n URL jarUrl = jarsWithImportMaps.nextElement();\n final JarURLConnection connection = (JarURLConnection) jarUrl.openConnection();\n mvnpmJars.add(connection.getJarFileURL());\n }\n mvnpmProducer.produce(new MvnpmBuildItem(mvnpmJars));\n } catch (IOException ex) {\n throw new UncheckedIOException(ex);\n }\n }\n\n /**\n * Here we create index.html\n * We aggregate all import maps into one\n * This includes import maps from 3rd party libs from mvnpm.org and internal ones defined above\n *\n * @return The QuteTemplate Build item that will create the end result\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n QuteTemplateBuildItem createIndexHtmlTemplate(\n MvnpmBuildItem mvnpmBuildItem,\n ThemeVarsBuildItem themeVarsBuildItem,\n NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,\n List internalImportMapBuildItems) {\n QuteTemplateBuildItem quteTemplateBuildItem = new QuteTemplateBuildItem(\n QuteTemplateBuildItem.DEV_UI);\n\n Aggregator aggregator = new Aggregator(mvnpmBuildItem.getMvnpmJars());\n\n for (InternalImportMapBuildItem importMapBuildItem : internalImportMapBuildItems) {\n Map importMap = importMapBuildItem.getImportMap();\n aggregator.addMappings(importMap);\n }\n String importmap = aggregator.aggregateAsJson(nonApplicationRootPathBuildItem.getNonApplicationRootPath());\n aggregator.reset();\n\n String themeVars = themeVarsBuildItem.getTemplateValue();\n String nonApplicationRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath();\n String contextRoot = nonApplicationRoot + DEV_UI + SLASH;\n\n Map data = Map.of(\n \"nonApplicationRoot\", nonApplicationRoot,\n \"contextRoot\", contextRoot,\n \"importmap\", importmap,\n \"themeVars\", themeVars);\n\n quteTemplateBuildItem.add(\"index.html\", data);\n\n return quteTemplateBuildItem;\n }\n\n \n @BuildStep(onlyIf = IsDevelopment.class)\n void loadAllBuildTimeTemplates(BuildProducer buildTimeContentProducer,\n List templates) {\n ClassLoader cl = Thread.currentThread().getContextClassLoader();\n for (QuteTemplateBuildItem template : templates) {\n\n List contentPerExtension = new ArrayList<>();\n\n List templatesWithData = template.getTemplateDatas();\n for (QuteTemplateBuildItem.TemplateData e : templatesWithData) {\n\n String templateName = e.getTemplateName(); \n Map data = e.getData();\n String resourceName = BUILD_TIME_PATH + SLASH + templateName;\n String fileName = e.getFileName();\n \n try (InputStream templateStream = cl.getResourceAsStream(resourceName)) {\n if (templateStream != null) {\n byte[] templateContent = IoUtil.readBytes(templateStream);\n \n DevUIContent content = DevUIContent.builder()\n .fileName(fileName)\n .template(templateContent)\n .addData(data)\n .build();\n contentPerExtension.add(content);\n }\n } catch (IOException ioe) {\n throw new UncheckedIOException(\"An error occurred while processing \" + resourceName, ioe);\n }\n }\n buildTimeContentProducer.produce(new StaticContentBuildItem(\n StaticContentBuildItem.DEV_UI, contentPerExtension));\n }\n }\n\n /**\n * Creates json data that is available in Javascript\n */\n @BuildStep(onlyIf = IsDevelopment.class)\n void createBuildTimeData(BuildProducer buildTimeConstProducer,\n BuildProducer themeVarsProducer,\n List internalPages,\n ExtensionsBuildItem extensionsBuildItem,\n NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,\n LaunchModeBuildItem launchModeBuildItem,\n Optional effectiveIdeBuildItem) {\n\n BuildTimeConstBuildItem internalBuildTimeData = new BuildTimeConstBuildItem(AbstractDevUIBuildItem.DEV_UI);\n\n addThemeBuildTimeData(internalBuildTimeData, themeVarsProducer);\n addMenuSectionBuildTimeData(internalBuildTimeData, internalPages, extensionsBuildItem);\n addFooterTabBuildTimeData(internalBuildTimeData, extensionsBuildItem);\n addVersionInfoBuildTimeData(internalBuildTimeData, nonApplicationRootPathBuildItem);\n addIdeBuildTimeData(internalBuildTimeData, effectiveIdeBuildItem, launchModeBuildItem);\n buildTimeConstProducer.produce(internalBuildTimeData);\n }\n\n private void addThemeBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n BuildProducer themeVarsProducer) {\n \n Map> themes = new HashMap<>();\n Map dark = new HashMap<>();\n Map light = new HashMap<>();\n\n computeColors(themes, dark, light);\n\n internalBuildTimeData.addBuildTimeData(\"themes\", themes);\n\n \n themeVarsProducer.produce(new ThemeVarsBuildItem(light.keySet(), QUARKUS_BLUE.toString()));\n }\n\n private void addMenuSectionBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n List internalPages,\n ExtensionsBuildItem extensionsBuildItem) {\n \n @SuppressWarnings(\"unchecked\")\n List sectionMenu = new ArrayList();\n Collections.sort(internalPages, (t, t1) -> {\n return ((Integer) t.getPosition()).compareTo(t1.getPosition());\n });\n\n for (InternalPageBuildItem internalPageBuildItem : internalPages) {\n List pages = internalPageBuildItem.getPages();\n for (Page page : pages) {\n sectionMenu.add(page);\n }\n internalBuildTimeData.addAllBuildTimeData(internalPageBuildItem.getBuildTimeData());\n }\n\n \n for (Extension e : extensionsBuildItem.getSectionMenuExtensions()) {\n List pagesFromExtension = e.getMenuPages();\n sectionMenu.addAll(pagesFromExtension);\n }\n\n internalBuildTimeData.addBuildTimeData(\"menuItems\", sectionMenu);\n }\n\n private void addFooterTabBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n ExtensionsBuildItem extensionsBuildItem) {\n \n @SuppressWarnings(\"unchecked\")\n List footerTabs = new ArrayList();\n Page serverLog = Page.webComponentPageBuilder().internal()\n .namespace(\"devui-logstream\")\n .title(\"Server\")\n .icon(\"font-awesome-solid:server\")\n .componentLink(\"qwc-server-log.js\").build();\n footerTabs.add(serverLog);\n\n Page testLog = Page.webComponentPageBuilder().internal()\n .namespace(\"devui-continuous-testing\")\n .title(\"Testing\")\n .icon(\"font-awesome-solid:flask-vial\")\n .componentLink(\"qwc-test-log.js\").build();\n footerTabs.add(testLog);\n\n \n if (Version.getVersion().equalsIgnoreCase(\"999-SNAPSHOT\")) {\n Page devUiLog = Page.webComponentPageBuilder().internal()\n .namespace(\"devui-jsonrpcstream\")\n .title(\"Dev UI\")\n .icon(\"font-awesome-solid:satellite-dish\")\n .componentLink(\"qwc-jsonrpc-messages.js\").build();\n footerTabs.add(devUiLog);\n }\n \n for (Extension e : extensionsBuildItem.getFooterTabsExtensions()) {\n List tabsFromExtension = e.getFooterPages();\n footerTabs.addAll(tabsFromExtension);\n }\n\n internalBuildTimeData.addBuildTimeData(\"footerTabs\", footerTabs);\n internalBuildTimeData.addBuildTimeData(\"loggerLevels\", LEVELS);\n }\n\n private void addVersionInfoBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) {\n \n String contextRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath() + DEV_UI + SLASH;\n Map applicationInfo = new HashMap<>();\n applicationInfo.put(\"contextRoot\", contextRoot);\n applicationInfo.put(\"quarkusVersion\", Version.getVersion());\n applicationInfo.put(\"applicationName\", config.getOptionalValue(\"quarkus.application.name\", String.class).orElse(\"\"));\n applicationInfo.put(\"applicationVersion\",\n config.getOptionalValue(\"quarkus.application.version\", String.class).orElse(\"\"));\n internalBuildTimeData.addBuildTimeData(\"applicationInfo\", applicationInfo);\n }\n\n private void addIdeBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,\n Optional effectiveIdeBuildItem,\n LaunchModeBuildItem launchModeBuildItem) {\n\n Map ideInfo = new HashMap<>();\n boolean disable = launchModeBuildItem.getDevModeType().orElse(DevModeType.LOCAL) != DevModeType.LOCAL;\n ideInfo.put(\"disable\", disable);\n if (effectiveIdeBuildItem.isPresent()) {\n EffectiveIdeBuildItem eibi = effectiveIdeBuildItem.get();\n if (!disable) {\n \n Ide ide = eibi.getIde();\n ideInfo.put(\"ideName\", ide.name());\n ideInfo.put(\"idePackages\", getAllUserPackages());\n }\n }\n internalBuildTimeData.addBuildTimeData(\"ideInfo\", ideInfo);\n }\n\n private List getAllUserPackages() {\n List sourcesDir = DevConsoleManager.getHotReplacementContext().getSourcesDir();\n List packages = new ArrayList<>();\n\n for (Path sourcePaths : sourcesDir) {\n packages.addAll(sourcePackagesForRoot(sourcePaths));\n }\n return packages;\n }\n\n /**\n * Return the most general packages used in the application\n *

\n * TODO: this likely covers almost all typical use cases, but probably needs some tweaks for extreme corner cases\n */\n \n\n private String commonPath(List paths) {\n String commonPath = \"\";\n List dirs = new ArrayList<>(paths.size());\n for (int i = 0; i < paths.size(); i++) {\n dirs.add(i, paths.get(i).split(Pattern.quote(File.separator)));\n }\n for (int j = 0; j < dirs.get(0).length; j++) {\n String thisDir = dirs.get(0)[j]; \n boolean allMatched = true;\n for (int i = 1; i < dirs.size() && allMatched; i++) { \n if (dirs.get(i).length < j) { \n allMatched = false;\n break;\n }\n allMatched = dirs.get(i)[j].equals(thisDir); \n }\n if (allMatched) {\n commonPath += thisDir + File.separator;\n } else {\n break;\n }\n }\n return commonPath;\n }\n\n private static final List LEVELS = List.of(\n OFF.getName(),\n SEVERE.getName(),\n ERROR.getName(),\n FATAL.getName(),\n WARNING.getName(),\n WARN.getName(),\n INFO.getName(),\n DEBUG.getName(),\n TRACE.getName(),\n CONFIG.getName(),\n FINE.getName(),\n FINER.getName(),\n FINEST.getName(),\n ALL.getName());\n\n private static void computeColors(Map> themes, Map dark,\n Map light) {\n \n light.put(\"--quarkus-blue\", QUARKUS_BLUE.toString());\n dark.put(\"--quarkus-blue\", QUARKUS_BLUE.toString());\n\n light.put(\"--quarkus-red\", QUARKUS_RED.toString());\n dark.put(\"--quarkus-red\", QUARKUS_RED.toString());\n\n light.put(\"--quarkus-center\", QUARKUS_DARK.toString());\n dark.put(\"--quarkus-center\", QUARKUS_LIGHT.toString());\n\n \n\n \n light.put(\"--lumo-base-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-base-color\", Color.from(210, 10, 23).toString());\n\n \n light.put(\"--lumo-contrast-5pct\", Color.from(214, 61, 25, 0.05).toString());\n dark.put(\"--lumo-contrast-5pct\", Color.from(214, 65, 85, 0.06).toString());\n light.put(\"--lumo-contrast-10pct\", Color.from(214, 57, 24, 0.1).toString());\n dark.put(\"--lumo-contrast-10pct\", Color.from(214, 60, 80, 0.14).toString());\n light.put(\"--lumo-contrast-20pct\", Color.from(214, 53, 23, 0.16).toString());\n dark.put(\"--lumo-contrast-20pct\", Color.from(214, 64, 82, 0.23).toString());\n light.put(\"--lumo-contrast-30pct\", Color.from(214, 50, 22, 0.26).toString());\n dark.put(\"--lumo-contrast-30pct\", Color.from(214, 69, 84, 0.32).toString());\n light.put(\"--lumo-contrast-40pct\", Color.from(214, 47, 21, 0.38).toString());\n dark.put(\"--lumo-contrast-40pct\", Color.from(214, 73, 86, 0.41).toString());\n light.put(\"--lumo-contrast-50pct\", Color.from(214, 45, 20, 0.52).toString());\n dark.put(\"--lumo-contrast-50pct\", Color.from(214, 78, 88, 0.50).toString());\n light.put(\"--lumo-contrast-60pct\", Color.from(214, 43, 19, 0.6).toString());\n dark.put(\"--lumo-contrast-60pct\", Color.from(214, 82, 90, 0.6).toString());\n light.put(\"--lumo-contrast-70pct\", Color.from(214, 42, 18, 0.69).toString());\n dark.put(\"--lumo-contrast-70pct\", Color.from(214, 87, 92, 0.7).toString());\n light.put(\"--lumo-contrast-80pct\", Color.from(214, 41, 17, 0.83).toString());\n dark.put(\"--lumo-contrast-80pct\", Color.from(214, 91, 94, 0.8).toString());\n light.put(\"--lumo-contrast-90pct\", Color.from(214, 40, 16, 0.94).toString());\n dark.put(\"--lumo-contrast-90pct\", Color.from(214, 96, 96, 0.9).toString());\n light.put(\"--lumo-contrast\", Color.from(214, 35, 15).toString());\n dark.put(\"--lumo-contrast\", Color.from(214, 100, 98).toString());\n\n \n light.put(\"--lumo-primary-color-10pct\", Color.from(214, 100, 60, 0.13).toString());\n dark.put(\"--lumo-primary-color-10pct\", Color.from(214, 90, 63, 0.1).toString());\n light.put(\"--lumo-primary-color-50pct\", Color.from(QUARKUS_BLUE, 0.76).toString());\n dark.put(\"--lumo-primary-color-50pct\", Color.from(QUARKUS_BLUE, 0.5).toString());\n light.put(\"--lumo-primary-color\", QUARKUS_BLUE.toString());\n dark.put(\"--lumo-primary-color\", QUARKUS_BLUE.toString());\n light.put(\"--lumo-primary-text-color\", QUARKUS_BLUE.toString());\n dark.put(\"--lumo-primary-text-color\", QUARKUS_BLUE.toString());\n light.put(\"--lumo-primary-contrast-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-primary-contrast-color\", Color.from(0, 100, 100).toString());\n\n \n light.put(\"--lumo-error-color-10pct\", Color.from(3, 85, 49, 0.1).toString());\n dark.put(\"--lumo-error-color-10pct\", Color.from(3, 90, 63, 0.1).toString());\n light.put(\"--lumo-error-color-50pct\", Color.from(3, 85, 49, 0.5).toString());\n dark.put(\"--lumo-error-color-50pct\", Color.from(3, 90, 63, 0.5).toString());\n light.put(\"--lumo-error-color\", Color.from(3, 85, 48).toString());\n dark.put(\"--lumo-error-color\", Color.from(3, 90, 63).toString());\n light.put(\"--lumo-error-text-color\", Color.from(3, 89, 42).toString());\n dark.put(\"--lumo-error-text-color\", Color.from(3, 100, 67).toString());\n light.put(\"--lumo-error-contrast-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-error-contrast-color\", Color.from(0, 100, 100).toString());\n\n \n light.put(\"--lumo-warning-color-10pct\", Color.from(30, 100, 50, 0.1).toString());\n dark.put(\"--lumo-warning-color-10pct\", Color.from(30, 100, 50, 0.1).toString());\n light.put(\"--lumo-warning-color-50pct\", Color.from(30, 100, 50, 0.5).toString());\n dark.put(\"--lumo-warning-color-50pct\", Color.from(30, 100, 50, 0.5).toString());\n light.put(\"--lumo-warning-color\", Color.from(30, 100, 50).toString());\n dark.put(\"--lumo-warning-color\", Color.from(30, 100, 50).toString());\n light.put(\"--lumo-warning-text-color\", Color.from(30, 89, 42).toString());\n dark.put(\"--lumo-warning-text-color\", Color.from(30, 100, 67).toString());\n light.put(\"--lumo-warning-contrast-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-warning-contrast-color\", Color.from(0, 100, 100).toString());\n\n \n light.put(\"--lumo-success-color-10pct\", Color.from(145, 72, 31, 0.1).toString());\n dark.put(\"--lumo-success-color-10pct\", Color.from(145, 65, 42, 0.1).toString());\n light.put(\"--lumo-success-color-50pct\", Color.from(145, 72, 31, 0.5).toString());\n dark.put(\"--lumo-success-color-50pct\", Color.from(145, 65, 42, 0.5).toString());\n light.put(\"--lumo-success-color\", Color.from(145, 72, 30).toString());\n dark.put(\"--lumo-success-color\", Color.from(145, 65, 42).toString());\n light.put(\"--lumo-success-text-color\", Color.from(145, 85, 25).toString());\n dark.put(\"--lumo-success-text-color\", Color.from(145, 85, 47).toString());\n light.put(\"--lumo-success-contrast-color\", Color.from(0, 100, 100).toString());\n dark.put(\"--lumo-success-contrast-color\", Color.from(0, 100, 100).toString());\n\n \n light.put(\"--lumo-header-text-color\", Color.from(214, 35, 15).toString());\n dark.put(\"--lumo-header-text-color\", Color.from(214, 100, 98).toString());\n light.put(\"--lumo-body-text-color\", Color.from(214, 40, 16, 0.94).toString());\n dark.put(\"--lumo-body-text-color\", Color.from(214, 96, 96, 0.9).toString());\n light.put(\"--lumo-secondary-text-color\", Color.from(214, 42, 18, 0.69).toString());\n dark.put(\"--lumo-secondary-text-color\", Color.from(214, 87, 92, 0.7).toString());\n light.put(\"--lumo-tertiary-text-color\", Color.from(214, 45, 20, 0.52).toString());\n dark.put(\"--lumo-tertiary-text-color\", Color.from(214, 78, 88, 0.5).toString());\n light.put(\"--lumo-disabled-text-color\", Color.from(214, 50, 22, 0.26).toString());\n dark.put(\"--lumo-disabled-text-color\", Color.from(214, 69, 84, 0.32).toString());\n\n themes.put(\"dark\", dark);\n themes.put(\"light\", light);\n }\n\n private static final Color QUARKUS_BLUE = Color.from(211, 63, 54);\n private static final Color QUARKUS_RED = Color.from(343, 100, 50);\n private static final Color QUARKUS_DARK = Color.from(180, 36, 5);\n private static final Color QUARKUS_LIGHT = Color.from(0, 0, 90);\n\n /**\n * This represents a HSLA color\n * see https:\n */\n static class Color {\n private int hue; \n private int saturation; \n private int lightness; \n private double alpha; \n\n private Color(int hue, int saturation, int lightness, double alpha) {\n if (hue < 0 || hue > 360) {\n throw new RuntimeException(\n \"Invalid hue, number needs to be between 0 and 360. Defines a degree on the color wheel\");\n }\n this.hue = hue;\n\n if (saturation < 0 || saturation > 100) {\n throw new RuntimeException(\n \"Invalid saturation, number needs to be between 0 and 100. 0% is a shade of gray and 100% is the full color (full saturation)\");\n }\n this.saturation = saturation;\n\n if (lightness < 0 || lightness > 100) {\n throw new RuntimeException(\n \"Invalid lightness, number needs to be between 0 and 100. 0% is black, 50% is normal, and 100% is white\");\n }\n this.lightness = lightness;\n\n if (alpha < 0 || alpha > 1) {\n throw new RuntimeException(\n \"Invalid alpha, number needs to be between 0 and 1. 0 is fully transparent, 1 is not transparent at all\");\n }\n this.alpha = alpha;\n }\n\n @Override\n public String toString() {\n return \"hsla(\" + this.hue + \", \" + this.saturation + \"%, \" + this.lightness + \"%, \" + this.alpha + \")\";\n }\n\n static Color from(Color color, double alpha) {\n return new Color(color.hue, color.saturation, color.lightness, alpha);\n }\n\n static Color from(int hue, int saturation, int lightness) {\n return new Color(hue, saturation, lightness, 1);\n }\n\n static Color from(int hue, int saturation, int lightness, double alpha) {\n return new Color(hue, saturation, lightness, alpha);\n }\n }\n\n private static class DetectPackageFileVisitor extends SimpleFileVisitor {\n private final List paths;\n\n public DetectPackageFileVisitor(List paths) {\n this.paths = paths;\n }\n\n @Override\n public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) {\n boolean hasRegularFiles = false;\n File[] files = dir.toFile().listFiles();\n if (files != null) {\n for (File file : files) {\n if (file.isFile()) {\n hasRegularFiles = true;\n break;\n }\n }\n }\n if (hasRegularFiles) {\n paths.add(dir.toAbsolutePath().toString());\n }\n return FileVisitResult.CONTINUE;\n }\n }\n}" }, { "comment": "Do we need a new field btw? Haven't we come across the requirement to identify if we are in a match statement before?", "method_body": "private boolean validateErrorVariable(BLangErrorVariable errorVariable) {\n BErrorType errorType;\n switch (errorVariable.type.tag) {\n case TypeTags.UNION:\n BUnionType unionType = ((BUnionType) errorVariable.type);\n List possibleTypes = unionType.getMemberTypes().stream()\n .filter(type -> TypeTags.ERROR == type.tag)\n .map(BErrorType.class::cast)\n .collect(Collectors.toList());\n if (possibleTypes.isEmpty()) {\n dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type);\n return false;\n }\n if (possibleTypes.size() > 1) {\n LinkedHashSet detailType = new LinkedHashSet<>();\n for (BErrorType possibleErrType : possibleTypes) {\n detailType.add(possibleErrType.detailType);\n }\n BType errorDetailType = detailType.size() > 1\n ? BUnionType.create(null, detailType)\n : detailType.iterator().next();\n errorType = new BErrorType(null, symTable.stringType,\n errorDetailType);\n } else {\n errorType = possibleTypes.get(0);\n }\n break;\n case TypeTags.ERROR:\n errorType = (BErrorType) errorVariable.type;\n break;\n default:\n dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type);\n return false;\n }\n errorVariable.type = errorType;\n boolean isReasonIgnored = false;\n BLangSimpleVariable reasonVariable = errorVariable.reason;\n if (Names.IGNORE == names.fromIdNode(reasonVariable.name)) {\n reasonVariable.type = symTable.noType;\n isReasonIgnored = true;\n } else {\n errorVariable.reason.type = errorType.reasonType;\n errorVariable.reason.accept(this);\n }\n\n if (errorVariable.detail == null || (errorVariable.detail.isEmpty()\n && !isRestDetailBindingAvailable(errorVariable))) {\n if (isReasonIgnored) {\n dlog.error(errorVariable.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return false;\n }\n return true;\n }\n\n if (errorType.detailType.getKind() == TypeKind.RECORD) {\n BRecordType recordType = (BRecordType) errorType.detailType;\n Map fieldMap = recordType.fields.stream()\n .collect(Collectors.toMap(f -> f.name.value, f -> f));\n\n for (BLangErrorVariable.BLangErrorDetailEntry errorDetailEntry : errorVariable.detail) {\n String entryName = errorDetailEntry.key.getValue();\n BField entryField = fieldMap.get(entryName);\n\n BLangVariable boundVar = errorDetailEntry.valueBindingPattern;\n if (entryField != null) {\n if ((entryField.symbol.flags & Flags.OPTIONAL) == Flags.OPTIONAL) {\n boundVar.type = BUnionType.create(null, entryField.type, symTable.nilType);\n } else {\n boundVar.type = entryField.type;\n }\n } else {\n if (recordType.sealed) {\n dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type);\n boundVar.type = symTable.semanticError;\n return false;\n } else {\n boundVar.type = BUnionType.create(null, recordType.restFieldType, symTable.nilType);\n }\n }\n\n boolean isIgnoredVar = boundVar.getKind() == NodeKind.VARIABLE\n && ((BLangSimpleVariable) boundVar).name.value.equals(Names.IGNORE.value);\n if (!isIgnoredVar) {\n boundVar.accept(this);\n }\n }\n\n if (isRestDetailBindingAvailable(errorVariable)) {\n BTypeSymbol typeSymbol = createTypeSymbol(SymTag.TYPE);\n BMapType restType = new BMapType(TypeTags.MAP, recordType.restFieldType, typeSymbol);\n typeSymbol.type = restType;\n errorVariable.restDetail.type = restType;\n errorVariable.restDetail.accept(this);\n }\n\n if (isReasonSpecified(errorVariable)\n && !errorVariable.reasonVarPrefixAvailable\n && errorVariable.reasonMatchConst == null\n && errorVariable.isInMatchStmt) {\n\n BSymbol reasonConst = symResolver.lookupSymbol(\n this.env.enclEnv, names.fromString(errorVariable.reason.name.value), SymTag.CONSTANT);\n if (reasonConst == symTable.notFoundSymbol) {\n dlog.error(errorVariable.reason.pos, DiagnosticCode.INVALID_ERROR_REASON_BINDING_PATTERN,\n errorVariable.reason.name);\n } else {\n dlog.error(errorVariable.reason.pos, DiagnosticCode.UNSUPPORTED_ERROR_REASON_CONST_MATCH);\n }\n return false;\n }\n return true;\n\n } else if (errorType.detailType.getKind() == TypeKind.UNION) {\n BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR,\n env.enclPkg.packageID, symTable.errorType, env.scope.owner);\n \n errorVariable.type = new BErrorType(errorTypeSymbol, symTable.stringType, symTable.detailType);\n return validateErrorVariable(errorVariable);\n }\n\n if (isRestDetailBindingAvailable(errorVariable)) {\n \n errorVariable.restDetail.type = symTable.detailType;\n errorVariable.restDetail.accept(this);\n }\n return true;\n }", "target_code": "&& errorVariable.isInMatchStmt) {", "method_body_after": "private boolean validateErrorVariable(BLangErrorVariable errorVariable) {\n BErrorType errorType;\n switch (errorVariable.type.tag) {\n case TypeTags.UNION:\n BUnionType unionType = ((BUnionType) errorVariable.type);\n List possibleTypes = unionType.getMemberTypes().stream()\n .filter(type -> TypeTags.ERROR == type.tag)\n .map(BErrorType.class::cast)\n .collect(Collectors.toList());\n if (possibleTypes.isEmpty()) {\n dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type);\n return false;\n }\n if (possibleTypes.size() > 1) {\n LinkedHashSet detailType = new LinkedHashSet<>();\n for (BErrorType possibleErrType : possibleTypes) {\n detailType.add(possibleErrType.detailType);\n }\n BType errorDetailType = detailType.size() > 1\n ? BUnionType.create(null, detailType)\n : detailType.iterator().next();\n errorType = new BErrorType(null, symTable.stringType,\n errorDetailType);\n } else {\n errorType = possibleTypes.get(0);\n }\n break;\n case TypeTags.ERROR:\n errorType = (BErrorType) errorVariable.type;\n break;\n default:\n dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type);\n return false;\n }\n errorVariable.type = errorType;\n boolean isReasonIgnored = false;\n BLangSimpleVariable reasonVariable = errorVariable.reason;\n if (Names.IGNORE == names.fromIdNode(reasonVariable.name)) {\n reasonVariable.type = symTable.noType;\n isReasonIgnored = true;\n } else {\n errorVariable.reason.type = errorType.reasonType;\n errorVariable.reason.accept(this);\n }\n\n if (errorVariable.detail == null || (errorVariable.detail.isEmpty()\n && !isRestDetailBindingAvailable(errorVariable))) {\n if (isReasonIgnored) {\n dlog.error(errorVariable.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return false;\n }\n return true;\n }\n\n if (errorType.detailType.getKind() == TypeKind.RECORD) {\n return validateErrorVariable(errorVariable, errorType);\n } else if (errorType.detailType.getKind() == TypeKind.UNION) {\n BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR,\n env.enclPkg.packageID, symTable.errorType, env.scope.owner);\n \n errorVariable.type = new BErrorType(errorTypeSymbol, symTable.stringType, symTable.detailType);\n return validateErrorVariable(errorVariable);\n }\n\n if (isRestDetailBindingAvailable(errorVariable)) {\n \n errorVariable.restDetail.type = symTable.detailType;\n errorVariable.restDetail.accept(this);\n }\n return true;\n }", "context_before": "class SemanticAnalyzer extends BLangNodeVisitor {\n\n private static final CompilerContext.Key SYMBOL_ANALYZER_KEY =\n new CompilerContext.Key<>();\n private static final String ANONYMOUS_RECORD_NAME = \"anonymous-record\";\n private static final String NULL_LITERAL = \"null\";\n private static final String LEFT_BRACE = \"{\";\n private static final String RIGHT_BRACE = \"}\";\n private static final String SPACE = \" \";\n public static final String COLON = \":\";\n\n private SymbolTable symTable;\n private SymbolEnter symbolEnter;\n private Names names;\n private SymbolResolver symResolver;\n private TypeChecker typeChecker;\n private Types types;\n private StreamsQuerySemanticAnalyzer streamsQuerySemanticAnalyzer;\n private BLangDiagnosticLog dlog;\n private TypeNarrower typeNarrower;\n private ConstantAnalyzer constantAnalyzer;\n private ConstantValueResolver constantValueResolver;\n\n private SymbolEnv env;\n private BType expType;\n private DiagnosticCode diagCode;\n private BType resType;\n\n \n \n private Stack prevEnvs = new Stack<>();\n\n public static SemanticAnalyzer getInstance(CompilerContext context) {\n SemanticAnalyzer semAnalyzer = context.get(SYMBOL_ANALYZER_KEY);\n if (semAnalyzer == null) {\n semAnalyzer = new SemanticAnalyzer(context);\n }\n\n return semAnalyzer;\n }\n\n public SemanticAnalyzer(CompilerContext context) {\n context.put(SYMBOL_ANALYZER_KEY, this);\n\n this.symTable = SymbolTable.getInstance(context);\n this.symbolEnter = SymbolEnter.getInstance(context);\n this.names = Names.getInstance(context);\n this.symResolver = SymbolResolver.getInstance(context);\n this.typeChecker = TypeChecker.getInstance(context);\n this.types = Types.getInstance(context);\n this.streamsQuerySemanticAnalyzer = StreamsQuerySemanticAnalyzer.getInstance(context);\n this.dlog = BLangDiagnosticLog.getInstance(context);\n this.typeNarrower = TypeNarrower.getInstance(context);\n this.constantAnalyzer = ConstantAnalyzer.getInstance(context);\n this.constantValueResolver = ConstantValueResolver.getInstance(context);\n }\n\n public BLangPackage analyze(BLangPackage pkgNode) {\n pkgNode.accept(this);\n return pkgNode;\n }\n\n\n \n\n public void visit(BLangPackage pkgNode) {\n if (pkgNode.completedPhases.contains(CompilerPhase.TYPE_CHECK)) {\n return;\n }\n SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol);\n\n \n pkgNode.topLevelNodes.stream().filter(pkgLevelNode -> pkgLevelNode.getKind() == NodeKind.CONSTANT)\n .forEach(constant -> analyzeDef((BLangNode) constant, pkgEnv));\n this.constantValueResolver.resolve(pkgNode.constants);\n\n pkgNode.topLevelNodes.stream().filter(pkgLevelNode -> pkgLevelNode.getKind() != NodeKind.CONSTANT)\n .filter(pkgLevelNode -> !(pkgLevelNode.getKind() == NodeKind.FUNCTION\n && ((BLangFunction) pkgLevelNode).flagSet.contains(Flag.LAMBDA)))\n .forEach(topLevelNode -> analyzeDef((BLangNode) topLevelNode, pkgEnv));\n\n while (pkgNode.lambdaFunctions.peek() != null) {\n BLangLambdaFunction lambdaFunction = pkgNode.lambdaFunctions.poll();\n BLangFunction function = lambdaFunction.function;\n lambdaFunction.type = function.symbol.type;\n analyzeDef(lambdaFunction.function, lambdaFunction.cachedEnv);\n }\n\n pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage));\n pkgNode.completedPhases.add(CompilerPhase.TYPE_CHECK);\n }\n\n public void visit(BLangXMLNS xmlnsNode) {\n xmlnsNode.type = symTable.stringType;\n\n \n \n if (xmlnsNode.symbol == null) {\n symbolEnter.defineNode(xmlnsNode, env);\n }\n\n typeChecker.checkExpr(xmlnsNode.namespaceURI, env, symTable.stringType);\n }\n\n public void visit(BLangXMLNSStatement xmlnsStmtNode) {\n analyzeNode(xmlnsStmtNode.xmlnsDecl, env);\n }\n\n public void visit(BLangFunction funcNode) {\n SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);\n \n funcNode.symbol.params.forEach(param -> param.flags |= Flags.FUNCTION_FINAL);\n\n if (!funcNode.flagSet.contains(Flag.WORKER)) {\n \n funcNode.annAttachments.forEach(annotationAttachment -> {\n if (Symbols.isFlagOn(funcNode.symbol.flags, Flags.RESOURCE)) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.RESOURCE);\n } else if (funcNode.attachedFunction) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT_METHOD);\n }\n annotationAttachment.attachPoints.add(AttachPoint.Point.FUNCTION);\n this.analyzeDef(annotationAttachment, funcEnv);\n });\n validateAnnotationAttachmentCount(funcNode.annAttachments);\n }\n\n if (funcNode.returnTypeNode != null) {\n funcNode.returnTypeAnnAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.RETURN);\n this.analyzeDef(annotationAttachment, funcEnv);\n });\n validateAnnotationAttachmentCount(funcNode.returnTypeAnnAttachments);\n }\n\n if (Symbols.isNative(funcNode.symbol)) {\n funcNode.externalAnnAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.EXTERNAL);\n this.analyzeDef(annotationAttachment, funcEnv);\n });\n validateAnnotationAttachmentCount(funcNode.externalAnnAttachments);\n }\n\n for (BLangSimpleVariable param : funcNode.requiredParams) {\n symbolEnter.defineExistingVarSymbolInEnv(param.symbol, funcNode.clonedEnv);\n this.analyzeDef(param, funcNode.clonedEnv);\n }\n if (funcNode.restParam != null) {\n symbolEnter.defineExistingVarSymbolInEnv(funcNode.restParam.symbol, funcNode.clonedEnv);\n this.analyzeDef(funcNode.restParam, funcNode.clonedEnv);\n }\n\n validateObjectAttachedFunction(funcNode);\n\n \n if (Symbols.isNative(funcNode.symbol) || funcNode.interfaceFunction) {\n if (funcNode.body != null) {\n dlog.error(funcNode.pos, DiagnosticCode.EXTERN_FUNCTION_CANNOT_HAVE_BODY, funcNode.name);\n }\n return;\n }\n\n if (funcNode.body != null) {\n analyzeStmt(funcNode.body, funcEnv);\n }\n\n if (funcNode.anonForkName != null) {\n funcNode.symbol.enclForkName = funcNode.anonForkName;\n }\n\n this.processWorkers(funcNode, funcEnv);\n }\n\n private void processWorkers(BLangInvokableNode invNode, SymbolEnv invEnv) {\n if (invNode.workers.size() > 0) {\n invEnv.scope.entries.putAll(invNode.body.scope.entries);\n invNode.workers.forEach(e -> this.symbolEnter.defineNode(e, invEnv));\n invNode.workers.forEach(e -> analyzeNode(e, invEnv));\n }\n }\n\n @Override\n public void visit(BLangTypeDefinition typeDefinition) {\n if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE\n || typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE\n || typeDefinition.typeNode.getKind() == NodeKind.ERROR_TYPE\n || typeDefinition.typeNode.getKind() == NodeKind.FINITE_TYPE_NODE) {\n analyzeDef(typeDefinition.typeNode, env);\n }\n\n typeDefinition.annAttachments.forEach(annotationAttachment -> {\n if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT);\n }\n annotationAttachment.attachPoints.add(AttachPoint.Point.TYPE);\n\n annotationAttachment.accept(this);\n });\n validateAnnotationAttachmentCount(typeDefinition.annAttachments);\n }\n\n public void visit(BLangTypeConversionExpr conversionExpr) {\n conversionExpr.annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.TYPE);\n if (conversionExpr.typeNode.getKind() == NodeKind.OBJECT_TYPE) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT);\n }\n\n annotationAttachment.accept(this);\n });\n validateAnnotationAttachmentCount(conversionExpr.annAttachments);\n }\n\n @Override\n public void visit(BLangFiniteTypeNode finiteTypeNode) {\n finiteTypeNode.valueSpace.forEach(val -> {\n if (val.type.tag == TypeTags.NIL && NULL_LITERAL.equals(((BLangLiteral) val).originalValue)) {\n dlog.error(val.pos, DiagnosticCode.INVALID_USE_OF_NULL_LITERAL);\n }\n });\n }\n\n @Override\n public void visit(BLangObjectTypeNode objectTypeNode) {\n SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env);\n\n boolean isAbstract = objectTypeNode.flagSet.contains(Flag.ABSTRACT);\n objectTypeNode.fields.forEach(field -> {\n analyzeDef(field, objectEnv);\n if (isAbstract) {\n if (field.flagSet.contains(Flag.PRIVATE)) {\n this.dlog.error(field.pos, DiagnosticCode.PRIVATE_FIELD_ABSTRACT_OBJECT, field.symbol.name);\n }\n\n if (field.expr != null) {\n this.dlog.error(field.expr.pos, DiagnosticCode.FIELD_WITH_DEFAULT_VALUE_ABSTRACT_OBJECT);\n }\n }\n });\n\n \n objectTypeNode.functions.forEach(func -> {\n analyzeDef(func, env);\n if (isAbstract && func.flagSet.contains(Flag.PRIVATE)) {\n this.dlog.error(func.pos, DiagnosticCode.PRIVATE_FUNC_ABSTRACT_OBJECT, func.name,\n objectTypeNode.symbol.name);\n }\n if (isAbstract && func.flagSet.contains(Flag.NATIVE)) {\n this.dlog.error(func.pos, DiagnosticCode.EXTERN_FUNC_ABSTRACT_OBJECT, func.name,\n objectTypeNode.symbol.name);\n }\n if (func.flagSet.contains(Flag.RESOURCE) && func.flagSet.contains(Flag.NATIVE)) {\n this.dlog.error(func.pos, DiagnosticCode.RESOURCE_FUNCTION_CANNOT_BE_EXTERN, func.name);\n }\n });\n\n \n ((BObjectTypeSymbol) objectTypeNode.symbol).referencedFunctions\n .forEach(func -> validateReferencedFunction(objectTypeNode.pos, func, env));\n\n if (objectTypeNode.initFunction == null) {\n return;\n }\n\n if (objectTypeNode.initFunction.flagSet.contains(Flag.PRIVATE)) {\n this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.PRIVATE_OBJECT_CONSTRUCTOR,\n objectTypeNode.symbol.name);\n return;\n }\n\n if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {\n this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.ABSTRACT_OBJECT_CONSTRUCTOR,\n objectTypeNode.symbol.name);\n return;\n }\n\n if (objectTypeNode.initFunction.flagSet.contains(Flag.NATIVE)) {\n this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.OBJECT_INIT_FUNCTION_CANNOT_BE_EXTERN,\n objectTypeNode.symbol.name);\n return;\n }\n\n analyzeDef(objectTypeNode.initFunction, env);\n }\n\n @Override\n public void visit(BLangRecordTypeNode recordTypeNode) {\n SymbolEnv recordEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env);\n recordTypeNode.fields.forEach(field -> analyzeDef(field, recordEnv));\n analyzeDef(recordTypeNode.initFunction, recordEnv);\n validateDefaultable(recordTypeNode);\n }\n\n @Override\n public void visit(BLangErrorType errorType) {\n BType reasonType = getReasonType(errorType);\n\n if (!types.isAssignable(reasonType, symTable.stringType)) {\n dlog.error(errorType.reasonType.pos, DiagnosticCode.INVALID_ERROR_REASON_TYPE, reasonType);\n } else if (errorType.reasonType != null) {\n validateModuleQualifiedReasons(errorType.reasonType.pos, reasonType);\n }\n\n if (errorType.detailType == null) {\n return;\n }\n\n BType detailType = errorType.detailType.type;\n if (!types.isValidErrorDetailType(detailType)) {\n dlog.error(errorType.detailType.pos, DiagnosticCode.INVALID_ERROR_DETAIL_TYPE, detailType,\n symTable.detailType);\n }\n }\n\n private BType getReasonType(BLangErrorType errorType) {\n \n if (errorType.reasonType == null) {\n return symTable.stringType;\n }\n return errorType.reasonType.type;\n }\n\n private void validateModuleQualifiedReasons(DiagnosticPos pos, BType reasonType) {\n switch (reasonType.tag) {\n case TypeTags.STRING:\n return;\n case TypeTags.FINITE:\n BFiniteType finiteType = (BFiniteType) reasonType;\n for (BLangExpression expr : finiteType.valueSpace) {\n validateModuleQualifiedReason(pos, (String) ((BLangLiteral) expr).value);\n }\n return;\n case TypeTags.UNION:\n ((BUnionType) reasonType).getMemberTypes().forEach(type -> validateModuleQualifiedReasons(pos, type));\n }\n }\n\n private void validateModuleQualifiedReason(DiagnosticPos pos, String reason) {\n if (!reason.startsWith(LEFT_BRACE)) {\n return;\n }\n\n PackageID currentPackageId = env.enclPkg.packageID;\n if (currentPackageId.isUnnamed || reason.contains(SPACE) ||\n !reason.startsWith(LEFT_BRACE.concat(currentPackageId.toString().split(COLON)[0])\n .concat(RIGHT_BRACE))) {\n dlog.warning(pos, DiagnosticCode.NON_MODULE_QUALIFIED_ERROR_REASON, reason);\n }\n }\n\n public void visit(BLangAnnotation annotationNode) {\n annotationNode.annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.ANNOTATION);\n annotationAttachment.accept(this);\n });\n validateAnnotationAttachmentCount(annotationNode.annAttachments);\n }\n\n public void visit(BLangAnnotationAttachment annAttachmentNode) {\n BSymbol symbol = this.symResolver.resolveAnnotation(annAttachmentNode.pos, env,\n names.fromString(annAttachmentNode.pkgAlias.getValue()),\n names.fromString(annAttachmentNode.getAnnotationName().getValue()));\n if (symbol == this.symTable.notFoundSymbol) {\n this.dlog.error(annAttachmentNode.pos, DiagnosticCode.UNDEFINED_ANNOTATION,\n annAttachmentNode.getAnnotationName().getValue());\n return;\n }\n \n BAnnotationSymbol annotationSymbol = (BAnnotationSymbol) symbol;\n annAttachmentNode.annotationSymbol = annotationSymbol;\n if (annotationSymbol.maskedPoints > 0 &&\n !Symbols.isAttachPointPresent(annotationSymbol.maskedPoints,\n AttachPoints.asMask(annAttachmentNode.attachPoints))) {\n String msg = annAttachmentNode.attachPoints.stream()\n .map(point -> point.name().toLowerCase())\n .collect(Collectors.joining(\", \"));\n this.dlog.error(annAttachmentNode.pos, DiagnosticCode.ANNOTATION_NOT_ALLOWED, annotationSymbol, msg);\n }\n \n validateAnnotationAttachmentExpr(annAttachmentNode, annotationSymbol);\n }\n\n public void visit(BLangSimpleVariable varNode) {\n\n if (varNode.isDeclaredWithVar) {\n validateWorkerAnnAttachments(varNode.expr);\n handleDeclaredWithVar(varNode);\n transferForkFlag(varNode);\n return;\n }\n\n int ownerSymTag = env.scope.owner.tag;\n if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) {\n \n \n if (varNode.symbol == null) {\n symbolEnter.defineNode(varNode, env);\n varNode.annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.VAR);\n annotationAttachment.accept(this);\n });\n } else {\n varNode.annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.PARAMETER);\n annotationAttachment.accept(this);\n });\n }\n } else {\n varNode.annAttachments.forEach(annotationAttachment -> {\n if (Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER)) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.LISTENER);\n } else if (Symbols.isFlagOn(varNode.symbol.flags, Flags.SERVICE)) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.SERVICE);\n } else {\n annotationAttachment.attachPoints.add(AttachPoint.Point.VAR);\n }\n annotationAttachment.accept(this);\n });\n }\n validateAnnotationAttachmentCount(varNode.annAttachments);\n\n validateWorkerAnnAttachments(varNode.expr);\n\n if (isIgnoredOrEmpty(varNode)) {\n \n varNode.symbol = new BVarSymbol(0, Names.IGNORE, env.enclPkg.packageID, symTable.anyType, env.scope.owner);\n }\n\n BType lhsType = varNode.symbol.type;\n varNode.type = lhsType;\n\n \n BLangExpression rhsExpr = varNode.expr;\n if (rhsExpr == null) {\n if (lhsType.tag == TypeTags.ARRAY && typeChecker.isArrayOpenSealedType((BArrayType) lhsType)) {\n dlog.error(varNode.pos, DiagnosticCode.SEALED_ARRAY_TYPE_NOT_INITIALIZED);\n }\n return;\n }\n\n \n \n \n SymbolEnv varInitEnv = SymbolEnv.createVarInitEnv(varNode, env, varNode.symbol);\n\n typeChecker.checkExpr(rhsExpr, varInitEnv, lhsType);\n if (Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER) &&\n !types.checkListenerCompatibility(varNode.symbol.type)) {\n dlog.error(varNode.pos, DiagnosticCode.INVALID_LISTENER_VARIABLE, varNode.name);\n }\n\n transferForkFlag(varNode);\n }\n\n private void transferForkFlag(BLangSimpleVariable varNode) {\n \n if (varNode.expr != null && varNode.expr.getKind() == NodeKind.INVOCATION\n && varNode.flagSet.contains(Flag.WORKER)) {\n\n BLangInvocation expr = (BLangInvocation) varNode.expr;\n if (expr.name.value.startsWith(\"0\") && (expr.symbol.flags & Flags.FORKED) == Flags.FORKED) {\n varNode.symbol.flags |= Flags.FORKED;\n }\n }\n }\n\n /**\n * Validate annotation attachment of the `start` action or workers.\n *\n * @param expr expression to be validated.\n */\n private void validateWorkerAnnAttachments(BLangExpression expr) {\n if (expr != null && expr.getKind() == NodeKind.INVOCATION && ((BLangInvocation) expr).async) {\n ((BLangInvocation) expr).annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.WORKER);\n annotationAttachment.accept(this);\n });\n validateAnnotationAttachmentCount(((BLangInvocation) expr).annAttachments);\n }\n }\n\n public void visit(BLangRecordVariable varNode) {\n\n if (varNode.isDeclaredWithVar) {\n handleDeclaredWithVar(varNode);\n return;\n }\n\n if (varNode.type == null) {\n varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);\n }\n\n if (!validateRecordVariable(varNode)) {\n varNode.type = symTable.semanticError;\n return;\n }\n\n symbolEnter.defineNode(varNode, env);\n\n if (varNode.expr == null) {\n \n return;\n }\n\n typeChecker.checkExpr(varNode.expr, env, varNode.type);\n\n }\n\n public void visit(BLangTupleVariable varNode) {\n\n if (varNode.isDeclaredWithVar) {\n expType = resolveTupleType(varNode);\n handleDeclaredWithVar(varNode);\n return;\n }\n\n if (varNode.type == null) {\n varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);\n }\n\n if (!(checkTypeAndVarCountConsistency(varNode))) {\n varNode.type = symTable.semanticError;\n return;\n }\n\n symbolEnter.defineNode(varNode, env);\n\n if (varNode.expr == null) {\n \n return;\n }\n\n typeChecker.checkExpr(varNode.expr, env, varNode.type);\n }\n\n private BType resolveTupleType(BLangTupleVariable varNode) {\n List memberTypes = new ArrayList<>(varNode.memberVariables.size());\n for (BLangVariable memberVariable : varNode.memberVariables) {\n if (memberVariable.getKind() == NodeKind.TUPLE_VARIABLE) {\n memberTypes.add(resolveTupleType((BLangTupleVariable) memberVariable));\n } else {\n memberTypes.add(symTable.noType);\n }\n }\n return new BTupleType(memberTypes);\n }\n\n public void visit(BLangErrorVariable varNode) {\n \n if (varNode.isDeclaredWithVar) {\n handleDeclaredWithVar(varNode);\n return;\n }\n\n if (varNode.type == null) {\n varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);\n }\n\n \n \n \n if (!varNode.reasonVarPrefixAvailable && varNode.type == null) {\n BErrorType errorType = new BErrorType(varNode.type.tsymbol, null, null);\n\n if (varNode.type.tag == TypeTags.UNION) {\n Set members = types.expandAndGetMemberTypesRecursive(varNode.type);\n List errorMembers = members.stream()\n .filter(m -> m.tag == TypeTags.ERROR)\n .map(m -> (BErrorType) m)\n .collect(Collectors.toList());\n\n if (errorMembers.isEmpty()) {\n dlog.error(varNode.pos, DiagnosticCode.INVALID_ERROR_MATCH_PATTERN);\n return;\n } else if (errorMembers.size() == 1) {\n errorType.detailType = errorMembers.get(0).detailType;\n errorType.reasonType = errorMembers.get(0).reasonType;\n } else {\n errorType.detailType = symTable.detailType;\n errorType.reasonType = symTable.stringType;\n }\n varNode.type = errorType;\n } else if (varNode.type.tag == TypeTags.ERROR) {\n errorType.detailType = ((BErrorType) varNode.type).detailType;\n }\n\n \n \n \n if (varNode.reasonMatchConst != null) {\n BTypeSymbol reasonConstTypeSymbol = new BTypeSymbol(SymTag.FINITE_TYPE,\n Flags.PUBLIC, names.fromString(\"\"), this.env.enclPkg.packageID, null, this.env.scope.owner);\n varNode.reasonMatchConst.type = symTable.stringType;\n typeChecker.checkExpr(varNode.reasonMatchConst, env);\n\n LinkedHashSet members = new LinkedHashSet<>();\n members.add(varNode.reasonMatchConst);\n errorType.reasonType = new BFiniteType(reasonConstTypeSymbol, members);\n } else {\n errorType.reasonType = symTable.stringType;\n }\n }\n if (!validateErrorVariable(varNode)) {\n varNode.type = symTable.semanticError;\n return;\n }\n symbolEnter.defineNode(varNode, env);\n if (varNode.expr == null) {\n \n return;\n }\n typeChecker.checkExpr(varNode.expr, env, varNode.type);\n\n }\n\n private void handleDeclaredWithVar(BLangVariable variable) {\n BLangExpression varRefExpr = variable.expr;\n BType rhsType = typeChecker.checkExpr(varRefExpr, this.env, expType);\n\n switch (variable.getKind()) {\n case VARIABLE:\n if (!validateVariableDefinition(varRefExpr)) {\n rhsType = symTable.semanticError;\n }\n\n BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable;\n\n Name varName = names.fromIdNode(simpleVariable.name);\n if (varName == Names.IGNORE) {\n dlog.error(simpleVariable.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return;\n }\n\n simpleVariable.type = rhsType;\n\n int ownerSymTag = env.scope.owner.tag;\n if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) {\n \n \n if (simpleVariable.symbol == null) {\n symbolEnter.defineNode(simpleVariable, env);\n }\n }\n\n \n \n simpleVariable.symbol.type = rhsType;\n break;\n case TUPLE_VARIABLE:\n if (variable.isDeclaredWithVar && variable.expr.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) {\n dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, \"tuple binding pattern\");\n variable.type = symTable.semanticError;\n return;\n }\n if (TypeTags.TUPLE != rhsType.tag) {\n dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_TUPLE_VAR, rhsType);\n variable.type = symTable.semanticError;\n return;\n }\n\n BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;\n tupleVariable.type = rhsType;\n\n if (!(checkTypeAndVarCountConsistency(tupleVariable))) {\n tupleVariable.type = symTable.semanticError;\n return;\n }\n\n symbolEnter.defineNode(tupleVariable, env);\n\n break;\n case RECORD_VARIABLE:\n if (TypeTags.RECORD != rhsType.tag && TypeTags.MAP != rhsType.tag && TypeTags.JSON != rhsType.tag) {\n dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_RECORD_VAR, rhsType);\n variable.type = symTable.semanticError;\n }\n\n BLangRecordVariable recordVariable = (BLangRecordVariable) variable;\n recordVariable.type = rhsType;\n\n if (!validateRecordVariable(recordVariable)) {\n recordVariable.type = symTable.semanticError;\n }\n break;\n case ERROR_VARIABLE:\n if (TypeTags.ERROR != rhsType.tag) {\n dlog.error(variable.expr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, rhsType);\n variable.type = symTable.semanticError;\n return;\n }\n BLangErrorVariable errorVariable = (BLangErrorVariable) variable;\n errorVariable.type = rhsType;\n if (!validateErrorVariable(errorVariable)) {\n errorVariable.type = symTable.semanticError;\n return;\n }\n symbolEnter.defineNode(errorVariable, env);\n break;\n }\n }\n\n private void handleDeclaredVarInForeach(BLangVariable variable, BType rhsType, SymbolEnv blockEnv) {\n switch (variable.getKind()) {\n case VARIABLE:\n BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable;\n Name varName = names.fromIdNode(simpleVariable.name);\n if (varName == Names.IGNORE) {\n dlog.error(simpleVariable.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);\n return;\n }\n\n simpleVariable.type = rhsType;\n\n int ownerSymTag = blockEnv.scope.owner.tag;\n if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) {\n \n \n if (simpleVariable.symbol == null) {\n symbolEnter.defineNode(simpleVariable, blockEnv);\n }\n }\n recursivelySetFinalFlag(simpleVariable);\n break;\n case TUPLE_VARIABLE:\n BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;\n if (TypeTags.TUPLE != rhsType.tag && TypeTags.UNION != rhsType.tag) {\n dlog.error(variable.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_TUPLE_VAR, rhsType);\n recursivelyDefineVariables(tupleVariable, blockEnv);\n return;\n }\n\n tupleVariable.type = rhsType;\n\n if (rhsType.tag == TypeTags.TUPLE && !(checkTypeAndVarCountConsistency(tupleVariable,\n (BTupleType) tupleVariable.type, blockEnv))) {\n return;\n }\n\n if (rhsType.tag == TypeTags.UNION && !(checkTypeAndVarCountConsistency(tupleVariable, null,\n blockEnv))) {\n return;\n }\n\n symbolEnter.defineNode(tupleVariable, blockEnv);\n recursivelySetFinalFlag(tupleVariable);\n break;\n case RECORD_VARIABLE:\n BLangRecordVariable recordVariable = (BLangRecordVariable) variable;\n recordVariable.type = rhsType;\n validateRecordVariable(recordVariable, blockEnv);\n recursivelySetFinalFlag(recordVariable);\n break;\n case ERROR_VARIABLE:\n BLangErrorVariable errorVariable = (BLangErrorVariable) variable;\n if (TypeTags.ERROR != rhsType.tag) {\n dlog.error(variable.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, rhsType);\n recursivelyDefineVariables(errorVariable, blockEnv);\n return;\n }\n errorVariable.type = rhsType;\n validateErrorVariable(errorVariable);\n recursivelySetFinalFlag(errorVariable);\n break;\n }\n }\n\n private void recursivelyDefineVariables(BLangVariable variable, SymbolEnv blockEnv) {\n switch (variable.getKind()) {\n case VARIABLE:\n Name name = names.fromIdNode(((BLangSimpleVariable) variable).name);\n if (name == Names.IGNORE) {\n return;\n }\n variable.type = symTable.semanticError;\n symbolEnter.defineVarSymbol(variable.pos, variable.flagSet, variable.type, name, blockEnv);\n break;\n case TUPLE_VARIABLE:\n ((BLangTupleVariable) variable).memberVariables.forEach(memberVariable ->\n recursivelyDefineVariables(memberVariable, blockEnv));\n break;\n case RECORD_VARIABLE:\n ((BLangRecordVariable) variable).variableList.forEach(value ->\n recursivelyDefineVariables(value.valueBindingPattern, blockEnv));\n break;\n }\n }\n\n private void recursivelySetFinalFlag(BLangVariable variable) {\n if (variable == null) {\n return;\n }\n\n switch (variable.getKind()) {\n case VARIABLE:\n if (variable.symbol == null) {\n return;\n }\n variable.symbol.flags |= Flags.FINAL;\n break;\n case TUPLE_VARIABLE:\n BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;\n tupleVariable.memberVariables.forEach(this::recursivelySetFinalFlag);\n recursivelySetFinalFlag(tupleVariable.restVariable);\n break;\n case RECORD_VARIABLE:\n BLangRecordVariable recordVariable = (BLangRecordVariable) variable;\n recordVariable.variableList.forEach(value -> recursivelySetFinalFlag(value.valueBindingPattern));\n recursivelySetFinalFlag((BLangVariable) recordVariable.restParam);\n break;\n case ERROR_VARIABLE:\n BLangErrorVariable errorVariable = (BLangErrorVariable) variable;\n recursivelySetFinalFlag(errorVariable.reason);\n recursivelySetFinalFlag(errorVariable.restDetail);\n errorVariable.detail.forEach(bLangErrorDetailEntry ->\n recursivelySetFinalFlag(bLangErrorDetailEntry.valueBindingPattern));\n break;\n }\n }\n\n private boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode) {\n return checkTypeAndVarCountConsistency(varNode, null, env);\n }\n\n private boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode, BTupleType tupleTypeNode,\n SymbolEnv env) {\n\n if (tupleTypeNode == null) {\n /*\n This switch block will resolve the tuple type of the tuple variable.\n For example consider the following - [int, string]|[boolean, float] [a, b] = foo();\n Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:\n Type of 'a' will be (int | boolean) while the type of 'b' will be (string | float).\n Consider anydata (a, b) = foo();\n Here, the type of 'a'and type of 'b' will be both anydata.\n */\n switch (varNode.type.tag) {\n case TypeTags.UNION:\n Set unionType = types.expandAndGetMemberTypesRecursive(varNode.type);\n List possibleTypes = unionType.stream()\n .filter(type -> {\n if (TypeTags.TUPLE == type.tag &&\n (varNode.memberVariables.size() == ((BTupleType) type).tupleTypes.size())) {\n return true;\n }\n return TypeTags.ANY == type.tag || TypeTags.ANYDATA == type.tag;\n })\n .collect(Collectors.toList());\n\n if (possibleTypes.isEmpty()) {\n dlog.error(varNode.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_TUPLE_VAR, varNode.type);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n List memberTupleTypes = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n LinkedHashSet memberTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.TUPLE) {\n memberTypes.add(((BTupleType) possibleType).tupleTypes.get(i));\n } else {\n memberTupleTypes.add(varNode.type);\n }\n }\n\n if (memberTypes.size() > 1) {\n memberTupleTypes.add(BUnionType.create(null, memberTypes));\n } else {\n memberTupleTypes.addAll(memberTypes);\n }\n }\n tupleTypeNode = new BTupleType(memberTupleTypes);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.TUPLE) {\n tupleTypeNode = (BTupleType) possibleTypes.get(0);\n break;\n }\n\n List memberTypes = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n memberTypes.add(possibleTypes.get(0));\n }\n tupleTypeNode = new BTupleType(memberTypes);\n break;\n case TypeTags.ANY:\n case TypeTags.ANYDATA:\n List memberTupleTypes = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n memberTupleTypes.add(varNode.type);\n }\n tupleTypeNode = new BTupleType(memberTupleTypes);\n if (varNode.restVariable != null) {\n tupleTypeNode.restType = varNode.type;\n }\n break;\n case TypeTags.TUPLE:\n tupleTypeNode = (BTupleType) varNode.type;\n break;\n default:\n dlog.error(varNode.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_TUPLE_VAR, varNode.type);\n return false;\n }\n }\n\n if (tupleTypeNode.tupleTypes.size() != varNode.memberVariables.size()\n || (tupleTypeNode.restType == null && varNode.restVariable != null)\n || (tupleTypeNode.restType != null && varNode.restVariable == null)) {\n dlog.error(varNode.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN);\n return false;\n }\n\n int ignoredCount = 0;\n List memberVariables = new ArrayList<>(varNode.memberVariables);\n if (varNode.restVariable != null) {\n memberVariables.add(varNode.restVariable);\n }\n for (int i = 0; i < memberVariables.size(); i++) {\n BLangVariable var = memberVariables.get(i);\n BType type = (i <= tupleTypeNode.tupleTypes.size() - 1) ? tupleTypeNode.tupleTypes.get(i) :\n new BArrayType(tupleTypeNode.restType);\n if (var.getKind() == NodeKind.VARIABLE) {\n \n BLangSimpleVariable simpleVar = (BLangSimpleVariable) var;\n Name varName = names.fromIdNode(simpleVar.name);\n if (varName == Names.IGNORE) {\n ignoredCount++;\n simpleVar.type = symTable.anyType;\n types.checkType(varNode.pos, type, simpleVar.type,\n DiagnosticCode.INCOMPATIBLE_TYPES);\n continue;\n }\n }\n var.type = type;\n analyzeNode(var, env);\n }\n\n if (!varNode.memberVariables.isEmpty() && ignoredCount == varNode.memberVariables.size()\n && varNode.restVariable == null) {\n dlog.error(varNode.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return false;\n }\n return true;\n }\n\n private boolean validateRecordVariable(BLangRecordVariable recordVar) {\n return validateRecordVariable(recordVar, env);\n }\n\n private boolean validateRecordVariable(BLangRecordVariable recordVar, SymbolEnv env) {\n BRecordType recordVarType;\n /*\n This switch block will resolve the record type of the record variable.\n For example consider the following -\n type Foo record {int a, boolean b};\n type Bar record {string a, float b};\n Foo|Bar {a, b} = foo();\n Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:\n Type of 'a' will be a union of the types of field 'a' in both Foo and Bar.\n i.e. type of 'a' is (int | string) and type of 'b' is (boolean | float).\n Consider anydata {a, b} = foo();\n Here, the type of 'a'and type of 'b' will be both anydata.\n */\n switch (recordVar.type.tag) {\n case TypeTags.UNION:\n BUnionType unionType = (BUnionType) recordVar.type;\n Set bTypes = types.expandAndGetMemberTypesRecursive(unionType);\n List possibleTypes = bTypes.stream()\n .filter(rec -> doesRecordContainKeys(rec, recordVar.variableList, recordVar.restParam != null))\n .collect(Collectors.toList());\n\n if (possibleTypes.isEmpty()) {\n dlog.error(recordVar.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, recordVar.type);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0,\n names.fromString(ANONYMOUS_RECORD_NAME), env.enclPkg.symbol.pkgID, null, env.scope.owner);\n recordVarType = (BRecordType) symTable.recordType;\n\n List fields = populateAndGetPossibleFieldsForRecVar(recordVar, possibleTypes, recordSymbol);\n\n if (recordVar.restParam != null) {\n LinkedHashSet memberTypes = possibleTypes.stream()\n .map(possibleType -> {\n if (possibleType.tag == TypeTags.RECORD) {\n return ((BRecordType) possibleType).restFieldType;\n } else if (possibleType.tag == TypeTags.MAP) {\n return ((BMapType) possibleType).constraint;\n } else {\n return possibleType;\n }\n })\n .collect(Collectors.toCollection(LinkedHashSet::new));\n recordVarType.restFieldType = memberTypes.size() > 1 ?\n BUnionType.create(null, memberTypes) :\n memberTypes.iterator().next();\n }\n recordVarType.tsymbol = recordSymbol;\n recordVarType.fields = fields;\n recordSymbol.type = recordVarType;\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.RECORD) {\n recordVarType = (BRecordType) possibleTypes.get(0);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.MAP) {\n recordVarType = createSameTypedFieldsRecordType(recordVar,\n ((BMapType) possibleTypes.get(0)).constraint);\n break;\n }\n\n recordVarType = createSameTypedFieldsRecordType(recordVar, possibleTypes.get(0));\n break;\n case TypeTags.RECORD:\n recordVarType = (BRecordType) recordVar.type;\n break;\n case TypeTags.MAP:\n recordVarType = createSameTypedFieldsRecordType(recordVar, ((BMapType) recordVar.type).constraint);\n break;\n case TypeTags.ANY:\n case TypeTags.ANYDATA:\n recordVarType = createSameTypedFieldsRecordType(recordVar, recordVar.type);\n break;\n default:\n dlog.error(recordVar.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, recordVar.type);\n return false;\n }\n\n Map recordVarTypeFields = recordVarType.fields.stream()\n .collect(Collectors.toMap(field -> field.getName().getValue(), field -> field));\n\n boolean validRecord = true;\n int ignoredCount = 0;\n for (BLangRecordVariableKeyValue variable : recordVar.variableList) {\n \n \n if (names.fromIdNode(variable.getKey()) == Names.IGNORE) {\n dlog.error(recordVar.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);\n continue;\n }\n\n BLangVariable value = variable.getValue();\n if (value.getKind() == NodeKind.VARIABLE) {\n \n BLangSimpleVariable simpleVar = (BLangSimpleVariable) value;\n Name varName = names.fromIdNode(simpleVar.name);\n if (varName == Names.IGNORE) {\n ignoredCount++;\n simpleVar.type = symTable.anyType;\n if (!recordVarTypeFields.containsKey(variable.getKey().getValue())) {\n continue;\n }\n types.checkType(variable.valueBindingPattern.pos,\n recordVarTypeFields.get((variable.getKey().getValue())).type, simpleVar.type,\n DiagnosticCode.INCOMPATIBLE_TYPES);\n continue;\n }\n }\n if (!recordVarTypeFields.containsKey(variable.getKey().getValue())) {\n if (recordVarType.sealed) {\n validRecord = false;\n dlog.error(recordVar.pos, DiagnosticCode.INVALID_FIELD_IN_RECORD_BINDING_PATTERN,\n variable.getKey().getValue(), recordVar.type);\n } else {\n BType restType;\n if (recordVarType.restFieldType.tag == TypeTags.ANYDATA ||\n recordVarType.restFieldType.tag == TypeTags.ANY) {\n restType = recordVarType.restFieldType;\n } else {\n restType = BUnionType.create(null, recordVarType.restFieldType, symTable.nilType);\n }\n value.type = restType;\n value.accept(this);\n }\n continue;\n }\n\n value.type = recordVarTypeFields.get((variable.getKey().getValue())).type;\n value.accept(this);\n }\n\n if (!recordVar.variableList.isEmpty() && ignoredCount == recordVar.variableList.size()\n && recordVar.restParam == null) {\n dlog.error(recordVar.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return false;\n }\n\n if (recordVar.restParam != null) {\n ((BLangVariable) recordVar.restParam).type = getRestParamType(recordVarType);\n symbolEnter.defineNode((BLangNode) recordVar.restParam, env);\n }\n\n return validRecord;\n }\n\n \n\n private boolean isReasonSpecified(BLangErrorVariable errorVariable) {\n return !isIgnoredOrEmpty(errorVariable.reason);\n }\n\n private boolean isIgnoredOrEmpty(BLangSimpleVariable varNode) {\n return varNode.name.value.equals(Names.IGNORE.value) || varNode.name.value.equals(\"\");\n }\n\n private boolean isRestDetailBindingAvailable(BLangErrorVariable errorVariable) {\n return errorVariable.restDetail != null &&\n !errorVariable.restDetail.name.value.equals(Names.IGNORE.value);\n }\n\n private BTypeSymbol createTypeSymbol(int type) {\n return new BTypeSymbol(type, Flags.PUBLIC, Names.EMPTY, env.enclPkg.packageID,\n null, env.scope.owner);\n }\n\n /**\n * This method will resolve field types based on a list of possible types.\n * When a record variable has multiple possible assignable types, each field will be a union of the relevant\n * possible types field type.\n *\n * @param recordVar record variable whose fields types are to be resolved\n * @param possibleTypes list of possible types\n * @param recordSymbol symbol of the record type to be used in creating fields\n * @return the list of fields\n */\n private List populateAndGetPossibleFieldsForRecVar(BLangRecordVariable recordVar, List possibleTypes,\n BRecordTypeSymbol recordSymbol) {\n List fields = new ArrayList<>();\n for (BLangRecordVariableKeyValue bLangRecordVariableKeyValue : recordVar.variableList) {\n String fieldName = bLangRecordVariableKeyValue.key.value;\n LinkedHashSet memberTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.RECORD) {\n BRecordType possibleRecordType = (BRecordType) possibleType;\n Optional optionalField = possibleRecordType.fields.stream()\n .filter(field -> field.getName().getValue().equals(fieldName))\n .findFirst();\n if (optionalField.isPresent()) {\n BField bField = optionalField.get();\n if (Symbols.isOptional(bField.symbol)) {\n memberTypes.add(symTable.nilType);\n }\n memberTypes.add(bField.type);\n } else {\n memberTypes.add(possibleRecordType.restFieldType);\n memberTypes.add(symTable.nilType);\n }\n continue;\n }\n if (possibleType.tag == TypeTags.MAP) {\n BMapType possibleMapType = (BMapType) possibleType;\n memberTypes.add(possibleMapType.constraint);\n continue;\n }\n memberTypes.add(possibleType); \n }\n\n BType fieldType = memberTypes.size() > 1 ?\n BUnionType.create(null, memberTypes) : memberTypes.iterator().next();\n fields.add(new BField(names.fromString(fieldName), recordVar.pos,\n new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID,\n fieldType, recordSymbol)));\n }\n return fields;\n }", "context_after": "class SemanticAnalyzer extends BLangNodeVisitor {\n\n private static final CompilerContext.Key SYMBOL_ANALYZER_KEY =\n new CompilerContext.Key<>();\n private static final String ANONYMOUS_RECORD_NAME = \"anonymous-record\";\n private static final String NULL_LITERAL = \"null\";\n private static final String LEFT_BRACE = \"{\";\n private static final String RIGHT_BRACE = \"}\";\n private static final String SPACE = \" \";\n public static final String COLON = \":\";\n\n private SymbolTable symTable;\n private SymbolEnter symbolEnter;\n private Names names;\n private SymbolResolver symResolver;\n private TypeChecker typeChecker;\n private Types types;\n private StreamsQuerySemanticAnalyzer streamsQuerySemanticAnalyzer;\n private BLangDiagnosticLog dlog;\n private TypeNarrower typeNarrower;\n private ConstantAnalyzer constantAnalyzer;\n private ConstantValueResolver constantValueResolver;\n\n private SymbolEnv env;\n private BType expType;\n private DiagnosticCode diagCode;\n private BType resType;\n\n \n \n private Stack prevEnvs = new Stack<>();\n\n public static SemanticAnalyzer getInstance(CompilerContext context) {\n SemanticAnalyzer semAnalyzer = context.get(SYMBOL_ANALYZER_KEY);\n if (semAnalyzer == null) {\n semAnalyzer = new SemanticAnalyzer(context);\n }\n\n return semAnalyzer;\n }\n\n public SemanticAnalyzer(CompilerContext context) {\n context.put(SYMBOL_ANALYZER_KEY, this);\n\n this.symTable = SymbolTable.getInstance(context);\n this.symbolEnter = SymbolEnter.getInstance(context);\n this.names = Names.getInstance(context);\n this.symResolver = SymbolResolver.getInstance(context);\n this.typeChecker = TypeChecker.getInstance(context);\n this.types = Types.getInstance(context);\n this.streamsQuerySemanticAnalyzer = StreamsQuerySemanticAnalyzer.getInstance(context);\n this.dlog = BLangDiagnosticLog.getInstance(context);\n this.typeNarrower = TypeNarrower.getInstance(context);\n this.constantAnalyzer = ConstantAnalyzer.getInstance(context);\n this.constantValueResolver = ConstantValueResolver.getInstance(context);\n }\n\n public BLangPackage analyze(BLangPackage pkgNode) {\n pkgNode.accept(this);\n return pkgNode;\n }\n\n\n \n\n public void visit(BLangPackage pkgNode) {\n if (pkgNode.completedPhases.contains(CompilerPhase.TYPE_CHECK)) {\n return;\n }\n SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol);\n\n \n pkgNode.topLevelNodes.stream().filter(pkgLevelNode -> pkgLevelNode.getKind() == NodeKind.CONSTANT)\n .forEach(constant -> analyzeDef((BLangNode) constant, pkgEnv));\n this.constantValueResolver.resolve(pkgNode.constants);\n\n pkgNode.topLevelNodes.stream().filter(pkgLevelNode -> pkgLevelNode.getKind() != NodeKind.CONSTANT)\n .filter(pkgLevelNode -> !(pkgLevelNode.getKind() == NodeKind.FUNCTION\n && ((BLangFunction) pkgLevelNode).flagSet.contains(Flag.LAMBDA)))\n .forEach(topLevelNode -> analyzeDef((BLangNode) topLevelNode, pkgEnv));\n\n while (pkgNode.lambdaFunctions.peek() != null) {\n BLangLambdaFunction lambdaFunction = pkgNode.lambdaFunctions.poll();\n BLangFunction function = lambdaFunction.function;\n lambdaFunction.type = function.symbol.type;\n analyzeDef(lambdaFunction.function, lambdaFunction.cachedEnv);\n }\n\n pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage));\n pkgNode.completedPhases.add(CompilerPhase.TYPE_CHECK);\n }\n\n public void visit(BLangXMLNS xmlnsNode) {\n xmlnsNode.type = symTable.stringType;\n\n \n \n if (xmlnsNode.symbol == null) {\n symbolEnter.defineNode(xmlnsNode, env);\n }\n\n typeChecker.checkExpr(xmlnsNode.namespaceURI, env, symTable.stringType);\n }\n\n public void visit(BLangXMLNSStatement xmlnsStmtNode) {\n analyzeNode(xmlnsStmtNode.xmlnsDecl, env);\n }\n\n public void visit(BLangFunction funcNode) {\n SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);\n \n funcNode.symbol.params.forEach(param -> param.flags |= Flags.FUNCTION_FINAL);\n\n if (!funcNode.flagSet.contains(Flag.WORKER)) {\n \n funcNode.annAttachments.forEach(annotationAttachment -> {\n if (Symbols.isFlagOn(funcNode.symbol.flags, Flags.RESOURCE)) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.RESOURCE);\n } else if (funcNode.attachedFunction) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT_METHOD);\n }\n annotationAttachment.attachPoints.add(AttachPoint.Point.FUNCTION);\n this.analyzeDef(annotationAttachment, funcEnv);\n });\n validateAnnotationAttachmentCount(funcNode.annAttachments);\n }\n\n if (funcNode.returnTypeNode != null) {\n funcNode.returnTypeAnnAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.RETURN);\n this.analyzeDef(annotationAttachment, funcEnv);\n });\n validateAnnotationAttachmentCount(funcNode.returnTypeAnnAttachments);\n }\n\n if (Symbols.isNative(funcNode.symbol)) {\n funcNode.externalAnnAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.EXTERNAL);\n this.analyzeDef(annotationAttachment, funcEnv);\n });\n validateAnnotationAttachmentCount(funcNode.externalAnnAttachments);\n }\n\n for (BLangSimpleVariable param : funcNode.requiredParams) {\n symbolEnter.defineExistingVarSymbolInEnv(param.symbol, funcNode.clonedEnv);\n this.analyzeDef(param, funcNode.clonedEnv);\n }\n if (funcNode.restParam != null) {\n symbolEnter.defineExistingVarSymbolInEnv(funcNode.restParam.symbol, funcNode.clonedEnv);\n this.analyzeDef(funcNode.restParam, funcNode.clonedEnv);\n }\n\n validateObjectAttachedFunction(funcNode);\n\n \n if (Symbols.isNative(funcNode.symbol) || funcNode.interfaceFunction) {\n if (funcNode.body != null) {\n dlog.error(funcNode.pos, DiagnosticCode.EXTERN_FUNCTION_CANNOT_HAVE_BODY, funcNode.name);\n }\n return;\n }\n\n if (funcNode.body != null) {\n analyzeStmt(funcNode.body, funcEnv);\n }\n\n if (funcNode.anonForkName != null) {\n funcNode.symbol.enclForkName = funcNode.anonForkName;\n }\n\n this.processWorkers(funcNode, funcEnv);\n }\n\n private void processWorkers(BLangInvokableNode invNode, SymbolEnv invEnv) {\n if (invNode.workers.size() > 0) {\n invEnv.scope.entries.putAll(invNode.body.scope.entries);\n invNode.workers.forEach(e -> this.symbolEnter.defineNode(e, invEnv));\n invNode.workers.forEach(e -> analyzeNode(e, invEnv));\n }\n }\n\n @Override\n public void visit(BLangTypeDefinition typeDefinition) {\n if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE\n || typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE\n || typeDefinition.typeNode.getKind() == NodeKind.ERROR_TYPE\n || typeDefinition.typeNode.getKind() == NodeKind.FINITE_TYPE_NODE) {\n analyzeDef(typeDefinition.typeNode, env);\n }\n\n typeDefinition.annAttachments.forEach(annotationAttachment -> {\n if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT);\n }\n annotationAttachment.attachPoints.add(AttachPoint.Point.TYPE);\n\n annotationAttachment.accept(this);\n });\n validateAnnotationAttachmentCount(typeDefinition.annAttachments);\n }\n\n public void visit(BLangTypeConversionExpr conversionExpr) {\n conversionExpr.annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.TYPE);\n if (conversionExpr.typeNode.getKind() == NodeKind.OBJECT_TYPE) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT);\n }\n\n annotationAttachment.accept(this);\n });\n validateAnnotationAttachmentCount(conversionExpr.annAttachments);\n }\n\n @Override\n public void visit(BLangFiniteTypeNode finiteTypeNode) {\n finiteTypeNode.valueSpace.forEach(val -> {\n if (val.type.tag == TypeTags.NIL && NULL_LITERAL.equals(((BLangLiteral) val).originalValue)) {\n dlog.error(val.pos, DiagnosticCode.INVALID_USE_OF_NULL_LITERAL);\n }\n });\n }\n\n @Override\n public void visit(BLangObjectTypeNode objectTypeNode) {\n SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env);\n\n boolean isAbstract = objectTypeNode.flagSet.contains(Flag.ABSTRACT);\n objectTypeNode.fields.forEach(field -> {\n analyzeDef(field, objectEnv);\n if (isAbstract) {\n if (field.flagSet.contains(Flag.PRIVATE)) {\n this.dlog.error(field.pos, DiagnosticCode.PRIVATE_FIELD_ABSTRACT_OBJECT, field.symbol.name);\n }\n\n if (field.expr != null) {\n this.dlog.error(field.expr.pos, DiagnosticCode.FIELD_WITH_DEFAULT_VALUE_ABSTRACT_OBJECT);\n }\n }\n });\n\n \n objectTypeNode.functions.forEach(func -> {\n analyzeDef(func, env);\n if (isAbstract && func.flagSet.contains(Flag.PRIVATE)) {\n this.dlog.error(func.pos, DiagnosticCode.PRIVATE_FUNC_ABSTRACT_OBJECT, func.name,\n objectTypeNode.symbol.name);\n }\n if (isAbstract && func.flagSet.contains(Flag.NATIVE)) {\n this.dlog.error(func.pos, DiagnosticCode.EXTERN_FUNC_ABSTRACT_OBJECT, func.name,\n objectTypeNode.symbol.name);\n }\n if (func.flagSet.contains(Flag.RESOURCE) && func.flagSet.contains(Flag.NATIVE)) {\n this.dlog.error(func.pos, DiagnosticCode.RESOURCE_FUNCTION_CANNOT_BE_EXTERN, func.name);\n }\n });\n\n \n ((BObjectTypeSymbol) objectTypeNode.symbol).referencedFunctions\n .forEach(func -> validateReferencedFunction(objectTypeNode.pos, func, env));\n\n if (objectTypeNode.initFunction == null) {\n return;\n }\n\n if (objectTypeNode.initFunction.flagSet.contains(Flag.PRIVATE)) {\n this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.PRIVATE_OBJECT_CONSTRUCTOR,\n objectTypeNode.symbol.name);\n return;\n }\n\n if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {\n this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.ABSTRACT_OBJECT_CONSTRUCTOR,\n objectTypeNode.symbol.name);\n return;\n }\n\n if (objectTypeNode.initFunction.flagSet.contains(Flag.NATIVE)) {\n this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.OBJECT_INIT_FUNCTION_CANNOT_BE_EXTERN,\n objectTypeNode.symbol.name);\n return;\n }\n\n analyzeDef(objectTypeNode.initFunction, env);\n }\n\n @Override\n public void visit(BLangRecordTypeNode recordTypeNode) {\n SymbolEnv recordEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env);\n recordTypeNode.fields.forEach(field -> analyzeDef(field, recordEnv));\n analyzeDef(recordTypeNode.initFunction, recordEnv);\n validateDefaultable(recordTypeNode);\n }\n\n @Override\n public void visit(BLangErrorType errorType) {\n BType reasonType = getReasonType(errorType);\n\n if (!types.isAssignable(reasonType, symTable.stringType)) {\n dlog.error(errorType.reasonType.pos, DiagnosticCode.INVALID_ERROR_REASON_TYPE, reasonType);\n } else if (errorType.reasonType != null) {\n validateModuleQualifiedReasons(errorType.reasonType.pos, reasonType);\n }\n\n if (errorType.detailType == null) {\n return;\n }\n\n BType detailType = errorType.detailType.type;\n if (!types.isValidErrorDetailType(detailType)) {\n dlog.error(errorType.detailType.pos, DiagnosticCode.INVALID_ERROR_DETAIL_TYPE, detailType,\n symTable.detailType);\n }\n }\n\n private BType getReasonType(BLangErrorType errorType) {\n \n if (errorType.reasonType == null) {\n return symTable.stringType;\n }\n return errorType.reasonType.type;\n }\n\n private void validateModuleQualifiedReasons(DiagnosticPos pos, BType reasonType) {\n switch (reasonType.tag) {\n case TypeTags.STRING:\n return;\n case TypeTags.FINITE:\n BFiniteType finiteType = (BFiniteType) reasonType;\n for (BLangExpression expr : finiteType.valueSpace) {\n validateModuleQualifiedReason(pos, (String) ((BLangLiteral) expr).value);\n }\n return;\n case TypeTags.UNION:\n ((BUnionType) reasonType).getMemberTypes().forEach(type -> validateModuleQualifiedReasons(pos, type));\n }\n }\n\n private void validateModuleQualifiedReason(DiagnosticPos pos, String reason) {\n if (!reason.startsWith(LEFT_BRACE)) {\n return;\n }\n\n PackageID currentPackageId = env.enclPkg.packageID;\n if (currentPackageId.isUnnamed || reason.contains(SPACE) ||\n !reason.startsWith(LEFT_BRACE.concat(currentPackageId.toString().split(COLON)[0])\n .concat(RIGHT_BRACE))) {\n dlog.warning(pos, DiagnosticCode.NON_MODULE_QUALIFIED_ERROR_REASON, reason);\n }\n }\n\n public void visit(BLangAnnotation annotationNode) {\n annotationNode.annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.ANNOTATION);\n annotationAttachment.accept(this);\n });\n validateAnnotationAttachmentCount(annotationNode.annAttachments);\n }\n\n public void visit(BLangAnnotationAttachment annAttachmentNode) {\n BSymbol symbol = this.symResolver.resolveAnnotation(annAttachmentNode.pos, env,\n names.fromString(annAttachmentNode.pkgAlias.getValue()),\n names.fromString(annAttachmentNode.getAnnotationName().getValue()));\n if (symbol == this.symTable.notFoundSymbol) {\n this.dlog.error(annAttachmentNode.pos, DiagnosticCode.UNDEFINED_ANNOTATION,\n annAttachmentNode.getAnnotationName().getValue());\n return;\n }\n \n BAnnotationSymbol annotationSymbol = (BAnnotationSymbol) symbol;\n annAttachmentNode.annotationSymbol = annotationSymbol;\n if (annotationSymbol.maskedPoints > 0 &&\n !Symbols.isAttachPointPresent(annotationSymbol.maskedPoints,\n AttachPoints.asMask(annAttachmentNode.attachPoints))) {\n String msg = annAttachmentNode.attachPoints.stream()\n .map(point -> point.name().toLowerCase())\n .collect(Collectors.joining(\", \"));\n this.dlog.error(annAttachmentNode.pos, DiagnosticCode.ANNOTATION_NOT_ALLOWED, annotationSymbol, msg);\n }\n \n validateAnnotationAttachmentExpr(annAttachmentNode, annotationSymbol);\n }\n\n public void visit(BLangSimpleVariable varNode) {\n\n if (varNode.isDeclaredWithVar) {\n validateWorkerAnnAttachments(varNode.expr);\n handleDeclaredWithVar(varNode);\n transferForkFlag(varNode);\n return;\n }\n\n int ownerSymTag = env.scope.owner.tag;\n if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) {\n \n \n if (varNode.symbol == null) {\n symbolEnter.defineNode(varNode, env);\n varNode.annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.VAR);\n annotationAttachment.accept(this);\n });\n } else {\n varNode.annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.PARAMETER);\n annotationAttachment.accept(this);\n });\n }\n } else {\n varNode.annAttachments.forEach(annotationAttachment -> {\n if (Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER)) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.LISTENER);\n } else if (Symbols.isFlagOn(varNode.symbol.flags, Flags.SERVICE)) {\n annotationAttachment.attachPoints.add(AttachPoint.Point.SERVICE);\n } else {\n annotationAttachment.attachPoints.add(AttachPoint.Point.VAR);\n }\n annotationAttachment.accept(this);\n });\n }\n validateAnnotationAttachmentCount(varNode.annAttachments);\n\n validateWorkerAnnAttachments(varNode.expr);\n\n if (isIgnoredOrEmpty(varNode)) {\n \n varNode.symbol = new BVarSymbol(0, Names.IGNORE, env.enclPkg.packageID, symTable.anyType, env.scope.owner);\n }\n\n BType lhsType = varNode.symbol.type;\n varNode.type = lhsType;\n\n \n BLangExpression rhsExpr = varNode.expr;\n if (rhsExpr == null) {\n if (lhsType.tag == TypeTags.ARRAY && typeChecker.isArrayOpenSealedType((BArrayType) lhsType)) {\n dlog.error(varNode.pos, DiagnosticCode.SEALED_ARRAY_TYPE_NOT_INITIALIZED);\n }\n return;\n }\n\n \n \n \n SymbolEnv varInitEnv = SymbolEnv.createVarInitEnv(varNode, env, varNode.symbol);\n\n typeChecker.checkExpr(rhsExpr, varInitEnv, lhsType);\n if (Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER) &&\n !types.checkListenerCompatibility(varNode.symbol.type)) {\n dlog.error(varNode.pos, DiagnosticCode.INVALID_LISTENER_VARIABLE, varNode.name);\n }\n\n transferForkFlag(varNode);\n }\n\n private void transferForkFlag(BLangSimpleVariable varNode) {\n \n if (varNode.expr != null && varNode.expr.getKind() == NodeKind.INVOCATION\n && varNode.flagSet.contains(Flag.WORKER)) {\n\n BLangInvocation expr = (BLangInvocation) varNode.expr;\n if (expr.name.value.startsWith(\"0\") && (expr.symbol.flags & Flags.FORKED) == Flags.FORKED) {\n varNode.symbol.flags |= Flags.FORKED;\n }\n }\n }\n\n /**\n * Validate annotation attachment of the `start` action or workers.\n *\n * @param expr expression to be validated.\n */\n private void validateWorkerAnnAttachments(BLangExpression expr) {\n if (expr != null && expr.getKind() == NodeKind.INVOCATION && ((BLangInvocation) expr).async) {\n ((BLangInvocation) expr).annAttachments.forEach(annotationAttachment -> {\n annotationAttachment.attachPoints.add(AttachPoint.Point.WORKER);\n annotationAttachment.accept(this);\n });\n validateAnnotationAttachmentCount(((BLangInvocation) expr).annAttachments);\n }\n }\n\n public void visit(BLangRecordVariable varNode) {\n\n if (varNode.isDeclaredWithVar) {\n handleDeclaredWithVar(varNode);\n return;\n }\n\n if (varNode.type == null) {\n varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);\n }\n\n if (!validateRecordVariable(varNode)) {\n varNode.type = symTable.semanticError;\n return;\n }\n\n symbolEnter.defineNode(varNode, env);\n\n if (varNode.expr == null) {\n \n return;\n }\n\n typeChecker.checkExpr(varNode.expr, env, varNode.type);\n\n }\n\n public void visit(BLangTupleVariable varNode) {\n\n if (varNode.isDeclaredWithVar) {\n expType = resolveTupleType(varNode);\n handleDeclaredWithVar(varNode);\n return;\n }\n\n if (varNode.type == null) {\n varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);\n }\n\n if (!(checkTypeAndVarCountConsistency(varNode))) {\n varNode.type = symTable.semanticError;\n return;\n }\n\n symbolEnter.defineNode(varNode, env);\n\n if (varNode.expr == null) {\n \n return;\n }\n\n typeChecker.checkExpr(varNode.expr, env, varNode.type);\n }\n\n private BType resolveTupleType(BLangTupleVariable varNode) {\n List memberTypes = new ArrayList<>(varNode.memberVariables.size());\n for (BLangVariable memberVariable : varNode.memberVariables) {\n if (memberVariable.getKind() == NodeKind.TUPLE_VARIABLE) {\n memberTypes.add(resolveTupleType((BLangTupleVariable) memberVariable));\n } else {\n memberTypes.add(symTable.noType);\n }\n }\n return new BTupleType(memberTypes);\n }\n\n public void visit(BLangErrorVariable varNode) {\n \n if (varNode.isDeclaredWithVar) {\n handleDeclaredWithVar(varNode);\n return;\n }\n\n if (varNode.type == null) {\n varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);\n }\n\n \n \n \n if (!varNode.reasonVarPrefixAvailable && varNode.type == null) {\n BErrorType errorType = new BErrorType(varNode.type.tsymbol, null, null);\n\n if (varNode.type.tag == TypeTags.UNION) {\n Set members = types.expandAndGetMemberTypesRecursive(varNode.type);\n List errorMembers = members.stream()\n .filter(m -> m.tag == TypeTags.ERROR)\n .map(m -> (BErrorType) m)\n .collect(Collectors.toList());\n\n if (errorMembers.isEmpty()) {\n dlog.error(varNode.pos, DiagnosticCode.INVALID_ERROR_MATCH_PATTERN);\n return;\n } else if (errorMembers.size() == 1) {\n errorType.detailType = errorMembers.get(0).detailType;\n errorType.reasonType = errorMembers.get(0).reasonType;\n } else {\n errorType.detailType = symTable.detailType;\n errorType.reasonType = symTable.stringType;\n }\n varNode.type = errorType;\n } else if (varNode.type.tag == TypeTags.ERROR) {\n errorType.detailType = ((BErrorType) varNode.type).detailType;\n }\n\n \n \n \n if (varNode.reasonMatchConst != null) {\n BTypeSymbol reasonConstTypeSymbol = new BTypeSymbol(SymTag.FINITE_TYPE,\n Flags.PUBLIC, names.fromString(\"\"), this.env.enclPkg.packageID, null, this.env.scope.owner);\n varNode.reasonMatchConst.type = symTable.stringType;\n typeChecker.checkExpr(varNode.reasonMatchConst, env);\n\n LinkedHashSet members = new LinkedHashSet<>();\n members.add(varNode.reasonMatchConst);\n errorType.reasonType = new BFiniteType(reasonConstTypeSymbol, members);\n } else {\n errorType.reasonType = symTable.stringType;\n }\n }\n if (!validateErrorVariable(varNode)) {\n varNode.type = symTable.semanticError;\n return;\n }\n symbolEnter.defineNode(varNode, env);\n if (varNode.expr == null) {\n \n return;\n }\n typeChecker.checkExpr(varNode.expr, env, varNode.type);\n\n }\n\n private void handleDeclaredWithVar(BLangVariable variable) {\n BLangExpression varRefExpr = variable.expr;\n BType rhsType = typeChecker.checkExpr(varRefExpr, this.env, expType);\n\n switch (variable.getKind()) {\n case VARIABLE:\n if (!validateVariableDefinition(varRefExpr)) {\n rhsType = symTable.semanticError;\n }\n\n BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable;\n\n Name varName = names.fromIdNode(simpleVariable.name);\n if (varName == Names.IGNORE) {\n dlog.error(simpleVariable.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return;\n }\n\n simpleVariable.type = rhsType;\n\n int ownerSymTag = env.scope.owner.tag;\n if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) {\n \n \n if (simpleVariable.symbol == null) {\n symbolEnter.defineNode(simpleVariable, env);\n }\n }\n\n \n \n simpleVariable.symbol.type = rhsType;\n break;\n case TUPLE_VARIABLE:\n if (variable.isDeclaredWithVar && variable.expr.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) {\n dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, \"tuple binding pattern\");\n variable.type = symTable.semanticError;\n return;\n }\n if (TypeTags.TUPLE != rhsType.tag) {\n dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_TUPLE_VAR, rhsType);\n variable.type = symTable.semanticError;\n return;\n }\n\n BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;\n tupleVariable.type = rhsType;\n\n if (!(checkTypeAndVarCountConsistency(tupleVariable))) {\n tupleVariable.type = symTable.semanticError;\n return;\n }\n\n symbolEnter.defineNode(tupleVariable, env);\n\n break;\n case RECORD_VARIABLE:\n if (TypeTags.RECORD != rhsType.tag && TypeTags.MAP != rhsType.tag && TypeTags.JSON != rhsType.tag) {\n dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_RECORD_VAR, rhsType);\n variable.type = symTable.semanticError;\n }\n\n BLangRecordVariable recordVariable = (BLangRecordVariable) variable;\n recordVariable.type = rhsType;\n\n if (!validateRecordVariable(recordVariable)) {\n recordVariable.type = symTable.semanticError;\n }\n break;\n case ERROR_VARIABLE:\n if (TypeTags.ERROR != rhsType.tag) {\n dlog.error(variable.expr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, rhsType);\n variable.type = symTable.semanticError;\n return;\n }\n BLangErrorVariable errorVariable = (BLangErrorVariable) variable;\n errorVariable.type = rhsType;\n if (!validateErrorVariable(errorVariable)) {\n errorVariable.type = symTable.semanticError;\n return;\n }\n symbolEnter.defineNode(errorVariable, env);\n break;\n }\n }\n\n private void handleDeclaredVarInForeach(BLangVariable variable, BType rhsType, SymbolEnv blockEnv) {\n switch (variable.getKind()) {\n case VARIABLE:\n BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable;\n Name varName = names.fromIdNode(simpleVariable.name);\n if (varName == Names.IGNORE) {\n dlog.error(simpleVariable.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);\n return;\n }\n\n simpleVariable.type = rhsType;\n\n int ownerSymTag = blockEnv.scope.owner.tag;\n if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) {\n \n \n if (simpleVariable.symbol == null) {\n symbolEnter.defineNode(simpleVariable, blockEnv);\n }\n }\n recursivelySetFinalFlag(simpleVariable);\n break;\n case TUPLE_VARIABLE:\n BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;\n if (TypeTags.TUPLE != rhsType.tag && TypeTags.UNION != rhsType.tag) {\n dlog.error(variable.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_TUPLE_VAR, rhsType);\n recursivelyDefineVariables(tupleVariable, blockEnv);\n return;\n }\n\n tupleVariable.type = rhsType;\n\n if (rhsType.tag == TypeTags.TUPLE && !(checkTypeAndVarCountConsistency(tupleVariable,\n (BTupleType) tupleVariable.type, blockEnv))) {\n return;\n }\n\n if (rhsType.tag == TypeTags.UNION && !(checkTypeAndVarCountConsistency(tupleVariable, null,\n blockEnv))) {\n return;\n }\n\n symbolEnter.defineNode(tupleVariable, blockEnv);\n recursivelySetFinalFlag(tupleVariable);\n break;\n case RECORD_VARIABLE:\n BLangRecordVariable recordVariable = (BLangRecordVariable) variable;\n recordVariable.type = rhsType;\n validateRecordVariable(recordVariable, blockEnv);\n recursivelySetFinalFlag(recordVariable);\n break;\n case ERROR_VARIABLE:\n BLangErrorVariable errorVariable = (BLangErrorVariable) variable;\n if (TypeTags.ERROR != rhsType.tag) {\n dlog.error(variable.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, rhsType);\n recursivelyDefineVariables(errorVariable, blockEnv);\n return;\n }\n errorVariable.type = rhsType;\n validateErrorVariable(errorVariable);\n recursivelySetFinalFlag(errorVariable);\n break;\n }\n }\n\n private void recursivelyDefineVariables(BLangVariable variable, SymbolEnv blockEnv) {\n switch (variable.getKind()) {\n case VARIABLE:\n Name name = names.fromIdNode(((BLangSimpleVariable) variable).name);\n if (name == Names.IGNORE) {\n return;\n }\n variable.type = symTable.semanticError;\n symbolEnter.defineVarSymbol(variable.pos, variable.flagSet, variable.type, name, blockEnv);\n break;\n case TUPLE_VARIABLE:\n ((BLangTupleVariable) variable).memberVariables.forEach(memberVariable ->\n recursivelyDefineVariables(memberVariable, blockEnv));\n break;\n case RECORD_VARIABLE:\n ((BLangRecordVariable) variable).variableList.forEach(value ->\n recursivelyDefineVariables(value.valueBindingPattern, blockEnv));\n break;\n }\n }\n\n private void recursivelySetFinalFlag(BLangVariable variable) {\n if (variable == null) {\n return;\n }\n\n switch (variable.getKind()) {\n case VARIABLE:\n if (variable.symbol == null) {\n return;\n }\n variable.symbol.flags |= Flags.FINAL;\n break;\n case TUPLE_VARIABLE:\n BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;\n tupleVariable.memberVariables.forEach(this::recursivelySetFinalFlag);\n recursivelySetFinalFlag(tupleVariable.restVariable);\n break;\n case RECORD_VARIABLE:\n BLangRecordVariable recordVariable = (BLangRecordVariable) variable;\n recordVariable.variableList.forEach(value -> recursivelySetFinalFlag(value.valueBindingPattern));\n recursivelySetFinalFlag((BLangVariable) recordVariable.restParam);\n break;\n case ERROR_VARIABLE:\n BLangErrorVariable errorVariable = (BLangErrorVariable) variable;\n recursivelySetFinalFlag(errorVariable.reason);\n recursivelySetFinalFlag(errorVariable.restDetail);\n errorVariable.detail.forEach(bLangErrorDetailEntry ->\n recursivelySetFinalFlag(bLangErrorDetailEntry.valueBindingPattern));\n break;\n }\n }\n\n private boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode) {\n return checkTypeAndVarCountConsistency(varNode, null, env);\n }\n\n private boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode, BTupleType tupleTypeNode,\n SymbolEnv env) {\n\n if (tupleTypeNode == null) {\n /*\n This switch block will resolve the tuple type of the tuple variable.\n For example consider the following - [int, string]|[boolean, float] [a, b] = foo();\n Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:\n Type of 'a' will be (int | boolean) while the type of 'b' will be (string | float).\n Consider anydata (a, b) = foo();\n Here, the type of 'a'and type of 'b' will be both anydata.\n */\n switch (varNode.type.tag) {\n case TypeTags.UNION:\n Set unionType = types.expandAndGetMemberTypesRecursive(varNode.type);\n List possibleTypes = unionType.stream()\n .filter(type -> {\n if (TypeTags.TUPLE == type.tag &&\n (varNode.memberVariables.size() == ((BTupleType) type).tupleTypes.size())) {\n return true;\n }\n return TypeTags.ANY == type.tag || TypeTags.ANYDATA == type.tag;\n })\n .collect(Collectors.toList());\n\n if (possibleTypes.isEmpty()) {\n dlog.error(varNode.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_TUPLE_VAR, varNode.type);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n List memberTupleTypes = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n LinkedHashSet memberTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.TUPLE) {\n memberTypes.add(((BTupleType) possibleType).tupleTypes.get(i));\n } else {\n memberTupleTypes.add(varNode.type);\n }\n }\n\n if (memberTypes.size() > 1) {\n memberTupleTypes.add(BUnionType.create(null, memberTypes));\n } else {\n memberTupleTypes.addAll(memberTypes);\n }\n }\n tupleTypeNode = new BTupleType(memberTupleTypes);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.TUPLE) {\n tupleTypeNode = (BTupleType) possibleTypes.get(0);\n break;\n }\n\n List memberTypes = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n memberTypes.add(possibleTypes.get(0));\n }\n tupleTypeNode = new BTupleType(memberTypes);\n break;\n case TypeTags.ANY:\n case TypeTags.ANYDATA:\n List memberTupleTypes = new ArrayList<>();\n for (int i = 0; i < varNode.memberVariables.size(); i++) {\n memberTupleTypes.add(varNode.type);\n }\n tupleTypeNode = new BTupleType(memberTupleTypes);\n if (varNode.restVariable != null) {\n tupleTypeNode.restType = varNode.type;\n }\n break;\n case TypeTags.TUPLE:\n tupleTypeNode = (BTupleType) varNode.type;\n break;\n default:\n dlog.error(varNode.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_TUPLE_VAR, varNode.type);\n return false;\n }\n }\n\n if (tupleTypeNode.tupleTypes.size() != varNode.memberVariables.size()\n || (tupleTypeNode.restType == null && varNode.restVariable != null)\n || (tupleTypeNode.restType != null && varNode.restVariable == null)) {\n dlog.error(varNode.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN);\n return false;\n }\n\n int ignoredCount = 0;\n List memberVariables = new ArrayList<>(varNode.memberVariables);\n if (varNode.restVariable != null) {\n memberVariables.add(varNode.restVariable);\n }\n for (int i = 0; i < memberVariables.size(); i++) {\n BLangVariable var = memberVariables.get(i);\n BType type = (i <= tupleTypeNode.tupleTypes.size() - 1) ? tupleTypeNode.tupleTypes.get(i) :\n new BArrayType(tupleTypeNode.restType);\n if (var.getKind() == NodeKind.VARIABLE) {\n \n BLangSimpleVariable simpleVar = (BLangSimpleVariable) var;\n Name varName = names.fromIdNode(simpleVar.name);\n if (varName == Names.IGNORE) {\n ignoredCount++;\n simpleVar.type = symTable.anyType;\n types.checkType(varNode.pos, type, simpleVar.type,\n DiagnosticCode.INCOMPATIBLE_TYPES);\n continue;\n }\n }\n var.type = type;\n analyzeNode(var, env);\n }\n\n if (!varNode.memberVariables.isEmpty() && ignoredCount == varNode.memberVariables.size()\n && varNode.restVariable == null) {\n dlog.error(varNode.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return false;\n }\n return true;\n }\n\n private boolean validateRecordVariable(BLangRecordVariable recordVar) {\n return validateRecordVariable(recordVar, env);\n }\n\n private boolean validateRecordVariable(BLangRecordVariable recordVar, SymbolEnv env) {\n BRecordType recordVarType;\n /*\n This switch block will resolve the record type of the record variable.\n For example consider the following -\n type Foo record {int a, boolean b};\n type Bar record {string a, float b};\n Foo|Bar {a, b} = foo();\n Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:\n Type of 'a' will be a union of the types of field 'a' in both Foo and Bar.\n i.e. type of 'a' is (int | string) and type of 'b' is (boolean | float).\n Consider anydata {a, b} = foo();\n Here, the type of 'a'and type of 'b' will be both anydata.\n */\n switch (recordVar.type.tag) {\n case TypeTags.UNION:\n BUnionType unionType = (BUnionType) recordVar.type;\n Set bTypes = types.expandAndGetMemberTypesRecursive(unionType);\n List possibleTypes = bTypes.stream()\n .filter(rec -> doesRecordContainKeys(rec, recordVar.variableList, recordVar.restParam != null))\n .collect(Collectors.toList());\n\n if (possibleTypes.isEmpty()) {\n dlog.error(recordVar.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, recordVar.type);\n return false;\n }\n\n if (possibleTypes.size() > 1) {\n BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0,\n names.fromString(ANONYMOUS_RECORD_NAME), env.enclPkg.symbol.pkgID, null, env.scope.owner);\n recordVarType = (BRecordType) symTable.recordType;\n\n List fields = populateAndGetPossibleFieldsForRecVar(recordVar, possibleTypes, recordSymbol);\n\n if (recordVar.restParam != null) {\n LinkedHashSet memberTypes = possibleTypes.stream()\n .map(possibleType -> {\n if (possibleType.tag == TypeTags.RECORD) {\n return ((BRecordType) possibleType).restFieldType;\n } else if (possibleType.tag == TypeTags.MAP) {\n return ((BMapType) possibleType).constraint;\n } else {\n return possibleType;\n }\n })\n .collect(Collectors.toCollection(LinkedHashSet::new));\n recordVarType.restFieldType = memberTypes.size() > 1 ?\n BUnionType.create(null, memberTypes) :\n memberTypes.iterator().next();\n }\n recordVarType.tsymbol = recordSymbol;\n recordVarType.fields = fields;\n recordSymbol.type = recordVarType;\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.RECORD) {\n recordVarType = (BRecordType) possibleTypes.get(0);\n break;\n }\n\n if (possibleTypes.get(0).tag == TypeTags.MAP) {\n recordVarType = createSameTypedFieldsRecordType(recordVar,\n ((BMapType) possibleTypes.get(0)).constraint);\n break;\n }\n\n recordVarType = createSameTypedFieldsRecordType(recordVar, possibleTypes.get(0));\n break;\n case TypeTags.RECORD:\n recordVarType = (BRecordType) recordVar.type;\n break;\n case TypeTags.MAP:\n recordVarType = createSameTypedFieldsRecordType(recordVar, ((BMapType) recordVar.type).constraint);\n break;\n case TypeTags.ANY:\n case TypeTags.ANYDATA:\n recordVarType = createSameTypedFieldsRecordType(recordVar, recordVar.type);\n break;\n default:\n dlog.error(recordVar.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, recordVar.type);\n return false;\n }\n\n Map recordVarTypeFields = recordVarType.fields.stream()\n .collect(Collectors.toMap(field -> field.getName().getValue(), field -> field));\n\n boolean validRecord = true;\n int ignoredCount = 0;\n for (BLangRecordVariableKeyValue variable : recordVar.variableList) {\n \n \n if (names.fromIdNode(variable.getKey()) == Names.IGNORE) {\n dlog.error(recordVar.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);\n continue;\n }\n\n BLangVariable value = variable.getValue();\n if (value.getKind() == NodeKind.VARIABLE) {\n \n BLangSimpleVariable simpleVar = (BLangSimpleVariable) value;\n Name varName = names.fromIdNode(simpleVar.name);\n if (varName == Names.IGNORE) {\n ignoredCount++;\n simpleVar.type = symTable.anyType;\n if (!recordVarTypeFields.containsKey(variable.getKey().getValue())) {\n continue;\n }\n types.checkType(variable.valueBindingPattern.pos,\n recordVarTypeFields.get((variable.getKey().getValue())).type, simpleVar.type,\n DiagnosticCode.INCOMPATIBLE_TYPES);\n continue;\n }\n }\n if (!recordVarTypeFields.containsKey(variable.getKey().getValue())) {\n if (recordVarType.sealed) {\n validRecord = false;\n dlog.error(recordVar.pos, DiagnosticCode.INVALID_FIELD_IN_RECORD_BINDING_PATTERN,\n variable.getKey().getValue(), recordVar.type);\n } else {\n BType restType;\n if (recordVarType.restFieldType.tag == TypeTags.ANYDATA ||\n recordVarType.restFieldType.tag == TypeTags.ANY) {\n restType = recordVarType.restFieldType;\n } else {\n restType = BUnionType.create(null, recordVarType.restFieldType, symTable.nilType);\n }\n value.type = restType;\n value.accept(this);\n }\n continue;\n }\n\n value.type = recordVarTypeFields.get((variable.getKey().getValue())).type;\n value.accept(this);\n }\n\n if (!recordVar.variableList.isEmpty() && ignoredCount == recordVar.variableList.size()\n && recordVar.restParam == null) {\n dlog.error(recordVar.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);\n return false;\n }\n\n if (recordVar.restParam != null) {\n ((BLangVariable) recordVar.restParam).type = getRestParamType(recordVarType);\n symbolEnter.defineNode((BLangNode) recordVar.restParam, env);\n }\n\n return validRecord;\n }\n\n \n\n private boolean validateErrorVariable(BLangErrorVariable errorVariable, BErrorType errorType) {\n if (errorVariable.isInMatchStmt\n && !errorVariable.reasonVarPrefixAvailable\n && errorVariable.reasonMatchConst == null\n && isReasonSpecified(errorVariable)) {\n\n BSymbol reasonConst = symResolver.lookupSymbol(\n this.env.enclEnv, names.fromString(errorVariable.reason.name.value), SymTag.CONSTANT);\n if (reasonConst == symTable.notFoundSymbol) {\n dlog.error(errorVariable.reason.pos, DiagnosticCode.INVALID_ERROR_REASON_BINDING_PATTERN,\n errorVariable.reason.name);\n } else {\n dlog.error(errorVariable.reason.pos, DiagnosticCode.UNSUPPORTED_ERROR_REASON_CONST_MATCH);\n }\n return false;\n }\n\n BRecordType recordType = (BRecordType) errorType.detailType;\n Map fieldMap = recordType.fields.stream()\n .collect(Collectors.toMap(f -> f.name.value, f -> f));\n for (BLangErrorVariable.BLangErrorDetailEntry errorDetailEntry : errorVariable.detail) {\n String entryName = errorDetailEntry.key.getValue();\n BField entryField = fieldMap.get(entryName);\n\n BLangVariable boundVar = errorDetailEntry.valueBindingPattern;\n if (entryField != null) {\n if ((entryField.symbol.flags & Flags.OPTIONAL) == Flags.OPTIONAL) {\n boundVar.type = BUnionType.create(null, entryField.type, symTable.nilType);\n } else {\n boundVar.type = entryField.type;\n }\n } else {\n if (recordType.sealed) {\n dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type);\n boundVar.type = symTable.semanticError;\n return false;\n } else {\n boundVar.type = BUnionType.create(null, recordType.restFieldType, symTable.nilType);\n }\n }\n\n boolean isIgnoredVar = boundVar.getKind() == NodeKind.VARIABLE\n && ((BLangSimpleVariable) boundVar).name.value.equals(Names.IGNORE.value);\n if (!isIgnoredVar) {\n boundVar.accept(this);\n }\n }\n\n if (isRestDetailBindingAvailable(errorVariable)) {\n BTypeSymbol typeSymbol = createTypeSymbol(SymTag.TYPE);\n BMapType restType = new BMapType(TypeTags.MAP, recordType.restFieldType, typeSymbol);\n typeSymbol.type = restType;\n errorVariable.restDetail.type = restType;\n errorVariable.restDetail.accept(this);\n }\n return true;\n }\n\n private boolean isReasonSpecified(BLangErrorVariable errorVariable) {\n return !isIgnoredOrEmpty(errorVariable.reason);\n }\n\n private boolean isIgnoredOrEmpty(BLangSimpleVariable varNode) {\n return varNode.name.value.equals(Names.IGNORE.value) || varNode.name.value.equals(\"\");\n }\n\n private boolean isRestDetailBindingAvailable(BLangErrorVariable errorVariable) {\n return errorVariable.restDetail != null &&\n !errorVariable.restDetail.name.value.equals(Names.IGNORE.value);\n }\n\n private BTypeSymbol createTypeSymbol(int type) {\n return new BTypeSymbol(type, Flags.PUBLIC, Names.EMPTY, env.enclPkg.packageID,\n null, env.scope.owner);\n }\n\n /**\n * This method will resolve field types based on a list of possible types.\n * When a record variable has multiple possible assignable types, each field will be a union of the relevant\n * possible types field type.\n *\n * @param recordVar record variable whose fields types are to be resolved\n * @param possibleTypes list of possible types\n * @param recordSymbol symbol of the record type to be used in creating fields\n * @return the list of fields\n */\n private List populateAndGetPossibleFieldsForRecVar(BLangRecordVariable recordVar, List possibleTypes,\n BRecordTypeSymbol recordSymbol) {\n List fields = new ArrayList<>();\n for (BLangRecordVariableKeyValue bLangRecordVariableKeyValue : recordVar.variableList) {\n String fieldName = bLangRecordVariableKeyValue.key.value;\n LinkedHashSet memberTypes = new LinkedHashSet<>();\n for (BType possibleType : possibleTypes) {\n if (possibleType.tag == TypeTags.RECORD) {\n BRecordType possibleRecordType = (BRecordType) possibleType;\n Optional optionalField = possibleRecordType.fields.stream()\n .filter(field -> field.getName().getValue().equals(fieldName))\n .findFirst();\n if (optionalField.isPresent()) {\n BField bField = optionalField.get();\n if (Symbols.isOptional(bField.symbol)) {\n memberTypes.add(symTable.nilType);\n }\n memberTypes.add(bField.type);\n } else {\n memberTypes.add(possibleRecordType.restFieldType);\n memberTypes.add(symTable.nilType);\n }\n continue;\n }\n if (possibleType.tag == TypeTags.MAP) {\n BMapType possibleMapType = (BMapType) possibleType;\n memberTypes.add(possibleMapType.constraint);\n continue;\n }\n memberTypes.add(possibleType); \n }\n\n BType fieldType = memberTypes.size() > 1 ?\n BUnionType.create(null, memberTypes) : memberTypes.iterator().next();\n fields.add(new BField(names.fromString(fieldName), recordVar.pos,\n new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID,\n fieldType, recordSymbol)));\n }\n return fields;\n }" }, { "comment": "```suggestion if (projectKind != null && !\"SINGLE_FILE_PROJECT\".equals(projectKind.getAsString())) { ```", "method_body": "private void compareResponse(String projectName, String response) {\n Path expectedPath = this.resourceRoot.resolve(\"metadata\").resolve(projectName);\n JsonObject expectedJsonObject =\n FileUtils.fileContentAsObject(expectedPath.toAbsolutePath().toString()).getAsJsonObject();\n JsonObject responseJsonObject = JSON_PARSER.parse(response).getAsJsonObject().getAsJsonObject(\"result\");\n JsonPrimitive packageName = expectedJsonObject.getAsJsonPrimitive(PACKAGE_NAME);\n if (packageName != null) {\n Assert.assertEquals(responseJsonObject.getAsJsonPrimitive(PACKAGE_NAME), packageName,\n \"Package MetadataTest \" + PACKAGE_NAME + \" fails with \" + projectName + \" test case.\");\n } else {\n Assert.assertNull(responseJsonObject.getAsJsonPrimitive(PACKAGE_NAME),\n \"Package MetadataTest \" + PACKAGE_NAME + \" fails with \" + projectName + \" test case.\");\n }\n\n JsonPrimitive projectKind = expectedJsonObject.getAsJsonPrimitive(KIND);\n Assert.assertEquals(responseJsonObject.getAsJsonPrimitive(KIND), projectKind,\n \"Package MetadataTest \" + KIND + \" fails with \" + projectName + \" test case.\");\n Assert.assertNotNull(responseJsonObject.getAsJsonPrimitive(PATH),\n \"Package MetadataTest \" + PATH + \" fails with \" + projectName + \" test case.\");\n\n Assert.assertNotNull(responseJsonObject.getAsJsonPrimitive(ORG_NAME),\n \"Package MetadataTest \" + ORG_NAME + \" fails with \" + projectName + \" test case.\");\n\n if (projectKind != null && !projectKind.getAsString().equals(\"SINGLE_FILE_PROJECT\")) {\n Assert.assertEquals(responseJsonObject.getAsJsonPrimitive(ORG_NAME),\n expectedJsonObject.getAsJsonPrimitive(ORG_NAME),\n \"Package MetadataTest \" + ORG_NAME + \" fails with \" + projectName + \" test case.\");\n }\n }", "target_code": "if (projectKind != null && !projectKind.getAsString().equals(\"SINGLE_FILE_PROJECT\")) {", "method_body_after": "private void compareResponse(String projectName, String response) {\n Path expectedPath = this.resourceRoot.resolve(\"metadata\").resolve(projectName);\n JsonObject expectedJsonObject =\n FileUtils.fileContentAsObject(expectedPath.toAbsolutePath().toString()).getAsJsonObject();\n JsonObject responseJsonObject = JSON_PARSER.parse(response).getAsJsonObject().getAsJsonObject(\"result\");\n JsonPrimitive packageName = expectedJsonObject.getAsJsonPrimitive(PACKAGE_NAME);\n if (packageName != null) {\n Assert.assertEquals(responseJsonObject.getAsJsonPrimitive(PACKAGE_NAME), packageName,\n \"Package MetadataTest \" + PACKAGE_NAME + \" fails with \" + projectName + \" test case.\");\n } else {\n Assert.assertNull(responseJsonObject.getAsJsonPrimitive(PACKAGE_NAME),\n \"Package MetadataTest \" + PACKAGE_NAME + \" fails with \" + projectName + \" test case.\");\n }\n\n JsonPrimitive projectKind = expectedJsonObject.getAsJsonPrimitive(KIND);\n Assert.assertEquals(responseJsonObject.getAsJsonPrimitive(KIND), projectKind,\n \"Package MetadataTest \" + KIND + \" fails with \" + projectName + \" test case.\");\n Assert.assertNotNull(responseJsonObject.getAsJsonPrimitive(PATH),\n \"Package MetadataTest \" + PATH + \" fails with \" + projectName + \" test case.\");\n\n Assert.assertNotNull(responseJsonObject.getAsJsonPrimitive(ORG_NAME),\n \"Package MetadataTest \" + ORG_NAME + \" fails with \" + projectName + \" test case.\");\n\n if (projectKind != null && !\"SINGLE_FILE_PROJECT\".equals(projectKind.getAsString())) {\n Assert.assertEquals(responseJsonObject.getAsJsonPrimitive(ORG_NAME),\n expectedJsonObject.getAsJsonPrimitive(ORG_NAME),\n \"Package MetadataTest \" + ORG_NAME + \" fails with \" + projectName + \" test case.\");\n }\n }", "context_before": "class MetadataTest {\n\n private static final String ORG_NAME = \"orgName\";\n private static final String PACKAGE_NAME = \"packageName\";\n private static final String PATH = \"path\";\n private static final String KIND = \"kind\";\n private static final JsonParser JSON_PARSER = new JsonParser();\n\n private Path resourceRoot;\n private Endpoint serviceEndpoint;\n\n @BeforeClass\n public void init() {\n this.resourceRoot = FileUtils.RES_DIR.resolve(\"packages\");\n this.serviceEndpoint = TestUtil.initializeLanguageSever();\n }\n\n @Test(description = \"Test package metadata API\", dataProvider = \"metadata-data-provider\")\n public void packageMetadataTestCase(String projectName, String packageName) throws IOException {\n Path sourcePath = this.resourceRoot.resolve(\"configs\").resolve(projectName).resolve(\"main.bal\");\n TestUtil.openDocument(serviceEndpoint, sourcePath);\n String response = TestUtil.getPackageMetadataResponse(serviceEndpoint, sourcePath.toAbsolutePath().toString());\n compareResponse(packageName, response);\n }\n\n /**\n * Compares actual response and expected response.\n *\n * @param projectName Project name for test reference\n * @param response JSON rpc response\n */\n \n\n @AfterClass\n public void cleanupLanguageServer() {\n TestUtil.shutdownLanguageServer(this.serviceEndpoint);\n }\n\n @DataProvider(name = \"metadata-data-provider\")\n public Object[][] getDataProvider() {\n return new Object[][]{\n {\"project-functions\", \"project-functions_expected.json\"},\n {\"project-services\", \"project-services_expected.json\"},\n {\"single-file\", \"single-file_expected.json\"}\n };\n }\n}", "context_after": "class MetadataTest {\n\n private static final String ORG_NAME = \"orgName\";\n private static final String PACKAGE_NAME = \"packageName\";\n private static final String PATH = \"path\";\n private static final String KIND = \"kind\";\n private static final JsonParser JSON_PARSER = new JsonParser();\n\n private Path resourceRoot;\n private Endpoint serviceEndpoint;\n\n @BeforeClass\n public void init() {\n this.resourceRoot = FileUtils.RES_DIR.resolve(\"packages\");\n this.serviceEndpoint = TestUtil.initializeLanguageSever();\n }\n\n @Test(description = \"Test package metadata API\", dataProvider = \"metadata-data-provider\")\n public void packageMetadataTestCase(String projectName, String packageName) throws IOException {\n Path sourcePath = this.resourceRoot.resolve(\"configs\").resolve(projectName).resolve(\"main.bal\");\n TestUtil.openDocument(serviceEndpoint, sourcePath);\n String response = TestUtil.getPackageMetadataResponse(serviceEndpoint, sourcePath.toAbsolutePath().toString());\n compareResponse(packageName, response);\n }\n\n /**\n * Compares actual response and expected response.\n *\n * @param projectName Project name for test reference\n * @param response JSON rpc response\n */\n \n\n @AfterClass\n public void cleanupLanguageServer() {\n TestUtil.shutdownLanguageServer(this.serviceEndpoint);\n }\n\n @DataProvider(name = \"metadata-data-provider\")\n public Object[][] getDataProvider() {\n return new Object[][]{\n {\"project-functions\", \"project-functions_expected.json\"},\n {\"project-services\", \"project-services_expected.json\"},\n {\"single-file\", \"single-file_expected.json\"}\n };\n }\n}" }, { "comment": "This is now supported automatically with https://github.com/smallrye/smallrye-config/pull/939", "method_body": "public static void startServerAfterFailedStart() {\n if (closeTask != null) {\n \n \n final Handler prevHotReplacementHandler = hotReplacementHandler;\n shutDownDevMode();\n \n \n \n hotReplacementHandler = prevHotReplacementHandler;\n }\n Supplier supplier = VertxCoreRecorder.getVertx();\n Vertx vertx;\n if (supplier == null) {\n \n \n \n \n vertx = null;\n } else {\n vertx = supplier.get();\n }\n\n try {\n HttpBuildTimeConfig buildConfig = new HttpBuildTimeConfig();\n ConfigInstantiator.handleObject(buildConfig);\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig = new ManagementInterfaceBuildTimeConfig();\n ConfigInstantiator.handleObject(managementBuildTimeConfig);\n HttpConfiguration config = new HttpConfiguration();\n ConfigInstantiator.handleObject(config);\n ManagementInterfaceConfiguration managementConfig = new ManagementInterfaceConfiguration();\n ConfigInstantiator.handleObject(managementConfig);\n if (config.host == null) {\n \n config.host = \"localhost\";\n }\n Router router = Router.router(vertx);\n if (hotReplacementHandler != null) {\n router.route().order(Integer.MIN_VALUE).blockingHandler(hotReplacementHandler);\n }\n\n Handler root = router;\n LiveReloadConfig liveReloadConfig = new LiveReloadConfig();\n ConfigInstantiator.handleObject(liveReloadConfig);\n if (liveReloadConfig.password.isPresent()\n && hotReplacementContext.getDevModeType() == DevModeType.REMOTE_SERVER_SIDE) {\n root = remoteSyncHandler = new RemoteSyncHandler(liveReloadConfig.password.get(), root, hotReplacementContext);\n }\n rootHandler = root;\n\n \n doServerStart(vertx, buildConfig, managementBuildTimeConfig, null, config, managementConfig, LaunchMode.DEVELOPMENT,\n new Supplier() {\n @Override\n public Integer get() {\n return ProcessorInfo.availableProcessors(); \n }\n }, null, false);\n } catch (Exception e) {\n throw new RuntimeException(e);\n }\n }", "target_code": "", "method_body_after": "public static void startServerAfterFailedStart() {\n if (closeTask != null) {\n \n \n final Handler prevHotReplacementHandler = hotReplacementHandler;\n shutDownDevMode();\n \n \n \n hotReplacementHandler = prevHotReplacementHandler;\n }\n Supplier supplier = VertxCoreRecorder.getVertx();\n Vertx vertx;\n if (supplier == null) {\n VertxConfiguration vertxConfiguration = ConfigUtils.emptyConfigBuilder()\n .addDiscoveredSources()\n .withMapping(VertxConfiguration.class)\n .build().getConfigMapping(VertxConfiguration.class);\n vertx = VertxCoreRecorder.recoverFailedStart(vertxConfiguration).get();\n } else {\n vertx = supplier.get();\n }\n\n try {\n HttpBuildTimeConfig buildConfig = new HttpBuildTimeConfig();\n ConfigInstantiator.handleObject(buildConfig);\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig = new ManagementInterfaceBuildTimeConfig();\n ConfigInstantiator.handleObject(managementBuildTimeConfig);\n HttpConfiguration config = new HttpConfiguration();\n ConfigInstantiator.handleObject(config);\n ManagementInterfaceConfiguration managementConfig = new ManagementInterfaceConfiguration();\n ConfigInstantiator.handleObject(managementConfig);\n if (config.host == null) {\n \n config.host = \"localhost\";\n }\n Router router = Router.router(vertx);\n if (hotReplacementHandler != null) {\n router.route().order(Integer.MIN_VALUE).blockingHandler(hotReplacementHandler);\n }\n\n Handler root = router;\n LiveReloadConfig liveReloadConfig = new LiveReloadConfig();\n ConfigInstantiator.handleObject(liveReloadConfig);\n if (liveReloadConfig.password.isPresent()\n && hotReplacementContext.getDevModeType() == DevModeType.REMOTE_SERVER_SIDE) {\n root = remoteSyncHandler = new RemoteSyncHandler(liveReloadConfig.password.get(), root, hotReplacementContext);\n }\n rootHandler = root;\n\n \n doServerStart(vertx, buildConfig, managementBuildTimeConfig, null, config, managementConfig, LaunchMode.DEVELOPMENT,\n new Supplier() {\n @Override\n public Integer get() {\n return ProcessorInfo.availableProcessors(); \n }\n }, null, false);\n } catch (Exception e) {\n throw new RuntimeException(e);\n }\n }", "context_before": "class VertxHttpRecorder {\n\n /**\n * The key that the request start time is stored under\n */\n public static final String REQUEST_START_TIME = \"io.quarkus.request-start-time\";\n\n public static final String MAX_REQUEST_SIZE_KEY = \"io.quarkus.max-request-size\";\n\n private static final String DISABLE_WEBSOCKETS_PROP_NAME = \"vertx.disableWebsockets\";\n\n /**\n * Order mark for route with priority over the default route (add an offset from this mark)\n **/\n public static final int BEFORE_DEFAULT_ROUTE_ORDER_MARK = 1_000;\n\n /**\n * Default route order (i.e. Static Resources, Servlet)\n **/\n public static final int DEFAULT_ROUTE_ORDER = 10_000;\n\n /**\n * Order mark for route without priority over the default route (add an offset from this mark)\n **/\n public static final int AFTER_DEFAULT_ROUTE_ORDER_MARK = 20_000;\n\n private static final Logger LOGGER = Logger.getLogger(VertxHttpRecorder.class.getName());\n\n private static volatile Handler hotReplacementHandler;\n private static volatile HotReplacementContext hotReplacementContext;\n private static volatile RemoteSyncHandler remoteSyncHandler;\n\n private static volatile Runnable closeTask;\n\n static volatile Handler rootHandler;\n\n private static volatile Handler nonApplicationRedirectHandler;\n\n private static volatile int actualHttpPort = -1;\n private static volatile int actualHttpsPort = -1;\n\n private static volatile int actualManagementPort = -1;\n\n public static final String GET = \"GET\";\n private static final Handler ACTUAL_ROOT = new Handler() {\n\n /** JVM system property that disables URI validation, don't use this in production. */\n private static final String DISABLE_URI_VALIDATION_PROP_NAME = \"vertx.disableURIValidation\";\n /**\n * Disables HTTP headers validation, so we can save some processing and save some allocations.\n */\n private final boolean DISABLE_URI_VALIDATION = Boolean.getBoolean(DISABLE_URI_VALIDATION_PROP_NAME);\n\n @Override\n public void handle(HttpServerRequest httpServerRequest) {\n if (!uriValid(httpServerRequest)) {\n httpServerRequest.response().setStatusCode(400).end();\n return;\n }\n\n \n \n \n \n \n httpServerRequest.pause();\n Handler rh = VertxHttpRecorder.rootHandler;\n if (rh != null) {\n rh.handle(httpServerRequest);\n } else {\n \n httpServerRequest.resume();\n httpServerRequest.response().setStatusCode(503).end();\n }\n }\n\n private boolean uriValid(HttpServerRequest httpServerRequest) {\n if (DISABLE_URI_VALIDATION) {\n return true;\n }\n try {\n \n new URI(httpServerRequest.uri());\n return true;\n } catch (URISyntaxException e) {\n return false;\n }\n }\n };\n private static HttpServerOptions httpMainSslServerOptions;\n private static HttpServerOptions httpMainServerOptions;\n private static HttpServerOptions httpMainDomainSocketOptions;\n private static HttpServerOptions httpManagementServerOptions;\n final HttpBuildTimeConfig httpBuildTimeConfig;\n final ManagementInterfaceBuildTimeConfig managementBuildTimeConfig;\n final RuntimeValue httpConfiguration;\n\n final RuntimeValue managementConfiguration;\n private static volatile Handler managementRouter;\n\n public VertxHttpRecorder(HttpBuildTimeConfig httpBuildTimeConfig,\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig,\n RuntimeValue httpConfiguration,\n RuntimeValue managementConfiguration) {\n this.httpBuildTimeConfig = httpBuildTimeConfig;\n this.httpConfiguration = httpConfiguration;\n this.managementBuildTimeConfig = managementBuildTimeConfig;\n this.managementConfiguration = managementConfiguration;\n }\n\n public static void setHotReplacement(Handler handler, HotReplacementContext hrc) {\n hotReplacementHandler = handler;\n hotReplacementContext = hrc;\n }\n\n public static void shutDownDevMode() {\n if (closeTask != null) {\n closeTask.run();\n closeTask = null;\n }\n rootHandler = null;\n hotReplacementHandler = null;\n\n }\n\n \n\n public RuntimeValue initializeRouter(final Supplier vertxRuntimeValue) {\n Vertx vertx = vertxRuntimeValue.get();\n Router router = Router.router(vertx);\n return new RuntimeValue<>(router);\n }\n\n public RuntimeValue createMutinyRouter(final RuntimeValue router) {\n return new RuntimeValue<>(new io.vertx.mutiny.ext.web.Router(router.getValue()));\n }\n\n public void startServer(Supplier vertx, ShutdownContext shutdown,\n LaunchMode launchMode,\n boolean startVirtual, boolean startSocket, Supplier ioThreads, List websocketSubProtocols,\n boolean auxiliaryApplication, boolean disableWebSockets)\n throws IOException {\n\n \n if (disableWebSockets && !System.getProperties().containsKey(DISABLE_WEBSOCKETS_PROP_NAME)) {\n System.setProperty(DISABLE_WEBSOCKETS_PROP_NAME, \"true\");\n }\n\n if (startVirtual) {\n initializeVirtual(vertx.get());\n shutdown.addShutdownTask(() -> {\n try {\n virtualBootstrapChannel.channel().close().sync();\n } catch (InterruptedException e) {\n LOGGER.warn(\"Unable to close virtualBootstrapChannel\");\n } finally {\n virtualBootstrapChannel = null;\n virtualBootstrap = null;\n }\n });\n }\n HttpConfiguration httpConfiguration = this.httpConfiguration.getValue();\n ManagementInterfaceConfiguration managementConfig = this.managementConfiguration == null ? null\n : this.managementConfiguration.getValue();\n if (startSocket && (httpConfiguration.hostEnabled || httpConfiguration.domainSocketEnabled\n || managementConfig.hostEnabled || managementConfig.domainSocketEnabled)) {\n \n if (closeTask == null) {\n doServerStart(vertx.get(), httpBuildTimeConfig, managementBuildTimeConfig, managementRouter,\n httpConfiguration, managementConfig, launchMode, ioThreads, websocketSubProtocols,\n auxiliaryApplication);\n if (launchMode != LaunchMode.DEVELOPMENT) {\n shutdown.addShutdownTask(closeTask);\n } else {\n shutdown.addShutdownTask(new Runnable() {\n @Override\n public void run() {\n VertxHttpHotReplacementSetup.handleDevModeRestart();\n }\n });\n }\n }\n }\n }\n\n public void mountFrameworkRouter(RuntimeValue mainRouter, RuntimeValue frameworkRouter,\n String frameworkPath) {\n mainRouter.getValue().mountSubRouter(frameworkPath, frameworkRouter.getValue());\n }\n\n public void finalizeRouter(BeanContainer container, Consumer defaultRouteHandler,\n List filterList, List managementInterfaceFilterList, Supplier vertx,\n LiveReloadConfig liveReloadConfig, Optional> mainRouterRuntimeValue,\n RuntimeValue httpRouterRuntimeValue, RuntimeValue mutinyRouter,\n RuntimeValue frameworkRouter, RuntimeValue managementRouter,\n String rootPath, String nonRootPath,\n LaunchMode launchMode, boolean requireBodyHandler,\n Handler bodyHandler,\n GracefulShutdownFilter gracefulShutdownFilter, ShutdownConfig shutdownConfig,\n Executor executor) {\n HttpConfiguration httpConfiguration = this.httpConfiguration.getValue();\n \n Router httpRouteRouter = httpRouterRuntimeValue.getValue();\n\n \n Event event = Arc.container().beanManager().getEvent();\n\n \n Filters filters = new Filters();\n event.select(Filters.class).fire(filters);\n\n filterList.addAll(filters.getFilters());\n\n \n event.select(Router.class, Default.Literal.INSTANCE).fire(httpRouteRouter);\n \n event.select(io.vertx.mutiny.ext.web.Router.class).fire(mutinyRouter.getValue());\n\n for (Filter filter : filterList) {\n if (filter.getHandler() != null) {\n if (filter.isFailureHandler()) {\n \n httpRouteRouter.route().order(-1 * filter.getPriority()).failureHandler(filter.getHandler());\n } else {\n \n httpRouteRouter.route().order(-1 * filter.getPriority()).handler(filter.getHandler());\n }\n }\n }\n\n if (defaultRouteHandler != null) {\n defaultRouteHandler.accept(httpRouteRouter.route().order(DEFAULT_ROUTE_ORDER));\n }\n\n applyCompression(httpBuildTimeConfig.enableCompression, httpRouteRouter);\n httpRouteRouter.route().last().failureHandler(\n new QuarkusErrorHandler(launchMode.isDevOrTest(), httpConfiguration.unhandledErrorContentTypeDefault));\n\n if (requireBodyHandler) {\n \n \n httpRouteRouter.route().order(Integer.MIN_VALUE + 1).handler(new Handler() {\n @Override\n public void handle(RoutingContext routingContext) {\n routingContext.request().resume();\n bodyHandler.handle(routingContext);\n }\n });\n }\n\n HttpServerCommonHandlers.enforceMaxBodySize(httpConfiguration.limits, httpRouteRouter);\n \n var filtersInConfig = httpConfiguration.filter;\n HttpServerCommonHandlers.applyFilters(filtersInConfig, httpRouteRouter);\n \n HttpServerCommonHandlers.applyHeaders(httpConfiguration.header, httpRouteRouter);\n\n Handler root;\n if (rootPath.equals(\"/\")) {\n if (hotReplacementHandler != null) {\n \n ClassLoader currentCl = Thread.currentThread().getContextClassLoader();\n httpRouteRouter.route().order(Integer.MIN_VALUE).handler(new Handler() {\n @Override\n public void handle(RoutingContext event) {\n Thread.currentThread().setContextClassLoader(currentCl);\n hotReplacementHandler.handle(event);\n }\n });\n }\n root = httpRouteRouter;\n } else {\n Router mainRouter = mainRouterRuntimeValue.isPresent() ? mainRouterRuntimeValue.get().getValue()\n : Router.router(vertx.get());\n mainRouter.mountSubRouter(rootPath, httpRouteRouter);\n\n if (hotReplacementHandler != null) {\n ClassLoader currentCl = Thread.currentThread().getContextClassLoader();\n mainRouter.route().order(Integer.MIN_VALUE).handler(new Handler() {\n @Override\n public void handle(RoutingContext event) {\n Thread.currentThread().setContextClassLoader(currentCl);\n hotReplacementHandler.handle(event);\n }\n });\n }\n root = mainRouter;\n }\n\n warnIfProxyAddressForwardingAllowedWithMultipleHeaders(httpConfiguration.proxy);\n root = HttpServerCommonHandlers.applyProxy(httpConfiguration.proxy, root, vertx);\n\n boolean quarkusWrapperNeeded = false;\n\n if (shutdownConfig.isShutdownTimeoutSet()) {\n gracefulShutdownFilter.next(root);\n root = gracefulShutdownFilter;\n quarkusWrapperNeeded = true;\n }\n\n AccessLogConfig accessLog = httpConfiguration.accessLog;\n if (accessLog.enabled) {\n AccessLogReceiver receiver;\n if (accessLog.logToFile) {\n File outputDir = accessLog.logDirectory.isPresent() ? new File(accessLog.logDirectory.get()) : new File(\"\");\n receiver = new DefaultAccessLogReceiver(executor, outputDir, accessLog.baseFileName, accessLog.logSuffix,\n accessLog.rotate);\n } else {\n receiver = new JBossLoggingAccessLogReceiver(accessLog.category);\n }\n AccessLogHandler handler = new AccessLogHandler(receiver, accessLog.pattern, getClass().getClassLoader(),\n accessLog.excludePattern);\n if (rootPath.equals(\"/\") || nonRootPath.equals(\"/\")) {\n mainRouterRuntimeValue.orElse(httpRouterRuntimeValue).getValue().route().order(Integer.MIN_VALUE)\n .handler(handler);\n } else if (nonRootPath.startsWith(rootPath)) {\n httpRouteRouter.route().order(Integer.MIN_VALUE).handler(handler);\n } else if (rootPath.startsWith(nonRootPath)) {\n frameworkRouter.getValue().route().order(Integer.MIN_VALUE).handler(handler);\n } else {\n httpRouteRouter.route().order(Integer.MIN_VALUE).handler(handler);\n frameworkRouter.getValue().route().order(Integer.MIN_VALUE).handler(handler);\n }\n\n quarkusWrapperNeeded = true;\n }\n\n BiConsumer cookieFunction = null;\n if (!httpConfiguration.sameSiteCookie.isEmpty()) {\n cookieFunction = processSameSiteConfig(httpConfiguration.sameSiteCookie);\n quarkusWrapperNeeded = true;\n }\n BiConsumer cookieConsumer = cookieFunction;\n\n if (quarkusWrapperNeeded) {\n Handler old = root;\n root = new Handler() {\n @Override\n public void handle(HttpServerRequest event) {\n old.handle(new QuarkusRequestWrapper(event, cookieConsumer));\n }\n };\n }\n\n Handler delegate = root;\n root = HttpServerCommonHandlers.enforceDuplicatedContext(delegate);\n if (httpConfiguration.recordRequestStartTime) {\n httpRouteRouter.route().order(Integer.MIN_VALUE).handler(new Handler() {\n @Override\n public void handle(RoutingContext event) {\n event.put(REQUEST_START_TIME, System.nanoTime());\n event.next();\n }\n });\n }\n if (launchMode == LaunchMode.DEVELOPMENT && liveReloadConfig.password.isPresent()\n && hotReplacementContext.getDevModeType() == DevModeType.REMOTE_SERVER_SIDE) {\n root = remoteSyncHandler = new RemoteSyncHandler(liveReloadConfig.password.get(), root, hotReplacementContext);\n }\n rootHandler = root;\n\n if (managementRouter != null && managementRouter.getValue() != null) {\n \n var mr = managementRouter.getValue();\n\n mr.route().last().failureHandler(\n new QuarkusErrorHandler(launchMode.isDevOrTest(), httpConfiguration.unhandledErrorContentTypeDefault));\n\n mr.route().order(Integer.MIN_VALUE).handler(createBodyHandlerForManagementInterface());\n \n mr.route().order(Integer.MIN_VALUE).handler(CorsHandler.create().addOrigin(\"*\"));\n\n HttpServerCommonHandlers.applyFilters(managementConfiguration.getValue().filter, mr);\n for (Filter filter : managementInterfaceFilterList) {\n mr.route().order(filter.getPriority()).handler(filter.getHandler());\n }\n\n HttpServerCommonHandlers.applyHeaders(managementConfiguration.getValue().header, mr);\n HttpServerCommonHandlers.enforceMaxBodySize(managementConfiguration.getValue().limits, mr);\n applyCompression(managementBuildTimeConfig.enableCompression, mr);\n\n Handler handler = HttpServerCommonHandlers.enforceDuplicatedContext(mr);\n handler = HttpServerCommonHandlers.applyProxy(managementConfiguration.getValue().proxy, handler, vertx);\n\n event.select(ManagementInterface.class).fire(new ManagementInterfaceImpl(managementRouter.getValue()));\n\n VertxHttpRecorder.managementRouter = handler;\n }\n }\n\n private void applyCompression(boolean enableCompression, Router httpRouteRouter) {\n if (enableCompression) {\n httpRouteRouter.route().order(0).handler(new Handler() {\n @Override\n public void handle(RoutingContext ctx) {\n \n \n ctx.response().putHeader(HttpHeaders.CONTENT_ENCODING, HttpHeaders.IDENTITY);\n ctx.next();\n }\n });\n }\n }\n\n private void warnIfProxyAddressForwardingAllowedWithMultipleHeaders(ProxyConfig proxyConfig) {\n boolean proxyAddressForwardingActivated = proxyConfig.proxyAddressForwarding;\n boolean forwardedActivated = proxyConfig.allowForwarded;\n boolean xForwardedActivated = proxyConfig.allowXForwarded.orElse(!forwardedActivated);\n\n if (proxyAddressForwardingActivated && forwardedActivated && xForwardedActivated) {\n LOGGER.warn(\n \"The X-Forwarded-* and Forwarded headers will be considered when determining the proxy address. \" +\n \"This configuration can cause a security issue as clients can forge requests and send a \" +\n \"forwarded header that is not overwritten by the proxy. \" +\n \"Please consider use one of these headers just to forward the proxy address in requests.\");\n }\n }\n\n private static CompletableFuture initializeManagementInterfaceWithDomainSocket(Vertx vertx,\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler managementRouter,\n ManagementInterfaceConfiguration managementConfig,\n List websocketSubProtocols) {\n CompletableFuture managementInterfaceDomainSocketFuture = new CompletableFuture<>();\n if (!managementBuildTimeConfig.enabled || managementRouter == null || managementConfig == null) {\n managementInterfaceDomainSocketFuture.complete(null);\n return managementInterfaceDomainSocketFuture;\n }\n\n HttpServerOptions domainSocketOptionsForManagement = createDomainSocketOptionsForManagementInterface(\n managementBuildTimeConfig, managementConfig,\n websocketSubProtocols);\n if (domainSocketOptionsForManagement != null) {\n vertx.createHttpServer(domainSocketOptionsForManagement)\n .requestHandler(managementRouter)\n .listen(ar -> {\n if (ar.failed()) {\n managementInterfaceDomainSocketFuture.completeExceptionally(\n new IllegalStateException(\n \"Unable to start the management interface on the \"\n + domainSocketOptionsForManagement.getHost() + \" domain socket\",\n ar.cause()));\n } else {\n managementInterfaceDomainSocketFuture.complete(ar.result());\n }\n });\n } else {\n managementInterfaceDomainSocketFuture.complete(null);\n }\n return managementInterfaceDomainSocketFuture;\n }\n\n private static CompletableFuture initializeManagementInterface(Vertx vertx,\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler managementRouter,\n ManagementInterfaceConfiguration managementConfig,\n LaunchMode launchMode,\n List websocketSubProtocols) throws IOException {\n httpManagementServerOptions = null;\n CompletableFuture managementInterfaceFuture = new CompletableFuture<>();\n if (!managementBuildTimeConfig.enabled || managementRouter == null || managementConfig == null) {\n managementInterfaceFuture.complete(null);\n return managementInterfaceFuture;\n }\n\n HttpServerOptions httpServerOptionsForManagement = createHttpServerOptionsForManagementInterface(\n managementBuildTimeConfig, managementConfig, launchMode,\n websocketSubProtocols);\n httpManagementServerOptions = HttpServerOptionsUtils.createSslOptionsForManagementInterface(\n managementBuildTimeConfig, managementConfig, launchMode,\n websocketSubProtocols);\n if (httpManagementServerOptions != null && httpManagementServerOptions.getKeyCertOptions() == null) {\n httpManagementServerOptions = httpServerOptionsForManagement;\n }\n\n if (httpManagementServerOptions != null) {\n vertx.createHttpServer(httpManagementServerOptions)\n .requestHandler(managementRouter)\n .listen(ar -> {\n if (ar.failed()) {\n managementInterfaceFuture.completeExceptionally(\n new IllegalStateException(\"Unable to start the management interface\", ar.cause()));\n } else {\n actualManagementPort = ar.result().actualPort();\n managementInterfaceFuture.complete(ar.result());\n }\n });\n } else {\n managementInterfaceFuture.complete(null);\n }\n return managementInterfaceFuture;\n }\n\n private static CompletableFuture initializeMainHttpServer(Vertx vertx, HttpBuildTimeConfig httpBuildTimeConfig,\n HttpConfiguration httpConfiguration,\n LaunchMode launchMode,\n Supplier eventLoops, List websocketSubProtocols) throws IOException {\n\n if (!httpConfiguration.hostEnabled && !httpConfiguration.domainSocketEnabled) {\n return CompletableFuture.completedFuture(null);\n }\n\n \n httpMainServerOptions = createHttpServerOptions(httpBuildTimeConfig, httpConfiguration, launchMode,\n websocketSubProtocols);\n httpMainDomainSocketOptions = createDomainSocketOptions(httpBuildTimeConfig, httpConfiguration,\n websocketSubProtocols);\n HttpServerOptions tmpSslConfig = HttpServerOptionsUtils.createSslOptions(httpBuildTimeConfig, httpConfiguration,\n launchMode,\n websocketSubProtocols);\n\n \n if (Arc.container() != null) {\n List> instances = Arc.container()\n .listAll(HttpServerOptionsCustomizer.class);\n for (InstanceHandle instance : instances) {\n HttpServerOptionsCustomizer customizer = instance.get();\n if (httpMainServerOptions != null) {\n customizer.customizeHttpServer(httpMainServerOptions);\n }\n if (tmpSslConfig != null) {\n customizer.customizeHttpsServer(tmpSslConfig);\n }\n if (httpMainDomainSocketOptions != null) {\n customizer.customizeDomainSocketServer(httpMainDomainSocketOptions);\n }\n }\n }\n\n \n if (tmpSslConfig != null && tmpSslConfig.getKeyCertOptions() == null) {\n tmpSslConfig = null;\n }\n httpMainSslServerOptions = tmpSslConfig;\n\n if (httpConfiguration.insecureRequests != HttpConfiguration.InsecureRequests.ENABLED\n && httpMainSslServerOptions == null) {\n throw new IllegalStateException(\"Cannot set quarkus.http.redirect-insecure-requests without enabling SSL.\");\n }\n\n int eventLoopCount = eventLoops.get();\n final int ioThreads;\n if (httpConfiguration.ioThreads.isPresent()) {\n ioThreads = Math.min(httpConfiguration.ioThreads.getAsInt(), eventLoopCount);\n } else if (launchMode.isDevOrTest()) {\n ioThreads = Math.min(2, eventLoopCount); \n } else {\n ioThreads = eventLoopCount;\n }\n CompletableFuture futureResult = new CompletableFuture<>();\n\n AtomicInteger connectionCount = new AtomicInteger();\n vertx.deployVerticle(new Supplier() {\n @Override\n public Verticle get() {\n return new WebDeploymentVerticle(httpMainServerOptions, httpMainSslServerOptions, httpMainDomainSocketOptions,\n launchMode,\n httpConfiguration.insecureRequests, httpConfiguration, connectionCount);\n }\n }, new DeploymentOptions().setInstances(ioThreads), new Handler>() {\n @Override\n public void handle(AsyncResult event) {\n if (event.failed()) {\n Throwable effectiveCause = event.cause();\n if (effectiveCause instanceof BindException) {\n List portsUsed = Collections.emptyList();\n\n if ((httpMainSslServerOptions == null) && (httpMainServerOptions != null)) {\n portsUsed = List.of(httpMainServerOptions.getPort());\n } else if ((httpConfiguration.insecureRequests == InsecureRequests.DISABLED)\n && (httpMainSslServerOptions != null)) {\n portsUsed = List.of(httpMainSslServerOptions.getPort());\n } else if ((httpMainSslServerOptions != null)\n && (httpConfiguration.insecureRequests == InsecureRequests.ENABLED)\n && (httpMainServerOptions != null)) {\n portsUsed = List.of(httpMainServerOptions.getPort(), httpMainSslServerOptions.getPort());\n }\n\n effectiveCause = new QuarkusBindException((BindException) effectiveCause, portsUsed);\n }\n futureResult.completeExceptionally(effectiveCause);\n } else {\n futureResult.complete(event.result());\n }\n }\n });\n\n return futureResult;\n }\n\n private static void doServerStart(Vertx vertx, HttpBuildTimeConfig httpBuildTimeConfig,\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler managementRouter,\n HttpConfiguration httpConfiguration, ManagementInterfaceConfiguration managementConfig,\n LaunchMode launchMode,\n Supplier eventLoops, List websocketSubProtocols, boolean auxiliaryApplication) throws IOException {\n\n var mainServerFuture = initializeMainHttpServer(vertx, httpBuildTimeConfig, httpConfiguration, launchMode, eventLoops,\n websocketSubProtocols);\n var managementInterfaceFuture = initializeManagementInterface(vertx, managementBuildTimeConfig, managementRouter,\n managementConfig, launchMode, websocketSubProtocols);\n var managementInterfaceDomainSocketFuture = initializeManagementInterfaceWithDomainSocket(vertx,\n managementBuildTimeConfig, managementRouter, managementConfig, websocketSubProtocols);\n\n try {\n String deploymentIdIfAny = mainServerFuture.get();\n\n HttpServer tmpManagementServer = null;\n HttpServer tmpManagementServerUsingDomainSocket = null;\n if (managementRouter != null) {\n tmpManagementServer = managementInterfaceFuture.get();\n tmpManagementServerUsingDomainSocket = managementInterfaceDomainSocketFuture.get();\n }\n HttpServer managementServer = tmpManagementServer;\n HttpServer managementServerDomainSocket = tmpManagementServerUsingDomainSocket;\n if (deploymentIdIfAny != null) {\n VertxCoreRecorder.setWebDeploymentId(deploymentIdIfAny);\n }\n closeTask = new Runnable() {\n @Override\n public synchronized void run() {\n \n if (closeTask == this) {\n boolean isVertxClose = ((VertxInternal) vertx).closeFuture().future().isComplete();\n int count = 0;\n if (deploymentIdIfAny != null && vertx.deploymentIDs().contains(deploymentIdIfAny)) {\n count++;\n }\n if (managementServer != null && !isVertxClose) {\n count++;\n }\n if (managementServerDomainSocket != null && !isVertxClose) {\n count++;\n }\n\n CountDownLatch latch = new CountDownLatch(count);\n var handler = new Handler>() {\n @Override\n public void handle(AsyncResult event) {\n latch.countDown();\n }\n };\n\n \n if (deploymentIdIfAny != null) {\n try {\n vertx.undeploy(deploymentIdIfAny, handler);\n } catch (Exception e) {\n LOGGER.warn(\"Failed to undeploy deployment \", e);\n }\n }\n\n \n try {\n if (managementServer != null && !isVertxClose) {\n managementServer.close(handler);\n }\n if (managementServerDomainSocket != null && !isVertxClose) {\n managementServerDomainSocket.close(handler);\n }\n } catch (Exception e) {\n LOGGER.warn(\"Unable to shutdown the management interface quietly\", e);\n }\n\n try {\n latch.await();\n } catch (InterruptedException e) {\n throw new RuntimeException(e);\n }\n }\n closeTask = null;\n if (remoteSyncHandler != null) {\n remoteSyncHandler.close();\n remoteSyncHandler = null;\n }\n }\n };\n } catch (InterruptedException | ExecutionException e) {\n throw new RuntimeException(\"Unable to start HTTP server\", e);\n }\n\n setHttpServerTiming(httpConfiguration.insecureRequests, httpMainServerOptions, httpMainSslServerOptions,\n httpMainDomainSocketOptions,\n auxiliaryApplication, httpManagementServerOptions);\n }\n\n private static void setHttpServerTiming(InsecureRequests insecureRequests, HttpServerOptions httpServerOptions,\n HttpServerOptions sslConfig,\n HttpServerOptions domainSocketOptions, boolean auxiliaryApplication, HttpServerOptions managementConfig) {\n StringBuilder serverListeningMessage = new StringBuilder(\"Listening on: \");\n int socketCount = 0;\n\n if (httpServerOptions != null && !InsecureRequests.DISABLED.equals(insecureRequests)) {\n serverListeningMessage.append(String.format(\n \"http:\n socketCount++;\n }\n\n if (sslConfig != null) {\n if (socketCount > 0) {\n serverListeningMessage.append(\" and \");\n }\n serverListeningMessage.append(String.format(\"https:\n socketCount++;\n }\n\n if (domainSocketOptions != null) {\n if (socketCount > 0) {\n serverListeningMessage.append(\" and \");\n }\n serverListeningMessage.append(String.format(\"unix:%s\", domainSocketOptions.getHost()));\n }\n if (managementConfig != null) {\n serverListeningMessage.append(\n String.format(\". Management interface listening on http%s:\n managementConfig.getHost(), managementConfig.getPort()));\n }\n\n Timing.setHttpServer(serverListeningMessage.toString(), auxiliaryApplication);\n }\n\n private static HttpServerOptions createHttpServerOptions(\n HttpBuildTimeConfig buildTimeConfig, HttpConfiguration httpConfiguration,\n LaunchMode launchMode, List websocketSubProtocols) {\n if (!httpConfiguration.hostEnabled) {\n return null;\n }\n \n HttpServerOptions options = new HttpServerOptions();\n int port = httpConfiguration.determinePort(launchMode);\n options.setPort(port == 0 ? -1 : port);\n\n HttpServerOptionsUtils.applyCommonOptions(options, buildTimeConfig, httpConfiguration, websocketSubProtocols);\n\n return options;\n }\n\n private static HttpServerOptions createHttpServerOptionsForManagementInterface(\n ManagementInterfaceBuildTimeConfig buildTimeConfig, ManagementInterfaceConfiguration httpConfiguration,\n LaunchMode launchMode, List websocketSubProtocols) {\n if (!httpConfiguration.hostEnabled) {\n return null;\n }\n HttpServerOptions options = new HttpServerOptions();\n int port = httpConfiguration.determinePort(launchMode);\n options.setPort(port == 0 ? -1 : port);\n\n HttpServerOptionsUtils.applyCommonOptionsForManagementInterface(options, buildTimeConfig, httpConfiguration,\n websocketSubProtocols);\n\n return options;\n }\n\n private static HttpServerOptions createDomainSocketOptions(\n HttpBuildTimeConfig buildTimeConfig, HttpConfiguration httpConfiguration,\n List websocketSubProtocols) {\n if (!httpConfiguration.domainSocketEnabled) {\n return null;\n }\n HttpServerOptions options = new HttpServerOptions();\n\n HttpServerOptionsUtils.applyCommonOptions(options, buildTimeConfig, httpConfiguration, websocketSubProtocols);\n \n options.setHost(httpConfiguration.domainSocket);\n\n \n \n File file = new File(httpConfiguration.domainSocket);\n if (!file.getParentFile().canWrite()) {\n LOGGER.warnf(\n \"Unable to write in the domain socket directory (`%s`). Binding to the socket is likely going to fail.\",\n httpConfiguration.domainSocket);\n }\n\n return options;\n }\n\n private static HttpServerOptions createDomainSocketOptionsForManagementInterface(\n ManagementInterfaceBuildTimeConfig buildTimeConfig, ManagementInterfaceConfiguration httpConfiguration,\n List websocketSubProtocols) {\n if (!httpConfiguration.domainSocketEnabled) {\n return null;\n }\n HttpServerOptions options = new HttpServerOptions();\n\n HttpServerOptionsUtils.applyCommonOptionsForManagementInterface(options, buildTimeConfig, httpConfiguration,\n websocketSubProtocols);\n \n options.setHost(httpConfiguration.domainSocket);\n\n \n \n File file = new File(httpConfiguration.domainSocket);\n if (!file.getParentFile().canWrite()) {\n LOGGER.warnf(\n \"Unable to write in the domain socket directory (`%s`). Binding to the socket is likely going to fail.\",\n httpConfiguration.domainSocket);\n }\n\n return options;\n }\n\n public void addRoute(RuntimeValue router, Function route, Handler handler,\n HandlerType type) {\n\n Route vr = route.apply(router.getValue());\n if (type == HandlerType.BLOCKING) {\n vr.blockingHandler(handler, false);\n } else if (type == HandlerType.FAILURE) {\n vr.failureHandler(handler);\n } else {\n vr.handler(handler);\n }\n }\n\n public void setNonApplicationRedirectHandler(String nonApplicationPath, String rootPath) {\n nonApplicationRedirectHandler = new Handler() {\n @Override\n public void handle(RoutingContext context) {\n String absoluteURI = context.request().path();\n String target = absoluteURI.substring(rootPath.length());\n String redirectTo = nonApplicationPath + target;\n\n String query = context.request().query();\n if (query != null && !query.isEmpty()) {\n redirectTo += '?' + query;\n }\n\n context.response()\n .setStatusCode(HttpResponseStatus.MOVED_PERMANENTLY.code())\n .putHeader(HttpHeaderNames.LOCATION, redirectTo)\n .end();\n }\n };\n }\n\n public Handler getNonApplicationRedirectHandler() {\n return nonApplicationRedirectHandler;\n }\n\n public GracefulShutdownFilter createGracefulShutdownHandler() {\n return new GracefulShutdownFilter();\n }\n\n private static class WebDeploymentVerticle extends AbstractVerticle implements Resource {\n\n private HttpServer httpServer;\n private HttpServer httpsServer;\n private HttpServer domainSocketServer;\n private final HttpServerOptions httpOptions;\n private final HttpServerOptions httpsOptions;\n private final HttpServerOptions domainSocketOptions;\n private final LaunchMode launchMode;\n private volatile boolean clearHttpProperty = false;\n private volatile boolean clearHttpsProperty = false;\n private volatile Map portPropertiesToRestore;\n private final HttpConfiguration.InsecureRequests insecureRequests;\n private final HttpConfiguration quarkusConfig;\n private final AtomicInteger connectionCount;\n\n public WebDeploymentVerticle(HttpServerOptions httpOptions, HttpServerOptions httpsOptions,\n HttpServerOptions domainSocketOptions, LaunchMode launchMode,\n InsecureRequests insecureRequests, HttpConfiguration quarkusConfig, AtomicInteger connectionCount) {\n this.httpOptions = httpOptions;\n this.httpsOptions = httpsOptions;\n this.launchMode = launchMode;\n this.domainSocketOptions = domainSocketOptions;\n this.insecureRequests = insecureRequests;\n this.quarkusConfig = quarkusConfig;\n this.connectionCount = connectionCount;\n org.crac.Core.getGlobalContext().register(this);\n }\n\n @Override\n public void start(Promise startFuture) {\n final AtomicInteger remainingCount = new AtomicInteger(0);\n boolean httpServerEnabled = httpOptions != null && insecureRequests != HttpConfiguration.InsecureRequests.DISABLED;\n if (httpServerEnabled) {\n remainingCount.incrementAndGet();\n }\n if (httpsOptions != null) {\n remainingCount.incrementAndGet();\n }\n if (domainSocketOptions != null) {\n remainingCount.incrementAndGet();\n }\n\n if (remainingCount.get() == 0) {\n startFuture\n .fail(new IllegalArgumentException(\"Must configure at least one of http, https or unix domain socket\"));\n }\n\n if (httpServerEnabled) {\n httpServer = vertx.createHttpServer(httpOptions);\n if (insecureRequests == HttpConfiguration.InsecureRequests.ENABLED) {\n httpServer.requestHandler(ACTUAL_ROOT);\n } else {\n httpServer.requestHandler(new Handler() {\n @Override\n public void handle(HttpServerRequest req) {\n try {\n String host = req.getHeader(HttpHeaderNames.HOST);\n if (host == null) {\n \n req.response().setStatusCode(HttpResponseStatus.NOT_FOUND.code()).end();\n } else {\n int includedPort = host.indexOf(\":\");\n if (includedPort != -1) {\n host = host.substring(0, includedPort);\n }\n req.response()\n .setStatusCode(301)\n .putHeader(\"Location\",\n \"https:\n .end();\n }\n } catch (Exception e) {\n req.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()).end();\n }\n }\n });\n }\n setupTcpHttpServer(httpServer, httpOptions, false, startFuture, remainingCount, connectionCount);\n }\n\n if (domainSocketOptions != null) {\n domainSocketServer = vertx.createHttpServer(domainSocketOptions);\n domainSocketServer.requestHandler(ACTUAL_ROOT);\n setupUnixDomainSocketHttpServer(domainSocketServer, domainSocketOptions, startFuture, remainingCount);\n }\n\n if (httpsOptions != null) {\n httpsServer = vertx.createHttpServer(httpsOptions);\n httpsServer.requestHandler(ACTUAL_ROOT);\n setupTcpHttpServer(httpsServer, httpsOptions, true, startFuture, remainingCount, connectionCount);\n }\n }\n\n private void setupUnixDomainSocketHttpServer(HttpServer httpServer, HttpServerOptions options,\n Promise startFuture,\n AtomicInteger remainingCount) {\n httpServer.listen(SocketAddress.domainSocketAddress(options.getHost()), event -> {\n if (event.succeeded()) {\n if (remainingCount.decrementAndGet() == 0) {\n startFuture.complete(null);\n }\n } else {\n if (event.cause() != null && event.cause().getMessage() != null\n && event.cause().getMessage().contains(\"Permission denied\")) {\n startFuture.fail(new IllegalStateException(\n String.format(\n \"Unable to bind to Unix domain socket (%s) as the application does not have the permission to write in the directory.\",\n domainSocketOptions.getHost())));\n } else if (event.cause() instanceof IllegalArgumentException) {\n startFuture.fail(new IllegalArgumentException(\n String.format(\n \"Unable to bind to Unix domain socket. Consider adding the 'io.netty:%s' dependency. See the Quarkus Vert.x reference guide for more details.\",\n Utils.isLinux() ? \"netty-transport-native-epoll\" : \"netty-transport-native-kqueue\")));\n } else {\n startFuture.fail(event.cause());\n }\n }\n });\n }\n\n private void setupTcpHttpServer(HttpServer httpServer, HttpServerOptions options, boolean https,\n Promise startFuture, AtomicInteger remainingCount, AtomicInteger currentConnectionCount) {\n if (quarkusConfig.limits.maxConnections.isPresent() && quarkusConfig.limits.maxConnections.getAsInt() > 0) {\n final int maxConnections = quarkusConfig.limits.maxConnections.getAsInt();\n httpServer.connectionHandler(new Handler() {\n\n @Override\n public void handle(HttpConnection event) {\n int current;\n do {\n current = currentConnectionCount.get();\n if (current == maxConnections) {\n \n LOGGER.debug(\"Rejecting connection as there are too many active connections\");\n event.close();\n return;\n }\n } while (!currentConnectionCount.compareAndSet(current, current + 1));\n event.closeHandler(new Handler() {\n @Override\n public void handle(Void event) {\n LOGGER.debug(\"Connection closed\");\n connectionCount.decrementAndGet();\n }\n });\n }\n });\n }\n httpServer.listen(options.getPort(), options.getHost(), new Handler<>() {\n @Override\n public void handle(AsyncResult event) {\n if (event.cause() != null) {\n startFuture.fail(event.cause());\n } else {\n \n int actualPort = event.result().actualPort();\n\n if (https) {\n actualHttpsPort = actualPort;\n } else {\n actualHttpPort = actualPort;\n }\n if (remainingCount.decrementAndGet() == 0) {\n \n if (actualPort != options.getPort()) {\n \n String schema;\n if (https) {\n clearHttpsProperty = true;\n schema = \"https\";\n } else {\n clearHttpProperty = true;\n actualHttpPort = actualPort;\n schema = \"http\";\n }\n portPropertiesToRestore = new HashMap<>();\n String portPropertyValue = String.valueOf(actualPort);\n \n \n String portPropertyName = \"quarkus.\" + schema + \".port\";\n String prevPortPropertyValue = System.setProperty(portPropertyName, portPropertyValue);\n if (!Objects.equals(prevPortPropertyValue, portPropertyValue)) {\n portPropertiesToRestore.put(portPropertyName, prevPortPropertyValue);\n }\n if (launchMode == LaunchMode.TEST) {\n \n String testPropName = \"quarkus.\" + schema + \".test-port\";\n String prevTestPropPrevValue = System.setProperty(testPropName, portPropertyValue);\n if (!Objects.equals(prevTestPropPrevValue, portPropertyValue)) {\n portPropertiesToRestore.put(testPropName, prevTestPropPrevValue);\n }\n }\n if (launchMode.isDevOrTest()) {\n \n portPropertyName = propertyWithProfilePrefix(portPropertyName);\n prevPortPropertyValue = System.setProperty(portPropertyName, portPropertyValue);\n if (!Objects.equals(prevPortPropertyValue, portPropertyValue)) {\n portPropertiesToRestore.put(portPropertyName, prevPortPropertyValue);\n }\n }\n }\n startFuture.complete(null);\n }\n\n }\n }\n });\n }\n\n @Override\n public void stop(Promise stopFuture) {\n\n final AtomicInteger remainingCount = new AtomicInteger(0);\n if (httpServer != null) {\n remainingCount.incrementAndGet();\n }\n if (httpsServer != null) {\n remainingCount.incrementAndGet();\n }\n if (domainSocketServer != null) {\n remainingCount.incrementAndGet();\n }\n\n Handler> handleClose = event -> {\n if (remainingCount.decrementAndGet() == 0) {\n\n if (clearHttpProperty) {\n String portPropertyName = launchMode == LaunchMode.TEST ? \"quarkus.http.test-port\"\n : \"quarkus.http.port\";\n System.clearProperty(portPropertyName);\n if (launchMode.isDevOrTest()) {\n System.clearProperty(propertyWithProfilePrefix(portPropertyName));\n }\n\n }\n if (clearHttpsProperty) {\n String portPropertyName = launchMode == LaunchMode.TEST ? \"quarkus.http.test-ssl-port\"\n : \"quarkus.http.ssl-port\";\n System.clearProperty(portPropertyName);\n if (launchMode.isDevOrTest()) {\n System.clearProperty(propertyWithProfilePrefix(portPropertyName));\n }\n }\n if (portPropertiesToRestore != null) {\n for (Map.Entry entry : portPropertiesToRestore.entrySet()) {\n if (entry.getValue() == null) {\n System.clearProperty(entry.getKey());\n } else {\n System.setProperty(entry.getKey(), entry.getValue());\n }\n }\n }\n\n stopFuture.complete();\n }\n };\n\n if (httpServer != null) {\n httpServer.close(handleClose);\n }\n if (httpsServer != null) {\n httpsServer.close(handleClose);\n }\n if (domainSocketServer != null) {\n domainSocketServer.close(handleClose);\n }\n }\n\n private String propertyWithProfilePrefix(String portPropertyName) {\n return \"%\" + launchMode.getDefaultProfile() + \".\" + portPropertyName;\n }\n\n @Override\n public void beforeCheckpoint(org.crac.Context context) throws Exception {\n Promise p = Promise.promise();\n stop(p);\n CountDownLatch latch = new CountDownLatch(1);\n p.future().onComplete(event -> latch.countDown());\n latch.await();\n }\n\n @Override\n public void afterRestore(org.crac.Context context) throws Exception {\n Promise p = Promise.promise();\n start(p);\n CountDownLatch latch = new CountDownLatch(1);\n p.future().onComplete(event -> latch.countDown());\n latch.await();\n }\n\n }\n\n protected static ServerBootstrap virtualBootstrap;\n protected static ChannelFuture virtualBootstrapChannel;\n public static VirtualAddress VIRTUAL_HTTP = new VirtualAddress(\"netty-virtual-http\");\n\n private static void initializeVirtual(Vertx vertxRuntime) {\n if (virtualBootstrap != null) {\n return;\n }\n\n VertxInternal vertx = (VertxInternal) vertxRuntime;\n virtualBootstrap = new ServerBootstrap();\n virtualBootstrap.group(vertx.getEventLoopGroup())\n .channel(VirtualServerChannel.class)\n .handler(new ChannelInitializer() {\n @Override\n public void initChannel(VirtualServerChannel ch) throws Exception {\n \n }\n })\n .childHandler(new ChannelInitializer() {\n @Override\n public void initChannel(VirtualChannel ch) throws Exception {\n EventLoopContext context = vertx.createEventLoopContext();\n VertxHandler handler = VertxHandler.create(chctx -> {\n\n Http1xServerConnection conn = new Http1xServerConnection(\n () -> {\n ContextInternal internal = (ContextInternal) VertxContext\n .getOrCreateDuplicatedContext(context);\n setContextSafe(internal, true);\n return internal;\n },\n null,\n new HttpServerOptions(),\n chctx,\n context,\n \"localhost\",\n null);\n conn.handler(ACTUAL_ROOT);\n return conn;\n });\n\n ch.pipeline().addLast(\"handler\", handler);\n }\n });\n\n \n try {\n virtualBootstrapChannel = virtualBootstrap.bind(VIRTUAL_HTTP).sync();\n } catch (InterruptedException e) {\n throw new RuntimeException(\"failed to bind virtual http\");\n }\n\n }\n\n public static Handler getRootHandler() {\n return ACTUAL_ROOT;\n }\n\n /**\n * used in the live reload handler to make sure the application has not been changed by another source (e.g. reactive\n * messaging)\n */\n public static Object getCurrentApplicationState() {\n return rootHandler;\n }\n\n private static Handler configureAndGetBody(Optional maxBodySize, BodyConfig bodyConfig) {\n BodyHandler bodyHandler = BodyHandler.create();\n if (maxBodySize.isPresent()) {\n bodyHandler.setBodyLimit(maxBodySize.get().asLongValue());\n }\n bodyHandler.setHandleFileUploads(bodyConfig.handleFileUploads);\n bodyHandler.setUploadsDirectory(bodyConfig.uploadsDirectory);\n bodyHandler.setDeleteUploadedFilesOnEnd(bodyConfig.deleteUploadedFilesOnEnd);\n bodyHandler.setMergeFormAttributes(bodyConfig.mergeFormAttributes);\n bodyHandler.setPreallocateBodyBuffer(bodyConfig.preallocateBodyBuffer);\n return new Handler() {\n @Override\n public void handle(RoutingContext event) {\n if (!Context.isOnEventLoopThread()) {\n ((ConnectionBase) event.request().connection()).channel().eventLoop().execute(new Runnable() {\n @Override\n public void run() {\n try {\n \n if (!event.request().isEnded()) {\n event.request().resume();\n if (CAN_HAVE_BODY.contains(event.request().method())) {\n bodyHandler.handle(event);\n } else {\n event.next();\n }\n } else {\n event.next();\n }\n } catch (Throwable t) {\n event.fail(t);\n }\n }\n });\n } else {\n if (!event.request().isEnded()) {\n event.request().resume();\n }\n if (CAN_HAVE_BODY.contains(event.request().method())) {\n bodyHandler.handle(event);\n } else {\n event.next();\n }\n }\n }\n };\n }\n\n public Handler createBodyHandler() {\n Optional maxBodySize = httpConfiguration.getValue().limits.maxBodySize;\n return configureAndGetBody(maxBodySize, httpConfiguration.getValue().body);\n }\n\n public Handler createBodyHandlerForManagementInterface() {\n Optional maxBodySize = managementConfiguration.getValue().limits.maxBodySize;\n return configureAndGetBody(maxBodySize, managementConfiguration.getValue().body);\n }\n\n private static final List CAN_HAVE_BODY = Arrays.asList(HttpMethod.POST, HttpMethod.PUT, HttpMethod.PATCH,\n HttpMethod.DELETE);\n\n private BiConsumer processSameSiteConfig(Map httpConfiguration) {\n\n List> functions = new ArrayList<>();\n BiFunction last = null;\n\n for (Map.Entry entry : new TreeMap<>(httpConfiguration).entrySet()) {\n Pattern p = Pattern.compile(entry.getKey(), entry.getValue().caseSensitive ? 0 : Pattern.CASE_INSENSITIVE);\n BiFunction biFunction = new BiFunction() {\n @Override\n public Boolean apply(Cookie cookie, HttpServerRequest request) {\n if (p.matcher(cookie.getName()).matches()) {\n if (entry.getValue().value == CookieSameSite.NONE) {\n if (entry.getValue().enableClientChecker) {\n String userAgent = request.getHeader(HttpHeaders.USER_AGENT);\n if (userAgent != null\n && SameSiteNoneIncompatibleClientChecker.isSameSiteNoneIncompatible(userAgent)) {\n return false;\n }\n }\n if (entry.getValue().addSecureForNone) {\n cookie.setSecure(true);\n }\n }\n cookie.setSameSite(entry.getValue().value);\n return true;\n }\n return false;\n }\n };\n if (entry.getKey().equals(\".*\")) {\n \n last = biFunction;\n } else {\n functions.add(biFunction);\n }\n }\n if (last != null) {\n functions.add(last);\n }\n\n return new BiConsumer() {\n @Override\n public void accept(Cookie cookie, HttpServerRequest request) {\n for (BiFunction i : functions) {\n if (i.apply(cookie, request)) {\n return;\n }\n }\n }\n };\n }\n}", "context_after": "class VertxHttpRecorder {\n\n /**\n * The key that the request start time is stored under\n */\n public static final String REQUEST_START_TIME = \"io.quarkus.request-start-time\";\n\n public static final String MAX_REQUEST_SIZE_KEY = \"io.quarkus.max-request-size\";\n\n private static final String DISABLE_WEBSOCKETS_PROP_NAME = \"vertx.disableWebsockets\";\n\n /**\n * Order mark for route with priority over the default route (add an offset from this mark)\n **/\n public static final int BEFORE_DEFAULT_ROUTE_ORDER_MARK = 1_000;\n\n /**\n * Default route order (i.e. Static Resources, Servlet)\n **/\n public static final int DEFAULT_ROUTE_ORDER = 10_000;\n\n /**\n * Order mark for route without priority over the default route (add an offset from this mark)\n **/\n public static final int AFTER_DEFAULT_ROUTE_ORDER_MARK = 20_000;\n\n private static final Logger LOGGER = Logger.getLogger(VertxHttpRecorder.class.getName());\n\n private static volatile Handler hotReplacementHandler;\n private static volatile HotReplacementContext hotReplacementContext;\n private static volatile RemoteSyncHandler remoteSyncHandler;\n\n private static volatile Runnable closeTask;\n\n static volatile Handler rootHandler;\n\n private static volatile Handler nonApplicationRedirectHandler;\n\n private static volatile int actualHttpPort = -1;\n private static volatile int actualHttpsPort = -1;\n\n private static volatile int actualManagementPort = -1;\n\n public static final String GET = \"GET\";\n private static final Handler ACTUAL_ROOT = new Handler() {\n\n /** JVM system property that disables URI validation, don't use this in production. */\n private static final String DISABLE_URI_VALIDATION_PROP_NAME = \"vertx.disableURIValidation\";\n /**\n * Disables HTTP headers validation, so we can save some processing and save some allocations.\n */\n private final boolean DISABLE_URI_VALIDATION = Boolean.getBoolean(DISABLE_URI_VALIDATION_PROP_NAME);\n\n @Override\n public void handle(HttpServerRequest httpServerRequest) {\n if (!uriValid(httpServerRequest)) {\n httpServerRequest.response().setStatusCode(400).end();\n return;\n }\n\n \n \n \n \n \n httpServerRequest.pause();\n Handler rh = VertxHttpRecorder.rootHandler;\n if (rh != null) {\n rh.handle(httpServerRequest);\n } else {\n \n httpServerRequest.resume();\n httpServerRequest.response().setStatusCode(503).end();\n }\n }\n\n private boolean uriValid(HttpServerRequest httpServerRequest) {\n if (DISABLE_URI_VALIDATION) {\n return true;\n }\n try {\n \n new URI(httpServerRequest.uri());\n return true;\n } catch (URISyntaxException e) {\n return false;\n }\n }\n };\n private static HttpServerOptions httpMainSslServerOptions;\n private static HttpServerOptions httpMainServerOptions;\n private static HttpServerOptions httpMainDomainSocketOptions;\n private static HttpServerOptions httpManagementServerOptions;\n final HttpBuildTimeConfig httpBuildTimeConfig;\n final ManagementInterfaceBuildTimeConfig managementBuildTimeConfig;\n final RuntimeValue httpConfiguration;\n\n final RuntimeValue managementConfiguration;\n private static volatile Handler managementRouter;\n\n public VertxHttpRecorder(HttpBuildTimeConfig httpBuildTimeConfig,\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig,\n RuntimeValue httpConfiguration,\n RuntimeValue managementConfiguration) {\n this.httpBuildTimeConfig = httpBuildTimeConfig;\n this.httpConfiguration = httpConfiguration;\n this.managementBuildTimeConfig = managementBuildTimeConfig;\n this.managementConfiguration = managementConfiguration;\n }\n\n public static void setHotReplacement(Handler handler, HotReplacementContext hrc) {\n hotReplacementHandler = handler;\n hotReplacementContext = hrc;\n }\n\n public static void shutDownDevMode() {\n if (closeTask != null) {\n closeTask.run();\n closeTask = null;\n }\n rootHandler = null;\n hotReplacementHandler = null;\n\n }\n\n \n\n public RuntimeValue initializeRouter(final Supplier vertxRuntimeValue) {\n Vertx vertx = vertxRuntimeValue.get();\n Router router = Router.router(vertx);\n return new RuntimeValue<>(router);\n }\n\n public RuntimeValue createMutinyRouter(final RuntimeValue router) {\n return new RuntimeValue<>(new io.vertx.mutiny.ext.web.Router(router.getValue()));\n }\n\n public void startServer(Supplier vertx, ShutdownContext shutdown,\n LaunchMode launchMode,\n boolean startVirtual, boolean startSocket, Supplier ioThreads, List websocketSubProtocols,\n boolean auxiliaryApplication, boolean disableWebSockets)\n throws IOException {\n\n \n if (disableWebSockets && !System.getProperties().containsKey(DISABLE_WEBSOCKETS_PROP_NAME)) {\n System.setProperty(DISABLE_WEBSOCKETS_PROP_NAME, \"true\");\n }\n\n if (startVirtual) {\n initializeVirtual(vertx.get());\n shutdown.addShutdownTask(() -> {\n try {\n virtualBootstrapChannel.channel().close().sync();\n } catch (InterruptedException e) {\n LOGGER.warn(\"Unable to close virtualBootstrapChannel\");\n } finally {\n virtualBootstrapChannel = null;\n virtualBootstrap = null;\n }\n });\n }\n HttpConfiguration httpConfiguration = this.httpConfiguration.getValue();\n ManagementInterfaceConfiguration managementConfig = this.managementConfiguration == null ? null\n : this.managementConfiguration.getValue();\n if (startSocket && (httpConfiguration.hostEnabled || httpConfiguration.domainSocketEnabled\n || managementConfig.hostEnabled || managementConfig.domainSocketEnabled)) {\n \n if (closeTask == null) {\n doServerStart(vertx.get(), httpBuildTimeConfig, managementBuildTimeConfig, managementRouter,\n httpConfiguration, managementConfig, launchMode, ioThreads, websocketSubProtocols,\n auxiliaryApplication);\n if (launchMode != LaunchMode.DEVELOPMENT) {\n shutdown.addShutdownTask(closeTask);\n } else {\n shutdown.addShutdownTask(new Runnable() {\n @Override\n public void run() {\n VertxHttpHotReplacementSetup.handleDevModeRestart();\n }\n });\n }\n }\n }\n }\n\n public void mountFrameworkRouter(RuntimeValue mainRouter, RuntimeValue frameworkRouter,\n String frameworkPath) {\n mainRouter.getValue().mountSubRouter(frameworkPath, frameworkRouter.getValue());\n }\n\n public void finalizeRouter(BeanContainer container, Consumer defaultRouteHandler,\n List filterList, List managementInterfaceFilterList, Supplier vertx,\n LiveReloadConfig liveReloadConfig, Optional> mainRouterRuntimeValue,\n RuntimeValue httpRouterRuntimeValue, RuntimeValue mutinyRouter,\n RuntimeValue frameworkRouter, RuntimeValue managementRouter,\n String rootPath, String nonRootPath,\n LaunchMode launchMode, boolean requireBodyHandler,\n Handler bodyHandler,\n GracefulShutdownFilter gracefulShutdownFilter, ShutdownConfig shutdownConfig,\n Executor executor) {\n HttpConfiguration httpConfiguration = this.httpConfiguration.getValue();\n \n Router httpRouteRouter = httpRouterRuntimeValue.getValue();\n\n \n Event event = Arc.container().beanManager().getEvent();\n\n \n Filters filters = new Filters();\n event.select(Filters.class).fire(filters);\n\n filterList.addAll(filters.getFilters());\n\n \n event.select(Router.class, Default.Literal.INSTANCE).fire(httpRouteRouter);\n \n event.select(io.vertx.mutiny.ext.web.Router.class).fire(mutinyRouter.getValue());\n\n for (Filter filter : filterList) {\n if (filter.getHandler() != null) {\n if (filter.isFailureHandler()) {\n \n httpRouteRouter.route().order(-1 * filter.getPriority()).failureHandler(filter.getHandler());\n } else {\n \n httpRouteRouter.route().order(-1 * filter.getPriority()).handler(filter.getHandler());\n }\n }\n }\n\n if (defaultRouteHandler != null) {\n defaultRouteHandler.accept(httpRouteRouter.route().order(DEFAULT_ROUTE_ORDER));\n }\n\n applyCompression(httpBuildTimeConfig.enableCompression, httpRouteRouter);\n httpRouteRouter.route().last().failureHandler(\n new QuarkusErrorHandler(launchMode.isDevOrTest(), httpConfiguration.unhandledErrorContentTypeDefault));\n\n if (requireBodyHandler) {\n \n \n httpRouteRouter.route().order(Integer.MIN_VALUE + 1).handler(new Handler() {\n @Override\n public void handle(RoutingContext routingContext) {\n routingContext.request().resume();\n bodyHandler.handle(routingContext);\n }\n });\n }\n\n HttpServerCommonHandlers.enforceMaxBodySize(httpConfiguration.limits, httpRouteRouter);\n \n var filtersInConfig = httpConfiguration.filter;\n HttpServerCommonHandlers.applyFilters(filtersInConfig, httpRouteRouter);\n \n HttpServerCommonHandlers.applyHeaders(httpConfiguration.header, httpRouteRouter);\n\n Handler root;\n if (rootPath.equals(\"/\")) {\n if (hotReplacementHandler != null) {\n \n ClassLoader currentCl = Thread.currentThread().getContextClassLoader();\n httpRouteRouter.route().order(Integer.MIN_VALUE).handler(new Handler() {\n @Override\n public void handle(RoutingContext event) {\n Thread.currentThread().setContextClassLoader(currentCl);\n hotReplacementHandler.handle(event);\n }\n });\n }\n root = httpRouteRouter;\n } else {\n Router mainRouter = mainRouterRuntimeValue.isPresent() ? mainRouterRuntimeValue.get().getValue()\n : Router.router(vertx.get());\n mainRouter.mountSubRouter(rootPath, httpRouteRouter);\n\n if (hotReplacementHandler != null) {\n ClassLoader currentCl = Thread.currentThread().getContextClassLoader();\n mainRouter.route().order(Integer.MIN_VALUE).handler(new Handler() {\n @Override\n public void handle(RoutingContext event) {\n Thread.currentThread().setContextClassLoader(currentCl);\n hotReplacementHandler.handle(event);\n }\n });\n }\n root = mainRouter;\n }\n\n warnIfProxyAddressForwardingAllowedWithMultipleHeaders(httpConfiguration.proxy);\n root = HttpServerCommonHandlers.applyProxy(httpConfiguration.proxy, root, vertx);\n\n boolean quarkusWrapperNeeded = false;\n\n if (shutdownConfig.isShutdownTimeoutSet()) {\n gracefulShutdownFilter.next(root);\n root = gracefulShutdownFilter;\n quarkusWrapperNeeded = true;\n }\n\n AccessLogConfig accessLog = httpConfiguration.accessLog;\n if (accessLog.enabled) {\n AccessLogReceiver receiver;\n if (accessLog.logToFile) {\n File outputDir = accessLog.logDirectory.isPresent() ? new File(accessLog.logDirectory.get()) : new File(\"\");\n receiver = new DefaultAccessLogReceiver(executor, outputDir, accessLog.baseFileName, accessLog.logSuffix,\n accessLog.rotate);\n } else {\n receiver = new JBossLoggingAccessLogReceiver(accessLog.category);\n }\n AccessLogHandler handler = new AccessLogHandler(receiver, accessLog.pattern, getClass().getClassLoader(),\n accessLog.excludePattern);\n if (rootPath.equals(\"/\") || nonRootPath.equals(\"/\")) {\n mainRouterRuntimeValue.orElse(httpRouterRuntimeValue).getValue().route().order(Integer.MIN_VALUE)\n .handler(handler);\n } else if (nonRootPath.startsWith(rootPath)) {\n httpRouteRouter.route().order(Integer.MIN_VALUE).handler(handler);\n } else if (rootPath.startsWith(nonRootPath)) {\n frameworkRouter.getValue().route().order(Integer.MIN_VALUE).handler(handler);\n } else {\n httpRouteRouter.route().order(Integer.MIN_VALUE).handler(handler);\n frameworkRouter.getValue().route().order(Integer.MIN_VALUE).handler(handler);\n }\n\n quarkusWrapperNeeded = true;\n }\n\n BiConsumer cookieFunction = null;\n if (!httpConfiguration.sameSiteCookie.isEmpty()) {\n cookieFunction = processSameSiteConfig(httpConfiguration.sameSiteCookie);\n quarkusWrapperNeeded = true;\n }\n BiConsumer cookieConsumer = cookieFunction;\n\n if (quarkusWrapperNeeded) {\n Handler old = root;\n root = new Handler() {\n @Override\n public void handle(HttpServerRequest event) {\n old.handle(new QuarkusRequestWrapper(event, cookieConsumer));\n }\n };\n }\n\n Handler delegate = root;\n root = HttpServerCommonHandlers.enforceDuplicatedContext(delegate);\n if (httpConfiguration.recordRequestStartTime) {\n httpRouteRouter.route().order(Integer.MIN_VALUE).handler(new Handler() {\n @Override\n public void handle(RoutingContext event) {\n event.put(REQUEST_START_TIME, System.nanoTime());\n event.next();\n }\n });\n }\n if (launchMode == LaunchMode.DEVELOPMENT && liveReloadConfig.password.isPresent()\n && hotReplacementContext.getDevModeType() == DevModeType.REMOTE_SERVER_SIDE) {\n root = remoteSyncHandler = new RemoteSyncHandler(liveReloadConfig.password.get(), root, hotReplacementContext);\n }\n rootHandler = root;\n\n if (managementRouter != null && managementRouter.getValue() != null) {\n \n var mr = managementRouter.getValue();\n\n mr.route().last().failureHandler(\n new QuarkusErrorHandler(launchMode.isDevOrTest(), httpConfiguration.unhandledErrorContentTypeDefault));\n\n mr.route().order(Integer.MIN_VALUE).handler(createBodyHandlerForManagementInterface());\n \n mr.route().order(Integer.MIN_VALUE).handler(CorsHandler.create().addOrigin(\"*\"));\n\n HttpServerCommonHandlers.applyFilters(managementConfiguration.getValue().filter, mr);\n for (Filter filter : managementInterfaceFilterList) {\n mr.route().order(filter.getPriority()).handler(filter.getHandler());\n }\n\n HttpServerCommonHandlers.applyHeaders(managementConfiguration.getValue().header, mr);\n HttpServerCommonHandlers.enforceMaxBodySize(managementConfiguration.getValue().limits, mr);\n applyCompression(managementBuildTimeConfig.enableCompression, mr);\n\n Handler handler = HttpServerCommonHandlers.enforceDuplicatedContext(mr);\n handler = HttpServerCommonHandlers.applyProxy(managementConfiguration.getValue().proxy, handler, vertx);\n\n event.select(ManagementInterface.class).fire(new ManagementInterfaceImpl(managementRouter.getValue()));\n\n VertxHttpRecorder.managementRouter = handler;\n }\n }\n\n private void applyCompression(boolean enableCompression, Router httpRouteRouter) {\n if (enableCompression) {\n httpRouteRouter.route().order(0).handler(new Handler() {\n @Override\n public void handle(RoutingContext ctx) {\n \n \n ctx.response().putHeader(HttpHeaders.CONTENT_ENCODING, HttpHeaders.IDENTITY);\n ctx.next();\n }\n });\n }\n }\n\n private void warnIfProxyAddressForwardingAllowedWithMultipleHeaders(ProxyConfig proxyConfig) {\n boolean proxyAddressForwardingActivated = proxyConfig.proxyAddressForwarding;\n boolean forwardedActivated = proxyConfig.allowForwarded;\n boolean xForwardedActivated = proxyConfig.allowXForwarded.orElse(!forwardedActivated);\n\n if (proxyAddressForwardingActivated && forwardedActivated && xForwardedActivated) {\n LOGGER.warn(\n \"The X-Forwarded-* and Forwarded headers will be considered when determining the proxy address. \" +\n \"This configuration can cause a security issue as clients can forge requests and send a \" +\n \"forwarded header that is not overwritten by the proxy. \" +\n \"Please consider use one of these headers just to forward the proxy address in requests.\");\n }\n }\n\n private static CompletableFuture initializeManagementInterfaceWithDomainSocket(Vertx vertx,\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler managementRouter,\n ManagementInterfaceConfiguration managementConfig,\n List websocketSubProtocols) {\n CompletableFuture managementInterfaceDomainSocketFuture = new CompletableFuture<>();\n if (!managementBuildTimeConfig.enabled || managementRouter == null || managementConfig == null) {\n managementInterfaceDomainSocketFuture.complete(null);\n return managementInterfaceDomainSocketFuture;\n }\n\n HttpServerOptions domainSocketOptionsForManagement = createDomainSocketOptionsForManagementInterface(\n managementBuildTimeConfig, managementConfig,\n websocketSubProtocols);\n if (domainSocketOptionsForManagement != null) {\n vertx.createHttpServer(domainSocketOptionsForManagement)\n .requestHandler(managementRouter)\n .listen(ar -> {\n if (ar.failed()) {\n managementInterfaceDomainSocketFuture.completeExceptionally(\n new IllegalStateException(\n \"Unable to start the management interface on the \"\n + domainSocketOptionsForManagement.getHost() + \" domain socket\",\n ar.cause()));\n } else {\n managementInterfaceDomainSocketFuture.complete(ar.result());\n }\n });\n } else {\n managementInterfaceDomainSocketFuture.complete(null);\n }\n return managementInterfaceDomainSocketFuture;\n }\n\n private static CompletableFuture initializeManagementInterface(Vertx vertx,\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler managementRouter,\n ManagementInterfaceConfiguration managementConfig,\n LaunchMode launchMode,\n List websocketSubProtocols) throws IOException {\n httpManagementServerOptions = null;\n CompletableFuture managementInterfaceFuture = new CompletableFuture<>();\n if (!managementBuildTimeConfig.enabled || managementRouter == null || managementConfig == null) {\n managementInterfaceFuture.complete(null);\n return managementInterfaceFuture;\n }\n\n HttpServerOptions httpServerOptionsForManagement = createHttpServerOptionsForManagementInterface(\n managementBuildTimeConfig, managementConfig, launchMode,\n websocketSubProtocols);\n httpManagementServerOptions = HttpServerOptionsUtils.createSslOptionsForManagementInterface(\n managementBuildTimeConfig, managementConfig, launchMode,\n websocketSubProtocols);\n if (httpManagementServerOptions != null && httpManagementServerOptions.getKeyCertOptions() == null) {\n httpManagementServerOptions = httpServerOptionsForManagement;\n }\n\n if (httpManagementServerOptions != null) {\n vertx.createHttpServer(httpManagementServerOptions)\n .requestHandler(managementRouter)\n .listen(ar -> {\n if (ar.failed()) {\n managementInterfaceFuture.completeExceptionally(\n new IllegalStateException(\"Unable to start the management interface\", ar.cause()));\n } else {\n actualManagementPort = ar.result().actualPort();\n managementInterfaceFuture.complete(ar.result());\n }\n });\n } else {\n managementInterfaceFuture.complete(null);\n }\n return managementInterfaceFuture;\n }\n\n private static CompletableFuture initializeMainHttpServer(Vertx vertx, HttpBuildTimeConfig httpBuildTimeConfig,\n HttpConfiguration httpConfiguration,\n LaunchMode launchMode,\n Supplier eventLoops, List websocketSubProtocols) throws IOException {\n\n if (!httpConfiguration.hostEnabled && !httpConfiguration.domainSocketEnabled) {\n return CompletableFuture.completedFuture(null);\n }\n\n \n httpMainServerOptions = createHttpServerOptions(httpBuildTimeConfig, httpConfiguration, launchMode,\n websocketSubProtocols);\n httpMainDomainSocketOptions = createDomainSocketOptions(httpBuildTimeConfig, httpConfiguration,\n websocketSubProtocols);\n HttpServerOptions tmpSslConfig = HttpServerOptionsUtils.createSslOptions(httpBuildTimeConfig, httpConfiguration,\n launchMode,\n websocketSubProtocols);\n\n \n if (Arc.container() != null) {\n List> instances = Arc.container()\n .listAll(HttpServerOptionsCustomizer.class);\n for (InstanceHandle instance : instances) {\n HttpServerOptionsCustomizer customizer = instance.get();\n if (httpMainServerOptions != null) {\n customizer.customizeHttpServer(httpMainServerOptions);\n }\n if (tmpSslConfig != null) {\n customizer.customizeHttpsServer(tmpSslConfig);\n }\n if (httpMainDomainSocketOptions != null) {\n customizer.customizeDomainSocketServer(httpMainDomainSocketOptions);\n }\n }\n }\n\n \n if (tmpSslConfig != null && tmpSslConfig.getKeyCertOptions() == null) {\n tmpSslConfig = null;\n }\n httpMainSslServerOptions = tmpSslConfig;\n\n if (httpConfiguration.insecureRequests != HttpConfiguration.InsecureRequests.ENABLED\n && httpMainSslServerOptions == null) {\n throw new IllegalStateException(\"Cannot set quarkus.http.redirect-insecure-requests without enabling SSL.\");\n }\n\n int eventLoopCount = eventLoops.get();\n final int ioThreads;\n if (httpConfiguration.ioThreads.isPresent()) {\n ioThreads = Math.min(httpConfiguration.ioThreads.getAsInt(), eventLoopCount);\n } else if (launchMode.isDevOrTest()) {\n ioThreads = Math.min(2, eventLoopCount); \n } else {\n ioThreads = eventLoopCount;\n }\n CompletableFuture futureResult = new CompletableFuture<>();\n\n AtomicInteger connectionCount = new AtomicInteger();\n vertx.deployVerticle(new Supplier() {\n @Override\n public Verticle get() {\n return new WebDeploymentVerticle(httpMainServerOptions, httpMainSslServerOptions, httpMainDomainSocketOptions,\n launchMode,\n httpConfiguration.insecureRequests, httpConfiguration, connectionCount);\n }\n }, new DeploymentOptions().setInstances(ioThreads), new Handler>() {\n @Override\n public void handle(AsyncResult event) {\n if (event.failed()) {\n Throwable effectiveCause = event.cause();\n if (effectiveCause instanceof BindException) {\n List portsUsed = Collections.emptyList();\n\n if ((httpMainSslServerOptions == null) && (httpMainServerOptions != null)) {\n portsUsed = List.of(httpMainServerOptions.getPort());\n } else if ((httpConfiguration.insecureRequests == InsecureRequests.DISABLED)\n && (httpMainSslServerOptions != null)) {\n portsUsed = List.of(httpMainSslServerOptions.getPort());\n } else if ((httpMainSslServerOptions != null)\n && (httpConfiguration.insecureRequests == InsecureRequests.ENABLED)\n && (httpMainServerOptions != null)) {\n portsUsed = List.of(httpMainServerOptions.getPort(), httpMainSslServerOptions.getPort());\n }\n\n effectiveCause = new QuarkusBindException((BindException) effectiveCause, portsUsed);\n }\n futureResult.completeExceptionally(effectiveCause);\n } else {\n futureResult.complete(event.result());\n }\n }\n });\n\n return futureResult;\n }\n\n private static void doServerStart(Vertx vertx, HttpBuildTimeConfig httpBuildTimeConfig,\n ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, Handler managementRouter,\n HttpConfiguration httpConfiguration, ManagementInterfaceConfiguration managementConfig,\n LaunchMode launchMode,\n Supplier eventLoops, List websocketSubProtocols, boolean auxiliaryApplication) throws IOException {\n\n var mainServerFuture = initializeMainHttpServer(vertx, httpBuildTimeConfig, httpConfiguration, launchMode, eventLoops,\n websocketSubProtocols);\n var managementInterfaceFuture = initializeManagementInterface(vertx, managementBuildTimeConfig, managementRouter,\n managementConfig, launchMode, websocketSubProtocols);\n var managementInterfaceDomainSocketFuture = initializeManagementInterfaceWithDomainSocket(vertx,\n managementBuildTimeConfig, managementRouter, managementConfig, websocketSubProtocols);\n\n try {\n String deploymentIdIfAny = mainServerFuture.get();\n\n HttpServer tmpManagementServer = null;\n HttpServer tmpManagementServerUsingDomainSocket = null;\n if (managementRouter != null) {\n tmpManagementServer = managementInterfaceFuture.get();\n tmpManagementServerUsingDomainSocket = managementInterfaceDomainSocketFuture.get();\n }\n HttpServer managementServer = tmpManagementServer;\n HttpServer managementServerDomainSocket = tmpManagementServerUsingDomainSocket;\n if (deploymentIdIfAny != null) {\n VertxCoreRecorder.setWebDeploymentId(deploymentIdIfAny);\n }\n closeTask = new Runnable() {\n @Override\n public synchronized void run() {\n \n if (closeTask == this) {\n boolean isVertxClose = ((VertxInternal) vertx).closeFuture().future().isComplete();\n int count = 0;\n if (deploymentIdIfAny != null && vertx.deploymentIDs().contains(deploymentIdIfAny)) {\n count++;\n }\n if (managementServer != null && !isVertxClose) {\n count++;\n }\n if (managementServerDomainSocket != null && !isVertxClose) {\n count++;\n }\n\n CountDownLatch latch = new CountDownLatch(count);\n var handler = new Handler>() {\n @Override\n public void handle(AsyncResult event) {\n latch.countDown();\n }\n };\n\n \n if (deploymentIdIfAny != null) {\n try {\n vertx.undeploy(deploymentIdIfAny, handler);\n } catch (Exception e) {\n if (e instanceof RejectedExecutionException) {\n \n LOGGER.debug(\"Failed to undeploy deployment because a task was rejected (due to shutdown)\",\n e);\n } else {\n LOGGER.warn(\"Failed to undeploy deployment\", e);\n }\n }\n }\n\n \n try {\n if (managementServer != null && !isVertxClose) {\n managementServer.close(handler);\n }\n if (managementServerDomainSocket != null && !isVertxClose) {\n managementServerDomainSocket.close(handler);\n }\n } catch (Exception e) {\n LOGGER.warn(\"Unable to shutdown the management interface quietly\", e);\n }\n\n try {\n latch.await();\n } catch (InterruptedException e) {\n throw new RuntimeException(e);\n }\n }\n closeTask = null;\n if (remoteSyncHandler != null) {\n remoteSyncHandler.close();\n remoteSyncHandler = null;\n }\n }\n };\n } catch (InterruptedException | ExecutionException e) {\n throw new RuntimeException(\"Unable to start HTTP server\", e);\n }\n\n setHttpServerTiming(httpConfiguration.insecureRequests, httpMainServerOptions, httpMainSslServerOptions,\n httpMainDomainSocketOptions,\n auxiliaryApplication, httpManagementServerOptions);\n }\n\n private static void setHttpServerTiming(InsecureRequests insecureRequests, HttpServerOptions httpServerOptions,\n HttpServerOptions sslConfig,\n HttpServerOptions domainSocketOptions, boolean auxiliaryApplication, HttpServerOptions managementConfig) {\n StringBuilder serverListeningMessage = new StringBuilder(\"Listening on: \");\n int socketCount = 0;\n\n if (httpServerOptions != null && !InsecureRequests.DISABLED.equals(insecureRequests)) {\n serverListeningMessage.append(String.format(\n \"http:\n socketCount++;\n }\n\n if (sslConfig != null) {\n if (socketCount > 0) {\n serverListeningMessage.append(\" and \");\n }\n serverListeningMessage.append(String.format(\"https:\n socketCount++;\n }\n\n if (domainSocketOptions != null) {\n if (socketCount > 0) {\n serverListeningMessage.append(\" and \");\n }\n serverListeningMessage.append(String.format(\"unix:%s\", domainSocketOptions.getHost()));\n }\n if (managementConfig != null) {\n serverListeningMessage.append(\n String.format(\". Management interface listening on http%s:\n managementConfig.getHost(), managementConfig.getPort()));\n }\n\n Timing.setHttpServer(serverListeningMessage.toString(), auxiliaryApplication);\n }\n\n private static HttpServerOptions createHttpServerOptions(\n HttpBuildTimeConfig buildTimeConfig, HttpConfiguration httpConfiguration,\n LaunchMode launchMode, List websocketSubProtocols) {\n if (!httpConfiguration.hostEnabled) {\n return null;\n }\n \n HttpServerOptions options = new HttpServerOptions();\n int port = httpConfiguration.determinePort(launchMode);\n options.setPort(port == 0 ? -1 : port);\n\n HttpServerOptionsUtils.applyCommonOptions(options, buildTimeConfig, httpConfiguration, websocketSubProtocols);\n\n return options;\n }\n\n private static HttpServerOptions createHttpServerOptionsForManagementInterface(\n ManagementInterfaceBuildTimeConfig buildTimeConfig, ManagementInterfaceConfiguration httpConfiguration,\n LaunchMode launchMode, List websocketSubProtocols) {\n if (!httpConfiguration.hostEnabled) {\n return null;\n }\n HttpServerOptions options = new HttpServerOptions();\n int port = httpConfiguration.determinePort(launchMode);\n options.setPort(port == 0 ? -1 : port);\n\n HttpServerOptionsUtils.applyCommonOptionsForManagementInterface(options, buildTimeConfig, httpConfiguration,\n websocketSubProtocols);\n\n return options;\n }\n\n private static HttpServerOptions createDomainSocketOptions(\n HttpBuildTimeConfig buildTimeConfig, HttpConfiguration httpConfiguration,\n List websocketSubProtocols) {\n if (!httpConfiguration.domainSocketEnabled) {\n return null;\n }\n HttpServerOptions options = new HttpServerOptions();\n\n HttpServerOptionsUtils.applyCommonOptions(options, buildTimeConfig, httpConfiguration, websocketSubProtocols);\n \n options.setHost(httpConfiguration.domainSocket);\n\n \n \n File file = new File(httpConfiguration.domainSocket);\n if (!file.getParentFile().canWrite()) {\n LOGGER.warnf(\n \"Unable to write in the domain socket directory (`%s`). Binding to the socket is likely going to fail.\",\n httpConfiguration.domainSocket);\n }\n\n return options;\n }\n\n private static HttpServerOptions createDomainSocketOptionsForManagementInterface(\n ManagementInterfaceBuildTimeConfig buildTimeConfig, ManagementInterfaceConfiguration httpConfiguration,\n List websocketSubProtocols) {\n if (!httpConfiguration.domainSocketEnabled) {\n return null;\n }\n HttpServerOptions options = new HttpServerOptions();\n\n HttpServerOptionsUtils.applyCommonOptionsForManagementInterface(options, buildTimeConfig, httpConfiguration,\n websocketSubProtocols);\n \n options.setHost(httpConfiguration.domainSocket);\n\n \n \n File file = new File(httpConfiguration.domainSocket);\n if (!file.getParentFile().canWrite()) {\n LOGGER.warnf(\n \"Unable to write in the domain socket directory (`%s`). Binding to the socket is likely going to fail.\",\n httpConfiguration.domainSocket);\n }\n\n return options;\n }\n\n public void addRoute(RuntimeValue router, Function route, Handler handler,\n HandlerType type) {\n\n Route vr = route.apply(router.getValue());\n if (type == HandlerType.BLOCKING) {\n vr.blockingHandler(handler, false);\n } else if (type == HandlerType.FAILURE) {\n vr.failureHandler(handler);\n } else {\n vr.handler(handler);\n }\n }\n\n public void setNonApplicationRedirectHandler(String nonApplicationPath, String rootPath) {\n nonApplicationRedirectHandler = new Handler() {\n @Override\n public void handle(RoutingContext context) {\n String absoluteURI = context.request().path();\n String target = absoluteURI.substring(rootPath.length());\n String redirectTo = nonApplicationPath + target;\n\n String query = context.request().query();\n if (query != null && !query.isEmpty()) {\n redirectTo += '?' + query;\n }\n\n context.response()\n .setStatusCode(HttpResponseStatus.MOVED_PERMANENTLY.code())\n .putHeader(HttpHeaderNames.LOCATION, redirectTo)\n .end();\n }\n };\n }\n\n public Handler getNonApplicationRedirectHandler() {\n return nonApplicationRedirectHandler;\n }\n\n public GracefulShutdownFilter createGracefulShutdownHandler() {\n return new GracefulShutdownFilter();\n }\n\n private static class WebDeploymentVerticle extends AbstractVerticle implements Resource {\n\n private HttpServer httpServer;\n private HttpServer httpsServer;\n private HttpServer domainSocketServer;\n private final HttpServerOptions httpOptions;\n private final HttpServerOptions httpsOptions;\n private final HttpServerOptions domainSocketOptions;\n private final LaunchMode launchMode;\n private volatile boolean clearHttpProperty = false;\n private volatile boolean clearHttpsProperty = false;\n private volatile PortSystemProperties portSystemProperties;\n private final HttpConfiguration.InsecureRequests insecureRequests;\n private final HttpConfiguration quarkusConfig;\n private final AtomicInteger connectionCount;\n\n public WebDeploymentVerticle(HttpServerOptions httpOptions, HttpServerOptions httpsOptions,\n HttpServerOptions domainSocketOptions, LaunchMode launchMode,\n InsecureRequests insecureRequests, HttpConfiguration quarkusConfig, AtomicInteger connectionCount) {\n this.httpOptions = httpOptions;\n this.httpsOptions = httpsOptions;\n this.launchMode = launchMode;\n this.domainSocketOptions = domainSocketOptions;\n this.insecureRequests = insecureRequests;\n this.quarkusConfig = quarkusConfig;\n this.connectionCount = connectionCount;\n org.crac.Core.getGlobalContext().register(this);\n }\n\n @Override\n public void start(Promise startFuture) {\n final AtomicInteger remainingCount = new AtomicInteger(0);\n boolean httpServerEnabled = httpOptions != null && insecureRequests != HttpConfiguration.InsecureRequests.DISABLED;\n if (httpServerEnabled) {\n remainingCount.incrementAndGet();\n }\n if (httpsOptions != null) {\n remainingCount.incrementAndGet();\n }\n if (domainSocketOptions != null) {\n remainingCount.incrementAndGet();\n }\n\n if (remainingCount.get() == 0) {\n startFuture\n .fail(new IllegalArgumentException(\"Must configure at least one of http, https or unix domain socket\"));\n }\n\n if (httpServerEnabled) {\n httpServer = vertx.createHttpServer(httpOptions);\n if (insecureRequests == HttpConfiguration.InsecureRequests.ENABLED) {\n httpServer.requestHandler(ACTUAL_ROOT);\n } else {\n httpServer.requestHandler(new Handler() {\n @Override\n public void handle(HttpServerRequest req) {\n try {\n String host = req.getHeader(HttpHeaderNames.HOST);\n if (host == null) {\n \n req.response().setStatusCode(HttpResponseStatus.NOT_FOUND.code()).end();\n } else {\n int includedPort = host.indexOf(\":\");\n if (includedPort != -1) {\n host = host.substring(0, includedPort);\n }\n req.response()\n .setStatusCode(301)\n .putHeader(\"Location\",\n \"https:\n .end();\n }\n } catch (Exception e) {\n req.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()).end();\n }\n }\n });\n }\n setupTcpHttpServer(httpServer, httpOptions, false, startFuture, remainingCount, connectionCount);\n }\n\n if (domainSocketOptions != null) {\n domainSocketServer = vertx.createHttpServer(domainSocketOptions);\n domainSocketServer.requestHandler(ACTUAL_ROOT);\n setupUnixDomainSocketHttpServer(domainSocketServer, domainSocketOptions, startFuture, remainingCount);\n }\n\n if (httpsOptions != null) {\n httpsServer = vertx.createHttpServer(httpsOptions);\n httpsServer.requestHandler(ACTUAL_ROOT);\n setupTcpHttpServer(httpsServer, httpsOptions, true, startFuture, remainingCount, connectionCount);\n }\n }\n\n private void setupUnixDomainSocketHttpServer(HttpServer httpServer, HttpServerOptions options,\n Promise startFuture,\n AtomicInteger remainingCount) {\n httpServer.listen(SocketAddress.domainSocketAddress(options.getHost()), event -> {\n if (event.succeeded()) {\n if (remainingCount.decrementAndGet() == 0) {\n startFuture.complete(null);\n }\n } else {\n if (event.cause() != null && event.cause().getMessage() != null\n && event.cause().getMessage().contains(\"Permission denied\")) {\n startFuture.fail(new IllegalStateException(\n String.format(\n \"Unable to bind to Unix domain socket (%s) as the application does not have the permission to write in the directory.\",\n domainSocketOptions.getHost())));\n } else if (event.cause() instanceof IllegalArgumentException) {\n startFuture.fail(new IllegalArgumentException(\n String.format(\n \"Unable to bind to Unix domain socket. Consider adding the 'io.netty:%s' dependency. See the Quarkus Vert.x reference guide for more details.\",\n Utils.isLinux() ? \"netty-transport-native-epoll\" : \"netty-transport-native-kqueue\")));\n } else {\n startFuture.fail(event.cause());\n }\n }\n });\n }\n\n private void setupTcpHttpServer(HttpServer httpServer, HttpServerOptions options, boolean https,\n Promise startFuture, AtomicInteger remainingCount, AtomicInteger currentConnectionCount) {\n if (quarkusConfig.limits.maxConnections.isPresent() && quarkusConfig.limits.maxConnections.getAsInt() > 0) {\n final int maxConnections = quarkusConfig.limits.maxConnections.getAsInt();\n httpServer.connectionHandler(new Handler() {\n\n @Override\n public void handle(HttpConnection event) {\n int current;\n do {\n current = currentConnectionCount.get();\n if (current == maxConnections) {\n \n LOGGER.debug(\"Rejecting connection as there are too many active connections\");\n event.close();\n return;\n }\n } while (!currentConnectionCount.compareAndSet(current, current + 1));\n event.closeHandler(new Handler() {\n @Override\n public void handle(Void event) {\n LOGGER.debug(\"Connection closed\");\n connectionCount.decrementAndGet();\n }\n });\n }\n });\n }\n httpServer.listen(options.getPort(), options.getHost(), new Handler<>() {\n @Override\n public void handle(AsyncResult event) {\n if (event.cause() != null) {\n startFuture.fail(event.cause());\n } else {\n \n int actualPort = event.result().actualPort();\n\n if (https) {\n actualHttpsPort = actualPort;\n } else {\n actualHttpPort = actualPort;\n }\n if (actualPort != options.getPort()) {\n \n String schema;\n if (https) {\n clearHttpsProperty = true;\n schema = \"https\";\n } else {\n clearHttpProperty = true;\n actualHttpPort = actualPort;\n schema = \"http\";\n }\n portSystemProperties = new PortSystemProperties();\n portSystemProperties.set(schema, actualPort, launchMode);\n }\n\n if (remainingCount.decrementAndGet() == 0) {\n \n startFuture.complete(null);\n }\n\n }\n }\n });\n }\n\n @Override\n public void stop(Promise stopFuture) {\n\n final AtomicInteger remainingCount = new AtomicInteger(0);\n if (httpServer != null) {\n remainingCount.incrementAndGet();\n }\n if (httpsServer != null) {\n remainingCount.incrementAndGet();\n }\n if (domainSocketServer != null) {\n remainingCount.incrementAndGet();\n }\n\n Handler> handleClose = event -> {\n if (remainingCount.decrementAndGet() == 0) {\n\n if (clearHttpProperty) {\n String portPropertyName = launchMode == LaunchMode.TEST ? \"quarkus.http.test-port\"\n : \"quarkus.http.port\";\n System.clearProperty(portPropertyName);\n if (launchMode.isDevOrTest()) {\n System.clearProperty(propertyWithProfilePrefix(portPropertyName));\n }\n\n }\n if (clearHttpsProperty) {\n String portPropertyName = launchMode == LaunchMode.TEST ? \"quarkus.http.test-ssl-port\"\n : \"quarkus.http.ssl-port\";\n System.clearProperty(portPropertyName);\n if (launchMode.isDevOrTest()) {\n System.clearProperty(propertyWithProfilePrefix(portPropertyName));\n }\n }\n if (portSystemProperties != null) {\n portSystemProperties.restore();\n }\n\n stopFuture.complete();\n }\n };\n\n if (httpServer != null) {\n httpServer.close(handleClose);\n }\n if (httpsServer != null) {\n httpsServer.close(handleClose);\n }\n if (domainSocketServer != null) {\n domainSocketServer.close(handleClose);\n }\n }\n\n private String propertyWithProfilePrefix(String portPropertyName) {\n return \"%\" + launchMode.getDefaultProfile() + \".\" + portPropertyName;\n }\n\n @Override\n public void beforeCheckpoint(org.crac.Context context) throws Exception {\n Promise p = Promise.promise();\n stop(p);\n CountDownLatch latch = new CountDownLatch(1);\n p.future().onComplete(event -> latch.countDown());\n latch.await();\n }\n\n @Override\n public void afterRestore(org.crac.Context context) throws Exception {\n Promise p = Promise.promise();\n start(p);\n CountDownLatch latch = new CountDownLatch(1);\n p.future().onComplete(event -> latch.countDown());\n latch.await();\n }\n\n }\n\n protected static ServerBootstrap virtualBootstrap;\n protected static ChannelFuture virtualBootstrapChannel;\n public static VirtualAddress VIRTUAL_HTTP = new VirtualAddress(\"netty-virtual-http\");\n\n private static void initializeVirtual(Vertx vertxRuntime) {\n if (virtualBootstrap != null) {\n return;\n }\n\n VertxInternal vertx = (VertxInternal) vertxRuntime;\n virtualBootstrap = new ServerBootstrap();\n virtualBootstrap.group(vertx.getEventLoopGroup())\n .channel(VirtualServerChannel.class)\n .handler(new ChannelInitializer() {\n @Override\n public void initChannel(VirtualServerChannel ch) throws Exception {\n \n }\n })\n .childHandler(new ChannelInitializer() {\n @Override\n public void initChannel(VirtualChannel ch) throws Exception {\n EventLoopContext context = vertx.createEventLoopContext();\n VertxHandler handler = VertxHandler.create(chctx -> {\n\n Http1xServerConnection conn = new Http1xServerConnection(\n () -> {\n ContextInternal internal = (ContextInternal) VertxContext\n .getOrCreateDuplicatedContext(context);\n setContextSafe(internal, true);\n return internal;\n },\n null,\n new HttpServerOptions(),\n chctx,\n context,\n \"localhost\",\n null);\n conn.handler(ACTUAL_ROOT);\n return conn;\n });\n\n ch.pipeline().addLast(\"handler\", handler);\n }\n });\n\n \n try {\n virtualBootstrapChannel = virtualBootstrap.bind(VIRTUAL_HTTP).sync();\n } catch (InterruptedException e) {\n throw new RuntimeException(\"failed to bind virtual http\");\n }\n\n }\n\n public static Handler getRootHandler() {\n return ACTUAL_ROOT;\n }\n\n /**\n * used in the live reload handler to make sure the application has not been changed by another source (e.g. reactive\n * messaging)\n */\n public static Object getCurrentApplicationState() {\n return rootHandler;\n }\n\n private static Handler configureAndGetBody(Optional maxBodySize, BodyConfig bodyConfig) {\n BodyHandler bodyHandler = BodyHandler.create();\n if (maxBodySize.isPresent()) {\n bodyHandler.setBodyLimit(maxBodySize.get().asLongValue());\n }\n bodyHandler.setHandleFileUploads(bodyConfig.handleFileUploads);\n bodyHandler.setUploadsDirectory(bodyConfig.uploadsDirectory);\n bodyHandler.setDeleteUploadedFilesOnEnd(bodyConfig.deleteUploadedFilesOnEnd);\n bodyHandler.setMergeFormAttributes(bodyConfig.mergeFormAttributes);\n bodyHandler.setPreallocateBodyBuffer(bodyConfig.preallocateBodyBuffer);\n return new Handler() {\n @Override\n public void handle(RoutingContext event) {\n if (!Context.isOnEventLoopThread()) {\n ((ConnectionBase) event.request().connection()).channel().eventLoop().execute(new Runnable() {\n @Override\n public void run() {\n try {\n \n if (!event.request().isEnded()) {\n event.request().resume();\n if (CAN_HAVE_BODY.contains(event.request().method())) {\n bodyHandler.handle(event);\n } else {\n event.next();\n }\n } else {\n event.next();\n }\n } catch (Throwable t) {\n event.fail(t);\n }\n }\n });\n } else {\n if (!event.request().isEnded()) {\n event.request().resume();\n }\n if (CAN_HAVE_BODY.contains(event.request().method())) {\n bodyHandler.handle(event);\n } else {\n event.next();\n }\n }\n }\n };\n }\n\n public Handler createBodyHandler() {\n Optional maxBodySize = httpConfiguration.getValue().limits.maxBodySize;\n return configureAndGetBody(maxBodySize, httpConfiguration.getValue().body);\n }\n\n public Handler createBodyHandlerForManagementInterface() {\n Optional maxBodySize = managementConfiguration.getValue().limits.maxBodySize;\n return configureAndGetBody(maxBodySize, managementConfiguration.getValue().body);\n }\n\n private static final List CAN_HAVE_BODY = Arrays.asList(HttpMethod.POST, HttpMethod.PUT, HttpMethod.PATCH,\n HttpMethod.DELETE);\n\n private BiConsumer processSameSiteConfig(Map httpConfiguration) {\n\n List> functions = new ArrayList<>();\n BiFunction last = null;\n\n for (Map.Entry entry : new TreeMap<>(httpConfiguration).entrySet()) {\n Pattern p = Pattern.compile(entry.getKey(), entry.getValue().caseSensitive ? 0 : Pattern.CASE_INSENSITIVE);\n BiFunction biFunction = new BiFunction() {\n @Override\n public Boolean apply(Cookie cookie, HttpServerRequest request) {\n if (p.matcher(cookie.getName()).matches()) {\n if (entry.getValue().value == CookieSameSite.NONE) {\n if (entry.getValue().enableClientChecker) {\n String userAgent = request.getHeader(HttpHeaders.USER_AGENT);\n if (userAgent != null\n && SameSiteNoneIncompatibleClientChecker.isSameSiteNoneIncompatible(userAgent)) {\n return false;\n }\n }\n if (entry.getValue().addSecureForNone) {\n cookie.setSecure(true);\n }\n }\n cookie.setSameSite(entry.getValue().value);\n return true;\n }\n return false;\n }\n };\n if (entry.getKey().equals(\".*\")) {\n \n last = biFunction;\n } else {\n functions.add(biFunction);\n }\n }\n if (last != null) {\n functions.add(last);\n }\n\n return new BiConsumer() {\n @Override\n public void accept(Cookie cookie, HttpServerRequest request) {\n for (BiFunction i : functions) {\n if (i.apply(cookie, request)) {\n return;\n }\n }\n }\n };\n }\n}" }, { "comment": "Done. There was an issue with try-with-resources closing the output stream before writing to the file. Due to that I separately closed both the output stream and the file writer inside the finally block.", "method_body": "private static void writeFile(String filePath, String content) throws IOException {\n OutputStreamWriter fileWriter = null;\n try {\n File newFile = new File(filePath);\n FileOutputStream fileStream = new FileOutputStream(newFile);\n fileWriter = new OutputStreamWriter(fileStream, StandardCharsets.UTF_8);\n fileWriter.write(content);\n } finally {\n if (fileWriter != null) {\n fileWriter.close();\n }\n }\n }", "target_code": "FileOutputStream fileStream = new FileOutputStream(newFile);", "method_body_after": "private static void writeFile(String filePath, String content) throws IOException {\n OutputStreamWriter fileWriter = null;\n FileOutputStream fileStream = null;\n try {\n File newFile = new File(filePath);\n fileStream = new FileOutputStream(newFile);\n fileWriter = new OutputStreamWriter(fileStream, StandardCharsets.UTF_8);\n fileWriter.write(content);\n } finally {\n if (fileWriter != null) {\n fileWriter.close();\n }\n if (fileStream != null) {\n fileStream.close();\n }\n }\n }", "context_before": "class FormatUtil {\n static final String CMD_NAME = \"format\";\n private static final PrintStream outStream = System.err;\n private static EmptyPrintStream emptyPrintStream;\n\n /**\n * Execute formatter.\n *\n * @param argList argument list from the console\n * @param helpFlag flag to get the help page\n * @param dryRun run the whole formatting\n * @param sourceRootPath execution path\n */\n static void execute(List argList, boolean helpFlag, boolean dryRun, Path sourceRootPath) {\n if (helpFlag) {\n String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(CMD_NAME);\n outStream.println(commandUsageInfo);\n return;\n }\n\n if (argList != null && argList.size() > 1) {\n throw LauncherUtils.createLauncherException(Messages.getArgumentError());\n }\n\n String moduleName;\n String ballerinaFilePath;\n\n try {\n \n \n if (argList != null && !argList.isEmpty()) {\n if (FormatUtil.isBalFile(argList.get(0))) {\n ballerinaFilePath = argList.get(0);\n Path filePath = Paths.get(ballerinaFilePath);\n\n \n if (!filePath.toFile().exists() || filePath.toFile().isDirectory()) {\n throw LauncherUtils.createLauncherException(Messages.getNoBallerinaFile(ballerinaFilePath));\n }\n\n String source = new String(Files.readAllBytes(filePath), StandardCharsets.UTF_8);\n \n String formattedSourceCode = Formatter.format(source);\n\n if (areChangesAvailable(source, formattedSourceCode)) {\n if (!dryRun) {\n \n FormatUtil.writeFile(filePath.toAbsolutePath().toString(), formattedSourceCode);\n outStream.println(Messages.getModifiedFiles() + System.lineSeparator() + ballerinaFilePath);\n outStream.println(System.lineSeparator() + Messages.getSuccessMessage());\n } else {\n outStream.println(Messages.getFilesToModify() + System.lineSeparator() + ballerinaFilePath);\n }\n } else {\n outStream.println(Messages.getNoChanges());\n }\n } else if (Paths.get(argList.get(0)).toFile().isFile()) {\n \n \n throw LauncherUtils.createLauncherException(Messages.getNotABallerinaFile());\n } else {\n moduleName = argList.get(0);\n\n \n if (!FormatUtil.isModuleExist(moduleName, sourceRootPath)) {\n \n \n \n if (moduleName.contains(\".\")) {\n throw LauncherUtils.createLauncherException(Messages\n .getNoBallerinaModuleOrFile(moduleName));\n } else {\n throw LauncherUtils.createLauncherException(Messages.getNoModuleFound(moduleName));\n }\n }\n\n \n if (!FormatUtil.isBallerinaProject(sourceRootPath)) {\n throw LauncherUtils.createLauncherException(Messages.getNotBallerinaProject());\n }\n BLangPackage bLangPackage = FormatUtil\n .compileModule(sourceRootPath, getModuleName(moduleName));\n\n \n List formattedFiles = iterateAndFormat(bLangPackage, sourceRootPath, dryRun);\n generateChangeReport(formattedFiles, dryRun);\n }\n } else {\n List packages = FormatUtil.compileProject(sourceRootPath);\n List formattedFiles = new ArrayList<>();\n \n for (BLangPackage bLangPackage : packages) {\n formattedFiles.addAll(iterateAndFormat(bLangPackage, sourceRootPath, dryRun));\n }\n\n generateChangeReport(formattedFiles, dryRun);\n }\n } catch (IOException | NullPointerException e) {\n throw LauncherUtils.createLauncherException(Messages.getException() + e);\n }\n }\n\n private static void generateChangeReport(List formattedFiles, boolean dryRun) {\n if (!formattedFiles.isEmpty()) {\n StringBuilder fileList = new StringBuilder();\n if (dryRun) {\n fileList.append(Messages.getFilesToModify()).append(System.lineSeparator());\n } else {\n fileList.append(Messages.getModifiedFiles()).append(System.lineSeparator());\n }\n for (String file : formattedFiles) {\n fileList.append(file).append(System.lineSeparator());\n }\n outStream.println(fileList.toString());\n if (!dryRun) {\n outStream.println(Messages.getSuccessMessage());\n }\n } else {\n outStream.println(Messages.getNoChanges());\n }\n }\n\n private static String getModuleName(String moduleName) {\n String pattern = Pattern.quote(File.separator);\n String[] splitedTokens = moduleName.split(pattern);\n return splitedTokens[splitedTokens.length - 1];\n }\n\n /**\n * Compile whole ballerina project.\n *\n * @param sourceRoot source root\n * @return {@link List} list of BLangPackages\n */\n private static List compileProject(Path sourceRoot) throws UnsupportedEncodingException {\n emptyPrintStream = new EmptyPrintStream();\n CompilerContext context = getCompilerContext(sourceRoot);\n Compiler compiler = Compiler.getInstance(context);\n \n compiler.setOutStream(emptyPrintStream);\n return compiler.compilePackages(false);\n }\n\n /**\n * Compile only a ballerina module.\n *\n * @param sourceRoot source root\n * @param moduleName name of the module to be compiled\n * @return {@link BLangPackage} ballerina package\n */\n private static BLangPackage compileModule(Path sourceRoot, String moduleName) throws UnsupportedEncodingException {\n emptyPrintStream = new EmptyPrintStream();\n CompilerContext context = getCompilerContext(sourceRoot);\n Compiler compiler = Compiler.getInstance(context);\n \n compiler.setOutStream(emptyPrintStream);\n return compiler.compile(moduleName);\n }\n\n private static void formatAndWrite(BLangCompilationUnit compilationUnit, Path sourceRootPath,\n List formattedFiles, boolean dryRun) throws IOException {\n String fileName = Paths.get(sourceRootPath.toString()).resolve(\"src\")\n .resolve(compilationUnit.getPosition().getSource().getPackageName())\n .resolve(compilationUnit.getPosition().getSource().getCompilationUnitName()).toString();\n\n String originalSource = new String(Files.readAllBytes(Paths.get(fileName)), StandardCharsets.UTF_8);\n \n String formattedSource = Formatter.format(originalSource);\n\n if (areChangesAvailable(originalSource, formattedSource)) {\n if (!dryRun) {\n \n FormatUtil.writeFile(fileName, formattedSource);\n }\n formattedFiles.add(fileName);\n }\n }\n\n private static List iterateAndFormat(BLangPackage bLangPackage, Path sourceRootPath, boolean dryRun)\n throws IOException {\n List formattedFiles = new ArrayList<>();\n\n \n for (BLangCompilationUnit compilationUnit : bLangPackage.getCompilationUnits()) {\n formatAndWrite(compilationUnit, sourceRootPath, formattedFiles, dryRun);\n }\n\n \n for (BLangTestablePackage testablePackage : bLangPackage.getTestablePkgs()) {\n for (BLangCompilationUnit compilationUnit : testablePackage.getCompilationUnits()) {\n formatAndWrite(compilationUnit, sourceRootPath, formattedFiles, dryRun);\n }\n }\n\n return formattedFiles;\n }\n\n /**\n * Get prepared compiler context.\n *\n * @param sourceRootPath ballerina compilable source root path\n * @return {@link CompilerContext} compiler context\n */\n private static CompilerContext getCompilerContext(Path sourceRootPath) {\n CompilerPhase compilerPhase = CompilerPhase.DEFINE;\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(OFFLINE, Boolean.toString(false));\n options.put(COMPILER_PHASE, compilerPhase.toString());\n options.put(SKIP_TESTS, Boolean.toString(false));\n options.put(TEST_ENABLED, \"true\");\n options.put(LOCK_ENABLED, Boolean.toString(false));\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(true));\n options.put(PRESERVE_WHITESPACE, Boolean.toString(true));\n\n return context;\n }\n\n /**\n * Check whether the given module name exists.\n *\n * @param module module name\n * @param projectRoot path of the ballerina project root\n * @return {@link Boolean} true or false\n */\n private static boolean isModuleExist(String module, Path projectRoot) {\n Path modulePath;\n if (module.startsWith(\"src/\")) {\n modulePath = projectRoot.resolve(module);\n } else {\n modulePath = projectRoot.resolve(\"src\").resolve(module);\n }\n\n return modulePath.toFile().isDirectory();\n }\n\n /**\n * Check whether the given path is a source root of a ballerina project.\n *\n * @param path - path where the command is executed from\n * @return {@link boolean} true or false\n */\n private static boolean isBallerinaProject(Path path) {\n Path cachePath = path.resolve(\"Ballerina.toml\");\n return cachePath.toFile().exists();\n }\n\n /**\n * Write content to a file.\n *\n * @param filePath - path of the file to add the content\n * @param content - content to be added to the file\n * @throws IOException - throws and IO exception\n */\n \n\n private static boolean areChangesAvailable(String originalSource, String formattedSource) {\n return !originalSource.equals(formattedSource);\n }\n\n /**\n * Check whether the given file is a ballerina file.\n *\n * @param fileName file name to be check whether a ballerina file\n * @return {@link Boolean} true or false\n */\n private static boolean isBalFile(String fileName) {\n return fileName.endsWith(\".bal\");\n }\n\n\n /**\n * Empty print stream extending the print stream.\n */\n static class EmptyPrintStream extends PrintStream {\n EmptyPrintStream() throws UnsupportedEncodingException {\n super(new OutputStream() {\n @Override\n public void write(int b) {\n }\n }, true, \"UTF-8\");\n }\n }\n}", "context_after": "class FormatUtil {\n static final String CMD_NAME = \"format\";\n private static final PrintStream outStream = System.err;\n private static EmptyPrintStream emptyPrintStream;\n\n /**\n * Execute formatter.\n *\n * @param argList argument list from the console\n * @param helpFlag flag to get the help page\n * @param dryRun run the whole formatting\n * @param sourceRootPath execution path\n */\n static void execute(List argList, boolean helpFlag, boolean dryRun, Path sourceRootPath) {\n if (helpFlag) {\n String commandUsageInfo = BLauncherCmd.getCommandUsageInfo(CMD_NAME);\n outStream.println(commandUsageInfo);\n return;\n }\n\n if (argList != null && argList.size() > 1) {\n throw LauncherUtils.createLauncherException(Messages.getArgumentError());\n }\n\n String moduleName;\n String ballerinaFilePath;\n\n try {\n \n \n if (argList != null && !argList.isEmpty()) {\n if (FormatUtil.isBalFile(argList.get(0))) {\n ballerinaFilePath = argList.get(0);\n Path filePath = Paths.get(ballerinaFilePath);\n\n \n if (!filePath.toFile().exists() || filePath.toFile().isDirectory()) {\n throw LauncherUtils.createLauncherException(Messages.getNoBallerinaFile(ballerinaFilePath));\n }\n\n String source = new String(Files.readAllBytes(filePath), StandardCharsets.UTF_8);\n \n String formattedSourceCode = Formatter.format(source);\n\n if (areChangesAvailable(source, formattedSourceCode)) {\n if (!dryRun) {\n \n FormatUtil.writeFile(filePath.toAbsolutePath().toString(), formattedSourceCode);\n outStream.println(Messages.getModifiedFiles() + System.lineSeparator() + ballerinaFilePath);\n outStream.println(System.lineSeparator() + Messages.getSuccessMessage());\n } else {\n outStream.println(Messages.getFilesToModify() + System.lineSeparator() + ballerinaFilePath);\n }\n } else {\n outStream.println(Messages.getNoChanges());\n }\n } else if (Paths.get(argList.get(0)).toFile().isFile()) {\n \n \n throw LauncherUtils.createLauncherException(Messages.getNotABallerinaFile());\n } else {\n moduleName = argList.get(0);\n\n \n if (!FormatUtil.isModuleExist(moduleName, sourceRootPath)) {\n \n \n \n if (moduleName.contains(\".\")) {\n throw LauncherUtils.createLauncherException(Messages\n .getNoBallerinaModuleOrFile(moduleName));\n } else {\n throw LauncherUtils.createLauncherException(Messages.getNoModuleFound(moduleName));\n }\n }\n\n \n if (!FormatUtil.isBallerinaProject(sourceRootPath)) {\n throw LauncherUtils.createLauncherException(Messages.getNotBallerinaProject());\n }\n BLangPackage bLangPackage = FormatUtil\n .compileModule(sourceRootPath, getModuleName(moduleName));\n\n \n List formattedFiles = iterateAndFormat(bLangPackage, sourceRootPath, dryRun);\n generateChangeReport(formattedFiles, dryRun);\n }\n } else {\n List packages = FormatUtil.compileProject(sourceRootPath);\n List formattedFiles = new ArrayList<>();\n \n for (BLangPackage bLangPackage : packages) {\n formattedFiles.addAll(iterateAndFormat(bLangPackage, sourceRootPath, dryRun));\n }\n\n generateChangeReport(formattedFiles, dryRun);\n }\n } catch (IOException | NullPointerException e) {\n throw LauncherUtils.createLauncherException(Messages.getException() + e);\n }\n }\n\n private static void generateChangeReport(List formattedFiles, boolean dryRun) {\n if (!formattedFiles.isEmpty()) {\n StringBuilder fileList = new StringBuilder();\n if (dryRun) {\n fileList.append(Messages.getFilesToModify()).append(System.lineSeparator());\n } else {\n fileList.append(Messages.getModifiedFiles()).append(System.lineSeparator());\n }\n for (String file : formattedFiles) {\n fileList.append(file).append(System.lineSeparator());\n }\n outStream.println(fileList.toString());\n if (!dryRun) {\n outStream.println(Messages.getSuccessMessage());\n }\n } else {\n outStream.println(Messages.getNoChanges());\n }\n }\n\n private static String getModuleName(String moduleName) {\n String pattern = Pattern.quote(File.separator);\n String[] splitedTokens = moduleName.split(pattern);\n return splitedTokens[splitedTokens.length - 1];\n }\n\n /**\n * Compile whole ballerina project.\n *\n * @param sourceRoot source root\n * @return {@link List} list of BLangPackages\n */\n private static List compileProject(Path sourceRoot) throws UnsupportedEncodingException {\n emptyPrintStream = new EmptyPrintStream();\n CompilerContext context = getCompilerContext(sourceRoot);\n Compiler compiler = Compiler.getInstance(context);\n \n compiler.setOutStream(emptyPrintStream);\n return compiler.compilePackages(false);\n }\n\n /**\n * Compile only a ballerina module.\n *\n * @param sourceRoot source root\n * @param moduleName name of the module to be compiled\n * @return {@link BLangPackage} ballerina package\n */\n private static BLangPackage compileModule(Path sourceRoot, String moduleName) throws UnsupportedEncodingException {\n emptyPrintStream = new EmptyPrintStream();\n CompilerContext context = getCompilerContext(sourceRoot);\n Compiler compiler = Compiler.getInstance(context);\n \n compiler.setOutStream(emptyPrintStream);\n return compiler.compile(moduleName);\n }\n\n private static void formatAndWrite(BLangCompilationUnit compilationUnit, Path sourceRootPath,\n List formattedFiles, boolean dryRun) throws IOException {\n String fileName = Paths.get(sourceRootPath.toString()).resolve(\"src\")\n .resolve(compilationUnit.getPosition().getSource().getPackageName())\n .resolve(compilationUnit.getPosition().getSource().getCompilationUnitName()).toString();\n\n String originalSource = new String(Files.readAllBytes(Paths.get(fileName)), StandardCharsets.UTF_8);\n \n String formattedSource = Formatter.format(originalSource);\n\n if (areChangesAvailable(originalSource, formattedSource)) {\n if (!dryRun) {\n \n FormatUtil.writeFile(fileName, formattedSource);\n }\n formattedFiles.add(fileName);\n }\n }\n\n private static List iterateAndFormat(BLangPackage bLangPackage, Path sourceRootPath, boolean dryRun)\n throws IOException {\n List formattedFiles = new ArrayList<>();\n\n \n for (BLangCompilationUnit compilationUnit : bLangPackage.getCompilationUnits()) {\n formatAndWrite(compilationUnit, sourceRootPath, formattedFiles, dryRun);\n }\n\n \n for (BLangTestablePackage testablePackage : bLangPackage.getTestablePkgs()) {\n for (BLangCompilationUnit compilationUnit : testablePackage.getCompilationUnits()) {\n formatAndWrite(compilationUnit, sourceRootPath, formattedFiles, dryRun);\n }\n }\n\n return formattedFiles;\n }\n\n /**\n * Get prepared compiler context.\n *\n * @param sourceRootPath ballerina compilable source root path\n * @return {@link CompilerContext} compiler context\n */\n private static CompilerContext getCompilerContext(Path sourceRootPath) {\n CompilerPhase compilerPhase = CompilerPhase.DEFINE;\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(OFFLINE, Boolean.toString(false));\n options.put(COMPILER_PHASE, compilerPhase.toString());\n options.put(SKIP_TESTS, Boolean.toString(false));\n options.put(TEST_ENABLED, \"true\");\n options.put(LOCK_ENABLED, Boolean.toString(false));\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(true));\n options.put(PRESERVE_WHITESPACE, Boolean.toString(true));\n\n return context;\n }\n\n /**\n * Check whether the given module name exists.\n *\n * @param module module name\n * @param projectRoot path of the ballerina project root\n * @return {@link Boolean} true or false\n */\n private static boolean isModuleExist(String module, Path projectRoot) {\n Path modulePath;\n if (module.startsWith(\"src/\")) {\n modulePath = projectRoot.resolve(module);\n } else {\n modulePath = projectRoot.resolve(\"src\").resolve(module);\n }\n\n return modulePath.toFile().isDirectory();\n }\n\n /**\n * Check whether the given path is a source root of a ballerina project.\n *\n * @param path - path where the command is executed from\n * @return {@link boolean} true or false\n */\n private static boolean isBallerinaProject(Path path) {\n Path cachePath = path.resolve(\"Ballerina.toml\");\n return cachePath.toFile().exists();\n }\n\n /**\n * Write content to a file.\n *\n * @param filePath - path of the file to add the content\n * @param content - content to be added to the file\n * @throws IOException - throws and IO exception\n */\n \n\n private static boolean areChangesAvailable(String originalSource, String formattedSource) {\n return !originalSource.equals(formattedSource);\n }\n\n /**\n * Check whether the given file is a ballerina file.\n *\n * @param fileName file name to be check whether a ballerina file\n * @return {@link Boolean} true or false\n */\n private static boolean isBalFile(String fileName) {\n return fileName.endsWith(\".bal\");\n }\n\n\n /**\n * Empty print stream extending the print stream.\n */\n static class EmptyPrintStream extends PrintStream {\n EmptyPrintStream() throws UnsupportedEncodingException {\n super(new OutputStream() {\n @Override\n public void write(int b) {\n }\n }, true, \"UTF-8\");\n }\n }\n}" }, { "comment": "~~unlikely; and sub-second interval will just overload whatever external system you have.~~", "method_body": "public MetricRegistryImpl(MetricRegistryConfiguration config, Collection reporterConfigurations) {\n\t\tthis.maximumFramesize = config.getQueryServiceMessageSizeLimit();\n\t\tthis.scopeFormats = config.getScopeFormats();\n\t\tthis.globalDelimiter = config.getDelimiter();\n\t\tthis.terminationFuture = new CompletableFuture<>();\n\t\tthis.isShutdown = false;\n\n\t\t\n\t\tthis.reporters = new ArrayList<>(4);\n\n\t\tthis.executor = Executors.newSingleThreadScheduledExecutor(new ExecutorThreadFactory(\"Flink-MetricRegistry\"));\n\n\t\tthis.queryService = null;\n\t\tthis.metricQueryServiceRpcService = null;\n\n\t\tif (reporterConfigurations.isEmpty()) {\n\t\t\t\n\t\t\t\n\t\t\tLOG.info(\"No metrics reporter configured, no metrics will be exposed/reported.\");\n\t\t} else {\n\t\t\tfor (ReporterSetup reporterSetup : reporterConfigurations) {\n\t\t\t\tfinal String namedReporter = reporterSetup.getName();\n\n\t\t\t\ttry {\n\t\t\t\t\tOptional configuredPeriod = reporterSetup.getIntervalSettings();\n\t\t\t\t\tTimeUnit timeunit = TimeUnit.SECONDS;\n\t\t\t\t\tlong period = MetricOptions.REPORTER_INTERVAL.defaultValue().getSeconds();\n\n\t\t\t\t\tif (configuredPeriod.isPresent()) {\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tString[] interval = configuredPeriod.get().split(\" \");\n\t\t\t\t\t\t\tperiod = Long.parseLong(interval[0]);\n\t\t\t\t\t\t\ttimeunit = TimeUnit.valueOf(interval[1]);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcatch (Exception e) {\n\t\t\t\t\t\t\tLOG.error(\"Cannot parse report interval from config: \" + configuredPeriod +\n\t\t\t\t\t\t\t\t\t\" - please use values like '10 SECONDS' or '500 MILLISECONDS'. \" +\n\t\t\t\t\t\t\t\t\t\"Using default reporting interval.\");\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tfinal MetricReporter reporterInstance = reporterSetup.getReporter();\n\t\t\t\t\tfinal String className = reporterInstance.getClass().getName();\n\n\t\t\t\t\tif (reporterInstance instanceof Scheduled) {\n\t\t\t\t\t\tLOG.info(\"Periodically reporting metrics in intervals of {} {} for reporter {} of type {}.\", period, timeunit.name(), namedReporter, className);\n\n\t\t\t\t\t\texecutor.scheduleWithFixedDelay(\n\t\t\t\t\t\t\t\tnew MetricRegistryImpl.ReporterTask((Scheduled) reporterInstance), period, period, timeunit);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tLOG.info(\"Reporting metrics for reporter {} of type {}.\", namedReporter, className);\n\t\t\t\t\t}\n\n\t\t\t\t\tString delimiterForReporter = reporterSetup.getDelimiter().orElse(String.valueOf(globalDelimiter));\n\t\t\t\t\tif (delimiterForReporter.length() != 1) {\n\t\t\t\t\t\tLOG.warn(\"Failed to parse delimiter '{}' for reporter '{}', using global delimiter '{}'.\", delimiterForReporter, namedReporter, globalDelimiter);\n\t\t\t\t\t\tdelimiterForReporter = String.valueOf(globalDelimiter);\n\t\t\t\t\t}\n\n\t\t\t\t\treporters.add(new ReporterAndSettings(\n\t\t\t\t\t\treporterInstance,\n\t\t\t\t\t\tnew ReporterScopedSettings(\n\t\t\t\t\t\t\treporters.size(),\n\t\t\t\t\t\t\tdelimiterForReporter.charAt(0),\n\t\t\t\t\t\t\treporterSetup.getExcludedVariables())));\n\t\t\t\t}\n\t\t\t\tcatch (Throwable t) {\n\t\t\t\t\tLOG.error(\"Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.\", namedReporter, t);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}", "target_code": "long period = MetricOptions.REPORTER_INTERVAL.defaultValue().getSeconds();", "method_body_after": "public MetricRegistryImpl(MetricRegistryConfiguration config, Collection reporterConfigurations) {\n\t\tthis.maximumFramesize = config.getQueryServiceMessageSizeLimit();\n\t\tthis.scopeFormats = config.getScopeFormats();\n\t\tthis.globalDelimiter = config.getDelimiter();\n\t\tthis.terminationFuture = new CompletableFuture<>();\n\t\tthis.isShutdown = false;\n\n\t\t\n\t\tthis.reporters = new ArrayList<>(4);\n\n\t\tthis.executor = Executors.newSingleThreadScheduledExecutor(new ExecutorThreadFactory(\"Flink-MetricRegistry\"));\n\n\t\tthis.queryService = null;\n\t\tthis.metricQueryServiceRpcService = null;\n\n\t\tif (reporterConfigurations.isEmpty()) {\n\t\t\t\n\t\t\t\n\t\t\tLOG.info(\"No metrics reporter configured, no metrics will be exposed/reported.\");\n\t\t} else {\n\t\t\tfor (ReporterSetup reporterSetup : reporterConfigurations) {\n\t\t\t\tfinal String namedReporter = reporterSetup.getName();\n\n\t\t\t\ttry {\n\t\t\t\t\tOptional configuredPeriod = reporterSetup.getIntervalSettings();\n\t\t\t\t\tTimeUnit timeunit = TimeUnit.SECONDS;\n\t\t\t\t\tlong period = MetricOptions.REPORTER_INTERVAL.defaultValue().getSeconds();\n\n\t\t\t\t\tif (configuredPeriod.isPresent()) {\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tString[] interval = configuredPeriod.get().split(\" \");\n\t\t\t\t\t\t\tperiod = Long.parseLong(interval[0]);\n\t\t\t\t\t\t\ttimeunit = TimeUnit.valueOf(interval[1]);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcatch (Exception e) {\n\t\t\t\t\t\t\tLOG.error(\"Cannot parse report interval from config: \" + configuredPeriod +\n\t\t\t\t\t\t\t\t\t\" - please use values like '10 SECONDS' or '500 MILLISECONDS'. \" +\n\t\t\t\t\t\t\t\t\t\"Using default reporting interval.\");\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tfinal MetricReporter reporterInstance = reporterSetup.getReporter();\n\t\t\t\t\tfinal String className = reporterInstance.getClass().getName();\n\n\t\t\t\t\tif (reporterInstance instanceof Scheduled) {\n\t\t\t\t\t\tLOG.info(\"Periodically reporting metrics in intervals of {} {} for reporter {} of type {}.\", period, timeunit.name(), namedReporter, className);\n\n\t\t\t\t\t\texecutor.scheduleWithFixedDelay(\n\t\t\t\t\t\t\t\tnew MetricRegistryImpl.ReporterTask((Scheduled) reporterInstance), period, period, timeunit);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tLOG.info(\"Reporting metrics for reporter {} of type {}.\", namedReporter, className);\n\t\t\t\t\t}\n\n\t\t\t\t\tString delimiterForReporter = reporterSetup.getDelimiter().orElse(String.valueOf(globalDelimiter));\n\t\t\t\t\tif (delimiterForReporter.length() != 1) {\n\t\t\t\t\t\tLOG.warn(\"Failed to parse delimiter '{}' for reporter '{}', using global delimiter '{}'.\", delimiterForReporter, namedReporter, globalDelimiter);\n\t\t\t\t\t\tdelimiterForReporter = String.valueOf(globalDelimiter);\n\t\t\t\t\t}\n\n\t\t\t\t\treporters.add(new ReporterAndSettings(\n\t\t\t\t\t\treporterInstance,\n\t\t\t\t\t\tnew ReporterScopedSettings(\n\t\t\t\t\t\t\treporters.size(),\n\t\t\t\t\t\t\tdelimiterForReporter.charAt(0),\n\t\t\t\t\t\t\treporterSetup.getExcludedVariables())));\n\t\t\t\t}\n\t\t\t\tcatch (Throwable t) {\n\t\t\t\t\tLOG.error(\"Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.\", namedReporter, t);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}", "context_before": "class MetricRegistryImpl implements MetricRegistry {\n\tprivate static final Logger LOG = LoggerFactory.getLogger(MetricRegistryImpl.class);\n\n\tprivate final Object lock = new Object();\n\n\tprivate final List reporters;\n\tprivate final ScheduledExecutorService executor;\n\n\tprivate final ScopeFormats scopeFormats;\n\tprivate final char globalDelimiter;\n\n\tprivate final CompletableFuture terminationFuture;\n\n\tprivate final long maximumFramesize;\n\n\t@Nullable\n\tprivate MetricQueryService queryService;\n\n\t@Nullable\n\tprivate RpcService metricQueryServiceRpcService;\n\n\tprivate ViewUpdater viewUpdater;\n\n\tprivate boolean isShutdown;\n\n\tpublic MetricRegistryImpl(MetricRegistryConfiguration config) {\n\t\tthis(config, Collections.emptyList());\n\t}\n\n\t/**\n\t * Creates a new MetricRegistry and starts the configured reporter.\n\t */\n\t\n\n\t/**\n\t * Initializes the MetricQueryService.\n\t *\n\t * @param rpcService RpcService to create the MetricQueryService on\n\t * @param resourceID resource ID used to disambiguate the actor name\n */\n\tpublic void startQueryService(RpcService rpcService, ResourceID resourceID) {\n\t\tsynchronized (lock) {\n\t\t\tPreconditions.checkState(!isShutdown(), \"The metric registry has already been shut down.\");\n\n\t\t\ttry {\n\t\t\t\tmetricQueryServiceRpcService = rpcService;\n\t\t\t\tqueryService = MetricQueryService.createMetricQueryService(rpcService, resourceID, maximumFramesize);\n\t\t\t\tqueryService.start();\n\t\t\t} catch (Exception e) {\n\t\t\t\tLOG.warn(\"Could not start MetricDumpActor. No metrics will be submitted to the WebInterface.\", e);\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Returns the rpc service that the {@link MetricQueryService} runs in.\n\t *\n\t * @return rpc service of hte MetricQueryService\n\t */\n\t@Nullable\n\tpublic RpcService getMetricQueryServiceRpcService() {\n\t\treturn metricQueryServiceRpcService;\n\t}\n\n\t/**\n\t * Returns the address under which the {@link MetricQueryService} is reachable.\n\t *\n\t * @return address of the metric query service\n\t */\n\t@Override\n\t@Nullable\n\tpublic String getMetricQueryServiceGatewayRpcAddress() {\n\t\tif (queryService != null) {\n\t\t\treturn queryService.getSelfGateway(MetricQueryServiceGateway.class).getAddress();\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\t@VisibleForTesting\n\t@Nullable\n\tMetricQueryServiceGateway getMetricQueryServiceGateway() {\n\t\tif (queryService != null) {\n\t\t\treturn queryService.getSelfGateway(MetricQueryServiceGateway.class);\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\t@Override\n\tpublic char getDelimiter() {\n\t\treturn this.globalDelimiter;\n\t}\n\n\t@VisibleForTesting\n\tchar getDelimiter(int reporterIndex) {\n\t\ttry {\n\t\t\treturn reporters.get(reporterIndex).getSettings().getDelimiter();\n\t\t} catch (IndexOutOfBoundsException e) {\n\t\t\tLOG.warn(\"Delimiter for reporter index {} not found, returning global delimiter.\", reporterIndex);\n\t\t\treturn this.globalDelimiter;\n\t\t}\n\t}\n\n\t@Override\n\tpublic int getNumberReporters() {\n\t\treturn reporters.size();\n\t}\n\n\t@VisibleForTesting\n\tpublic List getReporters() {\n\t\treturn reporters.stream().map(ReporterAndSettings::getReporter).collect(Collectors.toList());\n\t}\n\n\t/**\n\t * Returns whether this registry has been shutdown.\n\t *\n\t * @return true, if this registry was shutdown, otherwise false\n\t */\n\tpublic boolean isShutdown() {\n\t\tsynchronized (lock) {\n\t\t\treturn isShutdown;\n\t\t}\n\t}\n\n\t/**\n\t * Shuts down this registry and the associated {@link MetricReporter}.\n\t *\n\t *

NOTE: This operation is asynchronous and returns a future which is completed\n\t * once the shutdown operation has been completed.\n\t *\n\t * @return Future which is completed once the {@link MetricRegistryImpl}\n\t * is shut down.\n\t */\n\tpublic CompletableFuture shutdown() {\n\t\tsynchronized (lock) {\n\t\t\tif (isShutdown) {\n\t\t\t\treturn terminationFuture;\n\t\t\t} else {\n\t\t\t\tisShutdown = true;\n\t\t\t\tfinal Collection> terminationFutures = new ArrayList<>(3);\n\t\t\t\tfinal Time gracePeriod = Time.seconds(1L);\n\n\t\t\t\tif (metricQueryServiceRpcService != null) {\n\t\t\t\t\tfinal CompletableFuture metricQueryServiceRpcServiceTerminationFuture = metricQueryServiceRpcService.stopService();\n\t\t\t\t\tterminationFutures.add(metricQueryServiceRpcServiceTerminationFuture);\n\t\t\t\t}\n\n\t\t\t\tThrowable throwable = null;\n\t\t\t\tfor (ReporterAndSettings reporterAndSettings : reporters) {\n\t\t\t\t\ttry {\n\t\t\t\t\t\treporterAndSettings.getReporter().close();\n\t\t\t\t\t} catch (Throwable t) {\n\t\t\t\t\t\tthrowable = ExceptionUtils.firstOrSuppressed(t, throwable);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treporters.clear();\n\n\t\t\t\tif (throwable != null) {\n\t\t\t\t\tterminationFutures.add(\n\t\t\t\t\t\tFutureUtils.completedExceptionally(\n\t\t\t\t\t\t\tnew FlinkException(\"Could not shut down the metric reporters properly.\", throwable)));\n\t\t\t\t}\n\n\t\t\t\tfinal CompletableFuture executorShutdownFuture = ExecutorUtils.nonBlockingShutdown(\n\t\t\t\t\tgracePeriod.toMilliseconds(),\n\t\t\t\t\tTimeUnit.MILLISECONDS,\n\t\t\t\t\texecutor);\n\n\t\t\t\tterminationFutures.add(executorShutdownFuture);\n\n\t\t\t\tFutureUtils\n\t\t\t\t\t.completeAll(terminationFutures)\n\t\t\t\t\t.whenComplete(\n\t\t\t\t\t\t(Void ignored, Throwable error) -> {\n\t\t\t\t\t\t\tif (error != null) {\n\t\t\t\t\t\t\t\tterminationFuture.completeExceptionally(error);\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tterminationFuture.complete(null);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\n\t\t\t\treturn terminationFuture;\n\t\t\t}\n\t\t}\n\t}\n\n\t@Override\n\tpublic ScopeFormats getScopeFormats() {\n\t\treturn scopeFormats;\n\t}\n\n\t\n\t\n\t\n\n\t@Override\n\tpublic void register(Metric metric, String metricName, AbstractMetricGroup group) {\n\t\tsynchronized (lock) {\n\t\t\tif (isShutdown()) {\n\t\t\t\tLOG.warn(\"Cannot register metric, because the MetricRegistry has already been shut down.\");\n\t\t\t} else {\n\t\t\t\tif (reporters != null) {\n\t\t\t\t\tfor (int i = 0; i < reporters.size(); i++) {\n\t\t\t\t\t\tReporterAndSettings reporterAndSettings = reporters.get(i);\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tif (reporterAndSettings != null) {\n\t\t\t\t\t\t\t\tFrontMetricGroup front = new FrontMetricGroup>(reporterAndSettings.getSettings(), group);\n\t\t\t\t\t\t\t\treporterAndSettings.getReporter().notifyOfAddedMetric(metric, metricName, front);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} catch (Exception e) {\n\t\t\t\t\t\t\tLOG.warn(\"Error while registering metric: {}.\", metricName, e);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tif (queryService != null) {\n\t\t\t\t\t\tqueryService.addMetric(metricName, metric, group);\n\t\t\t\t\t}\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tLOG.warn(\"Error while registering metric: {}.\", metricName, e);\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tif (metric instanceof View) {\n\t\t\t\t\t\tif (viewUpdater == null) {\n\t\t\t\t\t\t\tviewUpdater = new ViewUpdater(executor);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tviewUpdater.notifyOfAddedView((View) metric);\n\t\t\t\t\t}\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tLOG.warn(\"Error while registering metric: {}.\", metricName, e);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t@Override\n\tpublic void unregister(Metric metric, String metricName, AbstractMetricGroup group) {\n\t\tsynchronized (lock) {\n\t\t\tif (isShutdown()) {\n\t\t\t\tLOG.warn(\"Cannot unregister metric, because the MetricRegistry has already been shut down.\");\n\t\t\t} else {\n\t\t\t\tif (reporters != null) {\n\t\t\t\t\tfor (int i = 0; i < reporters.size(); i++) {\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\tReporterAndSettings reporterAndSettings = reporters.get(i);\n\t\t\t\t\t\t\tif (reporterAndSettings != null) {\n\t\t\t\t\t\t\t\tFrontMetricGroup front = new FrontMetricGroup>(reporterAndSettings.getSettings(), group);\n\t\t\t\t\t\t\t\treporterAndSettings.getReporter().notifyOfRemovedMetric(metric, metricName, front);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} catch (Exception e) {\n\t\t\t\t\t\t\tLOG.warn(\"Error while unregistering metric: {}.\", metricName, e);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tif (queryService != null) {\n\t\t\t\t\t\tqueryService.removeMetric(metric);\n\t\t\t\t\t}\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tLOG.warn(\"Error while unregistering metric: {}.\", metricName, e);\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tif (metric instanceof View) {\n\t\t\t\t\t\tif (viewUpdater != null) {\n\t\t\t\t\t\t\tviewUpdater.notifyOfRemovedView((View) metric);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tLOG.warn(\"Error while unregistering metric: {}\", metricName, e);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t\n\n\t@VisibleForTesting\n\t@Nullable\n\tMetricQueryService getQueryService() {\n\t\treturn queryService;\n\t}\n\n\t\n\n\t/**\n\t * This task is explicitly a static class, so that it does not hold any references to the enclosing\n\t * MetricsRegistry instance.\n\t *\n\t *

This is a subtle difference, but very important: With this static class, the enclosing class instance\n\t * may become garbage-collectible, whereas with an anonymous inner class, the timer thread\n\t * (which is a GC root) will hold a reference via the timer task and its enclosing instance pointer.\n\t * Making the MetricsRegistry garbage collectible makes the java.util.Timer garbage collectible,\n\t * which acts as a fail-safe to stop the timer thread and prevents resource leaks.\n\t */\n\tprivate static final class ReporterTask extends TimerTask {\n\n\t\tprivate final Scheduled reporter;\n\n\t\tprivate ReporterTask(Scheduled reporter) {\n\t\t\tthis.reporter = reporter;\n\t\t}\n\n\t\t@Override\n\t\tpublic void run() {\n\t\t\ttry {\n\t\t\t\treporter.report();\n\t\t\t} catch (Throwable t) {\n\t\t\t\tLOG.warn(\"Error while reporting metrics\", t);\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate static class ReporterAndSettings {\n\n\t\tprivate final MetricReporter reporter;\n\t\tprivate final ReporterScopedSettings settings;\n\n\t\tprivate ReporterAndSettings(MetricReporter reporter, ReporterScopedSettings settings) {\n\t\t\tthis.reporter = Preconditions.checkNotNull(reporter);\n\t\t\tthis.settings = Preconditions.checkNotNull(settings);\n\t\t}\n\n\t\tpublic MetricReporter getReporter() {\n\t\t\treturn reporter;\n\t\t}\n\n\t\tpublic ReporterScopedSettings getSettings() {\n\t\t\treturn settings;\n\t\t}\n\t}\n}", "context_after": "class MetricRegistryImpl implements MetricRegistry {\n\tprivate static final Logger LOG = LoggerFactory.getLogger(MetricRegistryImpl.class);\n\n\tprivate final Object lock = new Object();\n\n\tprivate final List reporters;\n\tprivate final ScheduledExecutorService executor;\n\n\tprivate final ScopeFormats scopeFormats;\n\tprivate final char globalDelimiter;\n\n\tprivate final CompletableFuture terminationFuture;\n\n\tprivate final long maximumFramesize;\n\n\t@Nullable\n\tprivate MetricQueryService queryService;\n\n\t@Nullable\n\tprivate RpcService metricQueryServiceRpcService;\n\n\tprivate ViewUpdater viewUpdater;\n\n\tprivate boolean isShutdown;\n\n\tpublic MetricRegistryImpl(MetricRegistryConfiguration config) {\n\t\tthis(config, Collections.emptyList());\n\t}\n\n\t/**\n\t * Creates a new MetricRegistry and starts the configured reporter.\n\t */\n\t\n\n\t/**\n\t * Initializes the MetricQueryService.\n\t *\n\t * @param rpcService RpcService to create the MetricQueryService on\n\t * @param resourceID resource ID used to disambiguate the actor name\n */\n\tpublic void startQueryService(RpcService rpcService, ResourceID resourceID) {\n\t\tsynchronized (lock) {\n\t\t\tPreconditions.checkState(!isShutdown(), \"The metric registry has already been shut down.\");\n\n\t\t\ttry {\n\t\t\t\tmetricQueryServiceRpcService = rpcService;\n\t\t\t\tqueryService = MetricQueryService.createMetricQueryService(rpcService, resourceID, maximumFramesize);\n\t\t\t\tqueryService.start();\n\t\t\t} catch (Exception e) {\n\t\t\t\tLOG.warn(\"Could not start MetricDumpActor. No metrics will be submitted to the WebInterface.\", e);\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Returns the rpc service that the {@link MetricQueryService} runs in.\n\t *\n\t * @return rpc service of hte MetricQueryService\n\t */\n\t@Nullable\n\tpublic RpcService getMetricQueryServiceRpcService() {\n\t\treturn metricQueryServiceRpcService;\n\t}\n\n\t/**\n\t * Returns the address under which the {@link MetricQueryService} is reachable.\n\t *\n\t * @return address of the metric query service\n\t */\n\t@Override\n\t@Nullable\n\tpublic String getMetricQueryServiceGatewayRpcAddress() {\n\t\tif (queryService != null) {\n\t\t\treturn queryService.getSelfGateway(MetricQueryServiceGateway.class).getAddress();\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\t@VisibleForTesting\n\t@Nullable\n\tMetricQueryServiceGateway getMetricQueryServiceGateway() {\n\t\tif (queryService != null) {\n\t\t\treturn queryService.getSelfGateway(MetricQueryServiceGateway.class);\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\t@Override\n\tpublic char getDelimiter() {\n\t\treturn this.globalDelimiter;\n\t}\n\n\t@VisibleForTesting\n\tchar getDelimiter(int reporterIndex) {\n\t\ttry {\n\t\t\treturn reporters.get(reporterIndex).getSettings().getDelimiter();\n\t\t} catch (IndexOutOfBoundsException e) {\n\t\t\tLOG.warn(\"Delimiter for reporter index {} not found, returning global delimiter.\", reporterIndex);\n\t\t\treturn this.globalDelimiter;\n\t\t}\n\t}\n\n\t@Override\n\tpublic int getNumberReporters() {\n\t\treturn reporters.size();\n\t}\n\n\t@VisibleForTesting\n\tpublic List getReporters() {\n\t\treturn reporters.stream().map(ReporterAndSettings::getReporter).collect(Collectors.toList());\n\t}\n\n\t/**\n\t * Returns whether this registry has been shutdown.\n\t *\n\t * @return true, if this registry was shutdown, otherwise false\n\t */\n\tpublic boolean isShutdown() {\n\t\tsynchronized (lock) {\n\t\t\treturn isShutdown;\n\t\t}\n\t}\n\n\t/**\n\t * Shuts down this registry and the associated {@link MetricReporter}.\n\t *\n\t *

NOTE: This operation is asynchronous and returns a future which is completed\n\t * once the shutdown operation has been completed.\n\t *\n\t * @return Future which is completed once the {@link MetricRegistryImpl}\n\t * is shut down.\n\t */\n\tpublic CompletableFuture shutdown() {\n\t\tsynchronized (lock) {\n\t\t\tif (isShutdown) {\n\t\t\t\treturn terminationFuture;\n\t\t\t} else {\n\t\t\t\tisShutdown = true;\n\t\t\t\tfinal Collection> terminationFutures = new ArrayList<>(3);\n\t\t\t\tfinal Time gracePeriod = Time.seconds(1L);\n\n\t\t\t\tif (metricQueryServiceRpcService != null) {\n\t\t\t\t\tfinal CompletableFuture metricQueryServiceRpcServiceTerminationFuture = metricQueryServiceRpcService.stopService();\n\t\t\t\t\tterminationFutures.add(metricQueryServiceRpcServiceTerminationFuture);\n\t\t\t\t}\n\n\t\t\t\tThrowable throwable = null;\n\t\t\t\tfor (ReporterAndSettings reporterAndSettings : reporters) {\n\t\t\t\t\ttry {\n\t\t\t\t\t\treporterAndSettings.getReporter().close();\n\t\t\t\t\t} catch (Throwable t) {\n\t\t\t\t\t\tthrowable = ExceptionUtils.firstOrSuppressed(t, throwable);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treporters.clear();\n\n\t\t\t\tif (throwable != null) {\n\t\t\t\t\tterminationFutures.add(\n\t\t\t\t\t\tFutureUtils.completedExceptionally(\n\t\t\t\t\t\t\tnew FlinkException(\"Could not shut down the metric reporters properly.\", throwable)));\n\t\t\t\t}\n\n\t\t\t\tfinal CompletableFuture executorShutdownFuture = ExecutorUtils.nonBlockingShutdown(\n\t\t\t\t\tgracePeriod.toMilliseconds(),\n\t\t\t\t\tTimeUnit.MILLISECONDS,\n\t\t\t\t\texecutor);\n\n\t\t\t\tterminationFutures.add(executorShutdownFuture);\n\n\t\t\t\tFutureUtils\n\t\t\t\t\t.completeAll(terminationFutures)\n\t\t\t\t\t.whenComplete(\n\t\t\t\t\t\t(Void ignored, Throwable error) -> {\n\t\t\t\t\t\t\tif (error != null) {\n\t\t\t\t\t\t\t\tterminationFuture.completeExceptionally(error);\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tterminationFuture.complete(null);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\n\t\t\t\treturn terminationFuture;\n\t\t\t}\n\t\t}\n\t}\n\n\t@Override\n\tpublic ScopeFormats getScopeFormats() {\n\t\treturn scopeFormats;\n\t}\n\n\t\n\t\n\t\n\n\t@Override\n\tpublic void register(Metric metric, String metricName, AbstractMetricGroup group) {\n\t\tsynchronized (lock) {\n\t\t\tif (isShutdown()) {\n\t\t\t\tLOG.warn(\"Cannot register metric, because the MetricRegistry has already been shut down.\");\n\t\t\t} else {\n\t\t\t\tif (reporters != null) {\n\t\t\t\t\tfor (int i = 0; i < reporters.size(); i++) {\n\t\t\t\t\t\tReporterAndSettings reporterAndSettings = reporters.get(i);\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tif (reporterAndSettings != null) {\n\t\t\t\t\t\t\t\tFrontMetricGroup front = new FrontMetricGroup>(reporterAndSettings.getSettings(), group);\n\t\t\t\t\t\t\t\treporterAndSettings.getReporter().notifyOfAddedMetric(metric, metricName, front);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} catch (Exception e) {\n\t\t\t\t\t\t\tLOG.warn(\"Error while registering metric: {}.\", metricName, e);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tif (queryService != null) {\n\t\t\t\t\t\tqueryService.addMetric(metricName, metric, group);\n\t\t\t\t\t}\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tLOG.warn(\"Error while registering metric: {}.\", metricName, e);\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tif (metric instanceof View) {\n\t\t\t\t\t\tif (viewUpdater == null) {\n\t\t\t\t\t\t\tviewUpdater = new ViewUpdater(executor);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tviewUpdater.notifyOfAddedView((View) metric);\n\t\t\t\t\t}\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tLOG.warn(\"Error while registering metric: {}.\", metricName, e);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t@Override\n\tpublic void unregister(Metric metric, String metricName, AbstractMetricGroup group) {\n\t\tsynchronized (lock) {\n\t\t\tif (isShutdown()) {\n\t\t\t\tLOG.warn(\"Cannot unregister metric, because the MetricRegistry has already been shut down.\");\n\t\t\t} else {\n\t\t\t\tif (reporters != null) {\n\t\t\t\t\tfor (int i = 0; i < reporters.size(); i++) {\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\tReporterAndSettings reporterAndSettings = reporters.get(i);\n\t\t\t\t\t\t\tif (reporterAndSettings != null) {\n\t\t\t\t\t\t\t\tFrontMetricGroup front = new FrontMetricGroup>(reporterAndSettings.getSettings(), group);\n\t\t\t\t\t\t\t\treporterAndSettings.getReporter().notifyOfRemovedMetric(metric, metricName, front);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} catch (Exception e) {\n\t\t\t\t\t\t\tLOG.warn(\"Error while unregistering metric: {}.\", metricName, e);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tif (queryService != null) {\n\t\t\t\t\t\tqueryService.removeMetric(metric);\n\t\t\t\t\t}\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tLOG.warn(\"Error while unregistering metric: {}.\", metricName, e);\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tif (metric instanceof View) {\n\t\t\t\t\t\tif (viewUpdater != null) {\n\t\t\t\t\t\t\tviewUpdater.notifyOfRemovedView((View) metric);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tLOG.warn(\"Error while unregistering metric: {}\", metricName, e);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t\n\n\t@VisibleForTesting\n\t@Nullable\n\tMetricQueryService getQueryService() {\n\t\treturn queryService;\n\t}\n\n\t\n\n\t/**\n\t * This task is explicitly a static class, so that it does not hold any references to the enclosing\n\t * MetricsRegistry instance.\n\t *\n\t *

This is a subtle difference, but very important: With this static class, the enclosing class instance\n\t * may become garbage-collectible, whereas with an anonymous inner class, the timer thread\n\t * (which is a GC root) will hold a reference via the timer task and its enclosing instance pointer.\n\t * Making the MetricsRegistry garbage collectible makes the java.util.Timer garbage collectible,\n\t * which acts as a fail-safe to stop the timer thread and prevents resource leaks.\n\t */\n\tprivate static final class ReporterTask extends TimerTask {\n\n\t\tprivate final Scheduled reporter;\n\n\t\tprivate ReporterTask(Scheduled reporter) {\n\t\t\tthis.reporter = reporter;\n\t\t}\n\n\t\t@Override\n\t\tpublic void run() {\n\t\t\ttry {\n\t\t\t\treporter.report();\n\t\t\t} catch (Throwable t) {\n\t\t\t\tLOG.warn(\"Error while reporting metrics\", t);\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate static class ReporterAndSettings {\n\n\t\tprivate final MetricReporter reporter;\n\t\tprivate final ReporterScopedSettings settings;\n\n\t\tprivate ReporterAndSettings(MetricReporter reporter, ReporterScopedSettings settings) {\n\t\t\tthis.reporter = Preconditions.checkNotNull(reporter);\n\t\t\tthis.settings = Preconditions.checkNotNull(settings);\n\t\t}\n\n\t\tpublic MetricReporter getReporter() {\n\t\t\treturn reporter;\n\t\t}\n\n\t\tpublic ReporterScopedSettings getSettings() {\n\t\t\treturn settings;\n\t\t}\n\t}\n}" }, { "comment": "It's better to overwrite old record, but not `ON DUPLICATE KEY UPDATE NOTHING`", "method_body": "public String buildInsertSQL(final Record record) {\n String insertSql = super.buildInsertSQL(record);\n if (!record.getTableMetaData().getUniqueKeyNamesList().isEmpty()) {\n return insertSql + \" ON DUPLICATE KEY UPDATE NOTHING\";\n }\n return insertSql;\n }", "target_code": "return insertSql + \" ON DUPLICATE KEY UPDATE NOTHING\";", "method_body_after": "public String buildInsertSQL(final Record record) {\n String insertSql = super.buildInsertSQL(record);\n List uniqueKeyNamesList = record.getTableMetaData().getUniqueKeyNamesList();\n if (uniqueKeyNamesList.isEmpty()) {\n return insertSql;\n }\n StringBuilder updateValue = new StringBuilder();\n for (String each : record.getAfterMap().keySet()) {\n if (uniqueKeyNamesList.contains(each)) {\n continue;\n }\n updateValue.append(quote(each)).append(\"=EXCLUDED.\").append(quote(each)).append(\",\");\n }\n updateValue.setLength(updateValue.length() - 1);\n String uniqueKeyNames = uniqueKeyNamesList.stream().map(this::quote).collect(Collectors.joining(\",\"));\n return insertSql + String.format(\" ON CONFLICT (%s) DO UPDATE SET %s\", uniqueKeyNames, updateValue);\n }", "context_before": "class PostgreSQLSQLBuilder extends AbstractSQLBuilder {\n \n private static final List RESERVED_KEYWORDS = Arrays.asList(\"ALL\", \"ANALYSE\", \"ANALYZE\", \"AND\", \"ANY\", \"ARRAY\", \"AS\", \"ASC\", \"ASYMMETRIC\", \"AUTHORIZATION\", \"BETWEEN\", \"BIGINT\", \"BINARY\",\n \"BIT\", \"BOOLEAN\", \"BOTH\", \"CASE\", \"CAST\", \"CHAR\", \"CHARACTER\", \"CHECK\", \"COALESCE\", \"COLLATE\", \"COLLATION\", \"COLUMN\", \"CONCURRENTLY\", \"CONSTRAINT\", \"CREATE\", \"CROSS\", \"CURRENT_CATALOG\",\n \"CURRENT_DATE\", \"CURRENT_ROLE\", \"CURRENT_SCHEMA\", \"CURRENT_TIME\", \"CURRENT_TIMESTAMP\", \"CURRENT_USER\", \"DEC\", \"DECIMAL\", \"DEFAULT\", \"DEFERRABLE\", \"DESC\", \"DISTINCT\", \"DO\", \"ELSE\", \"END\",\n \"EXCEPT\", \"EXISTS\", \"EXTRACT\", \"FALSE\", \"FETCH\", \"FLOAT\", \"FOR\", \"FOREIGN\", \"FREEZE\", \"FROM\", \"FULL\", \"GRANT\", \"GREATEST\", \"GROUP\", \"GROUPING\", \"HAVING\", \"ILIKE\", \"IN\", \"INITIALLY\",\n \"INNER\", \"INOUT\", \"INT\", \"INTEGER\", \"INTERSECT\", \"INTERVAL\", \"INTO\", \"IS\", \"ISNULL\", \"JOIN\", \"LATERAL\", \"LEADING\", \"LEAST\", \"LEFT\", \"LIKE\", \"LIMIT\", \"LOCALTIME\", \"LOCALTIMESTAMP\",\n \"NATIONAL\", \"NATURAL\", \"NCHAR\", \"NONE\", \"NORMALIZE\", \"NOT\", \"NOTNULL\", \"NULL\", \"NULLIF\", \"NUMERIC\", \"OFFSET\", \"ON\", \"ONLY\", \"OR\", \"ORDER\", \"OUT\", \"OUTER\", \"OVERLAPS\", \"OVERLAY\", \"PLACING\",\n \"POSITION\", \"PRECISION\", \"PRIMARY\", \"REAL\", \"REFERENCES\", \"RETURNING\", \"RIGHT\", \"ROW\", \"SELECT\", \"SESSION_USER\", \"SETOF\", \"SIMILAR\", \"SMALLINT\", \"SOME\", \"SUBSTRING\", \"SYMMETRIC\", \"TABLE\",\n \"TABLESAMPLE\", \"THEN\", \"TIME\", \"TIMESTAMP\", \"TO\", \"TRAILING\", \"TREAT\", \"TRIM\", \"TRUE\", \"UNION\", \"UNIQUE\", \"USER\", \"USING\", \"VALUES\", \"VARCHAR\", \"VARIADIC\", \"VERBOSE\", \"WHEN\", \"WHERE\",\n \"WINDOW\", \"WITH\", \"XMLATTRIBUTES\", \"XMLCONCAT\", \"XMLELEMENT\", \"XMLEXISTS\", \"XMLFOREST\", \"XMLNAMESPACES\", \"XMLPARSE\", \"XMLPI\", \"XMLROOT\", \"XMLSERIALIZE\", \"XMLTABLE\");\n \n @Override\n protected boolean isKeyword(final String item) {\n return RESERVED_KEYWORDS.contains(item.toUpperCase());\n }\n \n @Override\n protected String getLeftIdentifierQuoteString() {\n return \"`\";\n }\n \n @Override\n protected String getRightIdentifierQuoteString() {\n return \"`\";\n }\n \n @Override\n \n}", "context_after": "class PostgreSQLSQLBuilder extends AbstractSQLBuilder {\n \n private static final List RESERVED_KEYWORDS = Arrays.asList(\"ALL\", \"ANALYSE\", \"ANALYZE\", \"AND\", \"ANY\", \"ARRAY\", \"AS\", \"ASC\", \"ASYMMETRIC\", \"AUTHORIZATION\", \"BETWEEN\", \"BIGINT\", \"BINARY\",\n \"BIT\", \"BOOLEAN\", \"BOTH\", \"CASE\", \"CAST\", \"CHAR\", \"CHARACTER\", \"CHECK\", \"COALESCE\", \"COLLATE\", \"COLLATION\", \"COLUMN\", \"CONCURRENTLY\", \"CONSTRAINT\", \"CREATE\", \"CROSS\", \"CURRENT_CATALOG\",\n \"CURRENT_DATE\", \"CURRENT_ROLE\", \"CURRENT_SCHEMA\", \"CURRENT_TIME\", \"CURRENT_TIMESTAMP\", \"CURRENT_USER\", \"DEC\", \"DECIMAL\", \"DEFAULT\", \"DEFERRABLE\", \"DESC\", \"DISTINCT\", \"DO\", \"ELSE\", \"END\",\n \"EXCEPT\", \"EXISTS\", \"EXTRACT\", \"FALSE\", \"FETCH\", \"FLOAT\", \"FOR\", \"FOREIGN\", \"FREEZE\", \"FROM\", \"FULL\", \"GRANT\", \"GREATEST\", \"GROUP\", \"GROUPING\", \"HAVING\", \"ILIKE\", \"IN\", \"INITIALLY\",\n \"INNER\", \"INOUT\", \"INT\", \"INTEGER\", \"INTERSECT\", \"INTERVAL\", \"INTO\", \"IS\", \"ISNULL\", \"JOIN\", \"LATERAL\", \"LEADING\", \"LEAST\", \"LEFT\", \"LIKE\", \"LIMIT\", \"LOCALTIME\", \"LOCALTIMESTAMP\",\n \"NATIONAL\", \"NATURAL\", \"NCHAR\", \"NONE\", \"NORMALIZE\", \"NOT\", \"NOTNULL\", \"NULL\", \"NULLIF\", \"NUMERIC\", \"OFFSET\", \"ON\", \"ONLY\", \"OR\", \"ORDER\", \"OUT\", \"OUTER\", \"OVERLAPS\", \"OVERLAY\", \"PLACING\",\n \"POSITION\", \"PRECISION\", \"PRIMARY\", \"REAL\", \"REFERENCES\", \"RETURNING\", \"RIGHT\", \"ROW\", \"SELECT\", \"SESSION_USER\", \"SETOF\", \"SIMILAR\", \"SMALLINT\", \"SOME\", \"SUBSTRING\", \"SYMMETRIC\", \"TABLE\",\n \"TABLESAMPLE\", \"THEN\", \"TIME\", \"TIMESTAMP\", \"TO\", \"TRAILING\", \"TREAT\", \"TRIM\", \"TRUE\", \"UNION\", \"UNIQUE\", \"USER\", \"USING\", \"VALUES\", \"VARCHAR\", \"VARIADIC\", \"VERBOSE\", \"WHEN\", \"WHERE\",\n \"WINDOW\", \"WITH\", \"XMLATTRIBUTES\", \"XMLCONCAT\", \"XMLELEMENT\", \"XMLEXISTS\", \"XMLFOREST\", \"XMLNAMESPACES\", \"XMLPARSE\", \"XMLPI\", \"XMLROOT\", \"XMLSERIALIZE\", \"XMLTABLE\");\n \n @Override\n protected boolean isKeyword(final String item) {\n return RESERVED_KEYWORDS.contains(item.toUpperCase());\n }\n \n @Override\n protected String getLeftIdentifierQuoteString() {\n return \"\\\"\";\n }\n \n @Override\n protected String getRightIdentifierQuoteString() {\n return \"\\\"\";\n }\n \n @Override\n \n}" }, { "comment": "We should use another method, error(String, Throwable) here.", "method_body": "public AbstractAuthenticationToken convert(Jwt jwt) {\n OAuth2AccessToken accessToken = new OAuth2AccessToken(\n OAuth2AccessToken.TokenType.BEARER, jwt.getTokenValue(), jwt.getIssuedAt(), jwt.getExpiresAt());\n AbstractAuthenticationToken token = this.jwtAuthenticationConverter.convert(jwt);\n Collection authorities = token.getAuthorities();\n JWTClaimsSet.Builder builder = new Builder();\n for (Entry entry : jwt.getClaims().entrySet()) {\n builder.claim(entry.getKey(), entry.getValue());\n }\n JWTClaimsSet jwtClaimsSet = builder.build();\n JWSObject jwsObject = null;\n try {\n jwsObject = JWSObject.parse(accessToken.getTokenValue());\n } catch (ParseException e) {\n LOGGER.error(\n e.getMessage() + \". When create an instance of JWSObject, an exception is resolved on the token.\");\n }\n UserPrincipal userPrincipal = new UserPrincipal(accessToken.getTokenValue(), jwsObject, jwtClaimsSet);\n return new PreAuthenticatedAuthenticationToken(userPrincipal, null, authorities);\n }", "target_code": "LOGGER.error(", "method_body_after": "public AbstractAuthenticationToken convert(Jwt jwt) {\n OAuth2AccessToken accessToken = new OAuth2AccessToken(\n OAuth2AccessToken.TokenType.BEARER, jwt.getTokenValue(), jwt.getIssuedAt(), jwt.getExpiresAt());\n Collection authorities = extractAuthorities(jwt);\n AzureOAuth2AuthenticatedPrincipal principal = new AzureOAuth2AuthenticatedPrincipal(\n jwt.getHeaders(), jwt.getClaims(), authorities, jwt.getTokenValue());\n return new BearerTokenAuthentication(principal, accessToken, authorities);\n }", "context_before": "class AzureJwtBearerTokenAuthenticationConverter implements Converter {\n\n private static final Logger LOGGER = LoggerFactory.getLogger(AzureJwtBearerTokenAuthenticationConverter.class);\n private final JwtAuthenticationConverter jwtAuthenticationConverter = new JwtAuthenticationConverter();\n\n public AzureJwtBearerTokenAuthenticationConverter() {\n }\n\n @Override\n \n}", "context_after": "class AzureJwtBearerTokenAuthenticationConverter implements Converter {\n\n private static final String DEFAULT_AUTHORITY_PREFIX = \"SCOPE_\";\n\n private Converter> jwtGrantedConverter\n = new JwtGrantedAuthoritiesConverter();\n\n public AzureJwtBearerTokenAuthenticationConverter() {\n }\n\n public AzureJwtBearerTokenAuthenticationConverter(String authoritiesClaimName) {\n this(authoritiesClaimName, DEFAULT_AUTHORITY_PREFIX);\n }\n\n public AzureJwtBearerTokenAuthenticationConverter(String authoritiesClaimName, String authorityPrefix) {\n Assert.notNull(authoritiesClaimName, \"authoritiesClaimName cannot be null\");\n Assert.notNull(authorityPrefix, \"authorityPrefix cannot be null\");\n JwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new JwtGrantedAuthoritiesConverter();\n jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName(authoritiesClaimName);\n jwtGrantedAuthoritiesConverter.setAuthorityPrefix(authorityPrefix);\n this.jwtGrantedConverter = jwtGrantedAuthoritiesConverter;\n }\n\n protected Collection extractAuthorities(Jwt jwt) {\n return this.jwtGrantedConverter.convert(jwt);\n }\n\n @Override\n \n\n public void setJwtGrantedAuthoritiesConverter(\n Converter> jwtGrantedAuthoritiesConverter) {\n this.jwtGrantedConverter = jwtGrantedAuthoritiesConverter;\n }\n}" }, { "comment": "would be nice to have a test checking for containing `null` with `true` as expected result", "method_body": "Stream getTestSetSpecs() {\n return Stream.of(\n TestSetSpec.forFunction(BuiltInFunctionDefinitions.ARRAY_CONTAINS)\n .onFieldsWithData(\n new Integer[] {1, 2, 3},\n null,\n new String[] {\"Hello\", \"World\"},\n new Row[] {\n Row.of(true, LocalDate.of(2022, 4, 20)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n null\n },\n new Integer[] {1, null, 3},\n new Integer[] {1, 2, 3})\n .andDataTypes(\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.STRING()).notNull(),\n DataTypes.ARRAY(\n DataTypes.ROW(DataTypes.BOOLEAN(), DataTypes.DATE())),\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.INT().notNull()).notNull())\n \n .testResult(\n $(\"f0\").arrayContains(2),\n \"ARRAY_CONTAINS(f0, 2)\",\n true,\n DataTypes.BOOLEAN().nullable())\n .testResult(\n $(\"f0\").arrayContains(42),\n \"ARRAY_CONTAINS(f0, 42)\",\n false,\n DataTypes.BOOLEAN().nullable())\n \n .testResult(\n $(\"f1\").arrayContains(12),\n \"ARRAY_CONTAINS(f1, 12)\",\n null,\n DataTypes.BOOLEAN().nullable())\n .testResult(\n $(\"f1\").arrayContains(null),\n \"ARRAY_CONTAINS(f1, NULL)\",\n null,\n DataTypes.BOOLEAN().nullable())\n \n .testResult(\n $(\"f2\").arrayContains(\"Hello\"),\n \"ARRAY_CONTAINS(f2, 'Hello')\",\n true,\n DataTypes.BOOLEAN().notNull())\n \n .testResult(\n $(\"f3\").arrayContains(row(true, LocalDate.of(1990, 10, 14))),\n \"ARRAY_CONTAINS(f3, (TRUE, DATE '1990-10-14'))\",\n true,\n DataTypes.BOOLEAN())\n .testResult(\n $(\"f3\").arrayContains(row(false, LocalDate.of(1990, 10, 14))),\n \"ARRAY_CONTAINS(f3, (FALSE, DATE '1990-10-14'))\",\n false,\n DataTypes.BOOLEAN())\n .testResult(\n $(\"f3\").arrayContains(null),\n \"ARRAY_CONTAINS(f3, null)\",\n true,\n DataTypes.BOOLEAN())\n \n .testResult(\n $(\"f4\").arrayContains(null),\n \"ARRAY_CONTAINS(f4, NULL)\",\n true,\n DataTypes.BOOLEAN().nullable())\n .testResult(\n $(\"f5\").arrayContains(lit(null, DataTypes.INT())),\n \"ARRAY_CONTAINS(f5, CAST(NULL AS INT))\",\n false,\n DataTypes.BOOLEAN().notNull())\n .testResult(\n $(\"f5\").arrayContains(lit(4, DataTypes.INT().notNull())),\n \"ARRAY_CONTAINS(f5, 4)\",\n false,\n DataTypes.BOOLEAN().notNull())\n \n .testSqlValidationError(\n \"ARRAY_CONTAINS(f0, TRUE)\",\n \"Invalid input arguments. Expected signatures are:\\n\"\n + \"ARRAY_CONTAINS(haystack , needle )\")\n .testTableApiValidationError(\n $(\"f0\").arrayContains(true),\n \"Invalid input arguments. Expected signatures are:\\n\"\n + \"ARRAY_CONTAINS(haystack , needle )\"),\n TestSetSpec.forFunction(BuiltInFunctionDefinitions.ARRAY_DISTINCT)\n .onFieldsWithData(\n new Integer[] {1, 2, 3},\n new Integer[] {null, 1, 2, 3, 4, 5, 4, 3, 2, 1, null},\n null,\n new String[] {\"Hello\", \"Hello\", \"Hello\"},\n new Row[] {\n Row.of(true, LocalDate.of(2022, 4, 20)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n null\n })\n .andDataTypes(\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.STRING()).notNull(),\n DataTypes.ARRAY(\n DataTypes.ROW(DataTypes.BOOLEAN(), DataTypes.DATE())))\n .testResult(\n $(\"f0\").arrayDistinct(),\n \"ARRAY_DISTINCT(f0)\",\n new Integer[] {1, 2, 3},\n DataTypes.ARRAY(DataTypes.INT()).nullable())\n .testResult(\n $(\"f1\").arrayDistinct(),\n \"ARRAY_DISTINCT(f1)\",\n new Integer[] {null, 1, 2, 3, 4, 5},\n DataTypes.ARRAY(DataTypes.INT()).nullable())\n .testResult(\n $(\"f2\").arrayDistinct(),\n \"ARRAY_DISTINCT(f2)\",\n null,\n DataTypes.ARRAY(DataTypes.INT()).nullable())\n .testResult(\n $(\"f3\").arrayDistinct(),\n \"ARRAY_DISTINCT(f3)\",\n new String[] {\"Hello\"},\n DataTypes.ARRAY(DataTypes.STRING()).notNull())\n .testResult(\n $(\"f4\").arrayDistinct(),\n \"ARRAY_DISTINCT(f4)\",\n new Row[] {\n Row.of(true, LocalDate.of(2022, 4, 20)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n null\n },\n DataTypes.ARRAY(\n DataTypes.ROW(DataTypes.BOOLEAN(), DataTypes.DATE()))));\n }", "target_code": "DataTypes.BOOLEAN().notNull())", "method_body_after": "Stream getTestSetSpecs() {\n return Stream.of(\n TestSetSpec.forFunction(BuiltInFunctionDefinitions.ARRAY_CONTAINS)\n .onFieldsWithData(\n new Integer[] {1, 2, 3},\n null,\n new String[] {\"Hello\", \"World\"},\n new Row[] {\n Row.of(true, LocalDate.of(2022, 4, 20)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n null\n },\n new Integer[] {1, null, 3},\n new Integer[] {1, 2, 3})\n .andDataTypes(\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.STRING()).notNull(),\n DataTypes.ARRAY(\n DataTypes.ROW(DataTypes.BOOLEAN(), DataTypes.DATE())),\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.INT().notNull()).notNull())\n \n .testResult(\n $(\"f0\").arrayContains(2),\n \"ARRAY_CONTAINS(f0, 2)\",\n true,\n DataTypes.BOOLEAN().nullable())\n .testResult(\n $(\"f0\").arrayContains(42),\n \"ARRAY_CONTAINS(f0, 42)\",\n false,\n DataTypes.BOOLEAN().nullable())\n \n .testResult(\n $(\"f1\").arrayContains(12),\n \"ARRAY_CONTAINS(f1, 12)\",\n null,\n DataTypes.BOOLEAN().nullable())\n .testResult(\n $(\"f1\").arrayContains(null),\n \"ARRAY_CONTAINS(f1, NULL)\",\n null,\n DataTypes.BOOLEAN().nullable())\n \n .testResult(\n $(\"f2\").arrayContains(\"Hello\"),\n \"ARRAY_CONTAINS(f2, 'Hello')\",\n true,\n DataTypes.BOOLEAN().notNull())\n \n .testResult(\n $(\"f3\").arrayContains(row(true, LocalDate.of(1990, 10, 14))),\n \"ARRAY_CONTAINS(f3, (TRUE, DATE '1990-10-14'))\",\n true,\n DataTypes.BOOLEAN())\n .testResult(\n $(\"f3\").arrayContains(row(false, LocalDate.of(1990, 10, 14))),\n \"ARRAY_CONTAINS(f3, (FALSE, DATE '1990-10-14'))\",\n false,\n DataTypes.BOOLEAN())\n .testResult(\n $(\"f3\").arrayContains(null),\n \"ARRAY_CONTAINS(f3, null)\",\n true,\n DataTypes.BOOLEAN())\n \n .testResult(\n $(\"f4\").arrayContains(null),\n \"ARRAY_CONTAINS(f4, NULL)\",\n true,\n DataTypes.BOOLEAN().nullable())\n .testResult(\n $(\"f5\").arrayContains(lit(null, DataTypes.INT())),\n \"ARRAY_CONTAINS(f5, CAST(NULL AS INT))\",\n false,\n DataTypes.BOOLEAN().notNull())\n .testResult(\n $(\"f5\").arrayContains(lit(4, DataTypes.INT().notNull())),\n \"ARRAY_CONTAINS(f5, 4)\",\n false,\n DataTypes.BOOLEAN().notNull())\n .testResult(\n $(\"f5\").arrayContains(lit(3, DataTypes.INT().notNull())),\n \"ARRAY_CONTAINS(f5, 3)\",\n true,\n DataTypes.BOOLEAN().notNull())\n \n .testSqlValidationError(\n \"ARRAY_CONTAINS(f0, TRUE)\",\n \"Invalid input arguments. Expected signatures are:\\n\"\n + \"ARRAY_CONTAINS(haystack , needle )\")\n .testTableApiValidationError(\n $(\"f0\").arrayContains(true),\n \"Invalid input arguments. Expected signatures are:\\n\"\n + \"ARRAY_CONTAINS(haystack , needle )\"),\n TestSetSpec.forFunction(BuiltInFunctionDefinitions.ARRAY_DISTINCT)\n .onFieldsWithData(\n new Integer[] {1, 2, 3},\n new Integer[] {null, 1, 2, 3, 4, 5, 4, 3, 2, 1, null},\n null,\n new String[] {\"Hello\", \"Hello\", \"Hello\"},\n new Row[] {\n Row.of(true, LocalDate.of(2022, 4, 20)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n null\n })\n .andDataTypes(\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.INT()),\n DataTypes.ARRAY(DataTypes.STRING()).notNull(),\n DataTypes.ARRAY(\n DataTypes.ROW(DataTypes.BOOLEAN(), DataTypes.DATE())))\n .testResult(\n $(\"f0\").arrayDistinct(),\n \"ARRAY_DISTINCT(f0)\",\n new Integer[] {1, 2, 3},\n DataTypes.ARRAY(DataTypes.INT()).nullable())\n .testResult(\n $(\"f1\").arrayDistinct(),\n \"ARRAY_DISTINCT(f1)\",\n new Integer[] {null, 1, 2, 3, 4, 5},\n DataTypes.ARRAY(DataTypes.INT()).nullable())\n .testResult(\n $(\"f2\").arrayDistinct(),\n \"ARRAY_DISTINCT(f2)\",\n null,\n DataTypes.ARRAY(DataTypes.INT()).nullable())\n .testResult(\n $(\"f3\").arrayDistinct(),\n \"ARRAY_DISTINCT(f3)\",\n new String[] {\"Hello\"},\n DataTypes.ARRAY(DataTypes.STRING()).notNull())\n .testResult(\n $(\"f4\").arrayDistinct(),\n \"ARRAY_DISTINCT(f4)\",\n new Row[] {\n Row.of(true, LocalDate.of(2022, 4, 20)),\n Row.of(true, LocalDate.of(1990, 10, 14)),\n null\n },\n DataTypes.ARRAY(\n DataTypes.ROW(DataTypes.BOOLEAN(), DataTypes.DATE()))));\n }", "context_before": "class CollectionFunctionsITCase extends BuiltInFunctionTestBase {\n\n @Override\n \n}", "context_after": "class CollectionFunctionsITCase extends BuiltInFunctionTestBase {\n\n @Override\n \n}" }, { "comment": "nit: It is still Thread.sleep(1), look into its implementation. TBH, I don't know what's the point of the method in jvm.", "method_body": "private void buildGraph(StreamExecutionEnvironment env) {\n \n \n env.fromSource(\n new NumberSequenceSource(0, Long.MAX_VALUE),\n WatermarkStrategy.noWatermarks(),\n \"num-source\")\n \n .keyBy(value -> value)\n \n .map(\n value -> {\n Thread.sleep(0, 100);\n return value;\n })\n .addSink(new DiscardingSink<>());\n }", "target_code": "Thread.sleep(0, 100);", "method_body_after": "private void buildGraph(StreamExecutionEnvironment env) {\n \n \n env.fromSource(\n new NumberSequenceSource(0, Long.MAX_VALUE),\n WatermarkStrategy.noWatermarks(),\n \"num-source\")\n \n .keyBy(value -> value)\n \n .map(\n value -> {\n Thread.sleep(1);\n return value;\n })\n .addSink(new DiscardingSink<>());\n }", "context_before": "class UnalignedCheckpointFailureHandlingITCase {\n\n private static final int PARALLELISM = 2;\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Rule public final SharedObjects sharedObjects = SharedObjects.create();\n\n @Rule\n public final MiniClusterWithClientResource miniClusterResource =\n new MiniClusterWithClientResource(\n new MiniClusterResourceConfiguration.Builder()\n .setNumberTaskManagers(PARALLELISM)\n .setNumberSlotsPerTaskManager(1)\n .build());\n\n @Test\n public void testCheckpointSuccessAfterFailure() throws Exception {\n final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n TestCheckpointStorage storage =\n new TestCheckpointStorage(\n new JobManagerCheckpointStorage(), sharedObjects, temporaryFolder);\n\n configure(env, storage);\n buildGraph(env);\n\n JobClient jobClient = env.executeAsync();\n JobID jobID = jobClient.getJobID();\n MiniCluster miniCluster = miniClusterResource.getMiniCluster();\n\n waitForJobStatus(jobClient, singletonList(RUNNING), fromNow(Duration.ofSeconds(30)));\n waitForAllTaskRunning(miniCluster, jobID, false);\n\n triggerFailingCheckpoint(jobID, TestException.class, miniCluster);\n\n miniCluster.triggerCheckpoint(jobID).get();\n }\n\n private void configure(StreamExecutionEnvironment env, TestCheckpointStorage storage) {\n \n env.enableCheckpointing(Long.MAX_VALUE, CheckpointingMode.EXACTLY_ONCE);\n\n env.getCheckpointConfig().setCheckpointStorage(storage);\n\n \n \n env.setStateBackend(new MockStateBackend(true));\n\n env.getCheckpointConfig().enableUnalignedCheckpoints();\n\n env.getCheckpointConfig().setAlignedCheckpointTimeout(Duration.ZERO); \n\n \n env.getCheckpointConfig().setTolerableCheckpointFailureNumber(Integer.MAX_VALUE);\n\n \n env.setParallelism(PARALLELISM);\n\n \n env.disableOperatorChaining();\n }\n\n \n\n private void triggerFailingCheckpoint(\n JobID jobID, Class expectedException, MiniCluster miniCluster)\n throws InterruptedException, ExecutionException {\n while (true) {\n Optional cpFailure =\n miniCluster\n .triggerCheckpoint(jobID)\n .thenApply(ign -> Optional.empty())\n .handle((ign, err) -> Optional.ofNullable(err))\n .get();\n if (!cpFailure.isPresent()) {\n Thread.sleep(50); \n } else if (isCausedBy(cpFailure.get(), expectedException)) {\n return;\n } else {\n rethrow(cpFailure.get());\n }\n }\n }\n\n private boolean isCausedBy(Throwable t, Class expectedException) {\n return findThrowable(t, SerializedThrowable.class)\n .flatMap(\n st -> {\n Throwable deser = st.deserializeError(getClass().getClassLoader());\n return findThrowable(deser, expectedException);\n })\n .isPresent();\n }\n\n private static class TestCheckpointStorage implements CheckpointStorage {\n private final CheckpointStorage delegate;\n private final SharedReference failOnCloseRef;\n private final SharedReference tempFolderRef;\n\n private TestCheckpointStorage(\n CheckpointStorage delegate,\n SharedObjects sharedObjects,\n TemporaryFolder tempFolder) {\n this.delegate = delegate;\n this.failOnCloseRef = sharedObjects.add(new AtomicBoolean(true));\n this.tempFolderRef = sharedObjects.add(tempFolder);\n }\n\n @Override\n public CheckpointStorageAccess createCheckpointStorage(JobID jobId) throws IOException {\n return new TestCheckpointStorageAccess(\n delegate.createCheckpointStorage(jobId),\n failOnCloseRef.get(),\n tempFolderRef.get().newFolder());\n }\n\n @Override\n public CompletedCheckpointStorageLocation resolveCheckpoint(String externalPointer)\n throws IOException {\n return delegate.resolveCheckpoint(externalPointer);\n }\n }\n\n private static class TestCheckpointStorageAccess implements CheckpointStorageAccess {\n private final CheckpointStorageAccess delegate;\n private final AtomicBoolean failOnClose;\n private final File path;\n\n public TestCheckpointStorageAccess(\n CheckpointStorageAccess delegate, AtomicBoolean failOnClose, File file) {\n this.delegate = delegate;\n this.failOnClose = failOnClose;\n this.path = file;\n }\n\n @Override\n public CheckpointStreamFactory resolveCheckpointStorageLocation(\n long checkpointId, CheckpointStorageLocationReference reference) {\n return ign -> new FailingOnceFsCheckpointOutputStream(path, 100, 0, failOnClose);\n }\n\n @Override\n public CheckpointStreamFactory.CheckpointStateOutputStream createTaskOwnedStateStream()\n throws IOException {\n return delegate.createTaskOwnedStateStream();\n }\n\n @Override\n public boolean supportsHighlyAvailableStorage() {\n return delegate.supportsHighlyAvailableStorage();\n }\n\n @Override\n public boolean hasDefaultSavepointLocation() {\n return delegate.hasDefaultSavepointLocation();\n }\n\n @Override\n public CompletedCheckpointStorageLocation resolveCheckpoint(String externalPointer)\n throws IOException {\n return delegate.resolveCheckpoint(externalPointer);\n }\n\n @Override\n public void initializeBaseLocationsForCheckpoint() throws IOException {\n delegate.initializeBaseLocationsForCheckpoint();\n }\n\n @Override\n public CheckpointStorageLocation initializeLocationForCheckpoint(long checkpointId)\n throws IOException {\n return delegate.initializeLocationForCheckpoint(checkpointId);\n }\n\n @Override\n public CheckpointStorageLocation initializeLocationForSavepoint(\n long checkpointId, @Nullable String externalLocationPointer) throws IOException {\n return delegate.initializeLocationForSavepoint(checkpointId, externalLocationPointer);\n }\n }\n\n private static class FailingOnceFsCheckpointOutputStream extends FsCheckpointStateOutputStream {\n private final AtomicBoolean failOnClose;\n private volatile boolean failedCloseAndGetHandle = false;\n\n public FailingOnceFsCheckpointOutputStream(\n File path, int bufferSize, int localStateThreshold, AtomicBoolean failOnClose)\n throws IOException {\n super(\n fromLocalFile(path.getAbsoluteFile()),\n FileSystem.get(path.toURI()),\n bufferSize,\n localStateThreshold);\n this.failOnClose = failOnClose;\n }\n\n \n @Override\n public StreamStateHandle closeAndGetHandle() throws IOException {\n if (failOnClose.get()) {\n failedCloseAndGetHandle = true;\n throw new TestException(\"failure from closeAndGetHandle\");\n } else {\n return super.closeAndGetHandle();\n }\n }\n\n \n @Override\n public void close() {\n if (failedCloseAndGetHandle && failOnClose.compareAndSet(true, false)) {\n throw new TestException(\"failure from close\");\n } else {\n super.close();\n }\n }\n }\n\n private static class TestException extends RuntimeException {\n public TestException(String message) {\n super(message);\n }\n }\n}", "context_after": "class UnalignedCheckpointFailureHandlingITCase {\n\n private static final int PARALLELISM = 2;\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Rule public final SharedObjects sharedObjects = SharedObjects.create();\n\n @Rule\n public final MiniClusterWithClientResource miniClusterResource =\n new MiniClusterWithClientResource(\n new MiniClusterResourceConfiguration.Builder()\n .setNumberTaskManagers(PARALLELISM)\n .setNumberSlotsPerTaskManager(1)\n .build());\n\n @Test\n public void testCheckpointSuccessAfterFailure() throws Exception {\n final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n TestCheckpointStorage storage =\n new TestCheckpointStorage(\n new JobManagerCheckpointStorage(), sharedObjects, temporaryFolder);\n\n configure(env, storage);\n buildGraph(env);\n\n JobClient jobClient = env.executeAsync();\n JobID jobID = jobClient.getJobID();\n MiniCluster miniCluster = miniClusterResource.getMiniCluster();\n\n waitForJobStatus(jobClient, singletonList(RUNNING), fromNow(Duration.ofSeconds(30)));\n waitForAllTaskRunning(miniCluster, jobID, false);\n\n triggerFailingCheckpoint(jobID, TestException.class, miniCluster);\n\n miniCluster.triggerCheckpoint(jobID).get();\n }\n\n private void configure(StreamExecutionEnvironment env, TestCheckpointStorage storage) {\n \n env.enableCheckpointing(Long.MAX_VALUE, CheckpointingMode.EXACTLY_ONCE);\n\n env.getCheckpointConfig().setCheckpointStorage(storage);\n\n \n \n env.setStateBackend(new MockStateBackend(true));\n\n env.getCheckpointConfig().enableUnalignedCheckpoints();\n\n env.getCheckpointConfig().setAlignedCheckpointTimeout(Duration.ZERO); \n\n \n env.getCheckpointConfig().setTolerableCheckpointFailureNumber(Integer.MAX_VALUE);\n\n \n env.setParallelism(PARALLELISM);\n\n \n env.disableOperatorChaining();\n }\n\n \n\n private void triggerFailingCheckpoint(\n JobID jobID, Class expectedException, MiniCluster miniCluster)\n throws InterruptedException, ExecutionException {\n while (true) {\n Optional cpFailure =\n miniCluster\n .triggerCheckpoint(jobID)\n .thenApply(ign -> Optional.empty())\n .handle((ign, err) -> Optional.ofNullable(err))\n .get();\n if (!cpFailure.isPresent()) {\n Thread.sleep(50); \n } else if (isCausedBy(cpFailure.get(), expectedException)) {\n return;\n } else {\n rethrow(cpFailure.get());\n }\n }\n }\n\n private boolean isCausedBy(Throwable t, Class expectedException) {\n return findThrowable(t, SerializedThrowable.class)\n .flatMap(\n st -> {\n Throwable deser = st.deserializeError(getClass().getClassLoader());\n return findThrowable(deser, expectedException);\n })\n .isPresent();\n }\n\n private static class TestCheckpointStorage implements CheckpointStorage {\n private final CheckpointStorage delegate;\n private final SharedReference failOnCloseRef;\n private final SharedReference tempFolderRef;\n\n private TestCheckpointStorage(\n CheckpointStorage delegate,\n SharedObjects sharedObjects,\n TemporaryFolder tempFolder) {\n this.delegate = delegate;\n this.failOnCloseRef = sharedObjects.add(new AtomicBoolean(true));\n this.tempFolderRef = sharedObjects.add(tempFolder);\n }\n\n @Override\n public CheckpointStorageAccess createCheckpointStorage(JobID jobId) throws IOException {\n return new TestCheckpointStorageAccess(\n delegate.createCheckpointStorage(jobId),\n failOnCloseRef.get(),\n tempFolderRef.get().newFolder());\n }\n\n @Override\n public CompletedCheckpointStorageLocation resolveCheckpoint(String externalPointer)\n throws IOException {\n return delegate.resolveCheckpoint(externalPointer);\n }\n }\n\n private static class TestCheckpointStorageAccess implements CheckpointStorageAccess {\n private final CheckpointStorageAccess delegate;\n private final AtomicBoolean failOnClose;\n private final File path;\n\n public TestCheckpointStorageAccess(\n CheckpointStorageAccess delegate, AtomicBoolean failOnClose, File file) {\n this.delegate = delegate;\n this.failOnClose = failOnClose;\n this.path = file;\n }\n\n @Override\n public CheckpointStreamFactory resolveCheckpointStorageLocation(\n long checkpointId, CheckpointStorageLocationReference reference) {\n return ign -> new FailingOnceFsCheckpointOutputStream(path, 100, 0, failOnClose);\n }\n\n @Override\n public CheckpointStreamFactory.CheckpointStateOutputStream createTaskOwnedStateStream()\n throws IOException {\n return delegate.createTaskOwnedStateStream();\n }\n\n @Override\n public boolean supportsHighlyAvailableStorage() {\n return delegate.supportsHighlyAvailableStorage();\n }\n\n @Override\n public boolean hasDefaultSavepointLocation() {\n return delegate.hasDefaultSavepointLocation();\n }\n\n @Override\n public CompletedCheckpointStorageLocation resolveCheckpoint(String externalPointer)\n throws IOException {\n return delegate.resolveCheckpoint(externalPointer);\n }\n\n @Override\n public void initializeBaseLocationsForCheckpoint() throws IOException {\n delegate.initializeBaseLocationsForCheckpoint();\n }\n\n @Override\n public CheckpointStorageLocation initializeLocationForCheckpoint(long checkpointId)\n throws IOException {\n return delegate.initializeLocationForCheckpoint(checkpointId);\n }\n\n @Override\n public CheckpointStorageLocation initializeLocationForSavepoint(\n long checkpointId, @Nullable String externalLocationPointer) throws IOException {\n return delegate.initializeLocationForSavepoint(checkpointId, externalLocationPointer);\n }\n }\n\n private static class FailingOnceFsCheckpointOutputStream extends FsCheckpointStateOutputStream {\n private final AtomicBoolean failOnClose;\n private volatile boolean failedCloseAndGetHandle = false;\n\n public FailingOnceFsCheckpointOutputStream(\n File path, int bufferSize, int localStateThreshold, AtomicBoolean failOnClose)\n throws IOException {\n super(\n fromLocalFile(path.getAbsoluteFile()),\n FileSystem.get(path.toURI()),\n bufferSize,\n localStateThreshold);\n this.failOnClose = failOnClose;\n }\n\n \n @Override\n public StreamStateHandle closeAndGetHandle() throws IOException {\n if (failOnClose.get()) {\n failedCloseAndGetHandle = true;\n throw new TestException(\"failure from closeAndGetHandle\");\n } else {\n return super.closeAndGetHandle();\n }\n }\n\n \n @Override\n public void close() {\n if (failedCloseAndGetHandle && failOnClose.compareAndSet(true, false)) {\n \n \n rethrow(new TestException(\"failure from close\"));\n } else {\n super.close();\n }\n }\n }\n\n private static class TestException extends IOException {\n public TestException(String message) {\n super(message);\n }\n }\n}" }, { "comment": "Yes, that sounds like a good split and would exactly help with that problem. The best way to handle this PR-wise would be: - create a JIRA for splitting the interface into something like and JM/TM side. - open PR for that change. - have this PR rebased, based on the interface split-PR. We should keep the cost/benefit ratio in mind. If you think rebasing would take you a lot of time (I think it should not) then just do it in this PR. Otherwise, I suggest the above strategy.", "method_body": "public CheckpointStorageLocation initializeLocationForCheckpoint(long checkpointId) throws IOException {\n\t\tcheckArgument(checkpointId >= 0);\n\n\t\t\n\t\tfinal Path checkpointDir = createCheckpointDirectory(checkpointsDirectory, checkpointId);\n\n\t\tif (!areDirectoriesCreated) {\n\t\t\tfileSystem.mkdirs(checkpointsDirectory);\n\t\t\tfileSystem.mkdirs(sharedStateDirectory);\n\t\t\tfileSystem.mkdirs(taskOwnedStateDirectory);\n\t\t\tareDirectoriesCreated = true;\n\t\t}\n\n\t\t\n\t\tfileSystem.mkdirs(checkpointDir);\n\n\t\treturn new FsCheckpointStorageLocation(\n\t\t\t\tfileSystem,\n\t\t\t\tcheckpointDir,\n\t\t\t\tsharedStateDirectory,\n\t\t\t\ttaskOwnedStateDirectory,\n\t\t\t\tCheckpointStorageLocationReference.getDefault(),\n\t\t\t\tfileSizeThreshold);\n\t}", "target_code": "fileSystem.mkdirs(sharedStateDirectory);", "method_body_after": "public CheckpointStorageLocation initializeLocationForCheckpoint(long checkpointId) throws IOException {\n\t\tcheckArgument(checkpointId >= 0, \"Illegal negative checkpoint id: %d.\", checkpointId);\n\t\tcheckArgument(baseLocationsInitialized, \"The base checkpoint location has not been initialized.\");\n\n\t\t\n\t\tfinal Path checkpointDir = createCheckpointDirectory(checkpointsDirectory, checkpointId);\n\n\t\t\n\t\tfileSystem.mkdirs(checkpointDir);\n\n\t\treturn new FsCheckpointStorageLocation(\n\t\t\t\tfileSystem,\n\t\t\t\tcheckpointDir,\n\t\t\t\tsharedStateDirectory,\n\t\t\t\ttaskOwnedStateDirectory,\n\t\t\t\tCheckpointStorageLocationReference.getDefault(),\n\t\t\t\tfileSizeThreshold,\n\t\t\t\twriteBufferSize);\n\t}", "context_before": "class FsCheckpointStorage extends AbstractFsCheckpointStorage {\n\n\tprivate final FileSystem fileSystem;\n\n\tprivate final Path checkpointsDirectory;\n\n\tprivate final Path sharedStateDirectory;\n\n\tprivate final Path taskOwnedStateDirectory;\n\n\tprivate final int fileSizeThreshold;\n\n\t/** Whether we already initialized checkpoint directories. */\n\tprivate transient boolean areDirectoriesCreated;\n\n\tpublic FsCheckpointStorage(\n\t\t\tPath checkpointBaseDirectory,\n\t\t\t@Nullable Path defaultSavepointDirectory,\n\t\t\tJobID jobId,\n\t\t\tint fileSizeThreshold) throws IOException {\n\n\t\tthis(checkpointBaseDirectory.getFileSystem(),\n\t\t\t\tcheckpointBaseDirectory,\n\t\t\t\tdefaultSavepointDirectory,\n\t\t\t\tjobId,\n\t\t\t\tfileSizeThreshold);\n\t}\n\n\tpublic FsCheckpointStorage(\n\t\t\tFileSystem fs,\n\t\t\tPath checkpointBaseDirectory,\n\t\t\t@Nullable Path defaultSavepointDirectory,\n\t\t\tJobID jobId,\n\t\t\tint fileSizeThreshold) throws IOException {\n\n\t\tsuper(jobId, defaultSavepointDirectory);\n\n\t\tcheckArgument(fileSizeThreshold >= 0);\n\n\t\tthis.fileSystem = checkNotNull(fs);\n\t\tthis.checkpointsDirectory = getCheckpointDirectoryForJob(checkpointBaseDirectory, jobId);\n\t\tthis.sharedStateDirectory = new Path(checkpointsDirectory, CHECKPOINT_SHARED_STATE_DIR);\n\t\tthis.taskOwnedStateDirectory = new Path(checkpointsDirectory, CHECKPOINT_TASK_OWNED_STATE_DIR);\n\t\tthis.fileSizeThreshold = fileSizeThreshold;\n\t}\n\n\t\n\n\tpublic Path getCheckpointsDirectory() {\n\t\treturn checkpointsDirectory;\n\t}\n\n\t\n\t\n\t\n\n\t@Override\n\tpublic boolean supportsHighlyAvailableStorage() {\n\t\treturn true;\n\t}\n\n\t@Override\n\t\n\n\t@Override\n\tpublic CheckpointStreamFactory resolveCheckpointStorageLocation(\n\t\t\tlong checkpointId,\n\t\t\tCheckpointStorageLocationReference reference) throws IOException {\n\n\t\tif (reference.isDefaultReference()) {\n\t\t\t\n\t\t\tfinal Path checkpointDir = createCheckpointDirectory(checkpointsDirectory, checkpointId);\n\n\t\t\treturn new FsCheckpointStorageLocation(\n\t\t\t\t\tfileSystem,\n\t\t\t\t\tcheckpointDir,\n\t\t\t\t\tsharedStateDirectory,\n\t\t\t\t\ttaskOwnedStateDirectory,\n\t\t\t\t\treference,\n\t\t\t\t\tfileSizeThreshold);\n\t\t}\n\t\telse {\n\t\t\t\n\t\t\tfinal Path path = decodePathFromReference(reference);\n\n\t\t\treturn new FsCheckpointStorageLocation(\n\t\t\t\t\tpath.getFileSystem(),\n\t\t\t\t\tpath,\n\t\t\t\t\tpath,\n\t\t\t\t\tpath,\n\t\t\t\t\treference,\n\t\t\t\t\tfileSizeThreshold);\n\t\t}\n\t}\n\n\t@Override\n\tpublic CheckpointStateOutputStream createTaskOwnedStateStream() throws IOException {\n\t\treturn new FsCheckpointStateOutputStream(\n\t\t\t\ttaskOwnedStateDirectory,\n\t\t\t\tfileSystem,\n\t\t\t\tFsCheckpointStreamFactory.DEFAULT_WRITE_BUFFER_SIZE,\n\t\t\t\tfileSizeThreshold);\n\t}\n\n\t@Override\n\tprotected CheckpointStorageLocation createSavepointLocation(FileSystem fs, Path location) throws IOException {\n\t\tfinal CheckpointStorageLocationReference reference = encodePathAsReference(location);\n\t\treturn new FsCheckpointStorageLocation(fs, location, location, location, reference, fileSizeThreshold);\n\t}\n}", "context_after": "class FsCheckpointStorage extends AbstractFsCheckpointStorage {\n\n\tprivate final FileSystem fileSystem;\n\n\tprivate final Path checkpointsDirectory;\n\n\tprivate final Path sharedStateDirectory;\n\n\tprivate final Path taskOwnedStateDirectory;\n\n\tprivate final int fileSizeThreshold;\n\n\tprivate final int writeBufferSize;\n\n\tprivate boolean baseLocationsInitialized = false;\n\n\tpublic FsCheckpointStorage(\n\t\t\tPath checkpointBaseDirectory,\n\t\t\t@Nullable Path defaultSavepointDirectory,\n\t\t\tJobID jobId,\n\t\t\tint fileSizeThreshold,\n\t\t\tint writeBufferSize) throws IOException {\n\n\t\tthis(checkpointBaseDirectory.getFileSystem(),\n\t\t\t\tcheckpointBaseDirectory,\n\t\t\t\tdefaultSavepointDirectory,\n\t\t\t\tjobId,\n\t\t\t\tfileSizeThreshold,\n\t\t\t\twriteBufferSize);\n\t}\n\n\tpublic FsCheckpointStorage(\n\t\t\tFileSystem fs,\n\t\t\tPath checkpointBaseDirectory,\n\t\t\t@Nullable Path defaultSavepointDirectory,\n\t\t\tJobID jobId,\n\t\t\tint fileSizeThreshold,\n\t\t\tint writeBufferSize) throws IOException {\n\n\t\tsuper(jobId, defaultSavepointDirectory);\n\n\t\tcheckArgument(fileSizeThreshold >= 0);\n\t\tcheckArgument(writeBufferSize >= 0);\n\n\t\tthis.fileSystem = checkNotNull(fs);\n\t\tthis.checkpointsDirectory = getCheckpointDirectoryForJob(checkpointBaseDirectory, jobId);\n\t\tthis.sharedStateDirectory = new Path(checkpointsDirectory, CHECKPOINT_SHARED_STATE_DIR);\n\t\tthis.taskOwnedStateDirectory = new Path(checkpointsDirectory, CHECKPOINT_TASK_OWNED_STATE_DIR);\n\t\tthis.fileSizeThreshold = fileSizeThreshold;\n\t\tthis.writeBufferSize = writeBufferSize;\n\t}\n\n\t\n\n\t@VisibleForTesting\n\tPath getCheckpointsDirectory() {\n\t\treturn checkpointsDirectory;\n\t}\n\n\t\n\t\n\t\n\n\t@Override\n\tpublic boolean supportsHighlyAvailableStorage() {\n\t\treturn true;\n\t}\n\n\t@Override\n\tpublic void initializeBaseLocations() throws IOException {\n\t\tfileSystem.mkdirs(sharedStateDirectory);\n\t\tfileSystem.mkdirs(taskOwnedStateDirectory);\n\t\tbaseLocationsInitialized = true;\n\t}\n\n\t@Override\n\t\n\n\t@Override\n\tpublic CheckpointStreamFactory resolveCheckpointStorageLocation(\n\t\t\tlong checkpointId,\n\t\t\tCheckpointStorageLocationReference reference) throws IOException {\n\n\t\tif (reference.isDefaultReference()) {\n\t\t\t\n\t\t\tfinal Path checkpointDir = createCheckpointDirectory(checkpointsDirectory, checkpointId);\n\n\t\t\treturn new FsCheckpointStorageLocation(\n\t\t\t\t\tfileSystem,\n\t\t\t\t\tcheckpointDir,\n\t\t\t\t\tsharedStateDirectory,\n\t\t\t\t\ttaskOwnedStateDirectory,\n\t\t\t\t\treference,\n\t\t\t\t\tfileSizeThreshold,\n\t\t\t\t\twriteBufferSize);\n\t\t}\n\t\telse {\n\t\t\t\n\t\t\tfinal Path path = decodePathFromReference(reference);\n\n\t\t\treturn new FsCheckpointStorageLocation(\n\t\t\t\t\tpath.getFileSystem(),\n\t\t\t\t\tpath,\n\t\t\t\t\tpath,\n\t\t\t\t\tpath,\n\t\t\t\t\treference,\n\t\t\t\t\tfileSizeThreshold,\n\t\t\t\t\twriteBufferSize);\n\t\t}\n\t}\n\n\t@Override\n\tpublic CheckpointStateOutputStream createTaskOwnedStateStream() {\n\t\treturn new FsCheckpointStateOutputStream(\n\t\t\t\ttaskOwnedStateDirectory,\n\t\t\t\tfileSystem,\n\t\t\t\twriteBufferSize,\n\t\t\t\tfileSizeThreshold);\n\t}\n\n\t@Override\n\tprotected CheckpointStorageLocation createSavepointLocation(FileSystem fs, Path location) {\n\t\tfinal CheckpointStorageLocationReference reference = encodePathAsReference(location);\n\t\treturn new FsCheckpointStorageLocation(fs, location, location, location, reference, fileSizeThreshold, writeBufferSize);\n\t}\n}" }, { "comment": "As I understood this only adds native libraries of the building module likewise we need to add libraries from imports and transitive dependencies as well.", "method_body": "private static void assembleExecutable(BLangPackage bLangPackage, Path project) {\n try {\n final Path target = project.resolve(ProjectDirConstants.TARGET_DIR_NAME);\n final Path bin = target.resolve(ProjectDirConstants.BIN_DIR_NAME);\n final Path jarCache = target.resolve(ProjectDirConstants.CACHES_DIR_NAME)\n .resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME);\n String moduleJarName = bLangPackage.packageID.name.value\n + ProjectDirConstants.BLANG_COMPILED_JAR_EXT;\n String execJarName = bLangPackage.packageID.name.value + ProjectDirConstants.EXEC_SUFFIX\n + ProjectDirConstants.BLANG_COMPILED_JAR_EXT;\n \n Path uberJar = bin.resolve(execJarName);\n Path moduleJar = jarCache\n .resolve(bLangPackage.packageID.orgName.value)\n .resolve(bLangPackage.packageID.name.value)\n .resolve(bLangPackage.packageID.version.value)\n .resolve(moduleJarName);\n\n \n if (bLangPackage.symbol.entryPointExists) {\n \n Files.createDirectories(bin);\n\n Files.copy(moduleJar, uberJar, StandardCopyOption.REPLACE_EXISTING);\n \n\n \n for (BPackageSymbol importz : bLangPackage.symbol.imports) {\n Path importJar = findImportJarPath(importz, project);\n\n if (importJar != null && Files.exists(importJar)) {\n copyFromJarToJar(importJar, uberJar);\n }\n }\n\n \n String baloName = getFileName(bLangPackage.packageID.name.value);\n Path fullPathToBalo = project.resolve(ProjectDirConstants.TARGET_DIR_NAME).\n resolve(ProjectDirConstants.TARGET_BALO_DIRECTORY).resolve(baloName);\n\n String destination = extractJar(fullPathToBalo.toString());\n\n if (Files.exists(Paths.get(destination).resolve(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME))) {\n try (Stream walk = Files.walk(Paths.get(destination)\n .resolve(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME))) {\n\n List result = walk.filter(Files::isRegularFile)\n .map(x -> x.toString()).collect(Collectors.toList());\n\n result.forEach(lib -> {\n try {\n copyFromJarToJar(Paths.get(lib), uberJar);\n } catch (Exception e) {\n throw new BLangCompilerException(\"Unable to create the executable :\" +\n e.getMessage());\n }\n });\n } catch (IOException e) {\n throw new BLangCompilerException(\"Unable to create the executable :\" + e.getMessage());\n }\n }\n\n }\n \n \n \n outStream.println(project.relativize(uberJar).toString());\n \n } catch (IOException e) {\n throw new BLangCompilerException(\"Unable to create the executable :\" + e.getMessage());\n }\n }", "target_code": "String baloName = getFileName(bLangPackage.packageID.name.value);", "method_body_after": "private static void assembleExecutable(BLangPackage bLangPackage, Path project) {\n try {\n final Path target = project.resolve(ProjectDirConstants.TARGET_DIR_NAME);\n final Path bin = target.resolve(ProjectDirConstants.BIN_DIR_NAME);\n final Path jarCache = target.resolve(ProjectDirConstants.CACHES_DIR_NAME)\n .resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME);\n String moduleJarName = bLangPackage.packageID.name.value\n + ProjectDirConstants.BLANG_COMPILED_JAR_EXT;\n String execJarName = bLangPackage.packageID.name.value + ProjectDirConstants.EXEC_SUFFIX\n + ProjectDirConstants.BLANG_COMPILED_JAR_EXT;\n \n Path uberJar = bin.resolve(execJarName);\n Path moduleJar = jarCache\n .resolve(bLangPackage.packageID.orgName.value)\n .resolve(bLangPackage.packageID.name.value)\n .resolve(bLangPackage.packageID.version.value)\n .resolve(moduleJarName);\n\n \n if (bLangPackage.symbol.entryPointExists) {\n \n Files.createDirectories(bin);\n\n Files.copy(moduleJar, uberJar, StandardCopyOption.REPLACE_EXISTING);\n \n\n \n for (BPackageSymbol importz : bLangPackage.symbol.imports) {\n Path importJar = findImportJarPath(importz, project);\n\n if (importJar != null && Files.exists(importJar)) {\n copyFromJarToJar(importJar, uberJar);\n }\n }\n\n \n String baloName = getFileName(bLangPackage.packageID.name.value);\n Path fullPathToBalo = project.resolve(ProjectDirConstants.TARGET_DIR_NAME).\n resolve(ProjectDirConstants.TARGET_BALO_DIRECTORY).resolve(baloName);\n\n String destination = extractJar(fullPathToBalo.toString());\n\n if (Files.exists(Paths.get(destination).resolve(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME))) {\n try (Stream walk = Files.walk(Paths.get(destination)\n .resolve(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME))) {\n\n List result = walk.filter(Files::isRegularFile)\n .map(x -> x.toString()).collect(Collectors.toList());\n\n result.forEach(lib -> {\n try {\n copyFromJarToJar(Paths.get(lib), uberJar);\n } catch (Exception e) {\n throw new BLangCompilerException(\"Unable to create the executable :\" +\n e.getMessage());\n }\n });\n } catch (IOException e) {\n throw new BLangCompilerException(\"Unable to create the executable :\" + e.getMessage());\n }\n }\n\n }\n \n \n \n outStream.println(project.relativize(uberJar).toString());\n \n } catch (IOException e) {\n throw new BLangCompilerException(\"Unable to create the executable :\" + e.getMessage());\n }\n }", "context_before": "class BuilderUtils {\n private static final String BALLERINA_HOME = \"BALLERINA_HOME\";\n private static PrintStream outStream = System.out;\n\n private static ModuleFileWriter moduleFileWriter;\n private static BIRFileWriter birFileWriter;\n private static LockFileWriter lockFileWriter;\n private static Manifest manifest;\n\n public static void compileWithTestsAndWrite(Path sourceRootPath,\n String packagePath,\n String targetPath,\n boolean buildCompiledPkg,\n boolean offline,\n boolean lockEnabled,\n boolean skipTests,\n boolean enableExperimentalFeatures,\n boolean siddhiRuntimeEnabled,\n boolean jvmTarget) {\n CompilerContext context = getCompilerContext(sourceRootPath, jvmTarget, buildCompiledPkg, offline,\n lockEnabled, skipTests, enableExperimentalFeatures, siddhiRuntimeEnabled);\n\n Compiler compiler = Compiler.getInstance(context);\n BLangPackage bLangPackage = compiler.build(packagePath);\n\n if (skipTests) {\n outStream.println();\n compiler.write(bLangPackage, targetPath);\n } else {\n runTests(compiler, sourceRootPath, Collections.singletonList(bLangPackage));\n compiler.write(bLangPackage, targetPath);\n }\n }\n\n public static void compileWithTestsAndWrite(Path sourceRootPath,\n String packageName,\n String targetPath,\n boolean buildCompiledPkg,\n boolean offline,\n boolean lockEnabled,\n boolean skiptests,\n boolean enableExperimentalFeatures,\n boolean siddhiRuntimeEnabled,\n boolean jvmTarget,\n boolean dumpBIR,\n boolean genExecutables) {\n CompilerContext context = getCompilerContext(sourceRootPath, jvmTarget, buildCompiledPkg, offline,\n lockEnabled, skiptests, enableExperimentalFeatures, siddhiRuntimeEnabled);\n\n Compiler compiler = Compiler.getInstance(context);\n BLangPackage bLangPackage = compiler.build(packageName);\n boolean isSingleFile = packageName.endsWith(ProjectDirConstants.BLANG_SOURCE_EXT);\n\n try {\n \n Path targetDirectory = Files.createTempDirectory(\"ballerina-compile\").toAbsolutePath();\n String balHome = Objects.requireNonNull(System.getProperty(\"ballerina.home\"),\n \"ballerina.home is not set\");\n\n String targetDir = Files.isDirectory(Paths.get(targetPath)) ? targetPath : \".\";\n\n BootstrapRunner.createClassLoaders(bLangPackage, Paths.get(balHome).resolve(\"bir-cache\"),\n targetDirectory, Optional.of(Paths.get(targetDir)), false);\n\n \n \n if (bLangPackage.symbol.entryPointExists && !isSingleFile) {\n outStream.println();\n outStream.println(\"Generating Executables\");\n assembleExecutable(bLangPackage, sourceRootPath);\n } else {\n if (!isSingleFile) {\n throw new BLangCompilerException(\"package `\" + packageName + \"` do not have an entry point\");\n }\n }\n \n ServiceLoader processorServiceLoader = ServiceLoader.load(CompilerPlugin.class);\n processorServiceLoader.forEach(plugin -> {\n String execJarName;\n Path execFilePath;\n if (!isSingleFile) {\n execJarName = bLangPackage.packageID.name.value + ProjectDirConstants.EXEC_SUFFIX +\n ProjectDirConstants.BLANG_COMPILED_JAR_EXT;\n execFilePath = sourceRootPath\n .resolve(ProjectDirConstants.TARGET_DIR_NAME)\n .resolve(ProjectDirConstants.BIN_DIR_NAME)\n .resolve(execJarName);\n } else {\n execFilePath = sourceRootPath.resolve(packageName.replaceAll(\".bal\", \"\") +\n ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n }\n\n plugin.codeGenerated(bLangPackage.packageID, execFilePath);\n });\n \n } catch (IOException e) {\n throw new BLangCompilerException(\"error invoking jballerina backend\", e);\n }\n }\n\n\n public static void compileWithTestsAndWrite(Path sourceRootPath, boolean offline, boolean lockEnabled,\n boolean skiptests, boolean enableExperimentalFeatures,\n boolean siddhiRuntimeEnabled, boolean jvmTarget, boolean dumpBir,\n boolean genExecutables) {\n CompilerPhase compilerPhase = jvmTarget ? CompilerPhase.BIR_GEN : CompilerPhase.CODE_GEN;\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(OFFLINE, Boolean.toString(offline));\n options.put(COMPILER_PHASE, compilerPhase.toString());\n options.put(LOCK_ENABLED, Boolean.toString(lockEnabled));\n options.put(SKIP_TESTS, Boolean.toString(skiptests));\n options.put(TEST_ENABLED, \"true\");\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExperimentalFeatures));\n options.put(SIDDHI_RUNTIME_ENABLED, Boolean.toString(siddhiRuntimeEnabled));\n Compiler compiler = Compiler.getInstance(context);\n List packages = compiler.build();\n\n prepareTargetDirectory(sourceRootPath);\n \n \n \n if (jvmTarget) {\n outStream.println();\n outStream.println(\"Generating artifacts\");\n \n generateModuleArtafacts(packages, context);\n\n try {\n generateJars(packages, sourceRootPath);\n } catch (IOException e) {\n throw new BLangCompilerException(\"error invoking jballerina backend\", e);\n }\n\n \n List entryPackages = packages.stream().filter(p -> p.symbol.entryPointExists)\n .collect(Collectors.toList());\n if (genExecutables && !entryPackages.isEmpty()) {\n outStream.println();\n outStream.println(\"Generating executables\");\n entryPackages.forEach(p -> assembleExecutable(p, sourceRootPath));\n }\n\n \n ServiceLoader processorServiceLoader = ServiceLoader.load(CompilerPlugin.class);\n for (BLangPackage p: packages) {\n processorServiceLoader.forEach(plugin -> {\n String execJarName = p.packageID.name.value + ProjectDirConstants.EXEC_SUFFIX\n + ProjectDirConstants.BLANG_COMPILED_JAR_EXT;\n Path execFilePath = sourceRootPath\n .resolve(ProjectDirConstants.TARGET_DIR_NAME)\n .resolve(ProjectDirConstants.BIN_DIR_NAME)\n .resolve(execJarName);\n plugin.codeGenerated(p.packageID, execFilePath);\n });\n }\n return;\n }\n\n\n if (skiptests) {\n if (packages.size() == 0) {\n throw new BLangCompilerException(\"no ballerina source files found to compile\");\n }\n outStream.println();\n compiler.write(packages);\n } else {\n if (packages.size() == 0) {\n throw new BLangCompilerException(\"no ballerina source files found to compile\");\n }\n runTests(compiler, sourceRootPath, packages);\n compiler.write(packages);\n }\n }\n\n private static void generateModuleArtafacts(List packages, CompilerContext context) {\n \n \n \n moduleFileWriter = ModuleFileWriter.getInstance(context);\n birFileWriter = BIRFileWriter.getInstance(context);\n \n lockFileWriter = LockFileWriter.getInstance(context);\n lockFileWriter.writeLockFile(ManifestProcessor.getInstance(context).getManifest());\n packages.forEach(moduleFileWriter::write);\n packages.forEach(birFileWriter::write);\n packages.forEach(bLangPackage -> lockFileWriter.addEntryPkg(bLangPackage.symbol));\n manifest = ManifestProcessor.getInstance(context).getManifest();\n }\n\n\n /**\n * Generate jars for given package.\n *\n * @param packages package\n * @param sourceRoot source root\n * @throws IOException for IO errors\n */\n public static void generateJars(List packages, Path sourceRoot) throws IOException {\n \n \n \n Path projectBIRcache = sourceRoot.resolve(ProjectDirConstants.TARGET_DIR_NAME)\n .resolve(ProjectDirConstants.CACHES_DIR_NAME)\n .resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME);\n \n Path homeBIRCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME);\n \n Path systemBIRCache = Paths.get(System.getProperty(BALLERINA_INSTALL_DIR_PROP)).resolve(\"bir-cache\");\n\n \n \n Path projectJARcache = sourceRoot.resolve(ProjectDirConstants.TARGET_DIR_NAME)\n .resolve(ProjectDirConstants.CACHES_DIR_NAME)\n .resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME);\n \n \n\n for (BLangPackage bpackage : packages) {\n Path moduleFragment = Paths.get(bpackage.packageID.orgName.value,\n bpackage.packageID.name.value, bpackage.packageID.version.value);\n\n \n \n \n writeImportJar(bpackage.symbol.imports, sourceRoot,\n projectBIRcache.toString(), homeBIRCache.toString(), systemBIRCache.toString());\n \n Files.createDirectories(projectJARcache.resolve(moduleFragment));\n Path entryBir = projectBIRcache.resolve(moduleFragment)\n .resolve(bpackage.packageID.name.value + ProjectDirConstants.BLANG_COMPILED_PKG_BIR_EXT);\n Path jarOutput = projectJARcache.resolve(moduleFragment)\n .resolve(bpackage.packageID.name.value + ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n BootstrapRunner.generateJarBinary(entryBir.toString(), jarOutput.toString(), false,\n projectBIRcache.toString(), homeBIRCache.toString(), systemBIRCache.toString());\n }\n }\n\n private static void writeImportJar(List imports, Path sourceRoot, String... reps) {\n for (BPackageSymbol bimport : imports) {\n PackageID id = bimport.pkgID;\n Path projectJarCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n Path homeJarCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n Path projectBirCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n Path homeBirCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n\n try {\n if (id.orgName.value.equals(\"ballerina\") || id.orgName.value.equals(\"ballerinax\")) {\n continue;\n }\n Path jarCache;\n Path birCache;\n \n \n \n if (ProjectDirs.isModuleExist(sourceRoot, id.name.value)) {\n jarCache = projectJarCache;\n birCache = projectBirCache;\n } else {\n jarCache = homeJarCache;\n birCache = homeBirCache;\n }\n Path jarFile = jarCache.resolve(id.name.value + ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n Path birFile = birCache.resolve(id.name.value + BLANG_COMPILED_PKG_BIR_EXT);\n if (!Files.exists(jarFile)) {\n Files.createDirectories(jarCache);\n BootstrapRunner.generateJarBinary(birFile.toString(), jarFile.toString(), false,\n reps);\n }\n writeImportJar(bimport.imports, sourceRoot);\n } catch (IOException e) {\n String msg = \"error writing the compiled module(jar) of '\" +\n id.name.value + \"' to '\" + homeJarCache + \"': \" + e.getMessage();\n throw new BLangCompilerException(msg, e);\n }\n }\n }\n\n /**\n * Run tests in the build.\n *\n * @param compiler compiler instance\n * @param sourceRootPath source root path\n * @param packageList list of compiled packages\n */\n private static void runTests(Compiler compiler, Path sourceRootPath, List packageList) {\n Map programFileMap = new HashMap<>();\n \n \n \n packageList.stream().filter(bLangPackage -> !bLangPackage.packageID.getName().equals(Names.DEFAULT_PACKAGE))\n .forEach(bLangPackage -> {\n CompiledBinaryFile.ProgramFile programFile;\n if (bLangPackage.containsTestablePkg()) {\n programFile = compiler.getExecutableProgram(bLangPackage.getTestablePkg());\n } else {\n \n \n \n \n \n programFile = compiler.getExecutableProgram(bLangPackage);\n }\n\n programFileMap.put(bLangPackage, programFile);\n });\n\n if (programFileMap.size() > 0) {\n TesterinaUtils.executeTests(sourceRootPath, programFileMap);\n }\n }\n\n private static CompilerContext getCompilerContext(Path sourceRootPath,\n boolean jvmTarget,\n boolean buildCompiledPkg,\n boolean offline,\n boolean lockEnabled,\n boolean skipTests,\n boolean enableExperimentalFeatures,\n boolean siddhiRuntimeEnabled) {\n CompilerPhase compilerPhase = jvmTarget ? CompilerPhase.BIR_GEN : CompilerPhase.CODE_GEN;\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(COMPILER_PHASE, compilerPhase.toString());\n options.put(BUILD_COMPILED_MODULE, Boolean.toString(buildCompiledPkg));\n options.put(OFFLINE, Boolean.toString(offline));\n options.put(LOCK_ENABLED, Boolean.toString(lockEnabled));\n options.put(SKIP_TESTS, Boolean.toString(skipTests));\n options.put(TEST_ENABLED, Boolean.toString(true));\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExperimentalFeatures));\n options.put(SIDDHI_RUNTIME_ENABLED, Boolean.toString(siddhiRuntimeEnabled));\n return context;\n }\n\n /**\n * Prepare target directory before the compile.\n *\n * @param sourceRoot source root of the ballerina file or project\n * @return target path\n */\n private static Path prepareTargetDirectory(Path sourceRoot) {\n \n Path target;\n if (ProjectDirs.isProject(sourceRoot)) {\n \n target = sourceRoot.resolve(ProjectDirConstants.TARGET_DIR_NAME);\n \n if (!Files.exists(target)) {\n try {\n Files.createDirectory(target);\n } catch (IOException e) {\n throw new BLangCompilerException(\"unable to create target directory\");\n }\n }\n createCacheDirectory(target);\n } else {\n \n try {\n target = Files.createTempDirectory(\"b7a-compiler\");\n } catch (IOException e) {\n throw new BLangCompilerException(\"unable to create target directory\");\n }\n createCacheDirectory(target);\n }\n return target;\n }\n\n /**\n * Prepare cache directory before the compile.\n *\n * @param target source root of the ballerina file or project\n * @return target path\n */\n private static void createCacheDirectory(Path target) {\n Path cacheDir = target.resolve(ProjectDirConstants.CACHES_DIR_NAME);\n if (!Files.exists(cacheDir)) {\n try {\n Files.createDirectory(cacheDir);\n } catch (IOException e) {\n throw new BLangCompilerException(\"unable to create target cache directory\");\n }\n }\n Path birCacheDir = cacheDir.resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME);\n if (!Files.exists(birCacheDir)) {\n try {\n Files.createDirectory(birCacheDir);\n } catch (IOException e) {\n throw new BLangCompilerException(\"unable to create target bir cache directory\");\n }\n }\n }\n\n \n \n private static String getFileName(String moduleName) {\n \n String versionNo = manifest.getProject().getVersion();\n \n String platform = manifest.getTargetPlatform();\n \n \n return moduleName + \"-\"\n + ProgramFileConstants.IMPLEMENTATION_VERSION + \"-\"\n + platform + \"-\"\n + versionNo\n + ProjectDirConstants.BLANG_COMPILED_PKG_BINARY_EXT;\n }\n\n private static String extractJar(String jarFileName) {\n JarFile jar = null;\n try {\n jar = new JarFile(jarFileName);\n java.util.Enumeration enumEntries = jar.entries();\n File destFile = File.createTempFile(\"temp-\" + jarFileName, Long.toString(System.nanoTime()));\n if (!(destFile.delete())) {\n throw new BLangCompilerException(\"Could not delete temp file: \" + destFile.getAbsolutePath());\n }\n if (!(destFile.mkdir())) {\n throw new BLangCompilerException(\"Could not create temp directory: \"\n + destFile.getAbsolutePath());\n }\n while (enumEntries.hasMoreElements()) {\n JarEntry file = (JarEntry) enumEntries.nextElement();\n if (file.getName().contains(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME)) {\n File f = new File(destFile.getPath() + File.separator + file.getName());\n if (file.isDirectory()) { \n f.mkdir();\n continue;\n }\n InputStream is = jar.getInputStream(file); \n FileOutputStream fos = new java.io.FileOutputStream(f);\n while (is.available() > 0) { \n fos.write(is.read());\n }\n fos.close();\n is.close();\n }\n }\n jar.close();\n return destFile.getPath();\n } catch (IOException e) {\n throw new BLangCompilerException(\"Unable to create the executable :\" + e.getMessage());\n }\n }\n\n private static Path findImportJarPath(BPackageSymbol importz, Path project) {\n \n PackageID id = importz.pkgID;\n Path projectJarCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n Path homeJarCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n \n if (id.orgName.value.equals(\"ballerina\") || id.orgName.value.equals(\"ballerinax\")) {\n return null;\n }\n \n if (ProjectDirs.isModuleExist(project, id.name.value)) {\n \n return projectJarCache.resolve(id.name.value + ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n } else {\n \n return homeJarCache.resolve(id.name.value + ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n }\n \n }\n\n\n private static void copyFromJarToJar(Path fromJar, Path toJar) throws IOException {\n URI uberJarUri = URI.create(\"jar:\" + toJar.toUri().toString());\n \n try (FileSystem toFs = FileSystems.newFileSystem(uberJarUri, Collections.emptyMap())) {\n Path to = toFs.getRootDirectories().iterator().next();\n URI moduleJarUri = URI.create(\"jar:\" + fromJar.toUri().toString());\n \n try (FileSystem fromFs = FileSystems.newFileSystem(moduleJarUri, Collections.emptyMap())) {\n Path from = fromFs.getRootDirectories().iterator().next();\n \n Files.walkFileTree(from, new Copy(from, to));\n }\n }\n }\n\n\n static class Copy extends SimpleFileVisitor {\n private Path fromPath;\n private Path toPath;\n private StandardCopyOption copyOption;\n\n\n public Copy(Path fromPath, Path toPath, StandardCopyOption copyOption) {\n this.fromPath = fromPath;\n this.toPath = toPath;\n this.copyOption = copyOption;\n }\n\n public Copy(Path fromPath, Path toPath) {\n this(fromPath, toPath, StandardCopyOption.REPLACE_EXISTING);\n }\n\n @Override\n public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)\n throws IOException {\n\n Path targetPath = toPath.resolve(fromPath.relativize(dir).toString());\n if (!Files.exists(targetPath)) {\n Files.createDirectory(targetPath);\n }\n return FileVisitResult.CONTINUE;\n }\n\n @Override\n public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)\n throws IOException {\n Path toFile = toPath.resolve(fromPath.relativize(file).toString());\n if (!Files.exists(toFile)) {\n Files.copy(file, toFile, copyOption);\n }\n return FileVisitResult.CONTINUE;\n }\n }\n}", "context_after": "class BuilderUtils {\n private static final String BALLERINA_HOME = \"BALLERINA_HOME\";\n private static PrintStream outStream = System.out;\n\n private static ModuleFileWriter moduleFileWriter;\n private static BIRFileWriter birFileWriter;\n private static LockFileWriter lockFileWriter;\n private static Manifest manifest;\n\n public static void compileWithTestsAndWrite(Path sourceRootPath,\n String packagePath,\n String targetPath,\n boolean buildCompiledPkg,\n boolean offline,\n boolean lockEnabled,\n boolean skipTests,\n boolean enableExperimentalFeatures,\n boolean siddhiRuntimeEnabled,\n boolean jvmTarget) {\n CompilerContext context = getCompilerContext(sourceRootPath, jvmTarget, buildCompiledPkg, offline,\n lockEnabled, skipTests, enableExperimentalFeatures, siddhiRuntimeEnabled);\n\n Compiler compiler = Compiler.getInstance(context);\n BLangPackage bLangPackage = compiler.build(packagePath);\n\n if (skipTests) {\n outStream.println();\n compiler.write(bLangPackage, targetPath);\n } else {\n runTests(compiler, sourceRootPath, Collections.singletonList(bLangPackage));\n compiler.write(bLangPackage, targetPath);\n }\n }\n\n public static void compileWithTestsAndWrite(Path sourceRootPath,\n String packageName,\n String targetPath,\n boolean buildCompiledPkg,\n boolean offline,\n boolean lockEnabled,\n boolean skiptests,\n boolean enableExperimentalFeatures,\n boolean siddhiRuntimeEnabled,\n boolean jvmTarget,\n boolean dumpBIR,\n boolean genExecutables) {\n CompilerContext context = getCompilerContext(sourceRootPath, jvmTarget, buildCompiledPkg, offline,\n lockEnabled, skiptests, enableExperimentalFeatures, siddhiRuntimeEnabled);\n\n Compiler compiler = Compiler.getInstance(context);\n BLangPackage bLangPackage = compiler.build(packageName);\n boolean isSingleFile = packageName.endsWith(ProjectDirConstants.BLANG_SOURCE_EXT);\n\n try {\n \n Path targetDirectory = Files.createTempDirectory(\"ballerina-compile\").toAbsolutePath();\n String balHome = Objects.requireNonNull(System.getProperty(\"ballerina.home\"),\n \"ballerina.home is not set\");\n\n String targetDir = Files.isDirectory(Paths.get(targetPath)) ? targetPath : \".\";\n\n BootstrapRunner.createClassLoaders(bLangPackage, Paths.get(balHome).resolve(\"bir-cache\"),\n targetDirectory, Optional.of(Paths.get(targetDir)), false);\n\n \n \n if (bLangPackage.symbol.entryPointExists && !isSingleFile) {\n outStream.println();\n outStream.println(\"Generating Executables\");\n assembleExecutable(bLangPackage, sourceRootPath);\n } else {\n if (!isSingleFile) {\n throw new BLangCompilerException(\"package `\" + packageName + \"` do not have an entry point\");\n }\n }\n \n ServiceLoader processorServiceLoader = ServiceLoader.load(CompilerPlugin.class);\n processorServiceLoader.forEach(plugin -> {\n String execJarName;\n Path execFilePath;\n if (!isSingleFile) {\n execJarName = bLangPackage.packageID.name.value + ProjectDirConstants.EXEC_SUFFIX +\n ProjectDirConstants.BLANG_COMPILED_JAR_EXT;\n execFilePath = sourceRootPath\n .resolve(ProjectDirConstants.TARGET_DIR_NAME)\n .resolve(ProjectDirConstants.BIN_DIR_NAME)\n .resolve(execJarName);\n } else {\n execFilePath = sourceRootPath.resolve(packageName.replaceAll(\".bal\", \"\") +\n ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n }\n\n plugin.codeGenerated(bLangPackage.packageID, execFilePath);\n });\n \n } catch (IOException e) {\n throw new BLangCompilerException(\"error invoking jballerina backend\", e);\n }\n }\n\n\n public static void compileWithTestsAndWrite(Path sourceRootPath, boolean offline, boolean lockEnabled,\n boolean skiptests, boolean enableExperimentalFeatures,\n boolean siddhiRuntimeEnabled, boolean jvmTarget, boolean dumpBir,\n boolean genExecutables) {\n CompilerPhase compilerPhase = jvmTarget ? CompilerPhase.BIR_GEN : CompilerPhase.CODE_GEN;\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(OFFLINE, Boolean.toString(offline));\n options.put(COMPILER_PHASE, compilerPhase.toString());\n options.put(LOCK_ENABLED, Boolean.toString(lockEnabled));\n options.put(SKIP_TESTS, Boolean.toString(skiptests));\n options.put(TEST_ENABLED, \"true\");\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExperimentalFeatures));\n options.put(SIDDHI_RUNTIME_ENABLED, Boolean.toString(siddhiRuntimeEnabled));\n Compiler compiler = Compiler.getInstance(context);\n List packages = compiler.build();\n\n prepareTargetDirectory(sourceRootPath);\n \n \n \n if (jvmTarget) {\n outStream.println();\n outStream.println(\"Generating artifacts\");\n \n generateModuleArtafacts(packages, context);\n\n try {\n generateJars(packages, sourceRootPath);\n } catch (IOException e) {\n throw new BLangCompilerException(\"error invoking jballerina backend\", e);\n }\n\n \n List entryPackages = packages.stream().filter(p -> p.symbol.entryPointExists)\n .collect(Collectors.toList());\n if (genExecutables && !entryPackages.isEmpty()) {\n outStream.println();\n outStream.println(\"Generating executables\");\n entryPackages.forEach(p -> assembleExecutable(p, sourceRootPath));\n }\n\n \n ServiceLoader processorServiceLoader = ServiceLoader.load(CompilerPlugin.class);\n for (BLangPackage p: packages) {\n processorServiceLoader.forEach(plugin -> {\n String execJarName = p.packageID.name.value + ProjectDirConstants.EXEC_SUFFIX\n + ProjectDirConstants.BLANG_COMPILED_JAR_EXT;\n Path execFilePath = sourceRootPath\n .resolve(ProjectDirConstants.TARGET_DIR_NAME)\n .resolve(ProjectDirConstants.BIN_DIR_NAME)\n .resolve(execJarName);\n plugin.codeGenerated(p.packageID, execFilePath);\n });\n }\n return;\n }\n\n\n if (skiptests) {\n if (packages.size() == 0) {\n throw new BLangCompilerException(\"no ballerina source files found to compile\");\n }\n outStream.println();\n compiler.write(packages);\n } else {\n if (packages.size() == 0) {\n throw new BLangCompilerException(\"no ballerina source files found to compile\");\n }\n runTests(compiler, sourceRootPath, packages);\n compiler.write(packages);\n }\n }\n\n private static void generateModuleArtafacts(List packages, CompilerContext context) {\n \n \n \n moduleFileWriter = ModuleFileWriter.getInstance(context);\n birFileWriter = BIRFileWriter.getInstance(context);\n \n lockFileWriter = LockFileWriter.getInstance(context);\n lockFileWriter.writeLockFile(ManifestProcessor.getInstance(context).getManifest());\n packages.forEach(moduleFileWriter::write);\n packages.forEach(birFileWriter::write);\n packages.forEach(bLangPackage -> lockFileWriter.addEntryPkg(bLangPackage.symbol));\n manifest = ManifestProcessor.getInstance(context).getManifest();\n }\n\n\n /**\n * Generate jars for given package.\n *\n * @param packages package\n * @param sourceRoot source root\n * @throws IOException for IO errors\n */\n public static void generateJars(List packages, Path sourceRoot) throws IOException {\n \n \n \n Path projectBIRcache = sourceRoot.resolve(ProjectDirConstants.TARGET_DIR_NAME)\n .resolve(ProjectDirConstants.CACHES_DIR_NAME)\n .resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME);\n \n Path homeBIRCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME);\n \n Path systemBIRCache = Paths.get(System.getProperty(BALLERINA_INSTALL_DIR_PROP)).resolve(\"bir-cache\");\n\n \n \n Path projectJARcache = sourceRoot.resolve(ProjectDirConstants.TARGET_DIR_NAME)\n .resolve(ProjectDirConstants.CACHES_DIR_NAME)\n .resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME);\n \n \n\n for (BLangPackage bpackage : packages) {\n Path moduleFragment = Paths.get(bpackage.packageID.orgName.value,\n bpackage.packageID.name.value, bpackage.packageID.version.value);\n\n \n \n \n writeImportJar(bpackage.symbol.imports, sourceRoot,\n projectBIRcache.toString(), homeBIRCache.toString(), systemBIRCache.toString());\n \n Files.createDirectories(projectJARcache.resolve(moduleFragment));\n Path entryBir = projectBIRcache.resolve(moduleFragment)\n .resolve(bpackage.packageID.name.value + ProjectDirConstants.BLANG_COMPILED_PKG_BIR_EXT);\n Path jarOutput = projectJARcache.resolve(moduleFragment)\n .resolve(bpackage.packageID.name.value + ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n BootstrapRunner.generateJarBinary(entryBir.toString(), jarOutput.toString(), false,\n projectBIRcache.toString(), homeBIRCache.toString(), systemBIRCache.toString());\n }\n }\n\n private static void writeImportJar(List imports, Path sourceRoot, String... reps) {\n for (BPackageSymbol bimport : imports) {\n PackageID id = bimport.pkgID;\n Path projectJarCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n Path homeJarCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n Path projectBirCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n Path homeBirCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n\n try {\n if (id.orgName.value.equals(\"ballerina\") || id.orgName.value.equals(\"ballerinax\")) {\n continue;\n }\n Path jarCache;\n Path birCache;\n \n \n \n if (ProjectDirs.isModuleExist(sourceRoot, id.name.value)) {\n jarCache = projectJarCache;\n birCache = projectBirCache;\n } else {\n jarCache = homeJarCache;\n birCache = homeBirCache;\n }\n Path jarFile = jarCache.resolve(id.name.value + ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n Path birFile = birCache.resolve(id.name.value + BLANG_COMPILED_PKG_BIR_EXT);\n if (!Files.exists(jarFile)) {\n Files.createDirectories(jarCache);\n BootstrapRunner.generateJarBinary(birFile.toString(), jarFile.toString(), false,\n reps);\n }\n writeImportJar(bimport.imports, sourceRoot);\n } catch (IOException e) {\n String msg = \"error writing the compiled module(jar) of '\" +\n id.name.value + \"' to '\" + homeJarCache + \"': \" + e.getMessage();\n throw new BLangCompilerException(msg, e);\n }\n }\n }\n\n /**\n * Run tests in the build.\n *\n * @param compiler compiler instance\n * @param sourceRootPath source root path\n * @param packageList list of compiled packages\n */\n private static void runTests(Compiler compiler, Path sourceRootPath, List packageList) {\n Map programFileMap = new HashMap<>();\n \n \n \n packageList.stream().filter(bLangPackage -> !bLangPackage.packageID.getName().equals(Names.DEFAULT_PACKAGE))\n .forEach(bLangPackage -> {\n CompiledBinaryFile.ProgramFile programFile;\n if (bLangPackage.containsTestablePkg()) {\n programFile = compiler.getExecutableProgram(bLangPackage.getTestablePkg());\n } else {\n \n \n \n \n \n programFile = compiler.getExecutableProgram(bLangPackage);\n }\n\n programFileMap.put(bLangPackage, programFile);\n });\n\n if (programFileMap.size() > 0) {\n TesterinaUtils.executeTests(sourceRootPath, programFileMap);\n }\n }\n\n private static CompilerContext getCompilerContext(Path sourceRootPath,\n boolean jvmTarget,\n boolean buildCompiledPkg,\n boolean offline,\n boolean lockEnabled,\n boolean skipTests,\n boolean enableExperimentalFeatures,\n boolean siddhiRuntimeEnabled) {\n CompilerPhase compilerPhase = jvmTarget ? CompilerPhase.BIR_GEN : CompilerPhase.CODE_GEN;\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(COMPILER_PHASE, compilerPhase.toString());\n options.put(BUILD_COMPILED_MODULE, Boolean.toString(buildCompiledPkg));\n options.put(OFFLINE, Boolean.toString(offline));\n options.put(LOCK_ENABLED, Boolean.toString(lockEnabled));\n options.put(SKIP_TESTS, Boolean.toString(skipTests));\n options.put(TEST_ENABLED, Boolean.toString(true));\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExperimentalFeatures));\n options.put(SIDDHI_RUNTIME_ENABLED, Boolean.toString(siddhiRuntimeEnabled));\n return context;\n }\n\n /**\n * Prepare target directory before the compile.\n *\n * @param sourceRoot source root of the ballerina file or project\n * @return target path\n */\n private static Path prepareTargetDirectory(Path sourceRoot) {\n \n Path target;\n if (ProjectDirs.isProject(sourceRoot)) {\n \n target = sourceRoot.resolve(ProjectDirConstants.TARGET_DIR_NAME);\n \n if (!Files.exists(target)) {\n try {\n Files.createDirectory(target);\n } catch (IOException e) {\n throw new BLangCompilerException(\"unable to create target directory\");\n }\n }\n createCacheDirectory(target);\n } else {\n \n try {\n target = Files.createTempDirectory(\"b7a-compiler\");\n } catch (IOException e) {\n throw new BLangCompilerException(\"unable to create target directory\");\n }\n createCacheDirectory(target);\n }\n return target;\n }\n\n /**\n * Prepare cache directory before the compile.\n *\n * @param target source root of the ballerina file or project\n * @return target path\n */\n private static void createCacheDirectory(Path target) {\n Path cacheDir = target.resolve(ProjectDirConstants.CACHES_DIR_NAME);\n if (!Files.exists(cacheDir)) {\n try {\n Files.createDirectory(cacheDir);\n } catch (IOException e) {\n throw new BLangCompilerException(\"unable to create target cache directory\");\n }\n }\n Path birCacheDir = cacheDir.resolve(ProjectDirConstants.BIR_CACHE_DIR_NAME);\n if (!Files.exists(birCacheDir)) {\n try {\n Files.createDirectory(birCacheDir);\n } catch (IOException e) {\n throw new BLangCompilerException(\"unable to create target bir cache directory\");\n }\n }\n }\n\n \n \n private static String getFileName(String moduleName) {\n \n String versionNo = manifest.getProject().getVersion();\n \n String platform = manifest.getTargetPlatform();\n \n \n return moduleName + \"-\"\n + ProgramFileConstants.IMPLEMENTATION_VERSION + \"-\"\n + platform + \"-\"\n + versionNo\n + ProjectDirConstants.BLANG_COMPILED_PKG_BINARY_EXT;\n }\n\n private static String extractJar(String jarFileName) {\n JarFile jar = null;\n try {\n jar = new JarFile(jarFileName);\n java.util.Enumeration enumEntries = jar.entries();\n File destFile = File.createTempFile(\"temp-\" + jarFileName, Long.toString(System.nanoTime()));\n if (!(destFile.delete())) {\n throw new BLangCompilerException(\"Could not delete temp file: \" + destFile.getAbsolutePath());\n }\n if (!(destFile.mkdir())) {\n throw new BLangCompilerException(\"Could not create temp directory: \"\n + destFile.getAbsolutePath());\n }\n while (enumEntries.hasMoreElements()) {\n JarEntry file = (JarEntry) enumEntries.nextElement();\n if (file.getName().contains(ProjectDirConstants.BALO_PLATFORM_LIB_DIR_NAME)) {\n File f = new File(destFile.getPath() + File.separator + file.getName());\n if (file.isDirectory()) { \n f.mkdir();\n continue;\n }\n InputStream is = jar.getInputStream(file); \n FileOutputStream fos = new java.io.FileOutputStream(f);\n while (is.available() > 0) { \n fos.write(is.read());\n }\n fos.close();\n is.close();\n }\n }\n jar.close();\n return destFile.getPath();\n } catch (IOException e) {\n throw new BLangCompilerException(\"Unable to create the executable :\" + e.getMessage());\n }\n }\n\n private static Path findImportJarPath(BPackageSymbol importz, Path project) {\n \n PackageID id = importz.pkgID;\n Path projectJarCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n Path homeJarCache = RepoUtils.createAndGetHomeReposPath().resolve(ProjectDirConstants.JAR_CACHE_DIR_NAME)\n .resolve(id.orgName.value).resolve(id.name.value).resolve(id.version.value);\n \n if (id.orgName.value.equals(\"ballerina\") || id.orgName.value.equals(\"ballerinax\")) {\n return null;\n }\n \n if (ProjectDirs.isModuleExist(project, id.name.value)) {\n \n return projectJarCache.resolve(id.name.value + ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n } else {\n \n return homeJarCache.resolve(id.name.value + ProjectDirConstants.BLANG_COMPILED_JAR_EXT);\n }\n \n }\n\n\n private static void copyFromJarToJar(Path fromJar, Path toJar) throws IOException {\n URI uberJarUri = URI.create(\"jar:\" + toJar.toUri().toString());\n \n try (FileSystem toFs = FileSystems.newFileSystem(uberJarUri, Collections.emptyMap())) {\n Path to = toFs.getRootDirectories().iterator().next();\n URI moduleJarUri = URI.create(\"jar:\" + fromJar.toUri().toString());\n \n try (FileSystem fromFs = FileSystems.newFileSystem(moduleJarUri, Collections.emptyMap())) {\n Path from = fromFs.getRootDirectories().iterator().next();\n \n Files.walkFileTree(from, new Copy(from, to));\n }\n }\n }\n\n\n static class Copy extends SimpleFileVisitor {\n private Path fromPath;\n private Path toPath;\n private StandardCopyOption copyOption;\n\n\n public Copy(Path fromPath, Path toPath, StandardCopyOption copyOption) {\n this.fromPath = fromPath;\n this.toPath = toPath;\n this.copyOption = copyOption;\n }\n\n public Copy(Path fromPath, Path toPath) {\n this(fromPath, toPath, StandardCopyOption.REPLACE_EXISTING);\n }\n\n @Override\n public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)\n throws IOException {\n\n Path targetPath = toPath.resolve(fromPath.relativize(dir).toString());\n if (!Files.exists(targetPath)) {\n Files.createDirectory(targetPath);\n }\n return FileVisitResult.CONTINUE;\n }\n\n @Override\n public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)\n throws IOException {\n Path toFile = toPath.resolve(fromPath.relativize(file).toString());\n if (!Files.exists(toFile)) {\n Files.copy(file, toFile, copyOption);\n }\n return FileVisitResult.CONTINUE;\n }\n }\n}" }, { "comment": "Ok, so it does fail, but it looks like this method does not throw on exit code != 0, so should be ok...", "method_body": "private void pushMetricsToContainer(NodeAgentContext context, List metrics) {\n StringBuilder params = new StringBuilder();\n try {\n for (DimensionMetrics dimensionMetrics : metrics) {\n params.append(dimensionMetrics.toSecretAgentReport());\n }\n String wrappedMetrics = \"s:\" + params.toString();\n\n \n runPushMetricsCommand(context, wrappedMetrics, true);\n runPushMetricsCommand(context, wrappedMetrics, false);\n } catch (DockerExecTimeoutException | JsonProcessingException e) {\n context.log(logger, LogLevel.WARNING, \"Failed to push metrics to container\", e);\n }\n }", "target_code": "runPushMetricsCommand(context, wrappedMetrics, true);", "method_body_after": "private void pushMetricsToContainer(NodeAgentContext context, List metrics) {\n StringBuilder params = new StringBuilder();\n try {\n for (DimensionMetrics dimensionMetrics : metrics) {\n params.append(dimensionMetrics.toSecretAgentReport());\n }\n } catch (JsonProcessingException e) {\n \n context.log(logger, LogLevel.WARNING, \"Failed to wrap metrics in secret agent report\", e);\n return;\n }\n String wrappedMetrics = \"s:\" + params.toString();\n\n \n runPushMetricsCommand(context, wrappedMetrics, true);\n runPushMetricsCommand(context, wrappedMetrics, false);\n }", "context_before": "class NodeAgentImpl implements NodeAgent {\n \n private static final long BYTES_IN_GB = 1_000_000_000L;\n\n private static final Logger logger = Logger.getLogger(NodeAgentImpl.class.getName());\n\n private final AtomicBoolean terminated = new AtomicBoolean(false);\n private boolean hasResumedNode = false;\n private boolean hasStartedServices = true;\n\n private final NodeAgentContextSupplier contextSupplier;\n private final NodeRepository nodeRepository;\n private final Orchestrator orchestrator;\n private final DockerOperations dockerOperations;\n private final StorageMaintainer storageMaintainer;\n private final Optional credentialsMaintainer;\n private final Optional aclMaintainer;\n private final Optional healthChecker;\n\n private final DoubleFlag containerCpuCap;\n\n private int numberOfUnhandledException = 0;\n private DockerImage imageBeingDownloaded = null;\n\n private long currentRebootGeneration = 0;\n private Optional currentRestartGeneration = Optional.empty();\n\n private final Thread loopThread;\n\n\n /**\n * ABSENT means container is definitely absent - A container that was absent will not suddenly appear without\n * NodeAgent explicitly starting it.\n * STARTING state is set just before we attempt to start a container, if successful we move to the next state.\n * Otherwise we can't be certain. A container that was running a minute ago may no longer be running without\n * NodeAgent doing anything (container could have crashed). Therefore we always have to ask docker daemon\n * to get updated state of the container.\n */\n enum ContainerState {\n ABSENT,\n STARTING,\n UNKNOWN\n }\n\n private ContainerState containerState = UNKNOWN;\n\n private NodeSpec lastNode = null;\n private CpuUsageReporter lastCpuMetric = new CpuUsageReporter();\n\n \n public NodeAgentImpl(\n final NodeAgentContextSupplier contextSupplier,\n final NodeRepository nodeRepository,\n final Orchestrator orchestrator,\n final DockerOperations dockerOperations,\n final StorageMaintainer storageMaintainer,\n final FlagSource flagSource,\n final Optional credentialsMaintainer,\n final Optional aclMaintainer,\n final Optional healthChecker) {\n this.contextSupplier = contextSupplier;\n this.nodeRepository = nodeRepository;\n this.orchestrator = orchestrator;\n this.dockerOperations = dockerOperations;\n this.storageMaintainer = storageMaintainer;\n this.credentialsMaintainer = credentialsMaintainer;\n this.aclMaintainer = aclMaintainer;\n this.healthChecker = healthChecker;\n\n this.containerCpuCap = Flags.CONTAINER_CPU_CAP.bindTo(flagSource)\n .with(FetchVector.Dimension.HOSTNAME, contextSupplier.currentContext().node().getHostname());\n\n this.loopThread = new Thread(() -> {\n while (!terminated.get()) {\n try {\n NodeAgentContext context = contextSupplier.nextContext();\n converge(context);\n } catch (InterruptedException ignored) { }\n }\n });\n this.loopThread.setName(\"tick-\" + contextSupplier.currentContext().hostname());\n }\n\n @Override\n public void start() {\n loopThread.start();\n }\n\n @Override\n public void stopForRemoval() {\n if (!terminated.compareAndSet(false, true)) {\n throw new RuntimeException(\"Can not re-stop a node agent.\");\n }\n contextSupplier.interrupt();\n\n do {\n try {\n loopThread.join();\n } catch (InterruptedException ignored) { }\n } while (loopThread.isAlive());\n\n contextSupplier.currentContext().log(logger, \"Stopped\");\n }\n\n void startServicesIfNeeded(NodeAgentContext context) {\n if (!hasStartedServices) {\n context.log(logger, \"Starting services\");\n dockerOperations.startServices(context);\n hasStartedServices = true;\n }\n }\n\n void resumeNodeIfNeeded(NodeAgentContext context) {\n if (!hasResumedNode) {\n context.log(logger, LogLevel.DEBUG, \"Starting optional node program resume command\");\n dockerOperations.resumeNode(context);\n hasResumedNode = true;\n }\n }\n\n private void updateNodeRepoWithCurrentAttributes(NodeAgentContext context) {\n final NodeAttributes currentNodeAttributes = new NodeAttributes();\n final NodeAttributes newNodeAttributes = new NodeAttributes();\n\n if (context.node().getWantedRestartGeneration().isPresent() &&\n !Objects.equals(context.node().getCurrentRestartGeneration(), currentRestartGeneration)) {\n currentNodeAttributes.withRestartGeneration(context.node().getCurrentRestartGeneration());\n newNodeAttributes.withRestartGeneration(currentRestartGeneration);\n }\n\n if (!Objects.equals(context.node().getCurrentRebootGeneration(), currentRebootGeneration)) {\n currentNodeAttributes.withRebootGeneration(context.node().getCurrentRebootGeneration());\n newNodeAttributes.withRebootGeneration(currentRebootGeneration);\n }\n\n Optional actualDockerImage = context.node().getWantedDockerImage().filter(n -> containerState == UNKNOWN);\n if (!Objects.equals(context.node().getCurrentDockerImage(), actualDockerImage)) {\n DockerImage currentImage = context.node().getCurrentDockerImage().orElse(DockerImage.EMPTY);\n DockerImage newImage = actualDockerImage.orElse(DockerImage.EMPTY);\n\n currentNodeAttributes.withDockerImage(currentImage);\n currentNodeAttributes.withVespaVersion(currentImage.tagAsVersion());\n newNodeAttributes.withDockerImage(newImage);\n newNodeAttributes.withVespaVersion(newImage.tagAsVersion());\n }\n\n publishStateToNodeRepoIfChanged(context, currentNodeAttributes, newNodeAttributes);\n }\n\n private void publishStateToNodeRepoIfChanged(NodeAgentContext context, NodeAttributes currentAttributes, NodeAttributes newAttributes) {\n if (!currentAttributes.equals(newAttributes)) {\n context.log(logger, \"Publishing new set of attributes to node repo: %s -> %s\",\n currentAttributes, newAttributes);\n nodeRepository.updateNodeAttributes(context.hostname().value(), newAttributes);\n }\n }\n\n private void startContainer(NodeAgentContext context) {\n ContainerData containerData = createContainerData(context);\n dockerOperations.createContainer(context, containerData, getContainerResources(context.node()));\n dockerOperations.startContainer(context);\n lastCpuMetric = new CpuUsageReporter();\n\n hasStartedServices = true; \n hasResumedNode = false;\n context.log(logger, \"Container successfully started, new containerState is \" + containerState);\n }\n\n private Optional removeContainerIfNeededUpdateContainerState(\n NodeAgentContext context, Optional existingContainer) {\n if (existingContainer.isPresent()) {\n Optional reason = shouldRemoveContainer(context.node(), existingContainer.get());\n if (reason.isPresent()) {\n removeContainer(context, existingContainer.get(), reason.get(), false);\n return Optional.empty();\n }\n\n shouldRestartServices(context.node()).ifPresent(restartReason -> {\n context.log(logger, \"Will restart services: \" + restartReason);\n restartServices(context, existingContainer.get());\n currentRestartGeneration = context.node().getWantedRestartGeneration();\n });\n }\n\n return existingContainer;\n }\n\n private Optional shouldRestartServices(NodeSpec node) {\n if (!node.getWantedRestartGeneration().isPresent()) return Optional.empty();\n\n \n if (currentRestartGeneration.get() < node.getWantedRestartGeneration().get()) {\n return Optional.of(\"Restart requested - wanted restart generation has been bumped: \"\n + currentRestartGeneration.get() + \" -> \" + node.getWantedRestartGeneration().get());\n }\n return Optional.empty();\n }\n\n private void restartServices(NodeAgentContext context, Container existingContainer) {\n if (existingContainer.state.isRunning() && context.node().getState() == NodeState.active) {\n context.log(logger, \"Restarting services\");\n \n orchestratorSuspendNode(context);\n dockerOperations.restartVespa(context);\n }\n }\n\n private void stopServices() {\n NodeAgentContext context = contextSupplier.currentContext();\n context.log(logger, \"Stopping services\");\n if (containerState == ABSENT) return;\n try {\n hasStartedServices = hasResumedNode = false;\n dockerOperations.stopServices(context);\n } catch (ContainerNotFoundException e) {\n containerState = ABSENT;\n }\n }\n\n @Override\n public void stopForHostSuspension() {\n NodeAgentContext context = contextSupplier.currentContext();\n getContainer(context).ifPresent(container -> removeContainer(context, container, \"suspending host\", true));\n }\n\n public void suspend() {\n NodeAgentContext context = contextSupplier.currentContext();\n context.log(logger, \"Suspending services on node\");\n if (containerState == ABSENT) return;\n try {\n hasResumedNode = false;\n dockerOperations.suspendNode(context);\n } catch (ContainerNotFoundException e) {\n containerState = ABSENT;\n } catch (RuntimeException e) {\n \n \n context.log(logger, LogLevel.WARNING, \"Failed trying to suspend container\", e);\n }\n }\n\n private Optional shouldRemoveContainer(NodeSpec node, Container existingContainer) {\n final NodeState nodeState = node.getState();\n if (nodeState == NodeState.dirty || nodeState == NodeState.provisioned) {\n return Optional.of(\"Node in state \" + nodeState + \", container should no longer be running\");\n }\n if (node.getWantedDockerImage().isPresent() && !node.getWantedDockerImage().get().equals(existingContainer.image)) {\n return Optional.of(\"The node is supposed to run a new Docker image: \"\n + existingContainer.image.asString() + \" -> \" + node.getWantedDockerImage().get().asString());\n }\n if (!existingContainer.state.isRunning()) {\n return Optional.of(\"Container no longer running\");\n }\n\n if (currentRebootGeneration < node.getWantedRebootGeneration()) {\n return Optional.of(String.format(\"Container reboot wanted. Current: %d, Wanted: %d\",\n currentRebootGeneration, node.getWantedRebootGeneration()));\n }\n\n \n \n \n ContainerResources wantedContainerResources = getContainerResources(node);\n if (!wantedContainerResources.equalsMemory(existingContainer.resources)) {\n return Optional.of(\"Container should be running with different memory allocation, wanted: \" +\n wantedContainerResources.toStringMemory() + \", actual: \" + existingContainer.resources.toStringMemory());\n }\n\n if (containerState == STARTING) return Optional.of(\"Container failed to start\");\n return Optional.empty();\n }\n\n private void removeContainer(NodeAgentContext context, Container existingContainer, String reason, boolean alreadySuspended) {\n context.log(logger, \"Will remove container: \" + reason);\n\n if (existingContainer.state.isRunning()) {\n if (!alreadySuspended) {\n orchestratorSuspendNode(context);\n }\n\n try {\n if (context.node().getState() != NodeState.dirty) {\n suspend();\n }\n stopServices();\n } catch (Exception e) {\n context.log(logger, LogLevel.WARNING, \"Failed stopping services, ignoring\", e);\n }\n }\n\n storageMaintainer.handleCoreDumpsForContainer(context, Optional.of(existingContainer));\n dockerOperations.removeContainer(context, existingContainer);\n currentRebootGeneration = context.node().getWantedRebootGeneration();\n containerState = ABSENT;\n context.log(logger, \"Container successfully removed, new containerState is \" + containerState);\n }\n\n\n private void updateContainerIfNeeded(NodeAgentContext context, Container existingContainer) {\n ContainerResources wantedContainerResources = getContainerResources(context.node());\n if (wantedContainerResources.equalsCpu(existingContainer.resources)) return;\n context.log(logger, \"Container should be running with different CPU allocation, wanted: %s, current: %s\",\n wantedContainerResources.toStringCpu(), existingContainer.resources.toStringCpu());\n\n orchestratorSuspendNode(context);\n\n dockerOperations.updateContainer(context, wantedContainerResources);\n }\n\n private ContainerResources getContainerResources(NodeSpec node) {\n double cpuCap = node.getOwner()\n .map(NodeOwner::asApplicationId)\n .map(appId -> containerCpuCap.with(FetchVector.Dimension.APPLICATION_ID, appId.serializedForm()))\n .orElse(containerCpuCap)\n .value() * node.getMinCpuCores();\n\n return ContainerResources.from(cpuCap, node.getMinCpuCores(), node.getMinMainMemoryAvailableGb());\n }\n\n\n private void scheduleDownLoadIfNeeded(NodeSpec node, Optional container) {\n if (node.getWantedDockerImage().equals(container.map(c -> c.image))) return;\n\n if (dockerOperations.pullImageAsyncIfNeeded(node.getWantedDockerImage().get())) {\n imageBeingDownloaded = node.getWantedDockerImage().get();\n } else if (imageBeingDownloaded != null) { \n imageBeingDownloaded = null;\n }\n }\n\n public void converge(NodeAgentContext context) {\n try {\n doConverge(context);\n } catch (OrchestratorException | ConvergenceException e) {\n context.log(logger, e.getMessage());\n } catch (ContainerNotFoundException e) {\n containerState = ABSENT;\n context.log(logger, LogLevel.WARNING, \"Container unexpectedly gone, resetting containerState to \" + containerState);\n } catch (DockerException e) {\n numberOfUnhandledException++;\n context.log(logger, LogLevel.ERROR, \"Caught a DockerException\", e);\n } catch (Throwable e) {\n numberOfUnhandledException++;\n context.log(logger, LogLevel.ERROR, \"Unhandled exception, ignoring\", e);\n }\n }\n\n \n void doConverge(NodeAgentContext context) {\n NodeSpec node = context.node();\n Optional container = getContainer(context);\n if (!node.equals(lastNode)) {\n logChangesToNodeSpec(context, lastNode, node);\n\n \n if (currentRebootGeneration < node.getCurrentRebootGeneration())\n currentRebootGeneration = node.getCurrentRebootGeneration();\n\n \n \n if (currentRestartGeneration.isPresent() != node.getCurrentRestartGeneration().isPresent() ||\n currentRestartGeneration.map(current -> current < node.getCurrentRestartGeneration().get()).orElse(false))\n currentRestartGeneration = node.getCurrentRestartGeneration();\n\n \n \n if (container.map(c -> c.state.isRunning()).orElse(false)) {\n storageMaintainer.writeMetricsConfig(context);\n }\n\n lastNode = node;\n }\n\n switch (node.getState()) {\n case ready:\n case reserved:\n case parked:\n case failed:\n removeContainerIfNeededUpdateContainerState(context, container);\n updateNodeRepoWithCurrentAttributes(context);\n break;\n case active:\n storageMaintainer.handleCoreDumpsForContainer(context, container);\n\n storageMaintainer.getDiskUsageFor(context)\n .map(diskUsage -> (double) diskUsage / BYTES_IN_GB / node.getMinDiskAvailableGb())\n .filter(diskUtil -> diskUtil >= 0.8)\n .ifPresent(diskUtil -> storageMaintainer.removeOldFilesFromNode(context));\n\n scheduleDownLoadIfNeeded(node, container);\n if (isDownloadingImage()) {\n context.log(logger, \"Waiting for image to download \" + imageBeingDownloaded.asString());\n return;\n }\n container = removeContainerIfNeededUpdateContainerState(context, container);\n credentialsMaintainer.ifPresent(maintainer -> maintainer.converge(context));\n if (! container.isPresent()) {\n containerState = STARTING;\n startContainer(context);\n containerState = UNKNOWN;\n } else {\n updateContainerIfNeeded(context, container.get());\n }\n\n aclMaintainer.ifPresent(maintainer -> maintainer.converge(context));\n startServicesIfNeeded(context);\n resumeNodeIfNeeded(context);\n healthChecker.ifPresent(checker -> checker.verifyHealth(context));\n\n \n \n \n \n \n \n \n \n \n \n updateNodeRepoWithCurrentAttributes(context);\n context.log(logger, \"Call resume against Orchestrator\");\n orchestrator.resume(context.hostname().value());\n break;\n case inactive:\n removeContainerIfNeededUpdateContainerState(context, container);\n updateNodeRepoWithCurrentAttributes(context);\n break;\n case provisioned:\n nodeRepository.setNodeState(context.hostname().value(), NodeState.dirty);\n break;\n case dirty:\n removeContainerIfNeededUpdateContainerState(context, container);\n context.log(logger, \"State is \" + node.getState() + \", will delete application storage and mark node as ready\");\n credentialsMaintainer.ifPresent(maintainer -> maintainer.clearCredentials(context));\n storageMaintainer.archiveNodeStorage(context);\n updateNodeRepoWithCurrentAttributes(context);\n nodeRepository.setNodeState(context.hostname().value(), NodeState.ready);\n break;\n default:\n throw new RuntimeException(\"UNKNOWN STATE \" + node.getState().name());\n }\n }\n\n private static void logChangesToNodeSpec(NodeAgentContext context, NodeSpec lastNode, NodeSpec node) {\n StringBuilder builder = new StringBuilder();\n appendIfDifferent(builder, \"state\", lastNode, node, NodeSpec::getState);\n if (builder.length() > 0) {\n context.log(logger, LogLevel.INFO, \"Changes to node: \" + builder.toString());\n }\n }\n\n private static String fieldDescription(T value) {\n return value == null ? \"[absent]\" : value.toString();\n }\n\n private static void appendIfDifferent(StringBuilder builder, String name, NodeSpec oldNode, NodeSpec newNode, Function getter) {\n T oldValue = oldNode == null ? null : getter.apply(oldNode);\n T newValue = getter.apply(newNode);\n if (!Objects.equals(oldValue, newValue)) {\n if (builder.length() > 0) {\n builder.append(\", \");\n }\n builder.append(name).append(\" \").append(fieldDescription(oldValue)).append(\" -> \").append(fieldDescription(newValue));\n }\n }\n\n @SuppressWarnings(\"unchecked\")\n public void updateContainerNodeMetrics() {\n if (containerState != UNKNOWN) return;\n final NodeAgentContext context = contextSupplier.currentContext();\n final NodeSpec node = context.node();\n\n Optional containerStats = dockerOperations.getContainerStats(context);\n if (!containerStats.isPresent()) return;\n\n Dimensions.Builder dimensionsBuilder = new Dimensions.Builder()\n .add(\"host\", context.hostname().value())\n .add(\"role\", SecretAgentCheckConfig.nodeTypeToRole(context.nodeType()))\n .add(\"state\", node.getState().toString());\n node.getParentHostname().ifPresent(parent -> dimensionsBuilder.add(\"parentHostname\", parent));\n node.getAllowedToBeDown().ifPresent(allowed ->\n dimensionsBuilder.add(\"orchestratorState\", allowed ? \"ALLOWED_TO_BE_DOWN\" : \"NO_REMARKS\"));\n Dimensions dimensions = dimensionsBuilder.build();\n\n ContainerStats stats = containerStats.get();\n final String APP = MetricReceiverWrapper.APPLICATION_NODE;\n final int totalNumCpuCores = stats.getCpuStats().getOnlineCpus();\n final long cpuContainerKernelTime = stats.getCpuStats().getUsageInKernelMode();\n final long cpuContainerTotalTime = stats.getCpuStats().getTotalUsage();\n final long cpuSystemTotalTime = stats.getCpuStats().getSystemCpuUsage();\n final long memoryTotalBytes = stats.getMemoryStats().getLimit();\n final long memoryTotalBytesUsage = stats.getMemoryStats().getUsage();\n final long memoryTotalBytesCache = stats.getMemoryStats().getCache();\n final long diskTotalBytes = (long) (node.getMinDiskAvailableGb() * BYTES_IN_GB);\n final Optional diskTotalBytesUsed = storageMaintainer.getDiskUsageFor(context);\n\n lastCpuMetric.updateCpuDeltas(cpuSystemTotalTime, cpuContainerTotalTime, cpuContainerKernelTime);\n\n \n final double allocatedCpuRatio = node.getMinCpuCores() / totalNumCpuCores;\n double cpuUsageRatioOfAllocated = lastCpuMetric.getCpuUsageRatio() / allocatedCpuRatio;\n double cpuKernelUsageRatioOfAllocated = lastCpuMetric.getCpuKernelUsageRatio() / allocatedCpuRatio;\n\n long memoryTotalBytesUsed = memoryTotalBytesUsage - memoryTotalBytesCache;\n double memoryUsageRatio = (double) memoryTotalBytesUsed / memoryTotalBytes;\n double memoryTotalUsageRatio = (double) memoryTotalBytesUsage / memoryTotalBytes;\n Optional diskUsageRatio = diskTotalBytesUsed.map(used -> (double) used / diskTotalBytes);\n\n List metrics = new ArrayList<>();\n DimensionMetrics.Builder systemMetricsBuilder = new DimensionMetrics.Builder(APP, dimensions)\n .withMetric(\"mem.limit\", memoryTotalBytes)\n .withMetric(\"mem.used\", memoryTotalBytesUsed)\n .withMetric(\"mem.util\", 100 * memoryUsageRatio)\n .withMetric(\"mem_total.used\", memoryTotalBytesUsage)\n .withMetric(\"mem_total.util\", 100 * memoryTotalUsageRatio)\n .withMetric(\"cpu.util\", 100 * cpuUsageRatioOfAllocated)\n .withMetric(\"cpu.sys.util\", 100 * cpuKernelUsageRatioOfAllocated)\n .withMetric(\"disk.limit\", diskTotalBytes);\n\n diskTotalBytesUsed.ifPresent(diskUsed -> systemMetricsBuilder.withMetric(\"disk.used\", diskUsed));\n diskUsageRatio.ifPresent(diskRatio -> systemMetricsBuilder.withMetric(\"disk.util\", 100 * diskRatio));\n metrics.add(systemMetricsBuilder.build());\n\n stats.getNetworks().forEach((interfaceName, interfaceStats) -> {\n Dimensions netDims = dimensionsBuilder.add(\"interface\", interfaceName).build();\n DimensionMetrics networkMetrics = new DimensionMetrics.Builder(APP, netDims)\n .withMetric(\"net.in.bytes\", interfaceStats.getRxBytes())\n .withMetric(\"net.in.errors\", interfaceStats.getRxErrors())\n .withMetric(\"net.in.dropped\", interfaceStats.getRxDropped())\n .withMetric(\"net.out.bytes\", interfaceStats.getTxBytes())\n .withMetric(\"net.out.errors\", interfaceStats.getTxErrors())\n .withMetric(\"net.out.dropped\", interfaceStats.getTxDropped())\n .build();\n metrics.add(networkMetrics);\n });\n\n pushMetricsToContainer(context, metrics);\n }\n\n \n\n \n private void runPushMetricsCommand(NodeAgentContext context, String wrappedMetrics, boolean newMetricsProxy) {\n int port = newMetricsProxy ? 19094 : 19091;\n \n long timeoutSeconds = newMetricsProxy ? 2L : 5L;\n String[] command = {\"vespa-rpc-invoke\", \"-t\", \"2\", \"tcp/localhost:\" + port, \"setExtraMetrics\", wrappedMetrics};\n dockerOperations.executeCommandInContainerAsRoot(context, timeoutSeconds, command);\n }\n\n private Optional getContainer(NodeAgentContext context) {\n if (containerState == ABSENT) return Optional.empty();\n Optional container = dockerOperations.getContainer(context);\n if (! container.isPresent()) containerState = ABSENT;\n return container;\n }\n\n @Override\n public boolean isDownloadingImage() {\n return imageBeingDownloaded != null;\n }\n\n @Override\n public int getAndResetNumberOfUnhandledExceptions() {\n int temp = numberOfUnhandledException;\n numberOfUnhandledException = 0;\n return temp;\n }\n\n class CpuUsageReporter {\n private long containerKernelUsage = 0;\n private long totalContainerUsage = 0;\n private long totalSystemUsage = 0;\n\n private long deltaContainerKernelUsage;\n private long deltaContainerUsage;\n private long deltaSystemUsage;\n\n private void updateCpuDeltas(long totalSystemUsage, long totalContainerUsage, long containerKernelUsage) {\n deltaSystemUsage = this.totalSystemUsage == 0 ? 0 : (totalSystemUsage - this.totalSystemUsage);\n deltaContainerUsage = totalContainerUsage - this.totalContainerUsage;\n deltaContainerKernelUsage = containerKernelUsage - this.containerKernelUsage;\n\n this.totalSystemUsage = totalSystemUsage;\n this.totalContainerUsage = totalContainerUsage;\n this.containerKernelUsage = containerKernelUsage;\n }\n\n /**\n * Returns the CPU usage ratio for the docker container that this NodeAgent is managing\n * in the time between the last two times updateCpuDeltas() was called. This is calculated\n * by dividing the CPU time used by the container with the CPU time used by the entire system.\n */\n double getCpuUsageRatio() {\n return deltaSystemUsage == 0 ? Double.NaN : (double) deltaContainerUsage / deltaSystemUsage;\n }\n\n double getCpuKernelUsageRatio() {\n return deltaSystemUsage == 0 ? Double.NaN : (double) deltaContainerKernelUsage / deltaSystemUsage;\n }\n }\n\n \n \n \n \n \n \n \n \n \n \n \n \n private void orchestratorSuspendNode(NodeAgentContext context) {\n if (context.node().getState() != NodeState.active) return;\n\n context.log(logger, \"Ask Orchestrator for permission to suspend node\");\n try {\n orchestrator.suspend(context.hostname().value());\n } catch (OrchestratorException e) {\n \n \n try {\n aclMaintainer.ifPresent(maintainer -> maintainer.converge(context));\n } catch (RuntimeException suppressed) {\n logger.log(LogLevel.WARNING, \"Suppressing ACL update failure: \" + suppressed);\n e.addSuppressed(suppressed);\n }\n\n throw e;\n }\n }\n\n protected ContainerData createContainerData(NodeAgentContext context) {\n return (pathInContainer, data) -> {\n throw new UnsupportedOperationException(\"addFile not implemented\");\n };\n }\n}", "context_after": "class NodeAgentImpl implements NodeAgent {\n \n private static final long BYTES_IN_GB = 1_000_000_000L;\n\n private static final Logger logger = Logger.getLogger(NodeAgentImpl.class.getName());\n\n private final AtomicBoolean terminated = new AtomicBoolean(false);\n private boolean hasResumedNode = false;\n private boolean hasStartedServices = true;\n\n private final NodeAgentContextSupplier contextSupplier;\n private final NodeRepository nodeRepository;\n private final Orchestrator orchestrator;\n private final DockerOperations dockerOperations;\n private final StorageMaintainer storageMaintainer;\n private final Optional credentialsMaintainer;\n private final Optional aclMaintainer;\n private final Optional healthChecker;\n\n private final DoubleFlag containerCpuCap;\n\n private int numberOfUnhandledException = 0;\n private DockerImage imageBeingDownloaded = null;\n\n private long currentRebootGeneration = 0;\n private Optional currentRestartGeneration = Optional.empty();\n\n private final Thread loopThread;\n\n\n /**\n * ABSENT means container is definitely absent - A container that was absent will not suddenly appear without\n * NodeAgent explicitly starting it.\n * STARTING state is set just before we attempt to start a container, if successful we move to the next state.\n * Otherwise we can't be certain. A container that was running a minute ago may no longer be running without\n * NodeAgent doing anything (container could have crashed). Therefore we always have to ask docker daemon\n * to get updated state of the container.\n */\n enum ContainerState {\n ABSENT,\n STARTING,\n UNKNOWN\n }\n\n private ContainerState containerState = UNKNOWN;\n\n private NodeSpec lastNode = null;\n private CpuUsageReporter lastCpuMetric = new CpuUsageReporter();\n\n \n public NodeAgentImpl(\n final NodeAgentContextSupplier contextSupplier,\n final NodeRepository nodeRepository,\n final Orchestrator orchestrator,\n final DockerOperations dockerOperations,\n final StorageMaintainer storageMaintainer,\n final FlagSource flagSource,\n final Optional credentialsMaintainer,\n final Optional aclMaintainer,\n final Optional healthChecker) {\n this.contextSupplier = contextSupplier;\n this.nodeRepository = nodeRepository;\n this.orchestrator = orchestrator;\n this.dockerOperations = dockerOperations;\n this.storageMaintainer = storageMaintainer;\n this.credentialsMaintainer = credentialsMaintainer;\n this.aclMaintainer = aclMaintainer;\n this.healthChecker = healthChecker;\n\n this.containerCpuCap = Flags.CONTAINER_CPU_CAP.bindTo(flagSource)\n .with(FetchVector.Dimension.HOSTNAME, contextSupplier.currentContext().node().getHostname());\n\n this.loopThread = new Thread(() -> {\n while (!terminated.get()) {\n try {\n NodeAgentContext context = contextSupplier.nextContext();\n converge(context);\n } catch (InterruptedException ignored) { }\n }\n });\n this.loopThread.setName(\"tick-\" + contextSupplier.currentContext().hostname());\n }\n\n @Override\n public void start() {\n loopThread.start();\n }\n\n @Override\n public void stopForRemoval() {\n if (!terminated.compareAndSet(false, true)) {\n throw new RuntimeException(\"Can not re-stop a node agent.\");\n }\n contextSupplier.interrupt();\n\n do {\n try {\n loopThread.join();\n } catch (InterruptedException ignored) { }\n } while (loopThread.isAlive());\n\n contextSupplier.currentContext().log(logger, \"Stopped\");\n }\n\n void startServicesIfNeeded(NodeAgentContext context) {\n if (!hasStartedServices) {\n context.log(logger, \"Starting services\");\n dockerOperations.startServices(context);\n hasStartedServices = true;\n }\n }\n\n void resumeNodeIfNeeded(NodeAgentContext context) {\n if (!hasResumedNode) {\n context.log(logger, LogLevel.DEBUG, \"Starting optional node program resume command\");\n dockerOperations.resumeNode(context);\n hasResumedNode = true;\n }\n }\n\n private void updateNodeRepoWithCurrentAttributes(NodeAgentContext context) {\n final NodeAttributes currentNodeAttributes = new NodeAttributes();\n final NodeAttributes newNodeAttributes = new NodeAttributes();\n\n if (context.node().getWantedRestartGeneration().isPresent() &&\n !Objects.equals(context.node().getCurrentRestartGeneration(), currentRestartGeneration)) {\n currentNodeAttributes.withRestartGeneration(context.node().getCurrentRestartGeneration());\n newNodeAttributes.withRestartGeneration(currentRestartGeneration);\n }\n\n if (!Objects.equals(context.node().getCurrentRebootGeneration(), currentRebootGeneration)) {\n currentNodeAttributes.withRebootGeneration(context.node().getCurrentRebootGeneration());\n newNodeAttributes.withRebootGeneration(currentRebootGeneration);\n }\n\n Optional actualDockerImage = context.node().getWantedDockerImage().filter(n -> containerState == UNKNOWN);\n if (!Objects.equals(context.node().getCurrentDockerImage(), actualDockerImage)) {\n DockerImage currentImage = context.node().getCurrentDockerImage().orElse(DockerImage.EMPTY);\n DockerImage newImage = actualDockerImage.orElse(DockerImage.EMPTY);\n\n currentNodeAttributes.withDockerImage(currentImage);\n currentNodeAttributes.withVespaVersion(currentImage.tagAsVersion());\n newNodeAttributes.withDockerImage(newImage);\n newNodeAttributes.withVespaVersion(newImage.tagAsVersion());\n }\n\n publishStateToNodeRepoIfChanged(context, currentNodeAttributes, newNodeAttributes);\n }\n\n private void publishStateToNodeRepoIfChanged(NodeAgentContext context, NodeAttributes currentAttributes, NodeAttributes newAttributes) {\n if (!currentAttributes.equals(newAttributes)) {\n context.log(logger, \"Publishing new set of attributes to node repo: %s -> %s\",\n currentAttributes, newAttributes);\n nodeRepository.updateNodeAttributes(context.hostname().value(), newAttributes);\n }\n }\n\n private void startContainer(NodeAgentContext context) {\n ContainerData containerData = createContainerData(context);\n dockerOperations.createContainer(context, containerData, getContainerResources(context.node()));\n dockerOperations.startContainer(context);\n lastCpuMetric = new CpuUsageReporter();\n\n hasStartedServices = true; \n hasResumedNode = false;\n context.log(logger, \"Container successfully started, new containerState is \" + containerState);\n }\n\n private Optional removeContainerIfNeededUpdateContainerState(\n NodeAgentContext context, Optional existingContainer) {\n if (existingContainer.isPresent()) {\n Optional reason = shouldRemoveContainer(context.node(), existingContainer.get());\n if (reason.isPresent()) {\n removeContainer(context, existingContainer.get(), reason.get(), false);\n return Optional.empty();\n }\n\n shouldRestartServices(context.node()).ifPresent(restartReason -> {\n context.log(logger, \"Will restart services: \" + restartReason);\n restartServices(context, existingContainer.get());\n currentRestartGeneration = context.node().getWantedRestartGeneration();\n });\n }\n\n return existingContainer;\n }\n\n private Optional shouldRestartServices(NodeSpec node) {\n if (!node.getWantedRestartGeneration().isPresent()) return Optional.empty();\n\n \n if (currentRestartGeneration.get() < node.getWantedRestartGeneration().get()) {\n return Optional.of(\"Restart requested - wanted restart generation has been bumped: \"\n + currentRestartGeneration.get() + \" -> \" + node.getWantedRestartGeneration().get());\n }\n return Optional.empty();\n }\n\n private void restartServices(NodeAgentContext context, Container existingContainer) {\n if (existingContainer.state.isRunning() && context.node().getState() == NodeState.active) {\n context.log(logger, \"Restarting services\");\n \n orchestratorSuspendNode(context);\n dockerOperations.restartVespa(context);\n }\n }\n\n private void stopServices() {\n NodeAgentContext context = contextSupplier.currentContext();\n context.log(logger, \"Stopping services\");\n if (containerState == ABSENT) return;\n try {\n hasStartedServices = hasResumedNode = false;\n dockerOperations.stopServices(context);\n } catch (ContainerNotFoundException e) {\n containerState = ABSENT;\n }\n }\n\n @Override\n public void stopForHostSuspension() {\n NodeAgentContext context = contextSupplier.currentContext();\n getContainer(context).ifPresent(container -> removeContainer(context, container, \"suspending host\", true));\n }\n\n public void suspend() {\n NodeAgentContext context = contextSupplier.currentContext();\n context.log(logger, \"Suspending services on node\");\n if (containerState == ABSENT) return;\n try {\n hasResumedNode = false;\n dockerOperations.suspendNode(context);\n } catch (ContainerNotFoundException e) {\n containerState = ABSENT;\n } catch (RuntimeException e) {\n \n \n context.log(logger, LogLevel.WARNING, \"Failed trying to suspend container\", e);\n }\n }\n\n private Optional shouldRemoveContainer(NodeSpec node, Container existingContainer) {\n final NodeState nodeState = node.getState();\n if (nodeState == NodeState.dirty || nodeState == NodeState.provisioned) {\n return Optional.of(\"Node in state \" + nodeState + \", container should no longer be running\");\n }\n if (node.getWantedDockerImage().isPresent() && !node.getWantedDockerImage().get().equals(existingContainer.image)) {\n return Optional.of(\"The node is supposed to run a new Docker image: \"\n + existingContainer.image.asString() + \" -> \" + node.getWantedDockerImage().get().asString());\n }\n if (!existingContainer.state.isRunning()) {\n return Optional.of(\"Container no longer running\");\n }\n\n if (currentRebootGeneration < node.getWantedRebootGeneration()) {\n return Optional.of(String.format(\"Container reboot wanted. Current: %d, Wanted: %d\",\n currentRebootGeneration, node.getWantedRebootGeneration()));\n }\n\n \n \n \n ContainerResources wantedContainerResources = getContainerResources(node);\n if (!wantedContainerResources.equalsMemory(existingContainer.resources)) {\n return Optional.of(\"Container should be running with different memory allocation, wanted: \" +\n wantedContainerResources.toStringMemory() + \", actual: \" + existingContainer.resources.toStringMemory());\n }\n\n if (containerState == STARTING) return Optional.of(\"Container failed to start\");\n return Optional.empty();\n }\n\n private void removeContainer(NodeAgentContext context, Container existingContainer, String reason, boolean alreadySuspended) {\n context.log(logger, \"Will remove container: \" + reason);\n\n if (existingContainer.state.isRunning()) {\n if (!alreadySuspended) {\n orchestratorSuspendNode(context);\n }\n\n try {\n if (context.node().getState() != NodeState.dirty) {\n suspend();\n }\n stopServices();\n } catch (Exception e) {\n context.log(logger, LogLevel.WARNING, \"Failed stopping services, ignoring\", e);\n }\n }\n\n storageMaintainer.handleCoreDumpsForContainer(context, Optional.of(existingContainer));\n dockerOperations.removeContainer(context, existingContainer);\n currentRebootGeneration = context.node().getWantedRebootGeneration();\n containerState = ABSENT;\n context.log(logger, \"Container successfully removed, new containerState is \" + containerState);\n }\n\n\n private void updateContainerIfNeeded(NodeAgentContext context, Container existingContainer) {\n ContainerResources wantedContainerResources = getContainerResources(context.node());\n if (wantedContainerResources.equalsCpu(existingContainer.resources)) return;\n context.log(logger, \"Container should be running with different CPU allocation, wanted: %s, current: %s\",\n wantedContainerResources.toStringCpu(), existingContainer.resources.toStringCpu());\n\n orchestratorSuspendNode(context);\n\n dockerOperations.updateContainer(context, wantedContainerResources);\n }\n\n private ContainerResources getContainerResources(NodeSpec node) {\n double cpuCap = node.getOwner()\n .map(NodeOwner::asApplicationId)\n .map(appId -> containerCpuCap.with(FetchVector.Dimension.APPLICATION_ID, appId.serializedForm()))\n .orElse(containerCpuCap)\n .value() * node.getMinCpuCores();\n\n return ContainerResources.from(cpuCap, node.getMinCpuCores(), node.getMinMainMemoryAvailableGb());\n }\n\n\n private void scheduleDownLoadIfNeeded(NodeSpec node, Optional container) {\n if (node.getWantedDockerImage().equals(container.map(c -> c.image))) return;\n\n if (dockerOperations.pullImageAsyncIfNeeded(node.getWantedDockerImage().get())) {\n imageBeingDownloaded = node.getWantedDockerImage().get();\n } else if (imageBeingDownloaded != null) { \n imageBeingDownloaded = null;\n }\n }\n\n public void converge(NodeAgentContext context) {\n try {\n doConverge(context);\n } catch (OrchestratorException | ConvergenceException e) {\n context.log(logger, e.getMessage());\n } catch (ContainerNotFoundException e) {\n containerState = ABSENT;\n context.log(logger, LogLevel.WARNING, \"Container unexpectedly gone, resetting containerState to \" + containerState);\n } catch (DockerException e) {\n numberOfUnhandledException++;\n context.log(logger, LogLevel.ERROR, \"Caught a DockerException\", e);\n } catch (Throwable e) {\n numberOfUnhandledException++;\n context.log(logger, LogLevel.ERROR, \"Unhandled exception, ignoring\", e);\n }\n }\n\n \n void doConverge(NodeAgentContext context) {\n NodeSpec node = context.node();\n Optional container = getContainer(context);\n if (!node.equals(lastNode)) {\n logChangesToNodeSpec(context, lastNode, node);\n\n \n if (currentRebootGeneration < node.getCurrentRebootGeneration())\n currentRebootGeneration = node.getCurrentRebootGeneration();\n\n \n \n if (currentRestartGeneration.isPresent() != node.getCurrentRestartGeneration().isPresent() ||\n currentRestartGeneration.map(current -> current < node.getCurrentRestartGeneration().get()).orElse(false))\n currentRestartGeneration = node.getCurrentRestartGeneration();\n\n \n \n if (container.map(c -> c.state.isRunning()).orElse(false)) {\n storageMaintainer.writeMetricsConfig(context);\n }\n\n lastNode = node;\n }\n\n switch (node.getState()) {\n case ready:\n case reserved:\n case parked:\n case failed:\n removeContainerIfNeededUpdateContainerState(context, container);\n updateNodeRepoWithCurrentAttributes(context);\n break;\n case active:\n storageMaintainer.handleCoreDumpsForContainer(context, container);\n\n storageMaintainer.getDiskUsageFor(context)\n .map(diskUsage -> (double) diskUsage / BYTES_IN_GB / node.getMinDiskAvailableGb())\n .filter(diskUtil -> diskUtil >= 0.8)\n .ifPresent(diskUtil -> storageMaintainer.removeOldFilesFromNode(context));\n\n scheduleDownLoadIfNeeded(node, container);\n if (isDownloadingImage()) {\n context.log(logger, \"Waiting for image to download \" + imageBeingDownloaded.asString());\n return;\n }\n container = removeContainerIfNeededUpdateContainerState(context, container);\n credentialsMaintainer.ifPresent(maintainer -> maintainer.converge(context));\n if (! container.isPresent()) {\n containerState = STARTING;\n startContainer(context);\n containerState = UNKNOWN;\n } else {\n updateContainerIfNeeded(context, container.get());\n }\n\n aclMaintainer.ifPresent(maintainer -> maintainer.converge(context));\n startServicesIfNeeded(context);\n resumeNodeIfNeeded(context);\n healthChecker.ifPresent(checker -> checker.verifyHealth(context));\n\n \n \n \n \n \n \n \n \n \n \n updateNodeRepoWithCurrentAttributes(context);\n context.log(logger, \"Call resume against Orchestrator\");\n orchestrator.resume(context.hostname().value());\n break;\n case inactive:\n removeContainerIfNeededUpdateContainerState(context, container);\n updateNodeRepoWithCurrentAttributes(context);\n break;\n case provisioned:\n nodeRepository.setNodeState(context.hostname().value(), NodeState.dirty);\n break;\n case dirty:\n removeContainerIfNeededUpdateContainerState(context, container);\n context.log(logger, \"State is \" + node.getState() + \", will delete application storage and mark node as ready\");\n credentialsMaintainer.ifPresent(maintainer -> maintainer.clearCredentials(context));\n storageMaintainer.archiveNodeStorage(context);\n updateNodeRepoWithCurrentAttributes(context);\n nodeRepository.setNodeState(context.hostname().value(), NodeState.ready);\n break;\n default:\n throw new RuntimeException(\"UNKNOWN STATE \" + node.getState().name());\n }\n }\n\n private static void logChangesToNodeSpec(NodeAgentContext context, NodeSpec lastNode, NodeSpec node) {\n StringBuilder builder = new StringBuilder();\n appendIfDifferent(builder, \"state\", lastNode, node, NodeSpec::getState);\n if (builder.length() > 0) {\n context.log(logger, LogLevel.INFO, \"Changes to node: \" + builder.toString());\n }\n }\n\n private static String fieldDescription(T value) {\n return value == null ? \"[absent]\" : value.toString();\n }\n\n private static void appendIfDifferent(StringBuilder builder, String name, NodeSpec oldNode, NodeSpec newNode, Function getter) {\n T oldValue = oldNode == null ? null : getter.apply(oldNode);\n T newValue = getter.apply(newNode);\n if (!Objects.equals(oldValue, newValue)) {\n if (builder.length() > 0) {\n builder.append(\", \");\n }\n builder.append(name).append(\" \").append(fieldDescription(oldValue)).append(\" -> \").append(fieldDescription(newValue));\n }\n }\n\n @SuppressWarnings(\"unchecked\")\n public void updateContainerNodeMetrics() {\n if (containerState != UNKNOWN) return;\n final NodeAgentContext context = contextSupplier.currentContext();\n final NodeSpec node = context.node();\n\n Optional containerStats = dockerOperations.getContainerStats(context);\n if (!containerStats.isPresent()) return;\n\n Dimensions.Builder dimensionsBuilder = new Dimensions.Builder()\n .add(\"host\", context.hostname().value())\n .add(\"role\", SecretAgentCheckConfig.nodeTypeToRole(context.nodeType()))\n .add(\"state\", node.getState().toString());\n node.getParentHostname().ifPresent(parent -> dimensionsBuilder.add(\"parentHostname\", parent));\n node.getAllowedToBeDown().ifPresent(allowed ->\n dimensionsBuilder.add(\"orchestratorState\", allowed ? \"ALLOWED_TO_BE_DOWN\" : \"NO_REMARKS\"));\n Dimensions dimensions = dimensionsBuilder.build();\n\n ContainerStats stats = containerStats.get();\n final String APP = MetricReceiverWrapper.APPLICATION_NODE;\n final int totalNumCpuCores = stats.getCpuStats().getOnlineCpus();\n final long cpuContainerKernelTime = stats.getCpuStats().getUsageInKernelMode();\n final long cpuContainerTotalTime = stats.getCpuStats().getTotalUsage();\n final long cpuSystemTotalTime = stats.getCpuStats().getSystemCpuUsage();\n final long memoryTotalBytes = stats.getMemoryStats().getLimit();\n final long memoryTotalBytesUsage = stats.getMemoryStats().getUsage();\n final long memoryTotalBytesCache = stats.getMemoryStats().getCache();\n final long diskTotalBytes = (long) (node.getMinDiskAvailableGb() * BYTES_IN_GB);\n final Optional diskTotalBytesUsed = storageMaintainer.getDiskUsageFor(context);\n\n lastCpuMetric.updateCpuDeltas(cpuSystemTotalTime, cpuContainerTotalTime, cpuContainerKernelTime);\n\n \n final double allocatedCpuRatio = node.getMinCpuCores() / totalNumCpuCores;\n double cpuUsageRatioOfAllocated = lastCpuMetric.getCpuUsageRatio() / allocatedCpuRatio;\n double cpuKernelUsageRatioOfAllocated = lastCpuMetric.getCpuKernelUsageRatio() / allocatedCpuRatio;\n\n long memoryTotalBytesUsed = memoryTotalBytesUsage - memoryTotalBytesCache;\n double memoryUsageRatio = (double) memoryTotalBytesUsed / memoryTotalBytes;\n double memoryTotalUsageRatio = (double) memoryTotalBytesUsage / memoryTotalBytes;\n Optional diskUsageRatio = diskTotalBytesUsed.map(used -> (double) used / diskTotalBytes);\n\n List metrics = new ArrayList<>();\n DimensionMetrics.Builder systemMetricsBuilder = new DimensionMetrics.Builder(APP, dimensions)\n .withMetric(\"mem.limit\", memoryTotalBytes)\n .withMetric(\"mem.used\", memoryTotalBytesUsed)\n .withMetric(\"mem.util\", 100 * memoryUsageRatio)\n .withMetric(\"mem_total.used\", memoryTotalBytesUsage)\n .withMetric(\"mem_total.util\", 100 * memoryTotalUsageRatio)\n .withMetric(\"cpu.util\", 100 * cpuUsageRatioOfAllocated)\n .withMetric(\"cpu.sys.util\", 100 * cpuKernelUsageRatioOfAllocated)\n .withMetric(\"disk.limit\", diskTotalBytes);\n\n diskTotalBytesUsed.ifPresent(diskUsed -> systemMetricsBuilder.withMetric(\"disk.used\", diskUsed));\n diskUsageRatio.ifPresent(diskRatio -> systemMetricsBuilder.withMetric(\"disk.util\", 100 * diskRatio));\n metrics.add(systemMetricsBuilder.build());\n\n stats.getNetworks().forEach((interfaceName, interfaceStats) -> {\n Dimensions netDims = dimensionsBuilder.add(\"interface\", interfaceName).build();\n DimensionMetrics networkMetrics = new DimensionMetrics.Builder(APP, netDims)\n .withMetric(\"net.in.bytes\", interfaceStats.getRxBytes())\n .withMetric(\"net.in.errors\", interfaceStats.getRxErrors())\n .withMetric(\"net.in.dropped\", interfaceStats.getRxDropped())\n .withMetric(\"net.out.bytes\", interfaceStats.getTxBytes())\n .withMetric(\"net.out.errors\", interfaceStats.getTxErrors())\n .withMetric(\"net.out.dropped\", interfaceStats.getTxDropped())\n .build();\n metrics.add(networkMetrics);\n });\n\n pushMetricsToContainer(context, metrics);\n }\n\n \n\n \n private void runPushMetricsCommand(NodeAgentContext context, String wrappedMetrics, boolean newMetricsProxy) {\n int port = newMetricsProxy ? 19094 : 19091;\n String[] command = {\"vespa-rpc-invoke\", \"-t\", \"2\", \"tcp/localhost:\" + port, \"setExtraMetrics\", wrappedMetrics};\n try {\n dockerOperations.executeCommandInContainerAsRoot(context, 5L, command);\n } catch (DockerExecTimeoutException e) {\n Level level = newMetricsProxy ? LogLevel.DEBUG : LogLevel.WARNING;\n context.log(logger, level, \"Failed to push metrics to container\", e);\n }\n\n }\n\n private Optional getContainer(NodeAgentContext context) {\n if (containerState == ABSENT) return Optional.empty();\n Optional container = dockerOperations.getContainer(context);\n if (! container.isPresent()) containerState = ABSENT;\n return container;\n }\n\n @Override\n public boolean isDownloadingImage() {\n return imageBeingDownloaded != null;\n }\n\n @Override\n public int getAndResetNumberOfUnhandledExceptions() {\n int temp = numberOfUnhandledException;\n numberOfUnhandledException = 0;\n return temp;\n }\n\n class CpuUsageReporter {\n private long containerKernelUsage = 0;\n private long totalContainerUsage = 0;\n private long totalSystemUsage = 0;\n\n private long deltaContainerKernelUsage;\n private long deltaContainerUsage;\n private long deltaSystemUsage;\n\n private void updateCpuDeltas(long totalSystemUsage, long totalContainerUsage, long containerKernelUsage) {\n deltaSystemUsage = this.totalSystemUsage == 0 ? 0 : (totalSystemUsage - this.totalSystemUsage);\n deltaContainerUsage = totalContainerUsage - this.totalContainerUsage;\n deltaContainerKernelUsage = containerKernelUsage - this.containerKernelUsage;\n\n this.totalSystemUsage = totalSystemUsage;\n this.totalContainerUsage = totalContainerUsage;\n this.containerKernelUsage = containerKernelUsage;\n }\n\n /**\n * Returns the CPU usage ratio for the docker container that this NodeAgent is managing\n * in the time between the last two times updateCpuDeltas() was called. This is calculated\n * by dividing the CPU time used by the container with the CPU time used by the entire system.\n */\n double getCpuUsageRatio() {\n return deltaSystemUsage == 0 ? Double.NaN : (double) deltaContainerUsage / deltaSystemUsage;\n }\n\n double getCpuKernelUsageRatio() {\n return deltaSystemUsage == 0 ? Double.NaN : (double) deltaContainerKernelUsage / deltaSystemUsage;\n }\n }\n\n \n \n \n \n \n \n \n \n \n \n \n \n private void orchestratorSuspendNode(NodeAgentContext context) {\n if (context.node().getState() != NodeState.active) return;\n\n context.log(logger, \"Ask Orchestrator for permission to suspend node\");\n try {\n orchestrator.suspend(context.hostname().value());\n } catch (OrchestratorException e) {\n \n \n try {\n aclMaintainer.ifPresent(maintainer -> maintainer.converge(context));\n } catch (RuntimeException suppressed) {\n logger.log(LogLevel.WARNING, \"Suppressing ACL update failure: \" + suppressed);\n e.addSuppressed(suppressed);\n }\n\n throw e;\n }\n }\n\n protected ContainerData createContainerData(NodeAgentContext context) {\n return (pathInContainer, data) -> {\n throw new UnsupportedOperationException(\"addFile not implemented\");\n };\n }\n}" }, { "comment": "The default bucket name is a concatenation of a fixed prefix, region and a project number. So for a given project, if it is running in the same region, the default bucket name would be the same. For our integration test, it would always be the same as \"gcp-options-it-us-central1-844138762903\". Actually, this may cause race condition if we run this integration test twice at the same time though.", "method_body": "public void testCreateDefaultBucket() throws IOException {\n TestPipelineOptions options =\n TestPipeline.testingPipelineOptions().as(TestPipelineOptions.class);\n\n CloudResourceManager crmClient =\n GcpOptions.GcpTempLocationFactory.newCloudResourceManagerClient(\n options.as(CloudResourceManagerOptions.class))\n .build();\n\n GcsOptions gcsOptions = options.as(GcsOptions.class);\n GcsUtil gcsUtil = gcsOptions.getGcsUtil();\n\n String tempLocation =\n GcpOptions.GcpTempLocationFactory.tryCreateDefaultBucketWithPrefix(\n options, crmClient, \"gcp-options-it-\");\n\n GcsPath gcsPath = GcsPath.fromUri(tempLocation);\n System.out.println(gcsPath);\n\n Bucket bucket = gcsUtil.getBucket(gcsPath);\n assertNotNull(bucket);\n \n assertEquals(bucket.getSoftDeletePolicy().getRetentionDurationSeconds(), Long.valueOf(0L));\n\n gcsUtil.removeBucket(bucket);\n }", "target_code": "options, crmClient, \"gcp-options-it-\");", "method_body_after": "public void testCreateDefaultBucket() throws IOException {\n TestPipelineOptions options =\n TestPipeline.testingPipelineOptions().as(TestPipelineOptions.class);\n\n CloudResourceManager crmClient =\n GcpOptions.GcpTempLocationFactory.newCloudResourceManagerClient(\n options.as(CloudResourceManagerOptions.class))\n .build();\n\n GcsOptions gcsOptions = options.as(GcsOptions.class);\n GcsUtil gcsUtil = gcsOptions.getGcsUtil();\n\n Random rand = new Random();\n \n \n \n String bucketNamePrefix = \"gcp-options-it-\" + rand.nextInt(1000);\n\n String bucketName =\n String.join(\n \"-\",\n GcpOptions.GcpTempLocationFactory.getDefaultBucketNameStubs(\n options, crmClient, bucketNamePrefix));\n\n \n try {\n Bucket oldBucket = gcsUtil.getBucket(GcsPath.fromUri(\"gs:\n gcsUtil.removeBucket(oldBucket);\n } catch (FileNotFoundException e) {\n \n }\n\n String tempLocation =\n GcpOptions.GcpTempLocationFactory.tryCreateDefaultBucketWithPrefix(\n options, crmClient, bucketNamePrefix);\n\n GcsPath gcsPath = GcsPath.fromUri(tempLocation);\n Bucket bucket = gcsUtil.getBucket(gcsPath);\n assertNotNull(bucket);\n \n assertEquals(bucket.getSoftDeletePolicy().getRetentionDurationSeconds(), Long.valueOf(0L));\n\n gcsUtil.removeBucket(bucket);\n assertThrows(FileNotFoundException.class, () -> gcsUtil.getBucket(gcsPath));\n }", "context_before": "class GcpOptionsIT {\n /** Tests the creation of a default bucket in a project. */\n @Test\n \n}", "context_after": "class GcpOptionsIT {\n /** Tests the creation of a default bucket in a project. */\n @Test\n \n}" }, { "comment": "Busy sleep is generally not preferable. Consider use wait/notify which sounds exactly what we need here? That is block this thread until future1 reaches while(true)", "method_body": "public void testCloseVisibleToAwaitCompletionCallerAndProducer() throws Exception {\n BeamFnDataInboundObserver observer =\n BeamFnDataInboundObserver.forConsumers(\n Arrays.asList(DataEndpoint.create(TRANSFORM_ID, CODER, (value) -> {})),\n Collections.emptyList());\n\n Future future =\n executor.submit(\n () -> {\n observer.accept(dataWith(\"ABC\"));\n assertThrows(\n BeamFnDataInboundObserver.CloseException.class,\n () -> {\n while (true) {\n \n \n \n observer.accept(dataWith(\"ABC\"));\n }\n });\n return null;\n });\n Future future2 =\n executor.submit(\n () -> {\n Thread.sleep(500); \n observer.close();\n return null;\n });\n\n assertThrows(BeamFnDataInboundObserver.CloseException.class, () -> observer.awaitCompletion());\n future.get();\n future2.get();\n }", "target_code": "Thread.sleep(500);", "method_body_after": "public void testCloseVisibleToAwaitCompletionCallerAndProducer() throws Exception {\n BeamFnDataInboundObserver observer =\n BeamFnDataInboundObserver.forConsumers(\n Arrays.asList(DataEndpoint.create(TRANSFORM_ID, CODER, (value) -> {})),\n Collections.emptyList());\n\n AtomicBoolean isReady = new AtomicBoolean(false);\n Future future =\n executor.submit(\n () -> {\n observer.accept(dataWith(\"ABC\"));\n synchronized (isReady) {\n isReady.set(true);\n isReady.notify();\n }\n assertThrows(\n BeamFnDataInboundObserver.CloseException.class,\n () -> {\n while (true) {\n \n \n \n observer.accept(dataWith(\"ABC\"));\n }\n });\n return null;\n });\n Future future2 =\n executor.submit(\n () -> {\n synchronized (isReady) {\n while (!isReady.get()) {\n isReady.wait();\n }\n }\n observer.close();\n return null;\n });\n\n assertThrows(BeamFnDataInboundObserver.CloseException.class, () -> observer.awaitCompletion());\n future.get();\n future2.get();\n }", "context_before": "class BeamFnDataInboundObserverTest {\n private static final Coder> CODER =\n WindowedValue.getFullCoder(StringUtf8Coder.of(), GlobalWindow.Coder.INSTANCE);\n private static final String TRANSFORM_ID = \"transformId\";\n private static final String TIMER_FAMILY_ID = \"timerFamilyId\";\n\n @Rule\n public final TestExecutorService executor = TestExecutors.from(Executors::newCachedThreadPool);\n\n @Test\n public void testConsumptionOfValuesHappensOnAwaitCompletionCallersThread() throws Exception {\n Thread thread = Thread.currentThread();\n Collection> values = new ArrayList<>();\n Collection> timers = new ArrayList<>();\n BeamFnDataInboundObserver observer =\n BeamFnDataInboundObserver.forConsumers(\n Arrays.asList(\n DataEndpoint.create(\n TRANSFORM_ID,\n CODER,\n (value) -> {\n assertSame(thread, Thread.currentThread());\n values.add(value);\n })),\n Arrays.asList(\n TimerEndpoint.create(\n TRANSFORM_ID,\n TIMER_FAMILY_ID,\n CODER,\n (value) -> {\n assertSame(thread, Thread.currentThread());\n timers.add(value);\n })));\n\n Future future =\n executor.submit(\n () -> {\n \n observer.accept(dataWith(\"ABC\", \"DEF\", \"GHI\"));\n observer.accept(lastData());\n observer.accept(timerWith(\"UVW\"));\n observer.accept(timerWith(\"XYZ\"));\n observer.accept(lastTimer());\n return null;\n });\n\n observer.awaitCompletion();\n assertThat(\n values,\n contains(\n WindowedValue.valueInGlobalWindow(\"ABC\"),\n WindowedValue.valueInGlobalWindow(\"DEF\"),\n WindowedValue.valueInGlobalWindow(\"GHI\")));\n assertThat(\n timers,\n contains(\n WindowedValue.valueInGlobalWindow(\"UVW\"), WindowedValue.valueInGlobalWindow(\"XYZ\")));\n future.get();\n }\n\n @Test\n public void testAwaitCompletionFailureVisibleToAwaitCompletionCallerAndProducer()\n throws Exception {\n BeamFnDataInboundObserver observer =\n BeamFnDataInboundObserver.forConsumers(\n Arrays.asList(\n DataEndpoint.create(\n TRANSFORM_ID,\n CODER,\n (value) -> {\n throw new Exception(\"test consumer failed\");\n })),\n Collections.emptyList());\n\n Future future =\n executor.submit(\n () -> {\n observer.accept(dataWith(\"ABC\"));\n assertThrows(\n \"test consumer failed\",\n Exception.class,\n () -> {\n while (true) {\n \n \n \n observer.accept(dataWith(\"ABC\"));\n }\n });\n return null;\n });\n\n assertThrows(\"test consumer failed\", Exception.class, () -> observer.awaitCompletion());\n future.get();\n }\n\n @Test\n \n\n @Test\n public void testBadProducerDataFailureVisibleToAwaitCompletionCallerAndProducer()\n throws Exception {\n BeamFnDataInboundObserver observer =\n BeamFnDataInboundObserver.forConsumers(\n Arrays.asList(DataEndpoint.create(TRANSFORM_ID, CODER, (value) -> {})),\n Collections.emptyList());\n Future future =\n executor.submit(\n () -> {\n observer.accept(timerWith(\"DEF\"));\n assertThrows(\n \"Unable to find inbound timer receiver for instruction\",\n IllegalStateException.class,\n () -> {\n \n \n \n while (true) {\n observer.accept(dataWith(\"ABC\"));\n }\n });\n return null;\n });\n\n assertThrows(\n \"Unable to find inbound timer receiver for instruction\",\n IllegalStateException.class,\n () -> observer.awaitCompletion());\n future.get();\n }\n\n private BeamFnApi.Elements dataWith(String... values) throws Exception {\n ByteStringOutputStream output = new ByteStringOutputStream();\n for (String value : values) {\n CODER.encode(WindowedValue.valueInGlobalWindow(value), output);\n }\n return BeamFnApi.Elements.newBuilder()\n .addData(\n BeamFnApi.Elements.Data.newBuilder()\n .setTransformId(TRANSFORM_ID)\n .setData(output.toByteString()))\n .build();\n }\n\n private BeamFnApi.Elements lastData() throws Exception {\n return BeamFnApi.Elements.newBuilder()\n .addData(BeamFnApi.Elements.Data.newBuilder().setTransformId(TRANSFORM_ID).setIsLast(true))\n .build();\n }\n\n private BeamFnApi.Elements timerWith(String... values) throws Exception {\n ByteStringOutputStream output = new ByteStringOutputStream();\n for (String value : values) {\n CODER.encode(WindowedValue.valueInGlobalWindow(value), output);\n }\n return BeamFnApi.Elements.newBuilder()\n .addTimers(\n BeamFnApi.Elements.Timers.newBuilder()\n .setTransformId(TRANSFORM_ID)\n .setTimerFamilyId(TIMER_FAMILY_ID)\n .setTimers(output.toByteString()))\n .build();\n }\n\n private BeamFnApi.Elements lastTimer() throws Exception {\n return BeamFnApi.Elements.newBuilder()\n .addTimers(\n BeamFnApi.Elements.Timers.newBuilder()\n .setTransformId(TRANSFORM_ID)\n .setTimerFamilyId(TIMER_FAMILY_ID)\n .setIsLast(true))\n .build();\n }\n}", "context_after": "class BeamFnDataInboundObserverTest {\n private static final Coder> CODER =\n WindowedValue.getFullCoder(StringUtf8Coder.of(), GlobalWindow.Coder.INSTANCE);\n private static final String TRANSFORM_ID = \"transformId\";\n private static final String TIMER_FAMILY_ID = \"timerFamilyId\";\n\n @Rule\n public final TestExecutorService executor = TestExecutors.from(Executors::newCachedThreadPool);\n\n @Test\n public void testConsumptionOfValuesHappensOnAwaitCompletionCallersThread() throws Exception {\n Thread thread = Thread.currentThread();\n Collection> values = new ArrayList<>();\n Collection> timers = new ArrayList<>();\n BeamFnDataInboundObserver observer =\n BeamFnDataInboundObserver.forConsumers(\n Arrays.asList(\n DataEndpoint.create(\n TRANSFORM_ID,\n CODER,\n (value) -> {\n assertSame(thread, Thread.currentThread());\n values.add(value);\n })),\n Arrays.asList(\n TimerEndpoint.create(\n TRANSFORM_ID,\n TIMER_FAMILY_ID,\n CODER,\n (value) -> {\n assertSame(thread, Thread.currentThread());\n timers.add(value);\n })));\n\n Future future =\n executor.submit(\n () -> {\n \n observer.accept(dataWith(\"ABC\", \"DEF\", \"GHI\"));\n observer.accept(lastData());\n observer.accept(timerWith(\"UVW\"));\n observer.accept(timerWith(\"XYZ\"));\n observer.accept(lastTimer());\n return null;\n });\n\n observer.awaitCompletion();\n assertThat(\n values,\n contains(\n WindowedValue.valueInGlobalWindow(\"ABC\"),\n WindowedValue.valueInGlobalWindow(\"DEF\"),\n WindowedValue.valueInGlobalWindow(\"GHI\")));\n assertThat(\n timers,\n contains(\n WindowedValue.valueInGlobalWindow(\"UVW\"), WindowedValue.valueInGlobalWindow(\"XYZ\")));\n future.get();\n }\n\n @Test\n public void testAwaitCompletionFailureVisibleToAwaitCompletionCallerAndProducer()\n throws Exception {\n BeamFnDataInboundObserver observer =\n BeamFnDataInboundObserver.forConsumers(\n Arrays.asList(\n DataEndpoint.create(\n TRANSFORM_ID,\n CODER,\n (value) -> {\n throw new Exception(\"test consumer failed\");\n })),\n Collections.emptyList());\n\n Future future =\n executor.submit(\n () -> {\n observer.accept(dataWith(\"ABC\"));\n assertThrows(\n \"test consumer failed\",\n Exception.class,\n () -> {\n while (true) {\n \n \n \n observer.accept(dataWith(\"ABC\"));\n }\n });\n return null;\n });\n\n assertThrows(\"test consumer failed\", Exception.class, () -> observer.awaitCompletion());\n future.get();\n }\n\n @Test\n \n\n @Test\n public void testBadProducerDataFailureVisibleToAwaitCompletionCallerAndProducer()\n throws Exception {\n BeamFnDataInboundObserver observer =\n BeamFnDataInboundObserver.forConsumers(\n Arrays.asList(DataEndpoint.create(TRANSFORM_ID, CODER, (value) -> {})),\n Collections.emptyList());\n Future future =\n executor.submit(\n () -> {\n observer.accept(timerWith(\"DEF\"));\n assertThrows(\n \"Unable to find inbound timer receiver for instruction\",\n IllegalStateException.class,\n () -> {\n \n \n \n while (true) {\n observer.accept(dataWith(\"ABC\"));\n }\n });\n return null;\n });\n\n assertThrows(\n \"Unable to find inbound timer receiver for instruction\",\n IllegalStateException.class,\n () -> observer.awaitCompletion());\n future.get();\n }\n\n private BeamFnApi.Elements dataWith(String... values) throws Exception {\n ByteStringOutputStream output = new ByteStringOutputStream();\n for (String value : values) {\n CODER.encode(WindowedValue.valueInGlobalWindow(value), output);\n }\n return BeamFnApi.Elements.newBuilder()\n .addData(\n BeamFnApi.Elements.Data.newBuilder()\n .setTransformId(TRANSFORM_ID)\n .setData(output.toByteString()))\n .build();\n }\n\n private BeamFnApi.Elements lastData() throws Exception {\n return BeamFnApi.Elements.newBuilder()\n .addData(BeamFnApi.Elements.Data.newBuilder().setTransformId(TRANSFORM_ID).setIsLast(true))\n .build();\n }\n\n private BeamFnApi.Elements timerWith(String... values) throws Exception {\n ByteStringOutputStream output = new ByteStringOutputStream();\n for (String value : values) {\n CODER.encode(WindowedValue.valueInGlobalWindow(value), output);\n }\n return BeamFnApi.Elements.newBuilder()\n .addTimers(\n BeamFnApi.Elements.Timers.newBuilder()\n .setTransformId(TRANSFORM_ID)\n .setTimerFamilyId(TIMER_FAMILY_ID)\n .setTimers(output.toByteString()))\n .build();\n }\n\n private BeamFnApi.Elements lastTimer() throws Exception {\n return BeamFnApi.Elements.newBuilder()\n .addTimers(\n BeamFnApi.Elements.Timers.newBuilder()\n .setTransformId(TRANSFORM_ID)\n .setTimerFamilyId(TIMER_FAMILY_ID)\n .setIsLast(true))\n .build();\n }\n}" }, { "comment": "@geoand see this comment in HV where we try to size properly all the collections that are in the metadata: https://github.com/hibernate/hibernate-validator/blob/master/engine/src/main/java/org/hibernate/validator/internal/util/CollectionHelper.java#L130 It's not an issue with `ArrayList` as it sizes the array exactly with the size you pass. In any case, it's a detail.", "method_body": "public HashSet apply(final int value) {\n return new HashSet<>(value);\n }", "target_code": "return new HashSet<>(value);", "method_body_after": "public HashSet apply(final int value) {\n return new HashSet<>(getInitialCapacityFromExpectedSize(value));\n }", "context_before": "class HashSetFactory implements IntFunction> {\n private static final HashSetFactory INSTANCE = new HashSetFactory<>();\n\n private HashSetFactory() {\n }\n\n \n\n @SuppressWarnings(\"unchecked\")\n public static HashSetFactory getInstance() {\n return (HashSetFactory) INSTANCE;\n }\n}", "context_after": "class HashSetFactory implements IntFunction> {\n private static final HashSetFactory INSTANCE = new HashSetFactory<>();\n\n private HashSetFactory() {\n }\n\n \n\n /**\n * As the default loadFactor is of 0.75, we need to calculate the initial capacity from the expected size to avoid\n * resizing the collection when we populate the collection with all the initial elements. We use a calculation\n * similar to what is done in {@link java.util.HashMap\n *\n * @param expectedSize the expected size of the collection\n * @return the initial capacity of the collection\n */\n private int getInitialCapacityFromExpectedSize(int expectedSize) {\n if (expectedSize < 3) {\n return expectedSize + 1;\n }\n return (int) ((float) expectedSize / 0.75f + 1.0f);\n }\n\n @SuppressWarnings(\"unchecked\")\n public static HashSetFactory getInstance() {\n return (HashSetFactory) INSTANCE;\n }\n}" }, { "comment": "Ah thanks now it make sense \ud83d\ude4f", "method_body": "public PojoSerializer duplicate() {\n TypeSerializer[] duplicateFieldSerializers = duplicateSerializers(fieldSerializers);\n TypeSerializer[] duplicateRegisteredSerializers =\n duplicateSerializers(registeredSerializers);\n\n \n \n return new PojoSerializer<>(\n clazz,\n fields,\n duplicateFieldSerializers,\n new LinkedHashMap<>(registeredClasses),\n duplicateRegisteredSerializers,\n subclassSerializerCache.entrySet().stream()\n .collect(\n Collectors.toMap(Map.Entry::getKey, e -> e.getValue().duplicate())),\n executionConfig);\n }", "target_code": "", "method_body_after": "public PojoSerializer duplicate() {\n TypeSerializer[] duplicateFieldSerializers = duplicateSerializers(fieldSerializers);\n TypeSerializer[] duplicateRegisteredSerializers =\n duplicateSerializers(registeredSerializers);\n\n return new PojoSerializer<>(\n clazz,\n fields,\n duplicateFieldSerializers,\n new LinkedHashMap<>(registeredClasses),\n duplicateRegisteredSerializers,\n subclassSerializerCache.entrySet().stream()\n .collect(\n Collectors.toMap(Map.Entry::getKey, e -> e.getValue().duplicate())),\n executionConfig);\n }", "context_before": "class and are actually sub-classes.\n LinkedHashSet> registeredSubclasses =\n getRegisteredSubclassesFromExecutionConfig(clazz, executionConfig);\n\n this.registeredClasses = createRegisteredSubclassTags(registeredSubclasses);\n this.registeredSerializers =\n createRegisteredSubclassSerializers(registeredSubclasses, executionConfig);\n\n this.subclassSerializerCache = new HashMap<>();\n }\n\n /**\n * Constructor to create a restore serializer or a reconfigured serializer from a {@link\n * PojoSerializerSnapshot}", "context_after": "class and are actually sub-classes.\n LinkedHashSet> registeredSubclasses =\n getRegisteredSubclassesFromExecutionConfig(clazz, executionConfig);\n\n this.registeredClasses = createRegisteredSubclassTags(registeredSubclasses);\n this.registeredSerializers =\n createRegisteredSubclassSerializers(registeredSubclasses, executionConfig);\n\n this.subclassSerializerCache = new HashMap<>();\n }\n\n /**\n * Constructor to create a restore serializer or a reconfigured serializer from a {@link\n * PojoSerializerSnapshot}" }, { "comment": "Since this is runtime validation, this is not a problem.", "method_body": "private static int getStatusCode(BMap errorDetails) {\n if (!errorDetails.hasKey(STATUS_CODE)) {\n return DEFAULT_ERROR_RETURN_STATUS_CODE;\n }\n\n BValue specifiedStatusCode = errorDetails.get(STATUS_CODE);\n if (specifiedStatusCode.getType().getTag() != TypeTags.INT) {\n return DEFAULT_ERROR_RETURN_STATUS_CODE;\n }\n\n long specifiedIntCode = ((BInteger) specifiedStatusCode).intValue();\n if (specifiedIntCode >= DEFAULT_ERROR_RETURN_STATUS_CODE) {\n if (specifiedIntCode > Integer.MAX_VALUE) {\n return Integer.MAX_VALUE;\n }\n return (int) specifiedIntCode;\n }\n return DEFAULT_ERROR_RETURN_STATUS_CODE;\n }", "target_code": "if (specifiedStatusCode.getType().getTag() != TypeTags.INT) {", "method_body_after": "private static int getStatusCode(BMap errorDetails) {\n if (!errorDetails.hasKey(STATUS_CODE)) {\n return DEFAULT_ERROR_RETURN_STATUS_CODE;\n }\n\n BValue specifiedStatusCode = errorDetails.get(STATUS_CODE);\n if (specifiedStatusCode.getType().getTag() != TypeTags.INT) {\n return DEFAULT_ERROR_RETURN_STATUS_CODE;\n }\n\n long specifiedIntCode = ((BInteger) specifiedStatusCode).intValue();\n if (specifiedIntCode >= DEFAULT_ERROR_RETURN_STATUS_CODE) {\n if (specifiedIntCode > Integer.MAX_VALUE) {\n return Integer.MAX_VALUE;\n }\n return (int) specifiedIntCode;\n }\n return DEFAULT_ERROR_RETURN_STATUS_CODE;\n }", "context_before": "class LauncherUtils {\n\n private static final String STATUS_CODE = \"statusCode\";\n private static final int DEFAULT_ERROR_RETURN_STATUS_CODE = 1;\n\n private static PrintStream errStream = System.err;\n\n public static void runProgram(Path sourceRootPath, Path sourcePath, Map runtimeParams,\n String configFilePath, String[] args, boolean offline, boolean observeFlag) {\n runProgram(sourceRootPath, sourcePath, runtimeParams, configFilePath, args, offline, observeFlag, false, true);\n }\n\n static void runProgram(Path sourceRootPath, Path sourcePath, Map runtimeParams,\n String configFilePath, String[] args, boolean offline, boolean observeFlag,\n boolean siddhiRuntimeFlag, boolean experimentalFlag) {\n ProgramFile programFile;\n String srcPathStr = sourcePath.toString();\n Path fullPath = sourceRootPath.resolve(sourcePath);\n \n System.setProperty(ProjectDirConstants.BALLERINA_SOURCE_ROOT, fullPath.getParent().toString());\n loadConfigurations(fullPath.getParent(), runtimeParams, configFilePath, observeFlag);\n\n if (srcPathStr.endsWith(BLANG_EXEC_FILE_SUFFIX)) {\n programFile = BLangProgramLoader.read(sourcePath);\n } else if (Files.isRegularFile(fullPath) && srcPathStr.endsWith(BLANG_SRC_FILE_SUFFIX) &&\n !RepoUtils.hasProjectRepo(sourceRootPath)) {\n programFile = compile(fullPath.getParent(), fullPath.getFileName(), offline, siddhiRuntimeFlag,\n experimentalFlag);\n } else if (Files.isDirectory(sourceRootPath)) {\n if (Files.isDirectory(fullPath) && !RepoUtils.hasProjectRepo(sourceRootPath)) {\n throw createLauncherException(\"you are trying to run a module that is not inside \" +\n \"a project. Run `ballerina init` from \" + sourceRootPath + \" to initialize it as a \" +\n \"project and then run the module.\");\n }\n if (Files.exists(fullPath)) {\n if (Files.isRegularFile(fullPath) && !srcPathStr.endsWith(BLANG_SRC_FILE_SUFFIX)) {\n throw createLauncherException(\"only modules, \" + BLANG_SRC_FILE_SUFFIX + \" and \" +\n BLANG_EXEC_FILE_SUFFIX + \" files can be used with the \" +\n \"'ballerina run' command.\");\n }\n } else {\n throw createLauncherException(\"ballerina source does not exist '\" + srcPathStr + \"'\");\n }\n \n \n \n if (Files.isRegularFile(fullPath) && srcPathStr.endsWith(BLANG_SRC_FILE_SUFFIX) &&\n sourcePath.getParent() != null) {\n throw createLauncherException(\"you are trying to run a ballerina file inside a module within a \" +\n \"project. Try running 'ballerina run '\");\n }\n programFile = compile(sourceRootPath, sourcePath, offline, siddhiRuntimeFlag, experimentalFlag);\n } else {\n throw createLauncherException(\"only modules, \" + BLANG_SRC_FILE_SUFFIX + \" and \" + BLANG_EXEC_FILE_SUFFIX\n + \" files can be used with the 'ballerina run' command.\");\n }\n\n \n \n if (!programFile.isMainEPAvailable() && !programFile.isServiceEPAvailable()) {\n throw createLauncherException(\"'\" + programFile.getProgramFilePath()\n + \"' does not contain a main function or a service\");\n }\n\n boolean runServicesOnly = !programFile.isMainEPAvailable();\n\n \n ServiceLoader listeners = ServiceLoader.load(LaunchListener.class);\n listeners.forEach(listener -> listener.beforeRunProgram(runServicesOnly));\n\n if (runServicesOnly) {\n if (args.length > 0) {\n throw LauncherUtils.createUsageExceptionWithHelp(\"arguments not allowed for services\");\n }\n runServices(programFile);\n } else {\n runMain(programFile, args);\n }\n BLangProgramRunner.resumeStates(programFile);\n listeners.forEach(listener -> listener.afterRunProgram(runServicesOnly));\n }\n\n @SuppressWarnings(\"unchecked\")\n public static void runMain(ProgramFile programFile, String[] args) {\n BValue[] result;\n int statusCode = 0;\n try {\n result = BLangProgramRunner.runMainFunc(programFile, args);\n if (result[0] != null && result[0].getType().getTag() == TypeTags.ERROR) {\n \n BError returnedError = (BError) result[0];\n errStream.print(prepareErrorReturnedErrorMessage(returnedError));\n\n if (returnedError.details != null) {\n BMap details = (BMap) returnedError.details;\n statusCode = getStatusCode(details);\n }\n } else if (programFile.isServiceEPAvailable()) {\n return;\n }\n } catch (BLangUsageException | BallerinaException e) {\n throw createUsageException(makeFirstLetterLowerCase(e.getLocalizedMessage()));\n }\n\n try {\n ThreadPoolFactory.getInstance().getWorkerExecutor().shutdown();\n ThreadPoolFactory.getInstance().getWorkerExecutor().awaitTermination(10000, TimeUnit.MILLISECONDS);\n } catch (InterruptedException ex) {\n \n }\n Runtime.getRuntime().exit(statusCode);\n }\n\n public static void runServices(ProgramFile programFile) {\n PrintStream outStream = System.out;\n\n ServerConnectorRegistry serverConnectorRegistry = new ServerConnectorRegistry();\n programFile.setServerConnectorRegistry(serverConnectorRegistry);\n serverConnectorRegistry.initServerConnectors();\n\n outStream.println(\"Initiating service(s) in '\" + programFile.getProgramFilePath() + \"'\");\n BLangProgramRunner.runService(programFile);\n\n serverConnectorRegistry.deploymentComplete();\n }\n\n public static Path getSourceRootPath(String sourceRoot) {\n \n Path sourceRootPath;\n if (sourceRoot == null || sourceRoot.isEmpty()) {\n sourceRootPath = Paths.get(System.getProperty(\"user.dir\"));\n } else {\n try {\n sourceRootPath = Paths.get(sourceRoot).toRealPath(LinkOption.NOFOLLOW_LINKS);\n } catch (IOException e) {\n throw new RuntimeException(\"error reading from directory: \" + sourceRoot + \" reason: \" +\n e.getMessage(), e);\n }\n\n if (!Files.isDirectory(sourceRootPath, LinkOption.NOFOLLOW_LINKS)) {\n throw new RuntimeException(\"source root must be a directory\");\n }\n }\n return sourceRootPath;\n }\n\n private static BLauncherException createUsageException(String errorMsg) {\n BLauncherException launcherException = new BLauncherException();\n launcherException.addMessage(\"ballerina: \" + errorMsg);\n return launcherException;\n }\n\n public static BLauncherException createUsageExceptionWithHelp(String errorMsg) {\n BLauncherException launcherException = new BLauncherException();\n launcherException.addMessage(\"ballerina: \" + errorMsg);\n launcherException.addMessage(\"Run 'ballerina help' for usage.\");\n return launcherException;\n }\n\n public static BLauncherException createLauncherException(String errorMsg) {\n BLauncherException launcherException = new BLauncherException();\n launcherException.addMessage(\"error: \" + errorMsg);\n return launcherException;\n }\n\n static void printLauncherException(BLauncherException e, PrintStream outStream) {\n List errorMessages = e.getMessages();\n errorMessages.forEach(outStream::println);\n }\n\n static String makeFirstLetterLowerCase(String s) {\n if (s == null) {\n return null;\n }\n char c[] = s.toCharArray();\n c[0] = Character.toLowerCase(c[0]);\n return new String(c);\n }\n\n /**\n * Write the process ID of this process to the file.\n *\n * @param ballerinaHome ballerina.home sys property value.\n */\n static void writePID(String ballerinaHome) {\n\n String[] cmd = {\"bash\", \"-c\", \"echo $PPID\"};\n Process p;\n String pid = \"\";\n try {\n p = Runtime.getRuntime().exec(cmd);\n } catch (IOException e) {\n \n \n return;\n }\n\n try (BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream(),\n StandardCharsets.UTF_8))) {\n StringBuilder builder = new StringBuilder();\n String line;\n while ((line = reader.readLine()) != null) {\n builder.append(line);\n }\n pid = builder.toString();\n } catch (Throwable e) {\n throw createLauncherException(\"failed to write ballerina.pid file: \"\n + makeFirstLetterLowerCase(e.getMessage()));\n }\n\n if (pid.length() != 0) {\n try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(\n new FileOutputStream(Paths.get(ballerinaHome, \"ballerina.pid\").toString()),\n StandardCharsets.UTF_8))) {\n writer.write(pid);\n } catch (IOException e) {\n throw createLauncherException(\"failed to write ballerina.pid file: \"\n + makeFirstLetterLowerCase(e.getMessage()));\n }\n }\n }\n\n /**\n * Compile and get the executable program file.\n * \n * @param sourceRootPath Path to the source root\n * @param sourcePath Path to the source from the source root\n * @param offline Should the build call remote repos\n * @param enableExpFeatures Flag indicating to enable the experimental feature\n * @return Executable program\n */\n public static ProgramFile compile(Path sourceRootPath, Path sourcePath, boolean offline,\n boolean enableExpFeatures) {\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(COMPILER_PHASE, CompilerPhase.CODE_GEN.toString());\n options.put(PRESERVE_WHITESPACE, \"false\");\n options.put(OFFLINE, Boolean.toString(offline));\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures));\n\n \n Compiler compiler = Compiler.getInstance(context);\n BLangPackage entryPkgNode = compiler.compile(sourcePath.toString());\n CompiledBinaryFile.ProgramFile programFile = compiler.getExecutableProgram(entryPkgNode);\n if (programFile == null) {\n throw new BLangCompilerException(\"compilation contains errors\");\n }\n\n ProgramFile progFile = getExecutableProgram(programFile);\n progFile.setProgramFilePath(sourcePath);\n return progFile;\n }\n\n /**\n * Compile and get the executable program file.\n *\n * @param sourceRootPath Path to the source root\n * @param sourcePath Path to the source from the source root\n * @param offline Should the build call remote repos\n * @param siddhiRuntimeFlag Flag to enable siddhi runtime based stream processing\n * @param enableExpFeatures Flag indicating to enable the experimental feature\n * @return Executable program\n */\n public static ProgramFile compile(Path sourceRootPath, Path sourcePath, boolean offline,\n boolean siddhiRuntimeFlag, boolean enableExpFeatures) {\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(COMPILER_PHASE, CompilerPhase.CODE_GEN.toString());\n options.put(PRESERVE_WHITESPACE, \"false\");\n options.put(OFFLINE, Boolean.toString(offline));\n options.put(SIDDHI_RUNTIME_ENABLED, Boolean.toString(siddhiRuntimeFlag));\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures));\n\n \n Compiler compiler = Compiler.getInstance(context);\n BLangPackage entryPkgNode = compiler.compile(sourcePath.toString());\n CompiledBinaryFile.ProgramFile programFile = compiler.getExecutableProgram(entryPkgNode);\n if (programFile == null) {\n throw new BLangCompilerException(\"compilation contains errors\");\n }\n\n ProgramFile progFile = getExecutableProgram(programFile);\n progFile.setProgramFilePath(sourcePath);\n return progFile;\n }\n\n /**\n * Get the executable program ({@link ProgramFile}) given the compiled program \n * ({@link org.wso2.ballerinalang.programfile.CompiledBinaryFile.ProgramFile}).\n * \n * @param programFile Compiled program\n * @return Executable program\n */\n public static ProgramFile getExecutableProgram(CompiledBinaryFile.ProgramFile programFile) {\n ByteArrayInputStream byteIS = null;\n ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();\n try {\n ProgramFileWriter.writeProgram(programFile, byteOutStream);\n\n ProgramFileReader reader = new ProgramFileReader();\n byteIS = new ByteArrayInputStream(byteOutStream.toByteArray());\n return reader.readProgram(byteIS);\n } catch (Throwable e) {\n throw createLauncherException(\"failed to compile file: \" + makeFirstLetterLowerCase(e.getMessage()));\n } finally {\n if (byteIS != null) {\n try {\n byteIS.close();\n } catch (IOException ignore) {\n }\n }\n\n try {\n byteOutStream.close();\n } catch (IOException ignore) {\n }\n }\n }\n\n /**\n * Initializes the {@link ConfigRegistry} and loads {@link LogManager} configs.\n *\n * @param sourceRootPath source directory\n * @param runtimeParams run time parameters\n * @param configFilePath config file path\n * @param observeFlag to indicate whether observability is enabled\n */\n public static void loadConfigurations(Path sourceRootPath, Map runtimeParams,\n String configFilePath, boolean observeFlag) {\n Path ballerinaConfPath = sourceRootPath.resolve(\"ballerina.conf\");\n try {\n ConfigRegistry.getInstance().initRegistry(runtimeParams, configFilePath, ballerinaConfPath);\n ((BLogManager) LogManager.getLogManager()).loadUserProvidedLogConfiguration();\n\n if (observeFlag) {\n ConfigRegistry.getInstance()\n .addConfiguration(ObservabilityConstants.CONFIG_METRICS_ENABLED, Boolean.TRUE);\n ConfigRegistry.getInstance()\n .addConfiguration(ObservabilityConstants.CONFIG_TRACING_ENABLED, Boolean.TRUE);\n }\n\n } catch (IOException e) {\n throw new BLangRuntimeException(\n \"failed to read the specified configuration file: \" + ballerinaConfPath.toString(), e);\n } catch (RuntimeException e) {\n throw new BLangRuntimeException(e.getMessage(), e);\n }\n }\n\n \n\n private static String prepareErrorReturnedErrorMessage(BError error) {\n return \"error: \" + BLangVMErrors.getErrorMessage(error);\n }\n}", "context_after": "class LauncherUtils {\n\n private static final String STATUS_CODE = \"statusCode\";\n private static final int DEFAULT_ERROR_RETURN_STATUS_CODE = 1;\n\n private static PrintStream errStream = System.err;\n\n public static void runProgram(Path sourceRootPath, Path sourcePath, Map runtimeParams,\n String configFilePath, String[] args, boolean offline, boolean observeFlag) {\n runProgram(sourceRootPath, sourcePath, runtimeParams, configFilePath, args, offline, observeFlag, false, true);\n }\n\n static void runProgram(Path sourceRootPath, Path sourcePath, Map runtimeParams,\n String configFilePath, String[] args, boolean offline, boolean observeFlag,\n boolean siddhiRuntimeFlag, boolean experimentalFlag) {\n ProgramFile programFile;\n String srcPathStr = sourcePath.toString();\n Path fullPath = sourceRootPath.resolve(sourcePath);\n \n System.setProperty(ProjectDirConstants.BALLERINA_SOURCE_ROOT, fullPath.getParent().toString());\n loadConfigurations(fullPath.getParent(), runtimeParams, configFilePath, observeFlag);\n\n if (srcPathStr.endsWith(BLANG_EXEC_FILE_SUFFIX)) {\n programFile = BLangProgramLoader.read(sourcePath);\n } else if (Files.isRegularFile(fullPath) && srcPathStr.endsWith(BLANG_SRC_FILE_SUFFIX) &&\n !RepoUtils.hasProjectRepo(sourceRootPath)) {\n programFile = compile(fullPath.getParent(), fullPath.getFileName(), offline, siddhiRuntimeFlag,\n experimentalFlag);\n } else if (Files.isDirectory(sourceRootPath)) {\n if (Files.isDirectory(fullPath) && !RepoUtils.hasProjectRepo(sourceRootPath)) {\n throw createLauncherException(\"you are trying to run a module that is not inside \" +\n \"a project. Run `ballerina init` from \" + sourceRootPath + \" to initialize it as a \" +\n \"project and then run the module.\");\n }\n if (Files.exists(fullPath)) {\n if (Files.isRegularFile(fullPath) && !srcPathStr.endsWith(BLANG_SRC_FILE_SUFFIX)) {\n throw createLauncherException(\"only modules, \" + BLANG_SRC_FILE_SUFFIX + \" and \" +\n BLANG_EXEC_FILE_SUFFIX + \" files can be used with the \" +\n \"'ballerina run' command.\");\n }\n } else {\n throw createLauncherException(\"ballerina source does not exist '\" + srcPathStr + \"'\");\n }\n \n \n \n if (Files.isRegularFile(fullPath) && srcPathStr.endsWith(BLANG_SRC_FILE_SUFFIX) &&\n sourcePath.getParent() != null) {\n throw createLauncherException(\"you are trying to run a ballerina file inside a module within a \" +\n \"project. Try running 'ballerina run '\");\n }\n programFile = compile(sourceRootPath, sourcePath, offline, siddhiRuntimeFlag, experimentalFlag);\n } else {\n throw createLauncherException(\"only modules, \" + BLANG_SRC_FILE_SUFFIX + \" and \" + BLANG_EXEC_FILE_SUFFIX\n + \" files can be used with the 'ballerina run' command.\");\n }\n\n \n \n if (!programFile.isMainEPAvailable() && !programFile.isServiceEPAvailable()) {\n throw createLauncherException(\"'\" + programFile.getProgramFilePath()\n + \"' does not contain a main function or a service\");\n }\n\n boolean runServicesOnly = !programFile.isMainEPAvailable();\n\n \n ServiceLoader listeners = ServiceLoader.load(LaunchListener.class);\n listeners.forEach(listener -> listener.beforeRunProgram(runServicesOnly));\n\n if (runServicesOnly) {\n if (args.length > 0) {\n throw LauncherUtils.createUsageExceptionWithHelp(\"arguments not allowed for services\");\n }\n runServices(programFile);\n } else {\n runMain(programFile, args);\n }\n BLangProgramRunner.resumeStates(programFile);\n listeners.forEach(listener -> listener.afterRunProgram(runServicesOnly));\n }\n\n @SuppressWarnings(\"unchecked\")\n public static void runMain(ProgramFile programFile, String[] args) {\n BValue[] result;\n int statusCode = 0;\n try {\n result = BLangProgramRunner.runMainFunc(programFile, args);\n if (result[0] != null && result[0].getType().getTag() == TypeTags.ERROR) {\n \n BError returnedError = (BError) result[0];\n errStream.print(prepareErrorReturnedErrorMessage(returnedError));\n\n if (returnedError.details != null) {\n BMap details = (BMap) returnedError.details;\n statusCode = getStatusCode(details);\n }\n } else if (programFile.isServiceEPAvailable()) {\n return;\n }\n } catch (BLangUsageException | BallerinaException e) {\n throw createUsageException(makeFirstLetterLowerCase(e.getLocalizedMessage()));\n }\n\n try {\n ThreadPoolFactory.getInstance().getWorkerExecutor().shutdown();\n ThreadPoolFactory.getInstance().getWorkerExecutor().awaitTermination(10000, TimeUnit.MILLISECONDS);\n } catch (InterruptedException ex) {\n \n }\n Runtime.getRuntime().exit(statusCode);\n }\n\n public static void runServices(ProgramFile programFile) {\n PrintStream outStream = System.out;\n\n ServerConnectorRegistry serverConnectorRegistry = new ServerConnectorRegistry();\n programFile.setServerConnectorRegistry(serverConnectorRegistry);\n serverConnectorRegistry.initServerConnectors();\n\n outStream.println(\"Initiating service(s) in '\" + programFile.getProgramFilePath() + \"'\");\n BLangProgramRunner.runService(programFile);\n\n serverConnectorRegistry.deploymentComplete();\n }\n\n public static Path getSourceRootPath(String sourceRoot) {\n \n Path sourceRootPath;\n if (sourceRoot == null || sourceRoot.isEmpty()) {\n sourceRootPath = Paths.get(System.getProperty(\"user.dir\"));\n } else {\n try {\n sourceRootPath = Paths.get(sourceRoot).toRealPath(LinkOption.NOFOLLOW_LINKS);\n } catch (IOException e) {\n throw new RuntimeException(\"error reading from directory: \" + sourceRoot + \" reason: \" +\n e.getMessage(), e);\n }\n\n if (!Files.isDirectory(sourceRootPath, LinkOption.NOFOLLOW_LINKS)) {\n throw new RuntimeException(\"source root must be a directory\");\n }\n }\n return sourceRootPath;\n }\n\n private static BLauncherException createUsageException(String errorMsg) {\n BLauncherException launcherException = new BLauncherException();\n launcherException.addMessage(\"ballerina: \" + errorMsg);\n return launcherException;\n }\n\n public static BLauncherException createUsageExceptionWithHelp(String errorMsg) {\n BLauncherException launcherException = new BLauncherException();\n launcherException.addMessage(\"ballerina: \" + errorMsg);\n launcherException.addMessage(\"Run 'ballerina help' for usage.\");\n return launcherException;\n }\n\n public static BLauncherException createLauncherException(String errorMsg) {\n BLauncherException launcherException = new BLauncherException();\n launcherException.addMessage(\"error: \" + errorMsg);\n return launcherException;\n }\n\n static void printLauncherException(BLauncherException e, PrintStream outStream) {\n List errorMessages = e.getMessages();\n errorMessages.forEach(outStream::println);\n }\n\n static String makeFirstLetterLowerCase(String s) {\n if (s == null) {\n return null;\n }\n char c[] = s.toCharArray();\n c[0] = Character.toLowerCase(c[0]);\n return new String(c);\n }\n\n /**\n * Write the process ID of this process to the file.\n *\n * @param ballerinaHome ballerina.home sys property value.\n */\n static void writePID(String ballerinaHome) {\n\n String[] cmd = {\"bash\", \"-c\", \"echo $PPID\"};\n Process p;\n String pid = \"\";\n try {\n p = Runtime.getRuntime().exec(cmd);\n } catch (IOException e) {\n \n \n return;\n }\n\n try (BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream(),\n StandardCharsets.UTF_8))) {\n StringBuilder builder = new StringBuilder();\n String line;\n while ((line = reader.readLine()) != null) {\n builder.append(line);\n }\n pid = builder.toString();\n } catch (Throwable e) {\n throw createLauncherException(\"failed to write ballerina.pid file: \"\n + makeFirstLetterLowerCase(e.getMessage()));\n }\n\n if (pid.length() != 0) {\n try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(\n new FileOutputStream(Paths.get(ballerinaHome, \"ballerina.pid\").toString()),\n StandardCharsets.UTF_8))) {\n writer.write(pid);\n } catch (IOException e) {\n throw createLauncherException(\"failed to write ballerina.pid file: \"\n + makeFirstLetterLowerCase(e.getMessage()));\n }\n }\n }\n\n /**\n * Compile and get the executable program file.\n * \n * @param sourceRootPath Path to the source root\n * @param sourcePath Path to the source from the source root\n * @param offline Should the build call remote repos\n * @param enableExpFeatures Flag indicating to enable the experimental feature\n * @return Executable program\n */\n public static ProgramFile compile(Path sourceRootPath, Path sourcePath, boolean offline,\n boolean enableExpFeatures) {\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(COMPILER_PHASE, CompilerPhase.CODE_GEN.toString());\n options.put(PRESERVE_WHITESPACE, \"false\");\n options.put(OFFLINE, Boolean.toString(offline));\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures));\n\n \n Compiler compiler = Compiler.getInstance(context);\n BLangPackage entryPkgNode = compiler.compile(sourcePath.toString());\n CompiledBinaryFile.ProgramFile programFile = compiler.getExecutableProgram(entryPkgNode);\n if (programFile == null) {\n throw new BLangCompilerException(\"compilation contains errors\");\n }\n\n ProgramFile progFile = getExecutableProgram(programFile);\n progFile.setProgramFilePath(sourcePath);\n return progFile;\n }\n\n /**\n * Compile and get the executable program file.\n *\n * @param sourceRootPath Path to the source root\n * @param sourcePath Path to the source from the source root\n * @param offline Should the build call remote repos\n * @param siddhiRuntimeFlag Flag to enable siddhi runtime based stream processing\n * @param enableExpFeatures Flag indicating to enable the experimental feature\n * @return Executable program\n */\n public static ProgramFile compile(Path sourceRootPath, Path sourcePath, boolean offline,\n boolean siddhiRuntimeFlag, boolean enableExpFeatures) {\n CompilerContext context = new CompilerContext();\n CompilerOptions options = CompilerOptions.getInstance(context);\n options.put(PROJECT_DIR, sourceRootPath.toString());\n options.put(COMPILER_PHASE, CompilerPhase.CODE_GEN.toString());\n options.put(PRESERVE_WHITESPACE, \"false\");\n options.put(OFFLINE, Boolean.toString(offline));\n options.put(SIDDHI_RUNTIME_ENABLED, Boolean.toString(siddhiRuntimeFlag));\n options.put(EXPERIMENTAL_FEATURES_ENABLED, Boolean.toString(enableExpFeatures));\n\n \n Compiler compiler = Compiler.getInstance(context);\n BLangPackage entryPkgNode = compiler.compile(sourcePath.toString());\n CompiledBinaryFile.ProgramFile programFile = compiler.getExecutableProgram(entryPkgNode);\n if (programFile == null) {\n throw new BLangCompilerException(\"compilation contains errors\");\n }\n\n ProgramFile progFile = getExecutableProgram(programFile);\n progFile.setProgramFilePath(sourcePath);\n return progFile;\n }\n\n /**\n * Get the executable program ({@link ProgramFile}) given the compiled program \n * ({@link org.wso2.ballerinalang.programfile.CompiledBinaryFile.ProgramFile}).\n * \n * @param programFile Compiled program\n * @return Executable program\n */\n public static ProgramFile getExecutableProgram(CompiledBinaryFile.ProgramFile programFile) {\n ByteArrayInputStream byteIS = null;\n ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();\n try {\n ProgramFileWriter.writeProgram(programFile, byteOutStream);\n\n ProgramFileReader reader = new ProgramFileReader();\n byteIS = new ByteArrayInputStream(byteOutStream.toByteArray());\n return reader.readProgram(byteIS);\n } catch (Throwable e) {\n throw createLauncherException(\"failed to compile file: \" + makeFirstLetterLowerCase(e.getMessage()));\n } finally {\n if (byteIS != null) {\n try {\n byteIS.close();\n } catch (IOException ignore) {\n }\n }\n\n try {\n byteOutStream.close();\n } catch (IOException ignore) {\n }\n }\n }\n\n /**\n * Initializes the {@link ConfigRegistry} and loads {@link LogManager} configs.\n *\n * @param sourceRootPath source directory\n * @param runtimeParams run time parameters\n * @param configFilePath config file path\n * @param observeFlag to indicate whether observability is enabled\n */\n public static void loadConfigurations(Path sourceRootPath, Map runtimeParams,\n String configFilePath, boolean observeFlag) {\n Path ballerinaConfPath = sourceRootPath.resolve(\"ballerina.conf\");\n try {\n ConfigRegistry.getInstance().initRegistry(runtimeParams, configFilePath, ballerinaConfPath);\n ((BLogManager) LogManager.getLogManager()).loadUserProvidedLogConfiguration();\n\n if (observeFlag) {\n ConfigRegistry.getInstance()\n .addConfiguration(ObservabilityConstants.CONFIG_METRICS_ENABLED, Boolean.TRUE);\n ConfigRegistry.getInstance()\n .addConfiguration(ObservabilityConstants.CONFIG_TRACING_ENABLED, Boolean.TRUE);\n }\n\n } catch (IOException e) {\n throw new BLangRuntimeException(\n \"failed to read the specified configuration file: \" + ballerinaConfPath.toString(), e);\n } catch (RuntimeException e) {\n throw new BLangRuntimeException(e.getMessage(), e);\n }\n }\n\n \n\n private static String prepareErrorReturnedErrorMessage(BError error) {\n return \"error: \" + BLangVMErrors.getErrorMessage(error);\n }\n}" }, { "comment": "please help change to ResourceManagerUtils.sleep", "method_body": "public void canCRUDRedisCache() throws Exception {\n \n Creatable resourceGroups =\n resourceManager.resourceGroups().define(rgNameSecond).withRegion(Region.US_CENTRAL);\n\n Creatable redisCacheDefinition1 =\n redisManager\n .redisCaches()\n .define(rrName)\n .withRegion(Region.ASIA_EAST)\n .withNewResourceGroup(rgName)\n .withBasicSku();\n Creatable redisCacheDefinition2 =\n redisManager\n .redisCaches()\n .define(rrNameSecond)\n .withRegion(Region.US_CENTRAL)\n .withNewResourceGroup(resourceGroups)\n .withPremiumSku()\n .withShardCount(2)\n .withPatchSchedule(DayOfWeek.SUNDAY, 10, Duration.ofMinutes(302));\n Creatable redisCacheDefinition3 =\n redisManager\n .redisCaches()\n .define(rrNameThird)\n .withRegion(Region.US_CENTRAL)\n .withNewResourceGroup(resourceGroups)\n .withPremiumSku(2)\n .withRedisConfiguration(\"maxclients\", \"2\")\n .withNonSslPort()\n .withFirewallRule(\"rule1\", \"192.168.0.1\", \"192.168.0.4\")\n .withFirewallRule(\"rule2\", \"192.168.0.10\", \"192.168.0.40\");\n \n \n\n CreatedResources batchRedisCaches =\n redisManager.redisCaches().create(redisCacheDefinition1, redisCacheDefinition2, redisCacheDefinition3);\n\n\n\n\n\n\n\n\n\n RedisCache redisCache = batchRedisCaches.get(redisCacheDefinition1.key());\n RedisCache redisCachePremium = batchRedisCaches.get(redisCacheDefinition3.key());\n Assertions.assertEquals(rgName, redisCache.resourceGroupName());\n Assertions.assertEquals(SkuName.BASIC, redisCache.sku().name());\n\n \n RedisCachePremium premiumCache = redisCachePremium.asPremium();\n Assertions.assertEquals(SkuFamily.P, premiumCache.sku().family());\n Assertions.assertEquals(2, premiumCache.firewallRules().size());\n Assertions.assertTrue(premiumCache.firewallRules().containsKey(\"rule1\"));\n Assertions.assertTrue(premiumCache.firewallRules().containsKey(\"rule2\"));\n\n \n premiumCache\n .update()\n .withRedisConfiguration(\"maxclients\", \"3\")\n .withoutFirewallRule(\"rule1\")\n .withFirewallRule(\"rule3\", \"192.168.0.10\", \"192.168.0.104\")\n .withoutMinimumTlsVersion()\n .apply();\n Thread.sleep(10000);\n premiumCache.refresh();\n\n Assertions.assertEquals(2, premiumCache.firewallRules().size());\n Assertions.assertTrue(premiumCache.firewallRules().containsKey(\"rule2\"));\n Assertions.assertTrue(premiumCache.firewallRules().containsKey(\"rule3\"));\n Assertions.assertFalse(premiumCache.firewallRules().containsKey(\"rule1\"));\n\n premiumCache.update().withoutRedisConfiguration(\"maxclients\").apply();\n\n premiumCache.update().withoutRedisConfiguration().apply();\n\n Assertions.assertEquals(0, premiumCache.patchSchedules().size());\n premiumCache.update().withPatchSchedule(DayOfWeek.MONDAY, 1).withPatchSchedule(DayOfWeek.TUESDAY, 5).apply();\n\n Assertions.assertEquals(2, premiumCache.patchSchedules().size());\n \n premiumCache.forceReboot(RebootType.ALL_NODES);\n\n \n List patchSchedule = premiumCache.listPatchSchedules();\n Assertions.assertEquals(2, patchSchedule.size());\n\n premiumCache.deletePatchSchedule();\n\n patchSchedule = redisManager.redisCaches().getById(premiumCache.id()).asPremium().listPatchSchedules();\n Assertions.assertNull(patchSchedule);\n\n \n List redisCaches =\n redisManager.redisCaches().listByResourceGroup(rgName).stream().collect(Collectors.toList());\n boolean found = false;\n for (RedisCache existingRedisCache : redisCaches) {\n if (existingRedisCache.name().equals(rrName)) {\n found = true;\n }\n }\n Assertions.assertTrue(found);\n Assertions.assertEquals(1, redisCaches.size());\n\n \n redisCaches = redisManager.redisCaches().list().stream().collect(Collectors.toList());\n found = false;\n for (RedisCache existingRedisCache : redisCaches) {\n if (existingRedisCache.name().equals(rrName)) {\n found = true;\n }\n }\n Assertions.assertTrue(found);\n Assertions.assertTrue(redisCaches.size() >= 3);\n\n \n RedisCache redisCacheGet = redisManager.redisCaches().getByResourceGroup(rgName, rrName);\n Assertions.assertNotNull(redisCacheGet);\n Assertions.assertEquals(redisCache.id(), redisCacheGet.id());\n Assertions.assertEquals(redisCache.provisioningState(), redisCacheGet.provisioningState());\n\n \n RedisAccessKeys redisKeys = redisCache.keys();\n Assertions.assertNotNull(redisKeys);\n Assertions.assertNotNull(redisKeys.primaryKey());\n Assertions.assertNotNull(redisKeys.secondaryKey());\n\n \n RedisAccessKeys oldKeys = redisCache.refreshKeys();\n RedisAccessKeys updatedPrimaryKey = redisCache.regenerateKey(RedisKeyType.PRIMARY);\n RedisAccessKeys updatedSecondaryKey = redisCache.regenerateKey(RedisKeyType.SECONDARY);\n Assertions.assertNotNull(oldKeys);\n Assertions.assertNotNull(updatedPrimaryKey);\n Assertions.assertNotNull(updatedSecondaryKey);\n if (!isPlaybackMode()) {\n Assertions.assertNotEquals(oldKeys.primaryKey(), updatedPrimaryKey.primaryKey());\n Assertions.assertEquals(oldKeys.secondaryKey(), updatedPrimaryKey.secondaryKey());\n Assertions.assertNotEquals(oldKeys.secondaryKey(), updatedSecondaryKey.secondaryKey());\n Assertions.assertNotEquals(updatedPrimaryKey.secondaryKey(), updatedSecondaryKey.secondaryKey());\n Assertions.assertEquals(updatedPrimaryKey.primaryKey(), updatedSecondaryKey.primaryKey());\n }\n\n \n redisCache = redisCache.update().withStandardSku().apply();\n Assertions.assertEquals(SkuName.STANDARD, redisCache.sku().name());\n Assertions.assertEquals(SkuFamily.C, redisCache.sku().family());\n\n try {\n redisCache.update().withBasicSku(1).apply();\n Assertions.fail();\n } catch (ManagementException e) {\n \n }\n\n \n redisCache.refresh();\n\n \n redisManager.redisCaches().deleteById(redisCache.id());\n\n \n \n \n \n \n \n \n \n /*premiumCache.exportData(storageAccount.name(),\"snapshot1\");\n\n premiumCache.importData(Arrays.asList(\"snapshot1\"));*/\n }", "target_code": "Thread.sleep(10000);", "method_body_after": "public void canCRUDRedisCache() throws Exception {\n \n Creatable resourceGroups =\n resourceManager.resourceGroups().define(rgNameSecond).withRegion(Region.US_CENTRAL);\n\n Creatable redisCacheDefinition1 =\n redisManager\n .redisCaches()\n .define(rrName)\n .withRegion(Region.ASIA_EAST)\n .withNewResourceGroup(rgName)\n .withBasicSku();\n Creatable redisCacheDefinition2 =\n redisManager\n .redisCaches()\n .define(rrNameSecond)\n .withRegion(Region.US_CENTRAL)\n .withNewResourceGroup(resourceGroups)\n .withPremiumSku()\n .withShardCount(2)\n .withPatchSchedule(DayOfWeek.SUNDAY, 10, Duration.ofMinutes(302));\n Creatable redisCacheDefinition3 =\n redisManager\n .redisCaches()\n .define(rrNameThird)\n .withRegion(Region.US_CENTRAL)\n .withNewResourceGroup(resourceGroups)\n .withPremiumSku(2)\n .withRedisConfiguration(\"maxclients\", \"2\")\n .withNonSslPort()\n .withFirewallRule(\"rule1\", \"192.168.0.1\", \"192.168.0.4\")\n .withFirewallRule(\"rule2\", \"192.168.0.10\", \"192.168.0.40\");\n \n \n\n CreatedResources batchRedisCaches =\n redisManager.redisCaches().create(redisCacheDefinition1, redisCacheDefinition2, redisCacheDefinition3);\n\n\n\n\n\n\n\n\n\n RedisCache redisCache = batchRedisCaches.get(redisCacheDefinition1.key());\n RedisCache redisCachePremium = batchRedisCaches.get(redisCacheDefinition3.key());\n Assertions.assertEquals(rgName, redisCache.resourceGroupName());\n Assertions.assertEquals(SkuName.BASIC, redisCache.sku().name());\n\n \n RedisCachePremium premiumCache = redisCachePremium.asPremium();\n Assertions.assertEquals(SkuFamily.P, premiumCache.sku().family());\n Assertions.assertEquals(2, premiumCache.firewallRules().size());\n Assertions.assertTrue(premiumCache.firewallRules().containsKey(\"rule1\"));\n Assertions.assertTrue(premiumCache.firewallRules().containsKey(\"rule2\"));\n\n \n premiumCache\n .update()\n .withRedisConfiguration(\"maxclients\", \"3\")\n .withoutFirewallRule(\"rule1\")\n .withFirewallRule(\"rule3\", \"192.168.0.10\", \"192.168.0.104\")\n .withoutMinimumTlsVersion()\n .apply();\n\n ResourceManagerUtils.sleep(Duration.ofSeconds(10));\n\n premiumCache.refresh();\n\n Assertions.assertEquals(2, premiumCache.firewallRules().size());\n Assertions.assertTrue(premiumCache.firewallRules().containsKey(\"rule2\"));\n Assertions.assertTrue(premiumCache.firewallRules().containsKey(\"rule3\"));\n Assertions.assertFalse(premiumCache.firewallRules().containsKey(\"rule1\"));\n\n premiumCache.update().withoutRedisConfiguration(\"maxclients\").apply();\n\n premiumCache.update().withoutRedisConfiguration().apply();\n\n Assertions.assertEquals(0, premiumCache.patchSchedules().size());\n premiumCache.update().withPatchSchedule(DayOfWeek.MONDAY, 1).withPatchSchedule(DayOfWeek.TUESDAY, 5).apply();\n\n Assertions.assertEquals(2, premiumCache.patchSchedules().size());\n \n premiumCache.forceReboot(RebootType.ALL_NODES);\n\n \n List patchSchedule = premiumCache.listPatchSchedules();\n Assertions.assertEquals(2, patchSchedule.size());\n\n premiumCache.deletePatchSchedule();\n\n patchSchedule = redisManager.redisCaches().getById(premiumCache.id()).asPremium().listPatchSchedules();\n Assertions.assertNull(patchSchedule);\n\n \n List redisCaches =\n redisManager.redisCaches().listByResourceGroup(rgName).stream().collect(Collectors.toList());\n boolean found = false;\n for (RedisCache existingRedisCache : redisCaches) {\n if (existingRedisCache.name().equals(rrName)) {\n found = true;\n }\n }\n Assertions.assertTrue(found);\n Assertions.assertEquals(1, redisCaches.size());\n\n \n redisCaches = redisManager.redisCaches().list().stream().collect(Collectors.toList());\n found = false;\n for (RedisCache existingRedisCache : redisCaches) {\n if (existingRedisCache.name().equals(rrName)) {\n found = true;\n }\n }\n Assertions.assertTrue(found);\n Assertions.assertTrue(redisCaches.size() >= 3);\n\n \n RedisCache redisCacheGet = redisManager.redisCaches().getByResourceGroup(rgName, rrName);\n Assertions.assertNotNull(redisCacheGet);\n Assertions.assertEquals(redisCache.id(), redisCacheGet.id());\n Assertions.assertEquals(redisCache.provisioningState(), redisCacheGet.provisioningState());\n\n \n RedisAccessKeys redisKeys = redisCache.keys();\n Assertions.assertNotNull(redisKeys);\n Assertions.assertNotNull(redisKeys.primaryKey());\n Assertions.assertNotNull(redisKeys.secondaryKey());\n\n \n RedisAccessKeys oldKeys = redisCache.refreshKeys();\n RedisAccessKeys updatedPrimaryKey = redisCache.regenerateKey(RedisKeyType.PRIMARY);\n RedisAccessKeys updatedSecondaryKey = redisCache.regenerateKey(RedisKeyType.SECONDARY);\n Assertions.assertNotNull(oldKeys);\n Assertions.assertNotNull(updatedPrimaryKey);\n Assertions.assertNotNull(updatedSecondaryKey);\n if (!isPlaybackMode()) {\n Assertions.assertNotEquals(oldKeys.primaryKey(), updatedPrimaryKey.primaryKey());\n Assertions.assertEquals(oldKeys.secondaryKey(), updatedPrimaryKey.secondaryKey());\n Assertions.assertNotEquals(oldKeys.secondaryKey(), updatedSecondaryKey.secondaryKey());\n Assertions.assertNotEquals(updatedPrimaryKey.secondaryKey(), updatedSecondaryKey.secondaryKey());\n Assertions.assertEquals(updatedPrimaryKey.primaryKey(), updatedSecondaryKey.primaryKey());\n }\n\n \n redisCache = redisCache.update().withStandardSku().apply();\n Assertions.assertEquals(SkuName.STANDARD, redisCache.sku().name());\n Assertions.assertEquals(SkuFamily.C, redisCache.sku().family());\n\n try {\n redisCache.update().withBasicSku(1).apply();\n Assertions.fail();\n } catch (ManagementException e) {\n \n }\n\n \n redisCache.refresh();\n\n \n redisManager.redisCaches().deleteById(redisCache.id());\n\n \n \n \n \n \n \n \n \n /*premiumCache.exportData(storageAccount.name(),\"snapshot1\");\n\n premiumCache.importData(Arrays.asList(\"snapshot1\"));*/\n }", "context_before": "class RedisCacheOperationsTests extends RedisManagementTest {\n\n @Test\n @SuppressWarnings(\"unchecked\")\n \n\n @Test\n public void canRedisVersionUpdate(){\n RedisCache.MajorVersion redisVersion = RedisCache.MajorVersion.V4;\n\n RedisCache redisCache =\n redisManager\n .redisCaches()\n .define(rrName)\n .withRegion(Region.ASIA_EAST)\n .withNewResourceGroup(rgName)\n .withBasicSku()\n .withRedisVersion(redisVersion)\n .create()\n ;\n\n Assertions.assertTrue(redisCache.redisVersion().startsWith(redisVersion.getValue()));\n\n redisVersion = RedisCache.MajorVersion.V6;\n redisCache = redisCache.update()\n .withRedisVersion(redisVersion)\n .apply(); \n\n ResourceManagerUtils.sleep(Duration.ofSeconds(300)); \n\n redisCache = redisCache.refresh();\n Assertions.assertTrue(redisCache.redisVersion().startsWith(redisVersion.getValue()));\n\n }\n\n @Test\n public void canCRUDLinkedServers() throws Exception {\n\n RedisCache rgg =\n redisManager\n .redisCaches()\n .define(rrNameThird)\n .withRegion(Region.US_CENTRAL)\n .withNewResourceGroup(rgNameSecond)\n .withPremiumSku(2)\n .withPatchSchedule(DayOfWeek.SATURDAY, 5, Duration.ofHours(5))\n .withRedisConfiguration(\"maxclients\", \"2\")\n .withNonSslPort()\n .withFirewallRule(\"rule1\", \"192.168.0.1\", \"192.168.0.4\")\n .withFirewallRule(\"rule2\", \"192.168.0.10\", \"192.168.0.40\")\n .create();\n\n RedisCache rggLinked =\n redisManager\n .redisCaches()\n .define(rrNameSecond)\n .withRegion(Region.US_EAST)\n .withExistingResourceGroup(rgNameSecond)\n .withPremiumSku(2)\n .create();\n\n Assertions.assertNotNull(rgg);\n Assertions.assertNotNull(rggLinked);\n\n RedisCachePremium premiumRgg = rgg.asPremium();\n\n String llName = premiumRgg.addLinkedServer(rggLinked.id(), rggLinked.regionName(), ReplicationRole.PRIMARY);\n\n Assertions.assertEquals(ResourceUtils.nameFromResourceId(rggLinked.id()), llName);\n\n Map linkedServers = premiumRgg.listLinkedServers();\n Assertions.assertEquals(1, linkedServers.size());\n Assertions.assertTrue(linkedServers.keySet().contains(llName));\n Assertions.assertEquals(ReplicationRole.PRIMARY, linkedServers.get(llName));\n\n ReplicationRole repRole = premiumRgg.getLinkedServerRole(llName);\n Assertions.assertEquals(ReplicationRole.PRIMARY, repRole);\n\n premiumRgg.removeLinkedServer(llName);\n\n rgg.update().withoutPatchSchedule().apply();\n\n rggLinked.update().withFirewallRule(\"rulesmhule\", \"192.168.1.10\", \"192.168.1.20\").apply();\n\n linkedServers = premiumRgg.listLinkedServers();\n Assertions.assertEquals(0, linkedServers.size());\n }\n}", "context_after": "class RedisCacheOperationsTests extends RedisManagementTest {\n\n @Test\n @SuppressWarnings(\"unchecked\")\n \n\n @Test\n public void canRedisVersionUpdate() {\n RedisCache.RedisVersion redisVersion = RedisCache.RedisVersion.V4;\n\n RedisCache redisCache =\n redisManager\n .redisCaches()\n .define(rrName)\n .withRegion(Region.ASIA_EAST)\n .withNewResourceGroup(rgName)\n .withBasicSku()\n .withRedisVersion(redisVersion)\n .create();\n\n Assertions.assertTrue(redisCache.redisVersion().startsWith(redisVersion.getValue()));\n\n redisVersion = RedisCache.RedisVersion.V6;\n redisCache = redisCache.update()\n .withRedisVersion(redisVersion)\n .apply(); \n\n ResourceManagerUtils.sleep(Duration.ofSeconds(300)); \n\n redisCache = redisCache.refresh();\n Assertions.assertTrue(redisCache.redisVersion().startsWith(redisVersion.getValue()));\n\n }\n\n @Test\n public void canCRUDLinkedServers() throws Exception {\n\n RedisCache rgg =\n redisManager\n .redisCaches()\n .define(rrNameThird)\n .withRegion(Region.US_CENTRAL)\n .withNewResourceGroup(rgNameSecond)\n .withPremiumSku(2)\n .withPatchSchedule(DayOfWeek.SATURDAY, 5, Duration.ofHours(5))\n .withRedisConfiguration(\"maxclients\", \"2\")\n .withNonSslPort()\n .withFirewallRule(\"rule1\", \"192.168.0.1\", \"192.168.0.4\")\n .withFirewallRule(\"rule2\", \"192.168.0.10\", \"192.168.0.40\")\n .create();\n\n RedisCache rggLinked =\n redisManager\n .redisCaches()\n .define(rrNameSecond)\n .withRegion(Region.US_EAST)\n .withExistingResourceGroup(rgNameSecond)\n .withPremiumSku(2)\n .create();\n\n Assertions.assertNotNull(rgg);\n Assertions.assertNotNull(rggLinked);\n\n RedisCachePremium premiumRgg = rgg.asPremium();\n\n String llName = premiumRgg.addLinkedServer(rggLinked.id(), rggLinked.regionName(), ReplicationRole.PRIMARY);\n\n Assertions.assertEquals(ResourceUtils.nameFromResourceId(rggLinked.id()), llName);\n\n Map linkedServers = premiumRgg.listLinkedServers();\n Assertions.assertEquals(1, linkedServers.size());\n Assertions.assertTrue(linkedServers.keySet().contains(llName));\n Assertions.assertEquals(ReplicationRole.PRIMARY, linkedServers.get(llName));\n\n ReplicationRole repRole = premiumRgg.getLinkedServerRole(llName);\n Assertions.assertEquals(ReplicationRole.PRIMARY, repRole);\n\n premiumRgg.removeLinkedServer(llName);\n\n rgg.update().withoutPatchSchedule().apply();\n\n rggLinked.update().withFirewallRule(\"rulesmhule\", \"192.168.1.10\", \"192.168.1.20\").apply();\n\n linkedServers = premiumRgg.listLinkedServers();\n Assertions.assertEquals(0, linkedServers.size());\n }\n}" }, { "comment": "`recognizeEntitiesActionResults.get(taskIndex)` `recognizePiiEntitiesActionResults.get(taskIndex)` or `extractKeyPhrasesActionResults.get(taskIndex)` get the object from the sorted output. So there is no need to reorder the errors.", "method_body": "private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {\n TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();\n final List piiTasksItems =\n tasksStateTasks.getEntityRecognitionPiiTasks();\n final List entityRecognitionTasksItems =\n tasksStateTasks.getEntityRecognitionTasks();\n final List keyPhraseExtractionTasks =\n tasksStateTasks.getKeyPhraseExtractionTasks();\n\n List recognizeEntitiesActionResults = new ArrayList<>();\n List recognizePiiEntitiesActionResults = new ArrayList<>();\n List extractKeyPhrasesActionResults = new ArrayList<>();\n if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {\n for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {\n final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);\n final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();\n RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,\n toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));\n TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,\n taskItem.getLastUpdateDateTime());\n recognizeEntitiesActionResults.add(actionResult);\n }\n }\n if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {\n for (int i = 0; i < piiTasksItems.size(); i++) {\n final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);\n final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();\n RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,\n toRecognizePiiEntitiesResultCollection(taskItem.getResults()));\n TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,\n taskItem.getLastUpdateDateTime());\n recognizePiiEntitiesActionResults.add(actionResult);\n }\n }\n if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {\n for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {\n final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);\n final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();\n ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,\n toExtractKeyPhrasesResultCollection(taskItem.getResults()));\n TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,\n taskItem.getLastUpdateDateTime());\n extractKeyPhrasesActionResults.add(actionResult);\n }\n }\n\n final List errors = analyzeJobState.getErrors();\n if (!CoreUtils.isNullOrEmpty(errors)) {\n for (TextAnalyticsError error : errors) {\n final String[] targetPair = parseActionErrorTarget(error.getTarget());\n final String taskName = targetPair[0];\n final Integer taskIndex = Integer.valueOf(targetPair[1]);\n final TextAnalyticsActionResult actionResult;\n if (\"entityRecognitionTasks\".equals(taskName)) {\n actionResult = recognizeEntitiesActionResults.get(taskIndex);\n } else if (\"entityRecognitionPiiTasks\".equals(taskName)) {\n actionResult = recognizePiiEntitiesActionResults.get(taskIndex);\n } else if (\"keyPhraseExtractionTasks\".equals(taskName)) {\n actionResult = extractKeyPhrasesActionResults.get(taskIndex);\n } else {\n throw logger.logExceptionAsError(new RuntimeException(\n \"Invalid task name in target reference, \" + taskName));\n }\n\n TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);\n TextAnalyticsActionResultPropertiesHelper.setError(actionResult,\n new com.azure.ai.textanalytics.models.TextAnalyticsError(\n TextAnalyticsErrorCode.fromString(\n error.getCode() == null ? null : error.getCode().toString()),\n error.getMessage(), null));\n }\n }\n\n final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();\n\n final RequestStatistics requestStatistics = analyzeJobState.getStatistics();\n TextDocumentBatchStatistics batchStatistics = null;\n if (requestStatistics != null) {\n batchStatistics = new TextDocumentBatchStatistics(\n requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),\n requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()\n );\n }\n\n AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);\n AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,\n IterableStream.of(recognizeEntitiesActionResults));\n AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,\n IterableStream.of(recognizePiiEntitiesActionResults));\n AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,\n IterableStream.of(extractKeyPhrasesActionResults));\n return analyzeBatchActionsResult;\n }", "target_code": "TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);", "method_body_after": "private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {\n TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();\n final List piiTasksItems =\n tasksStateTasks.getEntityRecognitionPiiTasks();\n final List entityRecognitionTasksItems =\n tasksStateTasks.getEntityRecognitionTasks();\n final List keyPhraseExtractionTasks =\n tasksStateTasks.getKeyPhraseExtractionTasks();\n\n List recognizeEntitiesActionResults = new ArrayList<>();\n List recognizePiiEntitiesActionResults = new ArrayList<>();\n List extractKeyPhrasesActionResults = new ArrayList<>();\n if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {\n for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {\n final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);\n final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();\n RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,\n toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));\n TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,\n taskItem.getLastUpdateDateTime());\n recognizeEntitiesActionResults.add(actionResult);\n }\n }\n if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {\n for (int i = 0; i < piiTasksItems.size(); i++) {\n final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);\n final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();\n RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,\n toRecognizePiiEntitiesResultCollection(taskItem.getResults()));\n TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,\n taskItem.getLastUpdateDateTime());\n recognizePiiEntitiesActionResults.add(actionResult);\n }\n }\n if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {\n for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {\n final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);\n final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();\n ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,\n toExtractKeyPhrasesResultCollection(taskItem.getResults()));\n TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,\n taskItem.getLastUpdateDateTime());\n extractKeyPhrasesActionResults.add(actionResult);\n }\n }\n\n final List errors = analyzeJobState.getErrors();\n if (!CoreUtils.isNullOrEmpty(errors)) {\n for (TextAnalyticsError error : errors) {\n final String[] targetPair = parseActionErrorTarget(error.getTarget());\n final String taskName = targetPair[0];\n final Integer taskIndex = Integer.valueOf(targetPair[1]);\n final TextAnalyticsActionResult actionResult;\n if (\"entityRecognitionTasks\".equals(taskName)) {\n actionResult = recognizeEntitiesActionResults.get(taskIndex);\n } else if (\"entityRecognitionPiiTasks\".equals(taskName)) {\n actionResult = recognizePiiEntitiesActionResults.get(taskIndex);\n } else if (\"keyPhraseExtractionTasks\".equals(taskName)) {\n actionResult = extractKeyPhrasesActionResults.get(taskIndex);\n } else {\n throw logger.logExceptionAsError(new RuntimeException(\n \"Invalid task name in target reference, \" + taskName));\n }\n\n TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);\n TextAnalyticsActionResultPropertiesHelper.setError(actionResult,\n new com.azure.ai.textanalytics.models.TextAnalyticsError(\n TextAnalyticsErrorCode.fromString(\n error.getCode() == null ? null : error.getCode().toString()),\n error.getMessage(), null));\n }\n }\n\n final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();\n\n final RequestStatistics requestStatistics = analyzeJobState.getStatistics();\n TextDocumentBatchStatistics batchStatistics = null;\n if (requestStatistics != null) {\n batchStatistics = new TextDocumentBatchStatistics(\n requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),\n requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()\n );\n }\n\n AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);\n AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,\n IterableStream.of(recognizeEntitiesActionResults));\n AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,\n IterableStream.of(recognizePiiEntitiesActionResults));\n AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,\n IterableStream.of(extractKeyPhrasesActionResults));\n return analyzeBatchActionsResult;\n }", "context_before": "class AnalyzeBatchActionsAsyncClient {\n private static final String REGEX_ACTION_ERROR_TARGET = \"\n\n private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);\n private final TextAnalyticsClientImpl service;\n\n AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {\n this.service = service;\n }\n\n PollerFlux> beginAnalyzeBatchActions(\n Iterable documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,\n Context context) {\n try {\n inputDocumentsValidation(documents);\n options = getNotNullAnalyzeBatchActionsOptions(options);\n final Context finalContext = getNotNullContext(context);\n final AnalyzeBatchInput analyzeBatchInput =\n new AnalyzeBatchInput()\n .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))\n .setTasks(getJobManifestTasks(actions));\n analyzeBatchInput.setDisplayName(actions.getDisplayName());\n final boolean finalIncludeStatistics = options.isIncludeStatistics();\n return new PollerFlux<>(\n \n \n DEFAULT_POLL_INTERVAL,\n activationOperation(\n service.analyzeWithResponseAsync(analyzeBatchInput,\n finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))\n .map(analyzeResponse -> {\n final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =\n new AnalyzeBatchActionsOperationDetail();\n AnalyzeBatchActionsOperationDetailPropertiesHelper\n .setOperationId(textAnalyticsOperationResult,\n parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));\n return textAnalyticsOperationResult;\n })),\n pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,\n finalIncludeStatistics, null, null, finalContext)),\n (activationResponse, pollingContext) ->\n Mono.error(new RuntimeException(\"Cancellation is not supported.\")),\n fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(\n operationId, null, null, finalIncludeStatistics, finalContext)))\n );\n } catch (RuntimeException ex) {\n return PollerFlux.error(ex);\n }\n }\n\n PollerFlux>\n beginAnalyzeBatchActionsIterable(Iterable documents, TextAnalyticsActions actions,\n AnalyzeBatchActionsOptions options, Context context) {\n try {\n inputDocumentsValidation(documents);\n options = getNotNullAnalyzeBatchActionsOptions(options);\n final Context finalContext = getNotNullContext(context);\n final AnalyzeBatchInput analyzeBatchInput =\n new AnalyzeBatchInput()\n .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))\n .setTasks(getJobManifestTasks(actions));\n analyzeBatchInput.setDisplayName(actions.getDisplayName());\n final boolean finalIncludeStatistics = options.isIncludeStatistics();\n return new PollerFlux<>(\n \n \n DEFAULT_POLL_INTERVAL,\n activationOperation(\n service.analyzeWithResponseAsync(analyzeBatchInput,\n finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))\n .map(analyzeResponse -> {\n final AnalyzeBatchActionsOperationDetail operationDetail =\n new AnalyzeBatchActionsOperationDetail();\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,\n parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));\n return operationDetail;\n })),\n pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,\n finalIncludeStatistics, null, null, finalContext)),\n (activationResponse, pollingContext) ->\n Mono.error(new RuntimeException(\"Cancellation is not supported.\")),\n fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(\n operationId, null, null, finalIncludeStatistics, finalContext))))\n );\n } catch (RuntimeException ex) {\n return PollerFlux.error(ex);\n }\n }\n\n private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {\n return new JobManifestTasks()\n .setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null\n : StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(\n action -> {\n if (action == null) {\n return null;\n }\n final EntitiesTask entitiesTask = new EntitiesTask();\n entitiesTask.setParameters(\n \n \n \n new EntitiesTaskParameters()\n .setModelVersion(getNotNullModelVersion(action.getModelVersion())));\n return entitiesTask;\n }).collect(Collectors.toList()))\n .setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null\n : StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(\n action -> {\n if (action == null) {\n return null;\n }\n final PiiTask piiTask = new PiiTask();\n piiTask.setParameters(\n new PiiTaskParameters()\n \n \n \n .setModelVersion(getNotNullModelVersion(action.getModelVersion()))\n .setDomain(PiiTaskParametersDomain.fromString(\n action.getDomainFilter() == null ? null\n : action.getDomainFilter().toString())));\n return piiTask;\n }).collect(Collectors.toList()))\n .setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null\n : StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(\n action -> {\n if (action == null) {\n return null;\n }\n final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();\n keyPhrasesTask.setParameters(\n \n \n \n new KeyPhrasesTaskParameters()\n .setModelVersion(getNotNullModelVersion(action.getModelVersion())));\n return keyPhrasesTask;\n }).collect(Collectors.toList()));\n }\n\n private Function, Mono>\n activationOperation(Mono operationResult) {\n return pollingContext -> {\n try {\n return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);\n } catch (RuntimeException ex) {\n return monoError(logger, ex);\n }\n };\n }\n\n private Function, Mono>>\n pollingOperation(Function>> pollingFunction) {\n return pollingContext -> {\n try {\n final PollResponse operationResultPollResponse =\n pollingContext.getLatestResponse();\n \n \n\n final String operationId = operationResultPollResponse.getValue().getOperationId();\n return pollingFunction.apply(operationId)\n .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))\n .onErrorMap(Utility::mapToHttpResponseExceptionIfExist);\n } catch (RuntimeException ex) {\n return monoError(logger, ex);\n }\n };\n }\n\n private Function, Mono>>\n fetchingOperation(Function>> fetchingFunction) {\n return pollingContext -> {\n try {\n \n \n\n final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();\n return fetchingFunction.apply(operationId);\n } catch (RuntimeException ex) {\n return monoError(logger, ex);\n }\n };\n }\n\n private Function, Mono>>\n fetchingOperationIterable(Function>> fetchingFunction) {\n return pollingContext -> {\n try {\n \n \n\n final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();\n return fetchingFunction.apply(operationId);\n } catch (RuntimeException ex) {\n return monoError(logger, ex);\n }\n };\n }\n\n PagedFlux getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,\n boolean showStats, Context context) {\n return new PagedFlux<>(\n () -> getPage(null, operationId, top, skip, showStats, context),\n continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));\n }\n\n Mono> getPage(String continuationToken, String operationId, Integer top,\n Integer skip, boolean showStats, Context context) {\n if (continuationToken != null) {\n final Map continuationTokenMap = parseNextLink(continuationToken);\n final Integer topValue = continuationTokenMap.getOrDefault(\"$top\", null);\n final Integer skipValue = continuationTokenMap.getOrDefault(\"$skip\", null);\n return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)\n .map(this::toAnalyzeTasksPagedResponse)\n .onErrorMap(Utility::mapToHttpResponseExceptionIfExist);\n } else {\n return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)\n .map(this::toAnalyzeTasksPagedResponse)\n .onErrorMap(Utility::mapToHttpResponseExceptionIfExist);\n }\n }\n\n private PagedResponse toAnalyzeTasksPagedResponse(Response response) {\n final AnalyzeJobState analyzeJobState = response.getValue();\n return new PagedResponseBase(\n response.getRequest(),\n response.getStatusCode(),\n response.getHeaders(),\n Arrays.asList(toAnalyzeTasks(analyzeJobState)),\n analyzeJobState.getNextLink(),\n null);\n }\n\n \n\n private Mono> processAnalyzedModelResponse(\n Response analyzeJobStateResponse,\n PollResponse operationResultPollResponse) {\n\n LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;\n if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {\n switch (analyzeJobStateResponse.getValue().getStatus()) {\n case NOT_STARTED:\n case RUNNING:\n status = LongRunningOperationStatus.IN_PROGRESS;\n break;\n case SUCCEEDED:\n status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;\n break;\n case CANCELLED:\n status = LongRunningOperationStatus.USER_CANCELLED;\n break;\n default:\n status = LongRunningOperationStatus.fromString(\n analyzeJobStateResponse.getValue().getStatus().toString(), true);\n break;\n }\n }\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),\n analyzeJobStateResponse.getValue().getDisplayName());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),\n analyzeJobStateResponse.getValue().getCreatedDateTime());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),\n analyzeJobStateResponse.getValue().getExpirationDateTime());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),\n analyzeJobStateResponse.getValue().getLastUpdateDateTime());\n final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),\n tasksResult.getFailed());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),\n tasksResult.getInProgress());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(\n operationResultPollResponse.getValue(), tasksResult.getCompleted());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),\n tasksResult.getTotal());\n return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));\n }\n\n private Context getNotNullContext(Context context) {\n return context == null ? Context.NONE : context;\n }\n\n private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {\n return options == null ? new AnalyzeBatchActionsOptions() : options;\n }\n\n private String getNotNullModelVersion(String modelVersion) {\n return modelVersion == null ? \"latest\" : modelVersion;\n }\n\n private String[] parseActionErrorTarget(String targetReference) {\n if (CoreUtils.isNullOrEmpty(targetReference)) {\n throw logger.logExceptionAsError(new RuntimeException(\n \"Expected an error with a target field referencing an action but did not get one\"));\n }\n \n final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);\n final Matcher matcher = pattern.matcher(targetReference);\n String[] taskNameIdPair = new String[2];\n while (matcher.find()) {\n taskNameIdPair[0] = matcher.group(1);\n taskNameIdPair[1] = matcher.group(2);\n }\n return taskNameIdPair;\n }\n}", "context_after": "class AnalyzeBatchActionsAsyncClient {\n private static final String REGEX_ACTION_ERROR_TARGET =\n \"\n\n private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);\n private final TextAnalyticsClientImpl service;\n\n AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {\n this.service = service;\n }\n\n PollerFlux> beginAnalyzeBatchActions(\n Iterable documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,\n Context context) {\n try {\n inputDocumentsValidation(documents);\n options = getNotNullAnalyzeBatchActionsOptions(options);\n final Context finalContext = getNotNullContext(context);\n final AnalyzeBatchInput analyzeBatchInput =\n new AnalyzeBatchInput()\n .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))\n .setTasks(getJobManifestTasks(actions));\n analyzeBatchInput.setDisplayName(actions.getDisplayName());\n final boolean finalIncludeStatistics = options.isIncludeStatistics();\n return new PollerFlux<>(\n \n \n DEFAULT_POLL_INTERVAL,\n activationOperation(\n service.analyzeWithResponseAsync(analyzeBatchInput,\n finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))\n .map(analyzeResponse -> {\n final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =\n new AnalyzeBatchActionsOperationDetail();\n AnalyzeBatchActionsOperationDetailPropertiesHelper\n .setOperationId(textAnalyticsOperationResult,\n parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));\n return textAnalyticsOperationResult;\n })),\n pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,\n finalIncludeStatistics, null, null, finalContext)),\n (activationResponse, pollingContext) ->\n Mono.error(new RuntimeException(\"Cancellation is not supported.\")),\n fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(\n operationId, null, null, finalIncludeStatistics, finalContext)))\n );\n } catch (RuntimeException ex) {\n return PollerFlux.error(ex);\n }\n }\n\n PollerFlux>\n beginAnalyzeBatchActionsIterable(Iterable documents, TextAnalyticsActions actions,\n AnalyzeBatchActionsOptions options, Context context) {\n try {\n inputDocumentsValidation(documents);\n options = getNotNullAnalyzeBatchActionsOptions(options);\n final Context finalContext = getNotNullContext(context);\n final AnalyzeBatchInput analyzeBatchInput =\n new AnalyzeBatchInput()\n .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))\n .setTasks(getJobManifestTasks(actions));\n analyzeBatchInput.setDisplayName(actions.getDisplayName());\n final boolean finalIncludeStatistics = options.isIncludeStatistics();\n return new PollerFlux<>(\n \n \n DEFAULT_POLL_INTERVAL,\n activationOperation(\n service.analyzeWithResponseAsync(analyzeBatchInput,\n finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))\n .map(analyzeResponse -> {\n final AnalyzeBatchActionsOperationDetail operationDetail =\n new AnalyzeBatchActionsOperationDetail();\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,\n parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));\n return operationDetail;\n })),\n pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,\n finalIncludeStatistics, null, null, finalContext)),\n (activationResponse, pollingContext) ->\n Mono.error(new RuntimeException(\"Cancellation is not supported.\")),\n fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(\n operationId, null, null, finalIncludeStatistics, finalContext))))\n );\n } catch (RuntimeException ex) {\n return PollerFlux.error(ex);\n }\n }\n\n private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {\n return new JobManifestTasks()\n .setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null\n : StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(\n action -> {\n if (action == null) {\n return null;\n }\n final EntitiesTask entitiesTask = new EntitiesTask();\n entitiesTask.setParameters(\n \n \n \n new EntitiesTaskParameters()\n .setModelVersion(getNotNullModelVersion(action.getModelVersion())));\n return entitiesTask;\n }).collect(Collectors.toList()))\n .setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null\n : StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(\n action -> {\n if (action == null) {\n return null;\n }\n final PiiTask piiTask = new PiiTask();\n piiTask.setParameters(\n new PiiTaskParameters()\n \n \n \n .setModelVersion(getNotNullModelVersion(action.getModelVersion()))\n .setDomain(PiiTaskParametersDomain.fromString(\n action.getDomainFilter() == null ? null\n : action.getDomainFilter().toString())));\n return piiTask;\n }).collect(Collectors.toList()))\n .setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null\n : StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(\n action -> {\n if (action == null) {\n return null;\n }\n final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();\n keyPhrasesTask.setParameters(\n \n \n \n new KeyPhrasesTaskParameters()\n .setModelVersion(getNotNullModelVersion(action.getModelVersion())));\n return keyPhrasesTask;\n }).collect(Collectors.toList()));\n }\n\n private Function, Mono>\n activationOperation(Mono operationResult) {\n return pollingContext -> {\n try {\n return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);\n } catch (RuntimeException ex) {\n return monoError(logger, ex);\n }\n };\n }\n\n private Function, Mono>>\n pollingOperation(Function>> pollingFunction) {\n return pollingContext -> {\n try {\n final PollResponse operationResultPollResponse =\n pollingContext.getLatestResponse();\n \n \n\n final String operationId = operationResultPollResponse.getValue().getOperationId();\n return pollingFunction.apply(operationId)\n .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))\n .onErrorMap(Utility::mapToHttpResponseExceptionIfExist);\n } catch (RuntimeException ex) {\n return monoError(logger, ex);\n }\n };\n }\n\n private Function, Mono>>\n fetchingOperation(Function>> fetchingFunction) {\n return pollingContext -> {\n try {\n \n \n\n final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();\n return fetchingFunction.apply(operationId);\n } catch (RuntimeException ex) {\n return monoError(logger, ex);\n }\n };\n }\n\n private Function, Mono>>\n fetchingOperationIterable(Function>> fetchingFunction) {\n return pollingContext -> {\n try {\n \n \n\n final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();\n return fetchingFunction.apply(operationId);\n } catch (RuntimeException ex) {\n return monoError(logger, ex);\n }\n };\n }\n\n PagedFlux getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,\n boolean showStats, Context context) {\n return new PagedFlux<>(\n () -> getPage(null, operationId, top, skip, showStats, context),\n continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));\n }\n\n Mono> getPage(String continuationToken, String operationId, Integer top,\n Integer skip, boolean showStats, Context context) {\n if (continuationToken != null) {\n final Map continuationTokenMap = parseNextLink(continuationToken);\n final Integer topValue = continuationTokenMap.getOrDefault(\"$top\", null);\n final Integer skipValue = continuationTokenMap.getOrDefault(\"$skip\", null);\n return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)\n .map(this::toAnalyzeTasksPagedResponse)\n .onErrorMap(Utility::mapToHttpResponseExceptionIfExist);\n } else {\n return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)\n .map(this::toAnalyzeTasksPagedResponse)\n .onErrorMap(Utility::mapToHttpResponseExceptionIfExist);\n }\n }\n\n private PagedResponse toAnalyzeTasksPagedResponse(Response response) {\n final AnalyzeJobState analyzeJobState = response.getValue();\n return new PagedResponseBase(\n response.getRequest(),\n response.getStatusCode(),\n response.getHeaders(),\n Arrays.asList(toAnalyzeTasks(analyzeJobState)),\n analyzeJobState.getNextLink(),\n null);\n }\n\n \n\n private Mono> processAnalyzedModelResponse(\n Response analyzeJobStateResponse,\n PollResponse operationResultPollResponse) {\n\n LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;\n if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {\n switch (analyzeJobStateResponse.getValue().getStatus()) {\n case NOT_STARTED:\n case RUNNING:\n status = LongRunningOperationStatus.IN_PROGRESS;\n break;\n case SUCCEEDED:\n status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;\n break;\n case CANCELLED:\n status = LongRunningOperationStatus.USER_CANCELLED;\n break;\n default:\n status = LongRunningOperationStatus.fromString(\n analyzeJobStateResponse.getValue().getStatus().toString(), true);\n break;\n }\n }\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),\n analyzeJobStateResponse.getValue().getDisplayName());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),\n analyzeJobStateResponse.getValue().getCreatedDateTime());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),\n analyzeJobStateResponse.getValue().getExpirationDateTime());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),\n analyzeJobStateResponse.getValue().getLastUpdateDateTime());\n final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),\n tasksResult.getFailed());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),\n tasksResult.getInProgress());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(\n operationResultPollResponse.getValue(), tasksResult.getCompleted());\n AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),\n tasksResult.getTotal());\n return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));\n }\n\n private Context getNotNullContext(Context context) {\n return context == null ? Context.NONE : context;\n }\n\n private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {\n return options == null ? new AnalyzeBatchActionsOptions() : options;\n }\n\n private String getNotNullModelVersion(String modelVersion) {\n return modelVersion == null ? \"latest\" : modelVersion;\n }\n\n private String[] parseActionErrorTarget(String targetReference) {\n if (CoreUtils.isNullOrEmpty(targetReference)) {\n throw logger.logExceptionAsError(new RuntimeException(\n \"Expected an error with a target field referencing an action but did not get one\"));\n }\n \n final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);\n final Matcher matcher = pattern.matcher(targetReference);\n String[] taskNameIdPair = new String[2];\n while (matcher.find()) {\n taskNameIdPair[0] = matcher.group(1);\n taskNameIdPair[1] = matcher.group(2);\n }\n return taskNameIdPair;\n }\n}" }, { "comment": "I presume this is a synchronous write to ZooKeeper? _If_ it's observed to be a performance issue, it could be useful to consider making progress updates async since this is called in the context of a visitor client worker thread callback.", "method_body": "private void progress(DocumentType type) {\n \n reindexing = database.readReindexing();\n status = reindexing.status().getOrDefault(type,\n Status.ready(clock.instant())\n .running()\n .successful(clock.instant()));\n if (ready.get(type).isAfter(status.startedAt()))\n status = Status.ready(clock.instant()); \n\n database.writeReindexing(reindexing = reindexing.with(type, status));\n\n switch (status.state()) {\n default:\n log.log(WARNING, \"Unknown reindexing state '\" + status.state() + \"'\");\n case FAILED:\n log.log(FINE, () -> \"Not continuing reindexing of \" + type + \" due to previous failure\");\n case SUCCESSFUL: \n return;\n case RUNNING:\n log.log(WARNING, \"Unexpected state 'RUNNING' of reindexing of \" + type);\n case READY: \n log.log(FINE, () -> \"Running reindexing of \" + type);\n }\n\n \n status = status.running();\n AtomicReference progressLastStored = new AtomicReference<>(clock.instant());\n VisitorControlHandler control = new VisitorControlHandler() {\n @Override\n public void onProgress(ProgressToken token) {\n super.onProgress(token);\n status = status.progressed(token);\n if (progressLastStored.get().isBefore(clock.instant().minusSeconds(10))) {\n progressLastStored.set(clock.instant());\n database.writeReindexing(reindexing = reindexing.with(type, status));\n }\n }\n @Override\n public void onDone(CompletionCode code, String message) {\n super.onDone(code, message);\n phaser.arriveAndAwaitAdvance();\n }\n };\n visit(type, status.progress().orElse(null), control);\n\n \n switch (control.getResult().getCode()) {\n default:\n log.log(WARNING, \"Unexpected visitor result '\" + control.getResult().getCode() + \"'\");\n case FAILURE: \n log.log(WARNING, \"Visiting failed: \" + control.getResult().getMessage());\n status = status.failed(clock.instant(), control.getResult().getMessage());\n break;\n case ABORTED:\n log.log(FINE, () -> \"Halting reindexing of \" + type + \" due to shutdown \u2014\u00a0will continue later\");\n status = status.halted();\n break;\n case SUCCESS:\n log.log(INFO, \"Completed reindexing of \" + type + \" after \" + Duration.between(status.startedAt(), clock.instant()));\n status = status.successful(clock.instant());\n }\n database.writeReindexing(reindexing.with(type, status));\n }", "target_code": "database.writeReindexing(reindexing = reindexing.with(type, status));", "method_body_after": "private void progress(DocumentType type) {\n \n reindexing = database.readReindexing();\n status = reindexing.status().getOrDefault(type,\n Status.ready(clock.instant())\n .running()\n .successful(clock.instant()));\n if (ready.get(type).isAfter(status.startedAt()))\n status = Status.ready(clock.instant()); \n\n database.writeReindexing(reindexing = reindexing.with(type, status));\n\n switch (status.state()) {\n default:\n log.log(WARNING, \"Unknown reindexing state '\" + status.state() + \"'\");\n case FAILED:\n log.log(FINE, () -> \"Not continuing reindexing of \" + type + \" due to previous failure\");\n case SUCCESSFUL: \n return;\n case RUNNING:\n log.log(WARNING, \"Unexpected state 'RUNNING' of reindexing of \" + type);\n case READY: \n log.log(FINE, () -> \"Running reindexing of \" + type);\n }\n\n \n status = status.running();\n AtomicReference progressLastStored = new AtomicReference<>(clock.instant());\n VisitorControlHandler control = new VisitorControlHandler() {\n @Override\n public void onProgress(ProgressToken token) {\n super.onProgress(token);\n status = status.progressed(token);\n if (progressLastStored.get().isBefore(clock.instant().minusSeconds(10))) {\n progressLastStored.set(clock.instant());\n database.writeReindexing(reindexing = reindexing.with(type, status));\n }\n }\n @Override\n public void onDone(CompletionCode code, String message) {\n super.onDone(code, message);\n phaser.arriveAndAwaitAdvance(); \n }\n };\n visit(type, status.progress().orElse(null), control);\n\n \n switch (control.getResult().getCode()) {\n default:\n log.log(WARNING, \"Unexpected visitor result '\" + control.getResult().getCode() + \"'\");\n case FAILURE: \n log.log(WARNING, \"Visiting failed: \" + control.getResult().getMessage());\n status = status.failed(clock.instant(), control.getResult().getMessage());\n break;\n case ABORTED:\n log.log(FINE, () -> \"Halting reindexing of \" + type + \" due to shutdown \u2014\u00a0will continue later\");\n status = status.halted();\n break;\n case SUCCESS:\n log.log(INFO, \"Completed reindexing of \" + type + \" after \" + Duration.between(status.startedAt(), clock.instant()));\n status = status.successful(clock.instant());\n }\n database.writeReindexing(reindexing.with(type, status));\n }", "context_before": "class Reindexer {\n\n private static final Logger log = Logger.getLogger(Reindexer.class.getName());\n\n private final Cluster cluster;\n private final Map ready;\n private final ReindexingCurator database;\n private final DocumentAccess access;\n private final Clock clock;\n private final Phaser phaser = new Phaser(2); \n\n private Reindexing reindexing;\n private Status status;\n\n public Reindexer(Cluster cluster, Map ready, ReindexingCurator database,\n DocumentAccess access, Clock clock) {\n for (DocumentType type : ready.keySet())\n cluster.bucketSpaceOf(type); \n\n this.cluster = cluster;\n this.ready = new TreeMap<>(ready); \n this.database = database;\n this.access = access;\n this.clock = clock;\n }\n\n /** Tells this to stop reindexing at its leisure. */\n public void shutdown() {\n phaser.forceTermination();\n }\n\n /** Starts and tracks reprocessing of ready document types until done, or interrupted. */\n public void reindex() throws ReindexingLockException {\n if (phaser.isTerminated())\n throw new IllegalStateException(\"Already shut down\");\n\n try (Lock lock = database.lockReindexing()) {\n for (DocumentType type : ready.keySet()) { \n if (ready.get(type).isAfter(clock.instant()))\n log.log(INFO, \"Received config for reindexing which is ready in the future \u2014 will process later \" +\n \"(\" + ready.get(type) + \" is after \" + clock.instant() + \")\");\n else\n progress(type);\n\n if (phaser.isTerminated())\n break;\n }\n }\n }\n\n @SuppressWarnings(\"fallthrough\") \n \n\n private void visit(DocumentType type, ProgressToken progress, VisitorControlHandler control) {\n VisitorParameters parameters = createParameters(type, progress);\n parameters.setControlHandler(control);\n VisitorSession session;\n try {\n session = access.createVisitorSession(parameters);\n }\n catch (ParseException e) {\n throw new IllegalStateException(e);\n }\n\n \n phaser.arriveAndAwaitAdvance();\n session.destroy();\n }\n\n VisitorParameters createParameters(DocumentType type, ProgressToken progress) {\n VisitorParameters parameters = new VisitorParameters(type.getName());\n parameters.setRemoteDataHandler(cluster.name());\n parameters.setResumeToken(progress);\n parameters.setFieldSet(type.getName() + \":[document]\");\n parameters.setPriority(DocumentProtocol.Priority.LOW_1);\n parameters.setRoute(cluster.route());\n parameters.setBucketSpace(cluster.bucketSpaceOf(type));\n \n return parameters;\n }\n\n\n static class Cluster {\n\n private final String name;\n private final String configId;\n private final Map documentBuckets;\n\n Cluster(String name, String configId, Map documentBuckets) {\n this.name = requireNonNull(name);\n this.configId = requireNonNull(configId);\n this.documentBuckets = Map.copyOf(documentBuckets);\n }\n\n String name() {\n return name;\n }\n\n String route() {\n return \"[Storage:cluster=\" + name + \";clusterconfigid=\" + configId + \"]\";\n }\n\n String bucketSpaceOf(DocumentType documentType) {\n return requireNonNull(documentBuckets.get(documentType), \"Unknown bucket space for \" + documentType);\n }\n\n @Override\n public boolean equals(Object o) {\n if (this == o) return true;\n if (o == null || getClass() != o.getClass()) return false;\n Cluster cluster = (Cluster) o;\n return name.equals(cluster.name) &&\n configId.equals(cluster.configId) &&\n documentBuckets.equals(cluster.documentBuckets);\n }\n\n @Override\n public int hashCode() {\n return Objects.hash(name, configId, documentBuckets);\n }\n\n @Override\n public String toString() {\n return \"Cluster{\" +\n \"name='\" + name + '\\'' +\n \", configId='\" + configId + '\\'' +\n \", documentBuckets=\" + documentBuckets +\n '}';\n }\n\n }\n\n}", "context_after": "class Reindexer {\n\n private static final Logger log = Logger.getLogger(Reindexer.class.getName());\n\n private final Cluster cluster;\n private final Map ready;\n private final ReindexingCurator database;\n private final DocumentAccess access;\n private final Clock clock;\n private final Phaser phaser = new Phaser(2); \n\n private Reindexing reindexing;\n private Status status;\n\n public Reindexer(Cluster cluster, Map ready, ReindexingCurator database,\n DocumentAccess access, Clock clock) {\n for (DocumentType type : ready.keySet())\n cluster.bucketSpaceOf(type); \n\n this.cluster = cluster;\n this.ready = new TreeMap<>(ready); \n this.database = database;\n this.access = access;\n this.clock = clock;\n }\n\n /** Lets the reindexere abort any ongoing visit session, wait for it to complete normally, then exit. */\n public void shutdown() {\n phaser.forceTermination(); \n }\n\n /** Starts and tracks reprocessing of ready document types until done, or interrupted. */\n public void reindex() throws ReindexingLockException {\n if (phaser.isTerminated())\n throw new IllegalStateException(\"Already shut down\");\n\n try (Lock lock = database.lockReindexing()) {\n for (DocumentType type : ready.keySet()) { \n if (ready.get(type).isAfter(clock.instant()))\n log.log(INFO, \"Received config for reindexing which is ready in the future \u2014 will process later \" +\n \"(\" + ready.get(type) + \" is after \" + clock.instant() + \")\");\n else\n progress(type);\n\n if (phaser.isTerminated())\n break;\n }\n }\n }\n\n @SuppressWarnings(\"fallthrough\") \n \n\n private void visit(DocumentType type, ProgressToken progress, VisitorControlHandler control) {\n VisitorParameters parameters = createParameters(type, progress);\n parameters.setControlHandler(control);\n VisitorSession session;\n try {\n session = access.createVisitorSession(parameters);\n }\n catch (ParseException e) {\n throw new IllegalStateException(e);\n }\n\n \n phaser.arriveAndAwaitAdvance(); \n session.destroy();\n }\n\n VisitorParameters createParameters(DocumentType type, ProgressToken progress) {\n VisitorParameters parameters = new VisitorParameters(type.getName());\n parameters.setRemoteDataHandler(cluster.name());\n parameters.setResumeToken(progress);\n parameters.setFieldSet(type.getName() + \":[document]\");\n parameters.setPriority(DocumentProtocol.Priority.LOW_1);\n parameters.setRoute(cluster.route());\n parameters.setBucketSpace(cluster.bucketSpaceOf(type));\n \n return parameters;\n }\n\n\n static class Cluster {\n\n private final String name;\n private final String configId;\n private final Map documentBuckets;\n\n Cluster(String name, String configId, Map documentBuckets) {\n this.name = requireNonNull(name);\n this.configId = requireNonNull(configId);\n this.documentBuckets = Map.copyOf(documentBuckets);\n }\n\n String name() {\n return name;\n }\n\n String route() {\n return \"[Storage:cluster=\" + name + \";clusterconfigid=\" + configId + \"]\";\n }\n\n String bucketSpaceOf(DocumentType documentType) {\n return requireNonNull(documentBuckets.get(documentType), \"Unknown bucket space for \" + documentType);\n }\n\n @Override\n public boolean equals(Object o) {\n if (this == o) return true;\n if (o == null || getClass() != o.getClass()) return false;\n Cluster cluster = (Cluster) o;\n return name.equals(cluster.name) &&\n configId.equals(cluster.configId) &&\n documentBuckets.equals(cluster.documentBuckets);\n }\n\n @Override\n public int hashCode() {\n return Objects.hash(name, configId, documentBuckets);\n }\n\n @Override\n public String toString() {\n return \"Cluster{\" +\n \"name='\" + name + '\\'' +\n \", configId='\" + configId + '\\'' +\n \", documentBuckets=\" + documentBuckets +\n '}';\n }\n\n }\n\n}" }, { "comment": "Or would it be better to let the method throw an IOException?", "method_body": "private static List getArtifacts(List stagingFiles) {\n Set pathsToStage = Sets.newHashSet(stagingFiles);\n ImmutableList.Builder artifactsBuilder = ImmutableList.builder();\n for (String path : pathsToStage) {\n File file = new File(path);\n \n if (file.exists()) {\n ArtifactInformation.Builder artifactBuilder = ArtifactInformation.newBuilder();\n artifactBuilder.setTypeUrn(BeamUrns.getUrn(StandardArtifacts.Types.FILE));\n artifactBuilder.setRoleUrn(BeamUrns.getUrn(StandardArtifacts.Roles.STAGING_TO));\n artifactBuilder.setRolePayload(\n RunnerApi.ArtifactStagingToRolePayload.newBuilder()\n .setStagedName(createStagingFileName(file))\n .build()\n .toByteString());\n if (file.isDirectory()) {\n File zippedFile;\n HashCode hashCode;\n try {\n zippedFile = zipDirectory(file);\n hashCode = Files.asByteSource(zippedFile).hash(Hashing.sha256());\n } catch (IOException e) {\n throw new RuntimeException(e);\n }\n artifactsBuilder.add(\n artifactBuilder\n .setTypePayload(\n RunnerApi.ArtifactFilePayload.newBuilder()\n .setPath(zippedFile.getPath())\n .setSha256(hashCode.toString())\n .build()\n .toByteString())\n .build());\n } else {\n HashCode hashCode;\n try {\n hashCode = Files.asByteSource(file).hash(Hashing.sha256());\n } catch (IOException e) {\n throw new RuntimeException(e);\n }\n artifactsBuilder.add(\n artifactBuilder\n .setTypePayload(\n RunnerApi.ArtifactFilePayload.newBuilder()\n .setPath(file.getPath())\n .setSha256(hashCode.toString())\n .build()\n .toByteString())\n .build());\n }\n }\n }\n return artifactsBuilder.build();\n }", "target_code": "throw new RuntimeException(e);", "method_body_after": "private static List getArtifacts(List stagingFiles) {\n Set pathsToStage = Sets.newHashSet(stagingFiles);\n ImmutableList.Builder artifactsBuilder = ImmutableList.builder();\n for (String path : pathsToStage) {\n File file = new File(path);\n \n if (file.exists()) {\n ArtifactInformation.Builder artifactBuilder = ArtifactInformation.newBuilder();\n artifactBuilder.setTypeUrn(BeamUrns.getUrn(StandardArtifacts.Types.FILE));\n artifactBuilder.setRoleUrn(BeamUrns.getUrn(StandardArtifacts.Roles.STAGING_TO));\n artifactBuilder.setRolePayload(\n RunnerApi.ArtifactStagingToRolePayload.newBuilder()\n .setStagedName(createStagingFileName(file))\n .build()\n .toByteString());\n if (file.isDirectory()) {\n File zippedFile;\n HashCode hashCode;\n try {\n zippedFile = zipDirectory(file);\n hashCode = Files.asByteSource(zippedFile).hash(Hashing.sha256());\n } catch (IOException e) {\n throw new RuntimeException(e);\n }\n artifactsBuilder.add(\n artifactBuilder\n .setTypePayload(\n RunnerApi.ArtifactFilePayload.newBuilder()\n .setPath(zippedFile.getPath())\n .setSha256(hashCode.toString())\n .build()\n .toByteString())\n .build());\n } else {\n HashCode hashCode;\n try {\n hashCode = Files.asByteSource(file).hash(Hashing.sha256());\n } catch (IOException e) {\n throw new RuntimeException(e);\n }\n artifactsBuilder.add(\n artifactBuilder\n .setTypePayload(\n RunnerApi.ArtifactFilePayload.newBuilder()\n .setPath(file.getPath())\n .setSha256(hashCode.toString())\n .build()\n .toByteString())\n .build());\n }\n }\n }\n return artifactsBuilder.build();\n }", "context_before": "class Environments {\n private static final Logger LOG = LoggerFactory.getLogger(Environments.class);\n\n private static final ObjectMapper MAPPER =\n new ObjectMapper()\n .registerModules(ObjectMapper.findModules(ReflectHelpers.findClassLoader()));\n public static final String ENVIRONMENT_DOCKER = \"DOCKER\";\n public static final String ENVIRONMENT_PROCESS = \"PROCESS\";\n public static final String ENVIRONMENT_EXTERNAL = \"EXTERNAL\";\n public static final String ENVIRONMENT_EMBEDDED = \"EMBEDDED\"; \n public static final String ENVIRONMENT_LOOPBACK = \"LOOPBACK\"; \n\n /* For development, use the container build by the current user to ensure that the SDK harness and\n * the SDK agree on how they should interact. This should be changed to a version-specific\n * container during a release.\n *\n * See https:\n * container.\n */\n private static final String JAVA_SDK_HARNESS_CONTAINER_URL =\n ReleaseInfo.getReleaseInfo().getDefaultDockerRepoRoot()\n + \"/\"\n + ReleaseInfo.getReleaseInfo().getDefaultDockerRepoPrefix()\n + \"java_sdk:\"\n + ReleaseInfo.getReleaseInfo().getSdkVersion();\n public static final Environment JAVA_SDK_HARNESS_ENVIRONMENT =\n createDockerEnvironment(JAVA_SDK_HARNESS_CONTAINER_URL);\n\n private Environments() {}\n\n public static Environment createOrGetDefaultEnvironment(PortablePipelineOptions options) {\n String type = options.getDefaultEnvironmentType();\n String config = options.getDefaultEnvironmentConfig();\n\n Environment defaultEnvironment;\n if (Strings.isNullOrEmpty(type)) {\n defaultEnvironment = JAVA_SDK_HARNESS_ENVIRONMENT;\n } else {\n switch (type) {\n case ENVIRONMENT_EMBEDDED:\n defaultEnvironment = createEmbeddedEnvironment(config);\n break;\n case ENVIRONMENT_EXTERNAL:\n case ENVIRONMENT_LOOPBACK:\n defaultEnvironment = createExternalEnvironment(config);\n break;\n case ENVIRONMENT_PROCESS:\n defaultEnvironment = createProcessEnvironment(config);\n break;\n case ENVIRONMENT_DOCKER:\n default:\n defaultEnvironment = createDockerEnvironment(config);\n }\n }\n return defaultEnvironment\n .toBuilder()\n .addAllDependencies(getDeferredArtifacts(options))\n .addAllCapabilities(getJavaCapabilities())\n .build();\n }\n\n public static Environment createDockerEnvironment(String dockerImageUrl) {\n if (Strings.isNullOrEmpty(dockerImageUrl)) {\n return JAVA_SDK_HARNESS_ENVIRONMENT;\n }\n return Environment.newBuilder()\n .setUrn(BeamUrns.getUrn(StandardEnvironments.Environments.DOCKER))\n .setPayload(\n DockerPayload.newBuilder().setContainerImage(dockerImageUrl).build().toByteString())\n .build();\n }\n\n private static Environment createExternalEnvironment(String config) {\n return Environment.newBuilder()\n .setUrn(BeamUrns.getUrn(StandardEnvironments.Environments.EXTERNAL))\n .setPayload(\n ExternalPayload.newBuilder()\n .setEndpoint(ApiServiceDescriptor.newBuilder().setUrl(config).build())\n .build()\n .toByteString())\n .build();\n }\n\n private static Environment createProcessEnvironment(String config) {\n try {\n ProcessPayloadReferenceJSON payloadReferenceJSON =\n MAPPER.readValue(config, ProcessPayloadReferenceJSON.class);\n return createProcessEnvironment(\n payloadReferenceJSON.getOs(),\n payloadReferenceJSON.getArch(),\n payloadReferenceJSON.getCommand(),\n payloadReferenceJSON.getEnv());\n } catch (IOException e) {\n throw new RuntimeException(\n String.format(\"Unable to parse process environment config: %s\", config), e);\n }\n }\n\n private static Environment createEmbeddedEnvironment(String config) {\n return Environment.newBuilder()\n .setUrn(ENVIRONMENT_EMBEDDED)\n .setPayload(ByteString.copyFromUtf8(MoreObjects.firstNonNull(config, \"\")))\n .build();\n }\n\n public static Environment createProcessEnvironment(\n String os, String arch, String command, Map env) {\n ProcessPayload.Builder builder = ProcessPayload.newBuilder();\n if (!Strings.isNullOrEmpty(os)) {\n builder.setOs(os);\n }\n if (!Strings.isNullOrEmpty(arch)) {\n builder.setArch(arch);\n }\n if (!Strings.isNullOrEmpty(command)) {\n builder.setCommand(command);\n }\n if (env != null) {\n builder.putAllEnv(env);\n }\n return Environment.newBuilder()\n .setUrn(BeamUrns.getUrn(StandardEnvironments.Environments.PROCESS))\n .setPayload(builder.build().toByteString())\n .build();\n }\n\n public static Optional getEnvironment(String ptransformId, Components components) {\n PTransform ptransform = components.getTransformsOrThrow(ptransformId);\n String envId = ptransform.getEnvironmentId();\n if (Strings.isNullOrEmpty(envId)) {\n \n \n \n return Optional.empty();\n } else {\n return Optional.of(components.getEnvironmentsOrThrow(envId));\n }\n }\n\n public static Optional getEnvironment(\n PTransform ptransform, RehydratedComponents components) {\n String envId = ptransform.getEnvironmentId();\n if (Strings.isNullOrEmpty(envId)) {\n return Optional.empty();\n } else {\n \n \n \n return Optional.of(components.getEnvironment(envId));\n }\n }\n\n \n\n public static List getDeferredArtifacts(PipelineOptions options) {\n List stagingFiles = options.as(PortablePipelineOptions.class).getFilesToStage();\n if (stagingFiles == null || stagingFiles.isEmpty()) {\n return ImmutableList.of();\n }\n\n String key = UUID.randomUUID().toString();\n DefaultArtifactResolver.INSTANCE.register(\n (info) -> {\n if (BeamUrns.getUrn(StandardArtifacts.Types.DEFERRED).equals(info.getTypeUrn())) {\n RunnerApi.DeferredArtifactPayload deferredArtifactPayload;\n try {\n deferredArtifactPayload =\n RunnerApi.DeferredArtifactPayload.parseFrom(info.getTypePayload());\n } catch (InvalidProtocolBufferException e) {\n throw new RuntimeException(\"Error parsing deferred artifact payload.\", e);\n }\n if (key.equals(deferredArtifactPayload.getKey())) {\n return Optional.of(getArtifacts(stagingFiles));\n } else {\n return Optional.empty();\n }\n } else {\n return Optional.empty();\n }\n });\n\n return ImmutableList.of(\n ArtifactInformation.newBuilder()\n .setTypeUrn(BeamUrns.getUrn(StandardArtifacts.Types.DEFERRED))\n .setTypePayload(\n RunnerApi.DeferredArtifactPayload.newBuilder().setKey(key).build().toByteString())\n .build());\n }\n\n public static Set getJavaCapabilities() {\n ImmutableSet.Builder capabilities = ImmutableSet.builder();\n capabilities.addAll(ModelCoders.urns());\n capabilities.add(BeamUrns.getUrn(StandardProtocols.Enum.MULTI_CORE_BUNDLE_PROCESSING));\n capabilities.add(BeamUrns.getUrn(StandardProtocols.Enum.PROGRESS_REPORTING));\n return capabilities.build();\n }\n\n private static String createStagingFileName(File file) {\n \n \n \n \n \n \n \n return UUID.randomUUID().toString();\n }\n\n private static File zipDirectory(File directory) throws IOException {\n File zipFile = File.createTempFile(directory.getName(), \".zip\");\n try (FileOutputStream fos = new FileOutputStream(zipFile)) {\n ZipFiles.zipDirectory(directory, fos);\n }\n return zipFile;\n }\n\n private static class ProcessPayloadReferenceJSON {\n @Nullable private String os;\n @Nullable private String arch;\n @Nullable private String command;\n @Nullable private Map env;\n\n @Nullable\n public String getOs() {\n return os;\n }\n\n @Nullable\n public String getArch() {\n return arch;\n }\n\n @Nullable\n public String getCommand() {\n return command;\n }\n\n @Nullable\n public Map getEnv() {\n return env;\n }\n }\n}", "context_after": "class Environments {\n private static final Logger LOG = LoggerFactory.getLogger(Environments.class);\n\n private static final ObjectMapper MAPPER =\n new ObjectMapper()\n .registerModules(ObjectMapper.findModules(ReflectHelpers.findClassLoader()));\n public static final String ENVIRONMENT_DOCKER = \"DOCKER\";\n public static final String ENVIRONMENT_PROCESS = \"PROCESS\";\n public static final String ENVIRONMENT_EXTERNAL = \"EXTERNAL\";\n public static final String ENVIRONMENT_EMBEDDED = \"EMBEDDED\"; \n public static final String ENVIRONMENT_LOOPBACK = \"LOOPBACK\"; \n\n /* For development, use the container build by the current user to ensure that the SDK harness and\n * the SDK agree on how they should interact. This should be changed to a version-specific\n * container during a release.\n *\n * See https:\n * container.\n */\n private static final String JAVA_SDK_HARNESS_CONTAINER_URL =\n ReleaseInfo.getReleaseInfo().getDefaultDockerRepoRoot()\n + \"/\"\n + ReleaseInfo.getReleaseInfo().getDefaultDockerRepoPrefix()\n + \"java_sdk:\"\n + ReleaseInfo.getReleaseInfo().getSdkVersion();\n public static final Environment JAVA_SDK_HARNESS_ENVIRONMENT =\n createDockerEnvironment(JAVA_SDK_HARNESS_CONTAINER_URL);\n\n private Environments() {}\n\n public static Environment createOrGetDefaultEnvironment(PortablePipelineOptions options) {\n String type = options.getDefaultEnvironmentType();\n String config = options.getDefaultEnvironmentConfig();\n\n Environment defaultEnvironment;\n if (Strings.isNullOrEmpty(type)) {\n defaultEnvironment = JAVA_SDK_HARNESS_ENVIRONMENT;\n } else {\n switch (type) {\n case ENVIRONMENT_EMBEDDED:\n defaultEnvironment = createEmbeddedEnvironment(config);\n break;\n case ENVIRONMENT_EXTERNAL:\n case ENVIRONMENT_LOOPBACK:\n defaultEnvironment = createExternalEnvironment(config);\n break;\n case ENVIRONMENT_PROCESS:\n defaultEnvironment = createProcessEnvironment(config);\n break;\n case ENVIRONMENT_DOCKER:\n default:\n defaultEnvironment = createDockerEnvironment(config);\n }\n }\n return defaultEnvironment\n .toBuilder()\n .addAllDependencies(getDeferredArtifacts(options))\n .addAllCapabilities(getJavaCapabilities())\n .build();\n }\n\n public static Environment createDockerEnvironment(String dockerImageUrl) {\n if (Strings.isNullOrEmpty(dockerImageUrl)) {\n return JAVA_SDK_HARNESS_ENVIRONMENT;\n }\n return Environment.newBuilder()\n .setUrn(BeamUrns.getUrn(StandardEnvironments.Environments.DOCKER))\n .setPayload(\n DockerPayload.newBuilder().setContainerImage(dockerImageUrl).build().toByteString())\n .build();\n }\n\n private static Environment createExternalEnvironment(String config) {\n return Environment.newBuilder()\n .setUrn(BeamUrns.getUrn(StandardEnvironments.Environments.EXTERNAL))\n .setPayload(\n ExternalPayload.newBuilder()\n .setEndpoint(ApiServiceDescriptor.newBuilder().setUrl(config).build())\n .build()\n .toByteString())\n .build();\n }\n\n private static Environment createProcessEnvironment(String config) {\n try {\n ProcessPayloadReferenceJSON payloadReferenceJSON =\n MAPPER.readValue(config, ProcessPayloadReferenceJSON.class);\n return createProcessEnvironment(\n payloadReferenceJSON.getOs(),\n payloadReferenceJSON.getArch(),\n payloadReferenceJSON.getCommand(),\n payloadReferenceJSON.getEnv());\n } catch (IOException e) {\n throw new RuntimeException(\n String.format(\"Unable to parse process environment config: %s\", config), e);\n }\n }\n\n private static Environment createEmbeddedEnvironment(String config) {\n return Environment.newBuilder()\n .setUrn(ENVIRONMENT_EMBEDDED)\n .setPayload(ByteString.copyFromUtf8(MoreObjects.firstNonNull(config, \"\")))\n .build();\n }\n\n public static Environment createProcessEnvironment(\n String os, String arch, String command, Map env) {\n ProcessPayload.Builder builder = ProcessPayload.newBuilder();\n if (!Strings.isNullOrEmpty(os)) {\n builder.setOs(os);\n }\n if (!Strings.isNullOrEmpty(arch)) {\n builder.setArch(arch);\n }\n if (!Strings.isNullOrEmpty(command)) {\n builder.setCommand(command);\n }\n if (env != null) {\n builder.putAllEnv(env);\n }\n return Environment.newBuilder()\n .setUrn(BeamUrns.getUrn(StandardEnvironments.Environments.PROCESS))\n .setPayload(builder.build().toByteString())\n .build();\n }\n\n public static Optional getEnvironment(String ptransformId, Components components) {\n PTransform ptransform = components.getTransformsOrThrow(ptransformId);\n String envId = ptransform.getEnvironmentId();\n if (Strings.isNullOrEmpty(envId)) {\n \n \n \n return Optional.empty();\n } else {\n return Optional.of(components.getEnvironmentsOrThrow(envId));\n }\n }\n\n public static Optional getEnvironment(\n PTransform ptransform, RehydratedComponents components) {\n String envId = ptransform.getEnvironmentId();\n if (Strings.isNullOrEmpty(envId)) {\n return Optional.empty();\n } else {\n \n \n \n return Optional.of(components.getEnvironment(envId));\n }\n }\n\n \n\n public static List getDeferredArtifacts(PipelineOptions options) {\n List stagingFiles = options.as(PortablePipelineOptions.class).getFilesToStage();\n if (stagingFiles == null || stagingFiles.isEmpty()) {\n return ImmutableList.of();\n }\n\n String key = UUID.randomUUID().toString();\n DefaultArtifactResolver.INSTANCE.register(\n (info) -> {\n if (BeamUrns.getUrn(StandardArtifacts.Types.DEFERRED).equals(info.getTypeUrn())) {\n RunnerApi.DeferredArtifactPayload deferredArtifactPayload;\n try {\n deferredArtifactPayload =\n RunnerApi.DeferredArtifactPayload.parseFrom(info.getTypePayload());\n } catch (InvalidProtocolBufferException e) {\n throw new RuntimeException(\"Error parsing deferred artifact payload.\", e);\n }\n if (key.equals(deferredArtifactPayload.getKey())) {\n return Optional.of(getArtifacts(stagingFiles));\n } else {\n return Optional.empty();\n }\n } else {\n return Optional.empty();\n }\n });\n\n return ImmutableList.of(\n ArtifactInformation.newBuilder()\n .setTypeUrn(BeamUrns.getUrn(StandardArtifacts.Types.DEFERRED))\n .setTypePayload(\n RunnerApi.DeferredArtifactPayload.newBuilder().setKey(key).build().toByteString())\n .build());\n }\n\n public static Set getJavaCapabilities() {\n ImmutableSet.Builder capabilities = ImmutableSet.builder();\n capabilities.addAll(ModelCoders.urns());\n capabilities.add(BeamUrns.getUrn(StandardProtocols.Enum.MULTI_CORE_BUNDLE_PROCESSING));\n capabilities.add(BeamUrns.getUrn(StandardProtocols.Enum.PROGRESS_REPORTING));\n return capabilities.build();\n }\n\n private static String createStagingFileName(File file) {\n \n \n \n \n \n \n \n return UUID.randomUUID().toString();\n }\n\n private static File zipDirectory(File directory) throws IOException {\n File zipFile = File.createTempFile(directory.getName(), \".zip\");\n try (FileOutputStream fos = new FileOutputStream(zipFile)) {\n ZipFiles.zipDirectory(directory, fos);\n }\n return zipFile;\n }\n\n private static class ProcessPayloadReferenceJSON {\n @Nullable private String os;\n @Nullable private String arch;\n @Nullable private String command;\n @Nullable private Map env;\n\n @Nullable\n public String getOs() {\n return os;\n }\n\n @Nullable\n public String getArch() {\n return arch;\n }\n\n @Nullable\n public String getCommand() {\n return command;\n }\n\n @Nullable\n public Map getEnv() {\n return env;\n }\n }\n}" }, { "comment": "No, currently only direct rewriting of generated columns is supported.", "method_body": "public Scope visitSelect(SelectRelation selectRelation, Scope scope) {\n AnalyzeState analyzeState = new AnalyzeState();\n \n Set aliasSet = new HashSet<>();\n Relation resolvedRelation = resolveTableRef(selectRelation.getRelation(), scope, aliasSet);\n if (resolvedRelation instanceof TableFunctionRelation) {\n throw unsupportedException(\"Table function must be used with lateral join\");\n }\n selectRelation.setRelation(resolvedRelation);\n Scope sourceScope = process(resolvedRelation, scope);\n sourceScope.setParent(scope);\n\n Map generatedExprToColumnRef = new HashMap<>();\n new AstVisitor() {\n @Override\n public Void visitTable(TableRelation tableRelation, Void context) {\n generatedExprToColumnRef.putAll(tableRelation.getGeneratedExprToColumnRef());\n return null;\n }\n\n @Override\n public Void visitJoin(JoinRelation joinRelation, Void context) {\n visit(joinRelation.getLeft());\n visit(joinRelation.getRight());\n return null;\n }\n }.visit(resolvedRelation);\n analyzeState.setGeneratedExprToColumnRef(generatedExprToColumnRef);\n\n SelectAnalyzer selectAnalyzer = new SelectAnalyzer(session);\n selectAnalyzer.analyze(\n analyzeState,\n selectRelation.getSelectList(),\n selectRelation.getRelation(),\n sourceScope,\n selectRelation.getGroupByClause(),\n selectRelation.getHavingClause(),\n selectRelation.getWhereClause(),\n selectRelation.getOrderBy(),\n selectRelation.getLimit());\n\n selectRelation.fillResolvedAST(analyzeState);\n return analyzeState.getOutputScope();\n }", "target_code": "return null;", "method_body_after": "public Scope visitSelect(SelectRelation selectRelation, Scope scope) {\n AnalyzeState analyzeState = new AnalyzeState();\n \n Set aliasSet = new HashSet<>();\n Relation resolvedRelation = resolveTableRef(selectRelation.getRelation(), scope, aliasSet);\n if (resolvedRelation instanceof TableFunctionRelation) {\n throw unsupportedException(\"Table function must be used with lateral join\");\n }\n selectRelation.setRelation(resolvedRelation);\n Scope sourceScope = process(resolvedRelation, scope);\n sourceScope.setParent(scope);\n\n Map generatedExprToColumnRef = new HashMap<>();\n new AstVisitor() {\n @Override\n public Void visitTable(TableRelation tableRelation, Void context) {\n generatedExprToColumnRef.putAll(tableRelation.getGeneratedExprToColumnRef());\n return null;\n }\n\n @Override\n public Void visitJoin(JoinRelation joinRelation, Void context) {\n visit(joinRelation.getLeft());\n visit(joinRelation.getRight());\n return null;\n }\n }.visit(resolvedRelation);\n analyzeState.setGeneratedExprToColumnRef(generatedExprToColumnRef);\n\n SelectAnalyzer selectAnalyzer = new SelectAnalyzer(session);\n selectAnalyzer.analyze(\n analyzeState,\n selectRelation.getSelectList(),\n selectRelation.getRelation(),\n sourceScope,\n selectRelation.getGroupByClause(),\n selectRelation.getHavingClause(),\n selectRelation.getWhereClause(),\n selectRelation.getOrderBy(),\n selectRelation.getLimit());\n\n selectRelation.fillResolvedAST(analyzeState);\n return analyzeState.getOutputScope();\n }", "context_before": "class Visitor extends AstVisitor {\n public Visitor() {\n }\n\n public Scope process(ParseNode node, Scope scope) {\n return node.accept(this, scope);\n }\n\n @Override\n public Scope visitQueryStatement(QueryStatement node, Scope parent) {\n Scope scope = visitQueryRelation(node.getQueryRelation(), parent);\n if (node.hasOutFileClause()) {\n node.getOutFileClause().analyze(scope);\n }\n return scope;\n }\n\n @Override\n public Scope visitQueryRelation(QueryRelation node, Scope parent) {\n Scope scope = analyzeCTE(node, parent);\n return process(node, scope);\n }\n\n private Scope analyzeCTE(QueryRelation stmt, Scope scope) {\n Scope cteScope = new Scope(RelationId.anonymous(), new RelationFields());\n cteScope.setParent(scope);\n\n if (!stmt.hasWithClause()) {\n return cteScope;\n }\n\n for (CTERelation withQuery : stmt.getCteRelations()) {\n QueryRelation query = withQuery.getCteQueryStatement().getQueryRelation();\n process(withQuery.getCteQueryStatement(), cteScope);\n String cteName = withQuery.getName();\n if (cteScope.containsCTE(cteName)) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_NONUNIQ_TABLE, cteName);\n }\n\n if (withQuery.getColumnOutputNames() == null) {\n withQuery.setColumnOutputNames(new ArrayList<>(query.getColumnOutputNames()));\n } else {\n if (withQuery.getColumnOutputNames().size() != query.getColumnOutputNames().size()) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_VIEW_WRONG_LIST);\n }\n }\n\n /*\n * use cte column name as output scope of subquery relation fields\n */\n ImmutableList.Builder outputFields = ImmutableList.builder();\n for (int fieldIdx = 0; fieldIdx < query.getRelationFields().getAllFields().size(); ++fieldIdx) {\n Field originField = query.getRelationFields().getFieldByIndex(fieldIdx);\n\n String database = originField.getRelationAlias() == null ? session.getDatabase() :\n originField.getRelationAlias().getDb();\n TableName tableName = new TableName(database, cteName);\n outputFields.add(\n new Field(withQuery.getColumnOutputNames().get(fieldIdx), originField.getType(), tableName,\n originField.getOriginExpression()));\n }\n\n /*\n * Because the analysis of CTE is sensitive to order\n * the later CTE can call the previous resolved CTE,\n * and the previous CTE can rewrite the existing table name.\n * So here will save an increasing AnalyzeState to add cte scope\n */\n withQuery.setScope(new Scope(RelationId.of(withQuery), new RelationFields(outputFields.build())));\n cteScope.addCteQueries(cteName, withQuery);\n }\n\n return cteScope;\n }\n\n @Override\n \n\n private Relation resolveTableRef(Relation relation, Scope scope, Set aliasSet) {\n if (relation instanceof JoinRelation) {\n JoinRelation join = (JoinRelation) relation;\n join.setLeft(resolveTableRef(join.getLeft(), scope, aliasSet));\n Relation rightRelation = resolveTableRef(join.getRight(), scope, aliasSet);\n join.setRight(rightRelation);\n if (rightRelation instanceof TableFunctionRelation) {\n join.setLateral(true);\n }\n return join;\n } else if (relation instanceof FileTableFunctionRelation) {\n FileTableFunctionRelation tableFunctionRelation = (FileTableFunctionRelation) relation;\n Table table = resolveTableFunctionTable(tableFunctionRelation.getProperties());\n tableFunctionRelation.setTable(table);\n return relation;\n } else if (relation instanceof TableRelation) {\n TableRelation tableRelation = (TableRelation) relation;\n TableName tableName = tableRelation.getName();\n if (tableName != null && Strings.isNullOrEmpty(tableName.getDb())) {\n Optional withQuery = scope.getCteQueries(tableName.getTbl());\n if (withQuery.isPresent()) {\n CTERelation cteRelation = withQuery.get();\n RelationFields withRelationFields = withQuery.get().getRelationFields();\n ImmutableList.Builder outputFields = ImmutableList.builder();\n\n for (int fieldIdx = 0; fieldIdx < withRelationFields.getAllFields().size(); ++fieldIdx) {\n Field originField = withRelationFields.getAllFields().get(fieldIdx);\n outputFields.add(new Field(\n originField.getName(), originField.getType(), tableRelation.getResolveTableName(),\n originField.getOriginExpression()));\n }\n\n \n \n \n \n \n \n CTERelation newCteRelation = new CTERelation(cteRelation.getCteMouldId(), tableName.getTbl(),\n cteRelation.getColumnOutputNames(),\n cteRelation.getCteQueryStatement());\n newCteRelation.setAlias(tableRelation.getAlias());\n newCteRelation.setResolvedInFromClause(true);\n newCteRelation.setScope(\n new Scope(RelationId.of(newCteRelation), new RelationFields(outputFields.build())));\n return newCteRelation;\n }\n }\n\n TableName resolveTableName = relation.getResolveTableName();\n MetaUtils.normalizationTableName(session, resolveTableName);\n if (aliasSet.contains(resolveTableName)) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_NONUNIQ_TABLE,\n relation.getResolveTableName().getTbl());\n } else {\n aliasSet.add(new TableName(resolveTableName.getCatalog(),\n resolveTableName.getDb(),\n resolveTableName.getTbl()));\n }\n\n Table table = resolveTable(tableRelation);\n Relation r;\n if (table instanceof View) {\n View view = (View) table;\n QueryStatement queryStatement = view.getQueryStatement();\n ViewRelation viewRelation = new ViewRelation(tableName, view, queryStatement);\n viewRelation.setAlias(tableRelation.getAlias());\n\n r = viewRelation;\n } else if (table instanceof HiveView) {\n HiveView hiveView = (HiveView) table;\n QueryStatement queryStatement = hiveView.getQueryStatement();\n View view = new View(hiveView.getId(), hiveView.getName(), hiveView.getFullSchema());\n view.setInlineViewDefWithSqlMode(hiveView.getInlineViewDef(), 0);\n ViewRelation viewRelation = new ViewRelation(tableName, view, queryStatement);\n viewRelation.setAlias(tableRelation.getAlias());\n\n r = viewRelation;\n } else {\n if (tableRelation.getTemporalClause() != null) {\n if (table.getType() != Table.TableType.MYSQL) {\n throw unsupportedException(\n \"Unsupported table type for temporal clauses: \" + table.getType() +\n \"; only external MYSQL tables support temporal clauses\");\n }\n }\n\n if (table.isSupported()) {\n tableRelation.setTable(table);\n r = tableRelation;\n } else {\n throw unsupportedException(\"Unsupported scan table type: \" + table.getType());\n }\n }\n\n if (r.isPolicyRewritten()) {\n return r;\n }\n assert tableName != null;\n QueryStatement policyRewriteQuery = SecurityPolicyRewriteRule.buildView(session, r, tableName);\n if (policyRewriteQuery == null) {\n return r;\n } else {\n r.setPolicyRewritten(true);\n SubqueryRelation subqueryRelation = new SubqueryRelation(policyRewriteQuery);\n subqueryRelation.setAlias(tableName);\n return subqueryRelation;\n }\n } else {\n if (relation.getResolveTableName() != null) {\n if (aliasSet.contains(relation.getResolveTableName())) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_NONUNIQ_TABLE,\n relation.getResolveTableName().getTbl());\n } else {\n aliasSet.add(relation.getResolveTableName());\n }\n }\n return relation;\n }\n }\n\n @Override\n public Scope visitTable(TableRelation node, Scope outerScope) {\n TableName tableName = node.getResolveTableName();\n Table table = node.getTable();\n\n ImmutableList.Builder fields = ImmutableList.builder();\n ImmutableMap.Builder columns = ImmutableMap.builder();\n\n if (node.isSyncMVQuery()) {\n OlapTable olapTable = (OlapTable) table;\n List mvSchema = olapTable.getSchemaByIndexId(olapTable.getBaseIndexId());\n for (Column column : mvSchema) {\n Field field = new Field(column.getName(), column.getType(), tableName,\n new SlotRef(tableName, column.getName(), column.getName()), true, column.isAllowNull());\n columns.put(field, column);\n fields.add(field);\n }\n } else {\n List fullSchema = node.isBinlogQuery()\n ? appendBinlogMetaColumns(table.getFullSchema()) : table.getFullSchema();\n Set baseSchema = new HashSet<>(node.isBinlogQuery()\n ? appendBinlogMetaColumns(table.getBaseSchema()) : table.getBaseSchema());\n for (Column column : fullSchema) {\n \n boolean visible = baseSchema.contains(column);\n SlotRef slot = new SlotRef(tableName, column.getName(), column.getName());\n Field field = new Field(column.getName(), column.getType(), tableName, slot, visible,\n column.isAllowNull());\n columns.put(field, column);\n fields.add(field);\n }\n }\n\n node.setColumns(columns.build());\n String dbName = node.getName().getDb();\n if (session.getDumpInfo() != null) {\n session.getDumpInfo().addTable(dbName, table);\n\n if (table.isHiveTable()) {\n HiveTable hiveTable = (HiveTable) table;\n session.getDumpInfo().addHMSTable(hiveTable.getResourceName(), hiveTable.getDbName(),\n hiveTable.getTableName());\n HiveMetaStoreTableDumpInfo hiveMetaStoreTableDumpInfo = session.getDumpInfo().getHMSTable(\n hiveTable.getResourceName(), hiveTable.getDbName(), hiveTable.getTableName());\n hiveMetaStoreTableDumpInfo.setPartColumnNames(hiveTable.getPartitionColumnNames());\n hiveMetaStoreTableDumpInfo.setDataColumnNames(hiveTable.getDataColumnNames());\n Resource resource = GlobalStateMgr.getCurrentState().getResourceMgr().\n getResource(hiveTable.getResourceName());\n if (resource != null) {\n session.getDumpInfo().addResource(resource);\n }\n }\n }\n\n Scope scope = new Scope(RelationId.of(node), new RelationFields(fields.build()));\n node.setScope(scope);\n\n Map generatedExprToColumnRef = new HashMap<>();\n for (Column column : table.getBaseSchema()) {\n if (column.generatedColumnExpr() != null) {\n Expr materializedExpression = column.generatedColumnExpr();\n ExpressionAnalyzer.analyzeExpression(materializedExpression, new AnalyzeState(), scope, session);\n SlotRef slotRef = new SlotRef(null, column.getName());\n ExpressionAnalyzer.analyzeExpression(slotRef, new AnalyzeState(), scope, session);\n generatedExprToColumnRef.put(materializedExpression, slotRef);\n }\n }\n node.setGeneratedExprToColumnRef(generatedExprToColumnRef);\n\n return scope;\n }\n\n private List appendBinlogMetaColumns(List schema) {\n List columns = new ArrayList<>(schema);\n columns.add(new Column(BINLOG_OP_COLUMN_NAME, Type.TINYINT));\n columns.add(new Column(BINLOG_VERSION_COLUMN_NAME, Type.BIGINT));\n columns.add(new Column(BINLOG_SEQ_ID_COLUMN_NAME, Type.BIGINT));\n columns.add(new Column(BINLOG_TIMESTAMP_COLUMN_NAME, Type.BIGINT));\n return columns;\n }\n\n @Override\n public Scope visitFileTableFunction(FileTableFunctionRelation node, Scope outerScope) {\n TableName tableName = node.getResolveTableName();\n Table table = node.getTable();\n\n ImmutableList.Builder fields = ImmutableList.builder();\n ImmutableMap.Builder columns = ImmutableMap.builder();\n\n List fullSchema = table.getFullSchema();\n for (Column column : fullSchema) {\n Field field = new Field(column.getName(), column.getType(), tableName,\n new SlotRef(tableName, column.getName(), column.getName()), true);\n columns.put(field, column);\n fields.add(field);\n }\n\n node.setColumns(columns.build());\n Scope scope = new Scope(RelationId.of(node), new RelationFields(fields.build()));\n node.setScope(scope);\n return scope;\n }\n\n @Override\n public Scope visitCTE(CTERelation cteRelation, Scope context) {\n QueryRelation query = cteRelation.getCteQueryStatement().getQueryRelation();\n\n ImmutableList.Builder outputFields = ImmutableList.builder();\n for (int fieldIdx = 0; fieldIdx < query.getRelationFields().getAllFields().size(); ++fieldIdx) {\n Field originField = query.getRelationFields().getFieldByIndex(fieldIdx);\n outputFields.add(new Field(cteRelation.getColumnOutputNames() == null ?\n originField.getName() : cteRelation.getColumnOutputNames().get(fieldIdx),\n originField.getType(),\n cteRelation.getResolveTableName(),\n originField.getOriginExpression()));\n }\n Scope scope = new Scope(RelationId.of(cteRelation), new RelationFields(outputFields.build()));\n cteRelation.setScope(scope);\n return scope;\n }\n\n @Override\n public Scope visitJoin(JoinRelation join, Scope parentScope) {\n Scope leftScope = process(join.getLeft(), parentScope);\n Scope rightScope;\n if (join.getRight() instanceof TableFunctionRelation || join.isLateral()) {\n if (!(join.getRight() instanceof TableFunctionRelation)) {\n throw new SemanticException(\"Only support lateral join with UDTF\");\n }\n\n if (!join.getJoinOp().isInnerJoin() && !join.getJoinOp().isCrossJoin()) {\n throw new SemanticException(\"Not support lateral join except inner or cross\");\n }\n rightScope = process(join.getRight(), leftScope);\n } else {\n rightScope = process(join.getRight(), parentScope);\n }\n\n Expr joinEqual = join.getOnPredicate();\n if (join.getUsingColNames() != null) {\n Expr resolvedUsing = analyzeJoinUsing(join.getUsingColNames(), leftScope, rightScope);\n if (joinEqual == null) {\n joinEqual = resolvedUsing;\n } else {\n joinEqual = new CompoundPredicate(CompoundPredicate.Operator.AND, joinEqual, resolvedUsing);\n }\n join.setOnPredicate(joinEqual);\n }\n\n if (!join.getJoinHint().isEmpty()) {\n analyzeJoinHints(join);\n }\n\n if (joinEqual != null) {\n /*\n * sourceRelation.getRelationFields() is used to represent the column information of output.\n * To ensure the OnPredicate in semi/anti is correct, the relation needs to be re-assembled here\n * with left child and right child relationFields\n */\n analyzeExpression(joinEqual, new AnalyzeState(), new Scope(RelationId.of(join),\n leftScope.getRelationFields().joinWith(rightScope.getRelationFields())));\n\n AnalyzerUtils.verifyNoAggregateFunctions(joinEqual, \"JOIN\");\n AnalyzerUtils.verifyNoWindowFunctions(joinEqual, \"JOIN\");\n AnalyzerUtils.verifyNoGroupingFunctions(joinEqual, \"JOIN\");\n\n if (!joinEqual.getType().matchesType(Type.BOOLEAN) && !joinEqual.getType().matchesType(Type.NULL)) {\n throw new SemanticException(\"WHERE clause must evaluate to a boolean: actual type %s\",\n joinEqual.getType());\n }\n \n \n \n \n \n checkJoinEqual(joinEqual);\n } else {\n if (join.getJoinOp().isOuterJoin() || join.getJoinOp().isSemiAntiJoin()) {\n throw new SemanticException(join.getJoinOp() + \" requires an ON or USING clause.\");\n }\n }\n\n /*\n * New Scope needs to be constructed for select in semi/anti join\n */\n Scope scope;\n if (join.getJoinOp().isLeftSemiAntiJoin()) {\n scope = new Scope(RelationId.of(join), leftScope.getRelationFields());\n } else if (join.getJoinOp().isRightSemiAntiJoin()) {\n scope = new Scope(RelationId.of(join), rightScope.getRelationFields());\n } else if (join.getJoinOp().isLeftOuterJoin()) {\n List rightFields = getFieldsWithNullable(rightScope);\n scope = new Scope(RelationId.of(join),\n leftScope.getRelationFields().joinWith(new RelationFields(rightFields)));\n } else if (join.getJoinOp().isRightOuterJoin()) {\n List leftFields = getFieldsWithNullable(leftScope);\n scope = new Scope(RelationId.of(join),\n new RelationFields(leftFields).joinWith(rightScope.getRelationFields()));\n } else if (join.getJoinOp().isFullOuterJoin()) {\n List rightFields = getFieldsWithNullable(rightScope);\n List leftFields = getFieldsWithNullable(leftScope);\n scope = new Scope(RelationId.of(join),\n new RelationFields(leftFields).joinWith(new RelationFields(rightFields)));\n } else {\n scope = new Scope(RelationId.of(join),\n leftScope.getRelationFields().joinWith(rightScope.getRelationFields()));\n }\n join.setScope(scope);\n return scope;\n }\n\n private List getFieldsWithNullable(Scope scope) {\n List newFields = new ArrayList<>();\n for (Field field : scope.getRelationFields().getAllFields()) {\n Field newField = new Field(field);\n newField.setNullable(true);\n newFields.add(newField);\n }\n return newFields;\n }\n\n private Expr analyzeJoinUsing(List usingColNames, Scope left, Scope right) {\n Expr joinEqual = null;\n for (String colName : usingColNames) {\n TableName leftTableName =\n left.resolveField(new SlotRef(null, colName)).getField().getRelationAlias();\n TableName rightTableName =\n right.resolveField(new SlotRef(null, colName)).getField().getRelationAlias();\n\n \n BinaryPredicate resolvedUsing = new BinaryPredicate(BinaryType.EQ,\n new SlotRef(leftTableName, colName), new SlotRef(rightTableName, colName));\n\n if (joinEqual == null) {\n joinEqual = resolvedUsing;\n } else {\n joinEqual = new CompoundPredicate(CompoundPredicate.Operator.AND, joinEqual, resolvedUsing);\n }\n }\n return joinEqual;\n }\n\n private void analyzeJoinHints(JoinRelation join) {\n if (JoinOperator.HINT_BROADCAST.equals(join.getJoinHint())) {\n if (join.getJoinOp() == JoinOperator.RIGHT_OUTER_JOIN\n || join.getJoinOp() == JoinOperator.FULL_OUTER_JOIN\n || join.getJoinOp() == JoinOperator.RIGHT_SEMI_JOIN\n || join.getJoinOp() == JoinOperator.RIGHT_ANTI_JOIN) {\n throw new SemanticException(join.getJoinOp().toString() + \" does not support BROADCAST.\");\n }\n } else if (JoinOperator.HINT_SHUFFLE.equals(join.getJoinHint())) {\n if (join.getJoinOp() == JoinOperator.CROSS_JOIN ||\n (join.getJoinOp() == JoinOperator.INNER_JOIN && join.getOnPredicate() == null)) {\n throw new SemanticException(\"CROSS JOIN does not support SHUFFLE.\");\n }\n } else if (JoinOperator.HINT_BUCKET.equals(join.getJoinHint()) ||\n JoinOperator.HINT_COLOCATE.equals(join.getJoinHint())) {\n if (join.getJoinOp() == JoinOperator.CROSS_JOIN) {\n throw new SemanticException(\"CROSS JOIN does not support \" + join.getJoinHint() + \".\");\n }\n } else if (!JoinOperator.HINT_UNREORDER.equals(join.getJoinHint())) {\n throw new SemanticException(\"JOIN hint not recognized: \" + join.getJoinHint());\n }\n }\n\n @Override\n public Scope visitSubquery(SubqueryRelation subquery, Scope context) {\n if (subquery.getResolveTableName() != null && subquery.getResolveTableName().getTbl() == null) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_DERIVED_MUST_HAVE_ALIAS);\n }\n\n Scope queryOutputScope = process(subquery.getQueryStatement(), context);\n\n ImmutableList.Builder outputFields = ImmutableList.builder();\n\n if (subquery.getExplicitColumnNames() != null) {\n if (queryOutputScope.getRelationFields().getAllVisibleFields().size()\n != subquery.getExplicitColumnNames().size()) {\n throw new SemanticException(\"In definition of view, derived table or common table expression, \" +\n \"SELECT list and column names list have different column counts\");\n }\n }\n\n int explicitColumnNameIdx = 0;\n for (Field field : queryOutputScope.getRelationFields().getAllFields()) {\n String fieldResolveName;\n if (subquery.getExplicitColumnNames() != null && field.isVisible()) {\n fieldResolveName = subquery.getExplicitColumnNames().get(explicitColumnNameIdx);\n explicitColumnNameIdx++;\n } else {\n fieldResolveName = field.getName();\n }\n\n outputFields.add(new Field(fieldResolveName, field.getType(), subquery.getResolveTableName(),\n field.getOriginExpression()));\n\n }\n Scope scope = new Scope(RelationId.of(subquery), new RelationFields(outputFields.build()));\n\n analyzeOrderByClause(subquery, scope);\n subquery.setScope(scope);\n return scope;\n }\n\n private void analyzeOrderByClause(QueryRelation query, Scope scope) {\n if (!query.hasOrderByClause()) {\n return;\n }\n List outputExpressions = query.getOutputExpression();\n for (OrderByElement orderByElement : query.getOrderBy()) {\n Expr expression = orderByElement.getExpr();\n AnalyzerUtils.verifyNoGroupingFunctions(expression, \"ORDER BY\");\n\n if (expression instanceof IntLiteral) {\n long ordinal = ((IntLiteral) expression).getLongValue();\n if (ordinal < 1 || ordinal > outputExpressions.size()) {\n throw new SemanticException(\"ORDER BY position %s is not in select list\", ordinal);\n }\n expression = new FieldReference((int) ordinal - 1, null);\n }\n\n analyzeExpression(expression, new AnalyzeState(), scope);\n\n if (!expression.getType().canOrderBy()) {\n throw new SemanticException(Type.NOT_SUPPORT_ORDER_ERROR_MSG);\n }\n\n orderByElement.setExpr(expression);\n }\n }\n\n @Override\n public Scope visitView(ViewRelation node, Scope scope) {\n Scope queryOutputScope;\n try {\n queryOutputScope = process(node.getQueryStatement(), scope);\n } catch (SemanticException e) {\n throw new SemanticException(\"View \" + node.getName() + \" references invalid table(s) or column(s) or \" +\n \"function(s) or definer/invoker of view lack rights to use them\");\n }\n View view = node.getView();\n List fields = Lists.newArrayList();\n for (int i = 0; i < view.getBaseSchema().size(); ++i) {\n Column column = view.getBaseSchema().get(i);\n Field originField = queryOutputScope.getRelationFields().getFieldByIndex(i);\n \n \n \n \n \n \n \n \n Field field = new Field(column.getName(), originField.getType(), node.getResolveTableName(),\n originField.getOriginExpression());\n fields.add(field);\n }\n\n if (session.getDumpInfo() != null) {\n String dbName = node.getName().getDb();\n session.getDumpInfo().addView(dbName, view);\n }\n\n Scope viewScope = new Scope(RelationId.of(node), new RelationFields(fields));\n node.setScope(viewScope);\n return viewScope;\n }\n\n @Override\n public Scope visitUnion(UnionRelation node, Scope context) {\n return analyzeSetOperation(node, context);\n }\n\n @Override\n public Scope visitExcept(ExceptRelation node, Scope context) {\n if (node.getQualifier().equals(SetQualifier.ALL)) {\n throw new SemanticException(\"EXCEPT does not support ALL qualifier\");\n }\n return analyzeSetOperation(node, context);\n }\n\n @Override\n public Scope visitIntersect(IntersectRelation node, Scope context) {\n if (node.getQualifier().equals(SetQualifier.ALL)) {\n throw new SemanticException(\"INTERSECT does not support ALL qualifier\");\n }\n return analyzeSetOperation(node, context);\n }\n\n private Scope analyzeSetOperation(SetOperationRelation node, Scope context) {\n List setOpRelations = node.getRelations();\n\n Scope leftChildScope = process(setOpRelations.get(0), context);\n Type[] outputTypes = leftChildScope.getRelationFields().getAllFields()\n .stream().map(Field::getType).toArray(Type[]::new);\n List nullables = leftChildScope.getRelationFields().getAllFields()\n .stream().map(field -> field.isNullable()).collect(Collectors.toList());\n int outputSize = leftChildScope.getRelationFields().size();\n\n for (int i = 1; i < setOpRelations.size(); ++i) {\n Scope relation = process(setOpRelations.get(i), context);\n if (relation.getRelationFields().size() != outputSize) {\n throw new SemanticException(\"Operands have unequal number of columns\");\n }\n for (int fieldIdx = 0; fieldIdx < relation.getRelationFields().size(); ++fieldIdx) {\n Field field = relation.getRelationFields().getAllFields().get(fieldIdx);\n Type fieldType = field.getType();\n if (fieldType.isOnlyMetricType() &&\n !((node instanceof UnionRelation) &&\n (node.getQualifier().equals(SetQualifier.ALL)))) {\n throw new SemanticException(\"%s not support set operation\", fieldType);\n }\n\n Type commonType = TypeManager.getCommonSuperType(outputTypes[fieldIdx],\n relation.getRelationFields().getFieldByIndex(fieldIdx).getType());\n if (!commonType.isValid()) {\n throw new SemanticException(String.format(\"Incompatible return types '%s' and '%s'\",\n outputTypes[fieldIdx],\n relation.getRelationFields().getFieldByIndex(fieldIdx).getType()));\n }\n outputTypes[fieldIdx] = commonType;\n nullables.set(fieldIdx, nullables.get(fieldIdx) | field.isNullable());\n }\n }\n\n ArrayList fields = new ArrayList<>();\n for (int fieldIdx = 0; fieldIdx < outputSize; ++fieldIdx) {\n Field oldField = leftChildScope.getRelationFields().getFieldByIndex(fieldIdx);\n fields.add(new Field(oldField.getName(), outputTypes[fieldIdx], oldField.getRelationAlias(),\n oldField.getOriginExpression(), true, nullables.get(fieldIdx)));\n }\n\n Scope setOpOutputScope = new Scope(RelationId.of(node), new RelationFields(fields));\n\n analyzeOrderByClause(node, setOpOutputScope);\n node.setScope(setOpOutputScope);\n return setOpOutputScope;\n }\n\n @Override\n public Scope visitValues(ValuesRelation node, Scope scope) {\n AnalyzeState analyzeState = new AnalyzeState();\n\n List firstRow = node.getRow(0);\n firstRow.forEach(e -> analyzeExpression(e, analyzeState, scope));\n List> rows = node.getRows();\n Type[] outputTypes = firstRow.stream().map(Expr::getType).toArray(Type[]::new);\n for (List row : rows) {\n if (row.size() != firstRow.size()) {\n throw new SemanticException(\"Values have unequal number of columns\");\n }\n for (int fieldIdx = 0; fieldIdx < row.size(); ++fieldIdx) {\n analyzeExpression(row.get(fieldIdx), analyzeState, scope);\n Type commonType =\n TypeManager.getCommonSuperType(outputTypes[fieldIdx], row.get(fieldIdx).getType());\n if (!commonType.isValid()) {\n throw new SemanticException(String.format(\"Incompatible return types '%s' and '%s'\",\n outputTypes[fieldIdx], row.get(fieldIdx).getType()));\n }\n outputTypes[fieldIdx] = commonType;\n }\n }\n List fields = new ArrayList<>();\n for (int fieldIdx = 0; fieldIdx < outputTypes.length; ++fieldIdx) {\n fields.add(new Field(node.getColumnOutputNames().get(fieldIdx), outputTypes[fieldIdx],\n node.getResolveTableName(),\n rows.get(0).get(fieldIdx)));\n }\n\n Scope valuesScope = new Scope(RelationId.of(node), new RelationFields(fields));\n node.setScope(valuesScope);\n return valuesScope;\n }\n\n @Override\n public Scope visitTableFunction(TableFunctionRelation node, Scope scope) {\n AnalyzeState analyzeState = new AnalyzeState();\n List args = node.getFunctionParams().exprs();\n Type[] argTypes = new Type[args.size()];\n for (int i = 0; i < args.size(); ++i) {\n analyzeExpression(args.get(i), analyzeState, scope);\n argTypes[i] = args.get(i).getType();\n\n AnalyzerUtils.verifyNoAggregateFunctions(args.get(i), \"Table Function\");\n AnalyzerUtils.verifyNoWindowFunctions(args.get(i), \"Table Function\");\n AnalyzerUtils.verifyNoGroupingFunctions(args.get(i), \"Table Function\");\n }\n\n Function fn = Expr.getBuiltinFunction(node.getFunctionName().getFunction(), argTypes,\n Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n\n if (fn == null) {\n fn = AnalyzerUtils.getUdfFunction(session, node.getFunctionName(), argTypes);\n }\n\n if (fn == null) {\n throw new SemanticException(\"Unknown table function '%s(%s)'\", node.getFunctionName().getFunction(),\n Arrays.stream(argTypes).map(Object::toString).collect(Collectors.joining(\",\")));\n }\n\n if (!(fn instanceof TableFunction)) {\n throw new SemanticException(\"'%s(%s)' is not table function\", node.getFunctionName().getFunction(),\n Arrays.stream(argTypes).map(Object::toString).collect(Collectors.joining(\",\")));\n }\n\n TableFunction tableFunction = (TableFunction) fn;\n node.setTableFunction(tableFunction);\n node.setChildExpressions(node.getFunctionParams().exprs());\n\n if (node.getColumnOutputNames() == null) {\n if (tableFunction.getFunctionName().getFunction().equals(\"unnest\")) {\n \n \n List columnNames = new ArrayList<>();\n for (int i = 0; i < tableFunction.getTableFnReturnTypes().size(); ++i) {\n columnNames.add(\"unnest\");\n }\n node.setColumnOutputNames(columnNames);\n } else {\n node.setColumnOutputNames(new ArrayList<>(tableFunction.getDefaultColumnNames()));\n }\n } else {\n if (node.getColumnOutputNames().size() != tableFunction.getTableFnReturnTypes().size()) {\n throw new SemanticException(\"table %s has %s columns available but %s columns specified\",\n node.getAlias().getTbl(),\n tableFunction.getTableFnReturnTypes().size(),\n node.getColumnOutputNames().size());\n }\n }\n\n ImmutableList.Builder fields = ImmutableList.builder();\n for (int i = 0; i < tableFunction.getTableFnReturnTypes().size(); ++i) {\n String colName = node.getColumnOutputNames().get(i);\n\n Field field = new Field(colName,\n tableFunction.getTableFnReturnTypes().get(i),\n node.getResolveTableName(),\n new SlotRef(node.getResolveTableName(), colName, colName));\n fields.add(field);\n }\n\n Scope outputScope = new Scope(RelationId.of(node), new RelationFields(fields.build()));\n node.setScope(outputScope);\n return outputScope;\n }\n\n @Override\n public Scope visitNormalizedTableFunction(NormalizedTableFunctionRelation node, Scope scope) {\n Scope ignored = visitJoin(node, scope);\n \n node.setScope(node.getRight().getScope());\n return node.getScope();\n }\n\n }", "context_after": "class Visitor extends AstVisitor {\n public Visitor() {\n }\n\n public Scope process(ParseNode node, Scope scope) {\n return node.accept(this, scope);\n }\n\n @Override\n public Scope visitQueryStatement(QueryStatement node, Scope parent) {\n Scope scope = visitQueryRelation(node.getQueryRelation(), parent);\n if (node.hasOutFileClause()) {\n node.getOutFileClause().analyze(scope);\n }\n return scope;\n }\n\n @Override\n public Scope visitQueryRelation(QueryRelation node, Scope parent) {\n Scope scope = analyzeCTE(node, parent);\n return process(node, scope);\n }\n\n private Scope analyzeCTE(QueryRelation stmt, Scope scope) {\n Scope cteScope = new Scope(RelationId.anonymous(), new RelationFields());\n cteScope.setParent(scope);\n\n if (!stmt.hasWithClause()) {\n return cteScope;\n }\n\n for (CTERelation withQuery : stmt.getCteRelations()) {\n QueryRelation query = withQuery.getCteQueryStatement().getQueryRelation();\n process(withQuery.getCteQueryStatement(), cteScope);\n String cteName = withQuery.getName();\n if (cteScope.containsCTE(cteName)) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_NONUNIQ_TABLE, cteName);\n }\n\n if (withQuery.getColumnOutputNames() == null) {\n withQuery.setColumnOutputNames(new ArrayList<>(query.getColumnOutputNames()));\n } else {\n if (withQuery.getColumnOutputNames().size() != query.getColumnOutputNames().size()) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_VIEW_WRONG_LIST);\n }\n }\n\n /*\n * use cte column name as output scope of subquery relation fields\n */\n ImmutableList.Builder outputFields = ImmutableList.builder();\n for (int fieldIdx = 0; fieldIdx < query.getRelationFields().getAllFields().size(); ++fieldIdx) {\n Field originField = query.getRelationFields().getFieldByIndex(fieldIdx);\n\n String database = originField.getRelationAlias() == null ? session.getDatabase() :\n originField.getRelationAlias().getDb();\n TableName tableName = new TableName(database, cteName);\n outputFields.add(\n new Field(withQuery.getColumnOutputNames().get(fieldIdx), originField.getType(), tableName,\n originField.getOriginExpression()));\n }\n\n /*\n * Because the analysis of CTE is sensitive to order\n * the later CTE can call the previous resolved CTE,\n * and the previous CTE can rewrite the existing table name.\n * So here will save an increasing AnalyzeState to add cte scope\n */\n withQuery.setScope(new Scope(RelationId.of(withQuery), new RelationFields(outputFields.build())));\n cteScope.addCteQueries(cteName, withQuery);\n }\n\n return cteScope;\n }\n\n @Override\n \n\n private Relation resolveTableRef(Relation relation, Scope scope, Set aliasSet) {\n if (relation instanceof JoinRelation) {\n JoinRelation join = (JoinRelation) relation;\n join.setLeft(resolveTableRef(join.getLeft(), scope, aliasSet));\n Relation rightRelation = resolveTableRef(join.getRight(), scope, aliasSet);\n join.setRight(rightRelation);\n if (rightRelation instanceof TableFunctionRelation) {\n join.setLateral(true);\n }\n return join;\n } else if (relation instanceof FileTableFunctionRelation) {\n FileTableFunctionRelation tableFunctionRelation = (FileTableFunctionRelation) relation;\n Table table = resolveTableFunctionTable(tableFunctionRelation.getProperties());\n tableFunctionRelation.setTable(table);\n return relation;\n } else if (relation instanceof TableRelation) {\n TableRelation tableRelation = (TableRelation) relation;\n TableName tableName = tableRelation.getName();\n if (tableName != null && Strings.isNullOrEmpty(tableName.getDb())) {\n Optional withQuery = scope.getCteQueries(tableName.getTbl());\n if (withQuery.isPresent()) {\n CTERelation cteRelation = withQuery.get();\n RelationFields withRelationFields = withQuery.get().getRelationFields();\n ImmutableList.Builder outputFields = ImmutableList.builder();\n\n for (int fieldIdx = 0; fieldIdx < withRelationFields.getAllFields().size(); ++fieldIdx) {\n Field originField = withRelationFields.getAllFields().get(fieldIdx);\n outputFields.add(new Field(\n originField.getName(), originField.getType(), tableRelation.getResolveTableName(),\n originField.getOriginExpression()));\n }\n\n \n \n \n \n \n \n CTERelation newCteRelation = new CTERelation(cteRelation.getCteMouldId(), tableName.getTbl(),\n cteRelation.getColumnOutputNames(),\n cteRelation.getCteQueryStatement());\n newCteRelation.setAlias(tableRelation.getAlias());\n newCteRelation.setResolvedInFromClause(true);\n newCteRelation.setScope(\n new Scope(RelationId.of(newCteRelation), new RelationFields(outputFields.build())));\n return newCteRelation;\n }\n }\n\n TableName resolveTableName = relation.getResolveTableName();\n MetaUtils.normalizationTableName(session, resolveTableName);\n if (aliasSet.contains(resolveTableName)) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_NONUNIQ_TABLE,\n relation.getResolveTableName().getTbl());\n } else {\n aliasSet.add(new TableName(resolveTableName.getCatalog(),\n resolveTableName.getDb(),\n resolveTableName.getTbl()));\n }\n\n Table table = resolveTable(tableRelation);\n Relation r;\n if (table instanceof View) {\n View view = (View) table;\n QueryStatement queryStatement = view.getQueryStatement();\n ViewRelation viewRelation = new ViewRelation(tableName, view, queryStatement);\n viewRelation.setAlias(tableRelation.getAlias());\n\n r = viewRelation;\n } else if (table instanceof HiveView) {\n HiveView hiveView = (HiveView) table;\n QueryStatement queryStatement = hiveView.getQueryStatement();\n View view = new View(hiveView.getId(), hiveView.getName(), hiveView.getFullSchema());\n view.setInlineViewDefWithSqlMode(hiveView.getInlineViewDef(), 0);\n ViewRelation viewRelation = new ViewRelation(tableName, view, queryStatement);\n viewRelation.setAlias(tableRelation.getAlias());\n\n r = viewRelation;\n } else {\n if (tableRelation.getTemporalClause() != null) {\n if (table.getType() != Table.TableType.MYSQL) {\n throw unsupportedException(\n \"Unsupported table type for temporal clauses: \" + table.getType() +\n \"; only external MYSQL tables support temporal clauses\");\n }\n }\n\n if (table.isSupported()) {\n tableRelation.setTable(table);\n r = tableRelation;\n } else {\n throw unsupportedException(\"Unsupported scan table type: \" + table.getType());\n }\n }\n\n if (r.isPolicyRewritten()) {\n return r;\n }\n assert tableName != null;\n QueryStatement policyRewriteQuery = SecurityPolicyRewriteRule.buildView(session, r, tableName);\n if (policyRewriteQuery == null) {\n return r;\n } else {\n r.setPolicyRewritten(true);\n SubqueryRelation subqueryRelation = new SubqueryRelation(policyRewriteQuery);\n subqueryRelation.setAlias(tableName);\n return subqueryRelation;\n }\n } else {\n if (relation.getResolveTableName() != null) {\n if (aliasSet.contains(relation.getResolveTableName())) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_NONUNIQ_TABLE,\n relation.getResolveTableName().getTbl());\n } else {\n aliasSet.add(relation.getResolveTableName());\n }\n }\n return relation;\n }\n }\n\n @Override\n public Scope visitTable(TableRelation node, Scope outerScope) {\n TableName tableName = node.getResolveTableName();\n Table table = node.getTable();\n\n ImmutableList.Builder fields = ImmutableList.builder();\n ImmutableMap.Builder columns = ImmutableMap.builder();\n\n if (node.isSyncMVQuery()) {\n OlapTable olapTable = (OlapTable) table;\n List mvSchema = olapTable.getSchemaByIndexId(olapTable.getBaseIndexId());\n for (Column column : mvSchema) {\n Field field = new Field(column.getName(), column.getType(), tableName,\n new SlotRef(tableName, column.getName(), column.getName()), true, column.isAllowNull());\n columns.put(field, column);\n fields.add(field);\n }\n } else {\n List fullSchema = node.isBinlogQuery()\n ? appendBinlogMetaColumns(table.getFullSchema()) : table.getFullSchema();\n Set baseSchema = new HashSet<>(node.isBinlogQuery()\n ? appendBinlogMetaColumns(table.getBaseSchema()) : table.getBaseSchema());\n for (Column column : fullSchema) {\n \n boolean visible = baseSchema.contains(column);\n SlotRef slot = new SlotRef(tableName, column.getName(), column.getName());\n Field field = new Field(column.getName(), column.getType(), tableName, slot, visible,\n column.isAllowNull());\n columns.put(field, column);\n fields.add(field);\n }\n }\n\n node.setColumns(columns.build());\n String dbName = node.getName().getDb();\n if (session.getDumpInfo() != null) {\n session.getDumpInfo().addTable(dbName, table);\n\n if (table.isHiveTable()) {\n HiveTable hiveTable = (HiveTable) table;\n session.getDumpInfo().addHMSTable(hiveTable.getResourceName(), hiveTable.getDbName(),\n hiveTable.getTableName());\n HiveMetaStoreTableDumpInfo hiveMetaStoreTableDumpInfo = session.getDumpInfo().getHMSTable(\n hiveTable.getResourceName(), hiveTable.getDbName(), hiveTable.getTableName());\n hiveMetaStoreTableDumpInfo.setPartColumnNames(hiveTable.getPartitionColumnNames());\n hiveMetaStoreTableDumpInfo.setDataColumnNames(hiveTable.getDataColumnNames());\n Resource resource = GlobalStateMgr.getCurrentState().getResourceMgr().\n getResource(hiveTable.getResourceName());\n if (resource != null) {\n session.getDumpInfo().addResource(resource);\n }\n }\n }\n\n Scope scope = new Scope(RelationId.of(node), new RelationFields(fields.build()));\n node.setScope(scope);\n\n Map generatedExprToColumnRef = new HashMap<>();\n for (Column column : table.getBaseSchema()) {\n if (column.generatedColumnExpr() != null) {\n Expr materializedExpression = column.generatedColumnExpr();\n ExpressionAnalyzer.analyzeExpression(materializedExpression, new AnalyzeState(), scope, session);\n SlotRef slotRef = new SlotRef(null, column.getName());\n ExpressionAnalyzer.analyzeExpression(slotRef, new AnalyzeState(), scope, session);\n generatedExprToColumnRef.put(materializedExpression, slotRef);\n }\n }\n node.setGeneratedExprToColumnRef(generatedExprToColumnRef);\n\n return scope;\n }\n\n private List appendBinlogMetaColumns(List schema) {\n List columns = new ArrayList<>(schema);\n columns.add(new Column(BINLOG_OP_COLUMN_NAME, Type.TINYINT));\n columns.add(new Column(BINLOG_VERSION_COLUMN_NAME, Type.BIGINT));\n columns.add(new Column(BINLOG_SEQ_ID_COLUMN_NAME, Type.BIGINT));\n columns.add(new Column(BINLOG_TIMESTAMP_COLUMN_NAME, Type.BIGINT));\n return columns;\n }\n\n @Override\n public Scope visitFileTableFunction(FileTableFunctionRelation node, Scope outerScope) {\n TableName tableName = node.getResolveTableName();\n Table table = node.getTable();\n\n ImmutableList.Builder fields = ImmutableList.builder();\n ImmutableMap.Builder columns = ImmutableMap.builder();\n\n List fullSchema = table.getFullSchema();\n for (Column column : fullSchema) {\n Field field = new Field(column.getName(), column.getType(), tableName,\n new SlotRef(tableName, column.getName(), column.getName()), true);\n columns.put(field, column);\n fields.add(field);\n }\n\n node.setColumns(columns.build());\n Scope scope = new Scope(RelationId.of(node), new RelationFields(fields.build()));\n node.setScope(scope);\n return scope;\n }\n\n @Override\n public Scope visitCTE(CTERelation cteRelation, Scope context) {\n QueryRelation query = cteRelation.getCteQueryStatement().getQueryRelation();\n\n ImmutableList.Builder outputFields = ImmutableList.builder();\n for (int fieldIdx = 0; fieldIdx < query.getRelationFields().getAllFields().size(); ++fieldIdx) {\n Field originField = query.getRelationFields().getFieldByIndex(fieldIdx);\n outputFields.add(new Field(cteRelation.getColumnOutputNames() == null ?\n originField.getName() : cteRelation.getColumnOutputNames().get(fieldIdx),\n originField.getType(),\n cteRelation.getResolveTableName(),\n originField.getOriginExpression()));\n }\n Scope scope = new Scope(RelationId.of(cteRelation), new RelationFields(outputFields.build()));\n cteRelation.setScope(scope);\n return scope;\n }\n\n @Override\n public Scope visitJoin(JoinRelation join, Scope parentScope) {\n Scope leftScope = process(join.getLeft(), parentScope);\n Scope rightScope;\n if (join.getRight() instanceof TableFunctionRelation || join.isLateral()) {\n if (!(join.getRight() instanceof TableFunctionRelation)) {\n throw new SemanticException(\"Only support lateral join with UDTF\");\n }\n\n if (!join.getJoinOp().isInnerJoin() && !join.getJoinOp().isCrossJoin()) {\n throw new SemanticException(\"Not support lateral join except inner or cross\");\n }\n rightScope = process(join.getRight(), leftScope);\n } else {\n rightScope = process(join.getRight(), parentScope);\n }\n\n Expr joinEqual = join.getOnPredicate();\n if (join.getUsingColNames() != null) {\n Expr resolvedUsing = analyzeJoinUsing(join.getUsingColNames(), leftScope, rightScope);\n if (joinEqual == null) {\n joinEqual = resolvedUsing;\n } else {\n joinEqual = new CompoundPredicate(CompoundPredicate.Operator.AND, joinEqual, resolvedUsing);\n }\n join.setOnPredicate(joinEqual);\n }\n\n if (!join.getJoinHint().isEmpty()) {\n analyzeJoinHints(join);\n }\n\n if (joinEqual != null) {\n /*\n * sourceRelation.getRelationFields() is used to represent the column information of output.\n * To ensure the OnPredicate in semi/anti is correct, the relation needs to be re-assembled here\n * with left child and right child relationFields\n */\n analyzeExpression(joinEqual, new AnalyzeState(), new Scope(RelationId.of(join),\n leftScope.getRelationFields().joinWith(rightScope.getRelationFields())));\n\n AnalyzerUtils.verifyNoAggregateFunctions(joinEqual, \"JOIN\");\n AnalyzerUtils.verifyNoWindowFunctions(joinEqual, \"JOIN\");\n AnalyzerUtils.verifyNoGroupingFunctions(joinEqual, \"JOIN\");\n\n if (!joinEqual.getType().matchesType(Type.BOOLEAN) && !joinEqual.getType().matchesType(Type.NULL)) {\n throw new SemanticException(\"WHERE clause must evaluate to a boolean: actual type %s\",\n joinEqual.getType());\n }\n \n \n \n \n \n checkJoinEqual(joinEqual);\n } else {\n if (join.getJoinOp().isOuterJoin() || join.getJoinOp().isSemiAntiJoin()) {\n throw new SemanticException(join.getJoinOp() + \" requires an ON or USING clause.\");\n }\n }\n\n /*\n * New Scope needs to be constructed for select in semi/anti join\n */\n Scope scope;\n if (join.getJoinOp().isLeftSemiAntiJoin()) {\n scope = new Scope(RelationId.of(join), leftScope.getRelationFields());\n } else if (join.getJoinOp().isRightSemiAntiJoin()) {\n scope = new Scope(RelationId.of(join), rightScope.getRelationFields());\n } else if (join.getJoinOp().isLeftOuterJoin()) {\n List rightFields = getFieldsWithNullable(rightScope);\n scope = new Scope(RelationId.of(join),\n leftScope.getRelationFields().joinWith(new RelationFields(rightFields)));\n } else if (join.getJoinOp().isRightOuterJoin()) {\n List leftFields = getFieldsWithNullable(leftScope);\n scope = new Scope(RelationId.of(join),\n new RelationFields(leftFields).joinWith(rightScope.getRelationFields()));\n } else if (join.getJoinOp().isFullOuterJoin()) {\n List rightFields = getFieldsWithNullable(rightScope);\n List leftFields = getFieldsWithNullable(leftScope);\n scope = new Scope(RelationId.of(join),\n new RelationFields(leftFields).joinWith(new RelationFields(rightFields)));\n } else {\n scope = new Scope(RelationId.of(join),\n leftScope.getRelationFields().joinWith(rightScope.getRelationFields()));\n }\n join.setScope(scope);\n return scope;\n }\n\n private List getFieldsWithNullable(Scope scope) {\n List newFields = new ArrayList<>();\n for (Field field : scope.getRelationFields().getAllFields()) {\n Field newField = new Field(field);\n newField.setNullable(true);\n newFields.add(newField);\n }\n return newFields;\n }\n\n private Expr analyzeJoinUsing(List usingColNames, Scope left, Scope right) {\n Expr joinEqual = null;\n for (String colName : usingColNames) {\n TableName leftTableName =\n left.resolveField(new SlotRef(null, colName)).getField().getRelationAlias();\n TableName rightTableName =\n right.resolveField(new SlotRef(null, colName)).getField().getRelationAlias();\n\n \n BinaryPredicate resolvedUsing = new BinaryPredicate(BinaryType.EQ,\n new SlotRef(leftTableName, colName), new SlotRef(rightTableName, colName));\n\n if (joinEqual == null) {\n joinEqual = resolvedUsing;\n } else {\n joinEqual = new CompoundPredicate(CompoundPredicate.Operator.AND, joinEqual, resolvedUsing);\n }\n }\n return joinEqual;\n }\n\n private void analyzeJoinHints(JoinRelation join) {\n if (JoinOperator.HINT_BROADCAST.equals(join.getJoinHint())) {\n if (join.getJoinOp() == JoinOperator.RIGHT_OUTER_JOIN\n || join.getJoinOp() == JoinOperator.FULL_OUTER_JOIN\n || join.getJoinOp() == JoinOperator.RIGHT_SEMI_JOIN\n || join.getJoinOp() == JoinOperator.RIGHT_ANTI_JOIN) {\n throw new SemanticException(join.getJoinOp().toString() + \" does not support BROADCAST.\");\n }\n } else if (JoinOperator.HINT_SHUFFLE.equals(join.getJoinHint())) {\n if (join.getJoinOp() == JoinOperator.CROSS_JOIN ||\n (join.getJoinOp() == JoinOperator.INNER_JOIN && join.getOnPredicate() == null)) {\n throw new SemanticException(\"CROSS JOIN does not support SHUFFLE.\");\n }\n } else if (JoinOperator.HINT_BUCKET.equals(join.getJoinHint()) ||\n JoinOperator.HINT_COLOCATE.equals(join.getJoinHint())) {\n if (join.getJoinOp() == JoinOperator.CROSS_JOIN) {\n throw new SemanticException(\"CROSS JOIN does not support \" + join.getJoinHint() + \".\");\n }\n } else if (!JoinOperator.HINT_UNREORDER.equals(join.getJoinHint())) {\n throw new SemanticException(\"JOIN hint not recognized: \" + join.getJoinHint());\n }\n }\n\n @Override\n public Scope visitSubquery(SubqueryRelation subquery, Scope context) {\n if (subquery.getResolveTableName() != null && subquery.getResolveTableName().getTbl() == null) {\n ErrorReport.reportSemanticException(ErrorCode.ERR_DERIVED_MUST_HAVE_ALIAS);\n }\n\n Scope queryOutputScope = process(subquery.getQueryStatement(), context);\n\n ImmutableList.Builder outputFields = ImmutableList.builder();\n\n if (subquery.getExplicitColumnNames() != null) {\n if (queryOutputScope.getRelationFields().getAllVisibleFields().size()\n != subquery.getExplicitColumnNames().size()) {\n throw new SemanticException(\"In definition of view, derived table or common table expression, \" +\n \"SELECT list and column names list have different column counts\");\n }\n }\n\n int explicitColumnNameIdx = 0;\n for (Field field : queryOutputScope.getRelationFields().getAllFields()) {\n String fieldResolveName;\n if (subquery.getExplicitColumnNames() != null && field.isVisible()) {\n fieldResolveName = subquery.getExplicitColumnNames().get(explicitColumnNameIdx);\n explicitColumnNameIdx++;\n } else {\n fieldResolveName = field.getName();\n }\n\n outputFields.add(new Field(fieldResolveName, field.getType(), subquery.getResolveTableName(),\n field.getOriginExpression()));\n\n }\n Scope scope = new Scope(RelationId.of(subquery), new RelationFields(outputFields.build()));\n\n analyzeOrderByClause(subquery, scope);\n subquery.setScope(scope);\n return scope;\n }\n\n private void analyzeOrderByClause(QueryRelation query, Scope scope) {\n if (!query.hasOrderByClause()) {\n return;\n }\n List outputExpressions = query.getOutputExpression();\n for (OrderByElement orderByElement : query.getOrderBy()) {\n Expr expression = orderByElement.getExpr();\n AnalyzerUtils.verifyNoGroupingFunctions(expression, \"ORDER BY\");\n\n if (expression instanceof IntLiteral) {\n long ordinal = ((IntLiteral) expression).getLongValue();\n if (ordinal < 1 || ordinal > outputExpressions.size()) {\n throw new SemanticException(\"ORDER BY position %s is not in select list\", ordinal);\n }\n expression = new FieldReference((int) ordinal - 1, null);\n }\n\n analyzeExpression(expression, new AnalyzeState(), scope);\n\n if (!expression.getType().canOrderBy()) {\n throw new SemanticException(Type.NOT_SUPPORT_ORDER_ERROR_MSG);\n }\n\n orderByElement.setExpr(expression);\n }\n }\n\n @Override\n public Scope visitView(ViewRelation node, Scope scope) {\n Scope queryOutputScope;\n try {\n queryOutputScope = process(node.getQueryStatement(), scope);\n } catch (SemanticException e) {\n throw new SemanticException(\"View \" + node.getName() + \" references invalid table(s) or column(s) or \" +\n \"function(s) or definer/invoker of view lack rights to use them\");\n }\n View view = node.getView();\n List fields = Lists.newArrayList();\n for (int i = 0; i < view.getBaseSchema().size(); ++i) {\n Column column = view.getBaseSchema().get(i);\n Field originField = queryOutputScope.getRelationFields().getFieldByIndex(i);\n \n \n \n \n \n \n \n \n Field field = new Field(column.getName(), originField.getType(), node.getResolveTableName(),\n originField.getOriginExpression());\n fields.add(field);\n }\n\n if (session.getDumpInfo() != null) {\n String dbName = node.getName().getDb();\n session.getDumpInfo().addView(dbName, view);\n }\n\n Scope viewScope = new Scope(RelationId.of(node), new RelationFields(fields));\n node.setScope(viewScope);\n return viewScope;\n }\n\n @Override\n public Scope visitUnion(UnionRelation node, Scope context) {\n return analyzeSetOperation(node, context);\n }\n\n @Override\n public Scope visitExcept(ExceptRelation node, Scope context) {\n if (node.getQualifier().equals(SetQualifier.ALL)) {\n throw new SemanticException(\"EXCEPT does not support ALL qualifier\");\n }\n return analyzeSetOperation(node, context);\n }\n\n @Override\n public Scope visitIntersect(IntersectRelation node, Scope context) {\n if (node.getQualifier().equals(SetQualifier.ALL)) {\n throw new SemanticException(\"INTERSECT does not support ALL qualifier\");\n }\n return analyzeSetOperation(node, context);\n }\n\n private Scope analyzeSetOperation(SetOperationRelation node, Scope context) {\n List setOpRelations = node.getRelations();\n\n Scope leftChildScope = process(setOpRelations.get(0), context);\n Type[] outputTypes = leftChildScope.getRelationFields().getAllFields()\n .stream().map(Field::getType).toArray(Type[]::new);\n List nullables = leftChildScope.getRelationFields().getAllFields()\n .stream().map(field -> field.isNullable()).collect(Collectors.toList());\n int outputSize = leftChildScope.getRelationFields().size();\n\n for (int i = 1; i < setOpRelations.size(); ++i) {\n Scope relation = process(setOpRelations.get(i), context);\n if (relation.getRelationFields().size() != outputSize) {\n throw new SemanticException(\"Operands have unequal number of columns\");\n }\n for (int fieldIdx = 0; fieldIdx < relation.getRelationFields().size(); ++fieldIdx) {\n Field field = relation.getRelationFields().getAllFields().get(fieldIdx);\n Type fieldType = field.getType();\n if (fieldType.isOnlyMetricType() &&\n !((node instanceof UnionRelation) &&\n (node.getQualifier().equals(SetQualifier.ALL)))) {\n throw new SemanticException(\"%s not support set operation\", fieldType);\n }\n\n Type commonType = TypeManager.getCommonSuperType(outputTypes[fieldIdx],\n relation.getRelationFields().getFieldByIndex(fieldIdx).getType());\n if (!commonType.isValid()) {\n throw new SemanticException(String.format(\"Incompatible return types '%s' and '%s'\",\n outputTypes[fieldIdx],\n relation.getRelationFields().getFieldByIndex(fieldIdx).getType()));\n }\n outputTypes[fieldIdx] = commonType;\n nullables.set(fieldIdx, nullables.get(fieldIdx) | field.isNullable());\n }\n }\n\n ArrayList fields = new ArrayList<>();\n for (int fieldIdx = 0; fieldIdx < outputSize; ++fieldIdx) {\n Field oldField = leftChildScope.getRelationFields().getFieldByIndex(fieldIdx);\n fields.add(new Field(oldField.getName(), outputTypes[fieldIdx], oldField.getRelationAlias(),\n oldField.getOriginExpression(), true, nullables.get(fieldIdx)));\n }\n\n Scope setOpOutputScope = new Scope(RelationId.of(node), new RelationFields(fields));\n\n analyzeOrderByClause(node, setOpOutputScope);\n node.setScope(setOpOutputScope);\n return setOpOutputScope;\n }\n\n @Override\n public Scope visitValues(ValuesRelation node, Scope scope) {\n AnalyzeState analyzeState = new AnalyzeState();\n\n List firstRow = node.getRow(0);\n firstRow.forEach(e -> analyzeExpression(e, analyzeState, scope));\n List> rows = node.getRows();\n Type[] outputTypes = firstRow.stream().map(Expr::getType).toArray(Type[]::new);\n for (List row : rows) {\n if (row.size() != firstRow.size()) {\n throw new SemanticException(\"Values have unequal number of columns\");\n }\n for (int fieldIdx = 0; fieldIdx < row.size(); ++fieldIdx) {\n analyzeExpression(row.get(fieldIdx), analyzeState, scope);\n Type commonType =\n TypeManager.getCommonSuperType(outputTypes[fieldIdx], row.get(fieldIdx).getType());\n if (!commonType.isValid()) {\n throw new SemanticException(String.format(\"Incompatible return types '%s' and '%s'\",\n outputTypes[fieldIdx], row.get(fieldIdx).getType()));\n }\n outputTypes[fieldIdx] = commonType;\n }\n }\n List fields = new ArrayList<>();\n for (int fieldIdx = 0; fieldIdx < outputTypes.length; ++fieldIdx) {\n fields.add(new Field(node.getColumnOutputNames().get(fieldIdx), outputTypes[fieldIdx],\n node.getResolveTableName(),\n rows.get(0).get(fieldIdx)));\n }\n\n Scope valuesScope = new Scope(RelationId.of(node), new RelationFields(fields));\n node.setScope(valuesScope);\n return valuesScope;\n }\n\n @Override\n public Scope visitTableFunction(TableFunctionRelation node, Scope scope) {\n AnalyzeState analyzeState = new AnalyzeState();\n List args = node.getFunctionParams().exprs();\n Type[] argTypes = new Type[args.size()];\n for (int i = 0; i < args.size(); ++i) {\n analyzeExpression(args.get(i), analyzeState, scope);\n argTypes[i] = args.get(i).getType();\n\n AnalyzerUtils.verifyNoAggregateFunctions(args.get(i), \"Table Function\");\n AnalyzerUtils.verifyNoWindowFunctions(args.get(i), \"Table Function\");\n AnalyzerUtils.verifyNoGroupingFunctions(args.get(i), \"Table Function\");\n }\n\n Function fn = Expr.getBuiltinFunction(node.getFunctionName().getFunction(), argTypes,\n Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);\n\n if (fn == null) {\n fn = AnalyzerUtils.getUdfFunction(session, node.getFunctionName(), argTypes);\n }\n\n if (fn == null) {\n throw new SemanticException(\"Unknown table function '%s(%s)'\", node.getFunctionName().getFunction(),\n Arrays.stream(argTypes).map(Object::toString).collect(Collectors.joining(\",\")));\n }\n\n if (!(fn instanceof TableFunction)) {\n throw new SemanticException(\"'%s(%s)' is not table function\", node.getFunctionName().getFunction(),\n Arrays.stream(argTypes).map(Object::toString).collect(Collectors.joining(\",\")));\n }\n\n TableFunction tableFunction = (TableFunction) fn;\n node.setTableFunction(tableFunction);\n node.setChildExpressions(node.getFunctionParams().exprs());\n\n if (node.getColumnOutputNames() == null) {\n if (tableFunction.getFunctionName().getFunction().equals(\"unnest\")) {\n \n \n List columnNames = new ArrayList<>();\n for (int i = 0; i < tableFunction.getTableFnReturnTypes().size(); ++i) {\n columnNames.add(\"unnest\");\n }\n node.setColumnOutputNames(columnNames);\n } else {\n node.setColumnOutputNames(new ArrayList<>(tableFunction.getDefaultColumnNames()));\n }\n } else {\n if (node.getColumnOutputNames().size() != tableFunction.getTableFnReturnTypes().size()) {\n throw new SemanticException(\"table %s has %s columns available but %s columns specified\",\n node.getAlias().getTbl(),\n tableFunction.getTableFnReturnTypes().size(),\n node.getColumnOutputNames().size());\n }\n }\n\n ImmutableList.Builder fields = ImmutableList.builder();\n for (int i = 0; i < tableFunction.getTableFnReturnTypes().size(); ++i) {\n String colName = node.getColumnOutputNames().get(i);\n\n Field field = new Field(colName,\n tableFunction.getTableFnReturnTypes().get(i),\n node.getResolveTableName(),\n new SlotRef(node.getResolveTableName(), colName, colName));\n fields.add(field);\n }\n\n Scope outputScope = new Scope(RelationId.of(node), new RelationFields(fields.build()));\n node.setScope(outputScope);\n return outputScope;\n }\n\n @Override\n public Scope visitNormalizedTableFunction(NormalizedTableFunctionRelation node, Scope scope) {\n Scope ignored = visitJoin(node, scope);\n \n node.setScope(node.getRight().getScope());\n return node.getScope();\n }\n\n }" }, { "comment": "We just need the fix Stuart introduced which looks for parent first artifact in \u02cbapplication.properties` file. But most of the rest can be simplified", "method_body": "private QuarkusDevModeLauncher newLauncher() throws Exception {\n final Project project = getProject();\n\n String java = null;\n\n if (GradleVersion.current().compareTo(GradleVersion.version(\"6.7\")) >= 0) {\n JavaToolchainService toolChainService = project.getExtensions().getByType(JavaToolchainService.class);\n JavaToolchainSpec toolchainSpec = project.getExtensions().getByType(JavaPluginExtension.class).getToolchain();\n Provider javaLauncher = toolChainService.launcherFor(toolchainSpec);\n if (javaLauncher.isPresent()) {\n java = javaLauncher.get().getExecutablePath().getAsFile().getAbsolutePath();\n }\n }\n GradleDevModeLauncher.Builder builder = GradleDevModeLauncher.builder(getLogger(), java)\n .preventnoverify(isPreventnoverify())\n .projectDir(project.getProjectDir())\n .buildDir(getBuildDir())\n .outputDir(getBuildDir())\n .debug(System.getProperty(\"debug\"))\n .debugHost(System.getProperty(\"debugHost\"))\n .debugPort(System.getProperty(\"debugPort\"))\n .suspend(System.getProperty(\"suspend\"));\n if (System.getProperty(IO_QUARKUS_DEVMODE_ARGS) == null) {\n builder.jvmArgs(\"-Dquarkus.test.basic-console=true\")\n .jvmArgs(\"-Dio.quarkus.force-color-support=true\");\n }\n\n if (getJvmArgs() != null) {\n builder.jvmArgs(getJvmArgs());\n }\n\n for (Map.Entry e : project.getProperties().entrySet()) {\n if (e.getValue() instanceof String) {\n builder.buildSystemProperty(e.getKey(), e.getValue().toString());\n }\n }\n\n \n builder.applicationName(project.getName());\n if (project.getVersion() != null) {\n builder.applicationVersion(project.getVersion().toString());\n }\n\n builder.sourceEncoding(getSourceEncoding());\n\n final ApplicationModel appModel = extension().getApplicationModel(LaunchMode.DEVELOPMENT);\n final Set projectDependencies = new HashSet<>();\n\n addSelfWithLocalDeps(project, builder, new HashSet<>(), projectDependencies, true);\n\n \n Set resourceDirs = new HashSet<>();\n for (SourceDir resourceDir : appModel.getApplicationModule().getMainSources().getResourceDirs()) {\n resourceDirs.add(resourceDir.getOutputDir());\n }\n\n Set configuredParentFirst = QuarkusBootstrap.createClassLoadingConfig(PathsCollection.from(resourceDirs),\n QuarkusBootstrap.Mode.DEV, Collections.emptyList()).parentFirstArtifacts;\n\n Set parentFirstArtifactKeys = new HashSet<>(configuredParentFirst);\n for (io.quarkus.maven.dependency.ResolvedDependency resolvedDependency : appModel.getDependencies()) {\n for (Path path : resolvedDependency.getResolvedPaths()) {\n if (Files.exists(path) && path.getFileName().endsWith(ArtifactCoords.TYPE_JAR)) {\n extractParentFirstArtifactKey(parentFirstArtifactKeys, path.toFile());\n }\n }\n }\n\n for (io.quarkus.maven.dependency.ResolvedDependency artifact : appModel.getDependencies()) {\n \n if (artifact.getGroupId().equals(\"io.quarkus\")\n && artifact.getArtifactId().equals(\"quarkus-ide-launcher\")) {\n continue;\n }\n GACT key = new GACT(artifact.getGroupId(), artifact.getArtifactId());\n if (!projectDependencies\n .contains(key)) {\n artifact.getResolvedPaths().forEach(p -> {\n File file = p.toFile();\n if (file.exists()) {\n if (artifact.getGroupId().equals(\"io.quarkus\")\n && artifact.getArtifactId().equals(\"quarkus-class-change-agent\")) {\n builder.jvmArgs(\"-javaagent:\" + file.getAbsolutePath());\n } else {\n if (parentFirstArtifactKeys.contains(key) && filesIncludedInClasspath.add(file)) {\n getProject().getLogger().debug(\"Adding dependency {}\", file);\n builder.classpathEntry(file);\n }\n }\n }\n });\n }\n }\n\n JavaPluginConvention javaPluginConvention = project.getConvention().findPlugin(JavaPluginConvention.class);\n if (javaPluginConvention != null) {\n builder.sourceJavaVersion(javaPluginConvention.getSourceCompatibility().toString());\n builder.targetJavaVersion(javaPluginConvention.getTargetCompatibility().toString());\n }\n\n if (getCompilerArgs().isEmpty() && shouldPropagateJavaCompilerArgs) {\n getJavaCompileTask()\n .map(compileTask -> compileTask.getOptions().getCompilerArgs())\n .ifPresent(builder::compilerOptions);\n } else {\n builder.compilerOptions(getCompilerArgs());\n }\n\n modifyDevModeContext(builder);\n\n final Path serializedModel = ToolingUtils.serializeAppModel(appModel, this, false);\n serializedModel.toFile().deleteOnExit();\n builder.jvmArgs(\"-D\" + BootstrapConstants.SERIALIZED_APP_MODEL + \"=\" + serializedModel.toAbsolutePath());\n\n final ApplicationModel testAppModel = extension().getApplicationModel(LaunchMode.TEST);\n final Path serializedTestModel = ToolingUtils.serializeAppModel(testAppModel, this, true);\n serializedTestModel.toFile().deleteOnExit();\n builder.jvmArgs(\"-D\" + BootstrapConstants.SERIALIZED_TEST_APP_MODEL + \"=\" + serializedTestModel.toAbsolutePath());\n\n extension().outputDirectory().mkdirs();\n\n if (!args.isEmpty()) {\n builder.applicationArgs(String.join(\" \", args));\n }\n\n return builder.build();\n }", "target_code": "if (parentFirstArtifactKeys.contains(key) && filesIncludedInClasspath.add(file)) {", "method_body_after": "private QuarkusDevModeLauncher newLauncher() throws Exception {\n final Project project = getProject();\n\n String java = null;\n\n if (GradleVersion.current().compareTo(GradleVersion.version(\"6.7\")) >= 0) {\n JavaToolchainService toolChainService = project.getExtensions().getByType(JavaToolchainService.class);\n JavaToolchainSpec toolchainSpec = project.getExtensions().getByType(JavaPluginExtension.class).getToolchain();\n Provider javaLauncher = toolChainService.launcherFor(toolchainSpec);\n if (javaLauncher.isPresent()) {\n java = javaLauncher.get().getExecutablePath().getAsFile().getAbsolutePath();\n }\n }\n GradleDevModeLauncher.Builder builder = GradleDevModeLauncher.builder(getLogger(), java)\n .preventnoverify(isPreventnoverify())\n .projectDir(project.getProjectDir())\n .buildDir(getBuildDir())\n .outputDir(getBuildDir())\n .debug(System.getProperty(\"debug\"))\n .debugHost(System.getProperty(\"debugHost\"))\n .debugPort(System.getProperty(\"debugPort\"))\n .suspend(System.getProperty(\"suspend\"));\n if (System.getProperty(IO_QUARKUS_DEVMODE_ARGS) == null) {\n builder.jvmArgs(\"-Dquarkus.test.basic-console=true\")\n .jvmArgs(\"-Dio.quarkus.force-color-support=true\");\n }\n\n if (getJvmArgs() != null) {\n builder.jvmArgs(getJvmArgs());\n }\n\n for (Map.Entry e : project.getProperties().entrySet()) {\n if (e.getValue() instanceof String) {\n builder.buildSystemProperty(e.getKey(), e.getValue().toString());\n }\n }\n\n \n builder.applicationName(project.getName());\n if (project.getVersion() != null) {\n builder.applicationVersion(project.getVersion().toString());\n }\n\n builder.sourceEncoding(getSourceEncoding());\n\n final ApplicationModel appModel = extension().getApplicationModel(LaunchMode.DEVELOPMENT);\n final Set projectDependencies = new HashSet<>();\n for (ResolvedDependency localDep : DependenciesFilter.getReloadableModules(appModel)) {\n addLocalProject(localDep, builder, projectDependencies, appModel.getAppArtifact().getWorkspaceModule().getId()\n .equals(localDep.getWorkspaceModule().getId()));\n }\n\n addQuarkusDevModeDeps(builder);\n\n \n Set resourceDirs = new HashSet<>();\n for (SourceDir resourceDir : appModel.getApplicationModule().getMainSources().getResourceDirs()) {\n resourceDirs.add(resourceDir.getOutputDir());\n }\n\n Set configuredParentFirst = QuarkusBootstrap.createClassLoadingConfig(PathsCollection.from(resourceDirs),\n QuarkusBootstrap.Mode.DEV, Collections.emptyList()).parentFirstArtifacts;\n\n Set parentFirstArtifactKeys = new HashSet<>(configuredParentFirst);\n parentFirstArtifactKeys.addAll(appModel.getParentFirst());\n\n for (io.quarkus.maven.dependency.ResolvedDependency artifact : appModel.getDependencies()) {\n if (!projectDependencies.contains(artifact.getKey())) {\n artifact.getResolvedPaths().forEach(p -> {\n File file = p.toFile();\n if (file.exists() && parentFirstArtifactKeys.contains(artifact.getKey())\n && filesIncludedInClasspath.add(file)) {\n getProject().getLogger().debug(\"Adding dependency {}\", file);\n builder.classpathEntry(file);\n }\n });\n }\n }\n\n JavaPluginConvention javaPluginConvention = project.getConvention().findPlugin(JavaPluginConvention.class);\n if (javaPluginConvention != null) {\n builder.sourceJavaVersion(javaPluginConvention.getSourceCompatibility().toString());\n builder.targetJavaVersion(javaPluginConvention.getTargetCompatibility().toString());\n }\n\n if (getCompilerArgs().isEmpty() && shouldPropagateJavaCompilerArgs) {\n getJavaCompileTask()\n .map(compileTask -> compileTask.getOptions().getCompilerArgs())\n .ifPresent(builder::compilerOptions);\n } else {\n builder.compilerOptions(getCompilerArgs());\n }\n\n modifyDevModeContext(builder);\n\n final Path serializedModel = ToolingUtils.serializeAppModel(appModel, this, false);\n serializedModel.toFile().deleteOnExit();\n builder.jvmArgs(\"-D\" + BootstrapConstants.SERIALIZED_APP_MODEL + \"=\" + serializedModel.toAbsolutePath());\n\n final ApplicationModel testAppModel = extension().getApplicationModel(LaunchMode.TEST);\n final Path serializedTestModel = ToolingUtils.serializeAppModel(testAppModel, this, true);\n serializedTestModel.toFile().deleteOnExit();\n builder.jvmArgs(\"-D\" + BootstrapConstants.SERIALIZED_TEST_APP_MODEL + \"=\" + serializedTestModel.toAbsolutePath());\n\n extension().outputDirectory().mkdirs();\n\n if (!args.isEmpty()) {\n builder.applicationArgs(String.join(\" \", args));\n }\n\n return builder.build();\n }", "context_before": "class QuarkusDev extends QuarkusTask {\n\n public static final String IO_QUARKUS_DEVMODE_ARGS = \"io.quarkus.devmode-args\";\n\n private static final String EXT_PROPERTIES_PATH = \"META-INF/quarkus-extension.properties\";\n private Set filesIncludedInClasspath = new HashSet<>();\n\n protected Configuration quarkusDevConfiguration;\n\n private File buildDir;\n\n private String sourceDir;\n\n private String workingDir;\n\n private List jvmArgs;\n\n private boolean preventnoverify = false;\n\n private List args = new LinkedList();\n\n private List compilerArgs = new LinkedList<>();\n\n private boolean shouldPropagateJavaCompilerArgs = true;\n\n @Inject\n public QuarkusDev() {\n super(\"Development mode: enables hot deployment with background compilation\");\n }\n\n public QuarkusDev(String name) {\n super(name);\n }\n\n @CompileClasspath\n public Configuration getQuarkusDevConfiguration() {\n return this.quarkusDevConfiguration;\n }\n\n public void setQuarkusDevConfiguration(Configuration quarkusDevConfiguration) {\n this.quarkusDevConfiguration = quarkusDevConfiguration;\n }\n\n @InputDirectory\n @Optional\n public File getBuildDir() {\n if (buildDir == null) {\n buildDir = getProject().getBuildDir();\n }\n return buildDir;\n }\n\n public void setBuildDir(File buildDir) {\n this.buildDir = buildDir;\n }\n\n @Optional\n @InputDirectory\n public File getSourceDir() {\n if (sourceDir == null) {\n return extension().sourceDir();\n } else {\n return new File(sourceDir);\n }\n }\n\n @Option(description = \"Set source directory\", option = \"source-dir\")\n public void setSourceDir(String sourceDir) {\n this.sourceDir = sourceDir;\n }\n\n @Input\n \n public String getWorkingDir() {\n if (workingDir == null) {\n return extension().workingDir().toString();\n } else {\n return workingDir;\n }\n }\n\n @Option(description = \"Set working directory\", option = \"working-dir\")\n public void setWorkingDir(String workingDir) {\n this.workingDir = workingDir;\n }\n\n @Optional\n @Input\n public List getJvmArgs() {\n return jvmArgs;\n }\n\n @Option(description = \"Set JVM arguments\", option = \"jvm-args\")\n public void setJvmArgs(List jvmArgs) {\n this.jvmArgs = jvmArgs;\n }\n\n @Optional\n @Input\n public List getArgs() {\n return args;\n }\n\n public void setArgs(List args) {\n this.args = args;\n }\n\n @Option(description = \"Set application arguments\", option = \"quarkus-args\")\n public void setArgsString(String argsString) {\n this.setArgs(Arrays.asList(Commandline.translateCommandline(argsString)));\n }\n\n @Input\n public boolean isPreventnoverify() {\n return preventnoverify;\n }\n\n @Option(description = \"value is intended to be set to true when some generated bytecode is\" +\n \" erroneous causing the JVM to crash when the verify:none option is set \" +\n \"(which is on by default)\", option = \"prevent-noverify\")\n public void setPreventnoverify(boolean preventnoverify) {\n this.preventnoverify = preventnoverify;\n }\n\n @Optional\n @Input\n public List getCompilerArgs() {\n return compilerArgs;\n }\n\n @Option(description = \"Additional parameters to pass to javac when recompiling changed source files\", option = \"compiler-args\")\n public void setCompilerArgs(List compilerArgs) {\n this.compilerArgs = compilerArgs;\n }\n\n @TaskAction\n public void startDev() {\n if (!getSourceDir().isDirectory()) {\n throw new GradleException(\"The `src/main/java` directory is required, please create it.\");\n }\n\n if (!extension().outputDirectory().isDirectory()) {\n throw new GradleException(\"The project has no output yet, \" +\n \"this should not happen as build should have been executed first. \" +\n \"Does the project have any source files?\");\n }\n\n try {\n QuarkusDevModeLauncher runner = newLauncher();\n String outputFile = System.getProperty(IO_QUARKUS_DEVMODE_ARGS);\n if (outputFile == null) {\n getProject().exec(action -> {\n action.commandLine(runner.args()).workingDir(getWorkingDir());\n action.setStandardInput(System.in)\n .setErrorOutput(System.out)\n .setStandardOutput(System.out);\n });\n } else {\n try (BufferedWriter is = Files.newBufferedWriter(Paths.get(outputFile))) {\n for (String i : runner.args()) {\n is.write(i);\n is.newLine();\n }\n }\n }\n\n } catch (Exception e) {\n throw new GradleException(\"Failed to run\", e);\n }\n }\n\n \n\n protected void modifyDevModeContext(GradleDevModeLauncher.Builder builder) {\n\n }\n\n private void addSelfWithLocalDeps(Project project, GradleDevModeLauncher.Builder builder, Set visited,\n Set addedDeps, boolean root) {\n if (!visited.add(project.getPath())) {\n return;\n }\n final Configuration compileCp = project.getConfigurations().findByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME);\n if (compileCp != null) {\n compileCp.getIncoming().getDependencies().forEach(d -> {\n if (d instanceof ProjectDependency) {\n addSelfWithLocalDeps(((ProjectDependency) d).getDependencyProject(), builder, visited, addedDeps, false);\n }\n });\n }\n\n addLocalProject(project, builder, addedDeps, root);\n }\n\n private void extractParentFirstArtifactKey(Set parentFirstArtifactKey, File archive) throws Exception {\n try (ZipFile jar = new ZipFile(archive)) {\n ZipEntry entry = jar.getEntry(EXT_PROPERTIES_PATH);\n if (entry != null) {\n Properties p = new Properties();\n try (InputStream inputStream = jar.getInputStream(entry)) {\n p.load(inputStream);\n String parentFirst = p.getProperty(ApplicationModel.PARENT_FIRST_ARTIFACTS);\n if (parentFirst != null) {\n String[] artifacts = parentFirst.split(\",\");\n for (String artifact : artifacts) {\n parentFirstArtifactKey.add(new GACT(artifact.split(\":\")));\n }\n }\n }\n }\n }\n }\n\n private void addLocalProject(Project project, GradleDevModeLauncher.Builder builder, Set addeDeps, boolean root) {\n final GACT key = new GACT(project.getGroup().toString(), project.getName(), \"\", \"jar\");\n if (addeDeps.contains(key)) {\n return;\n }\n final JavaPluginConvention javaConvention = project.getConvention().findPlugin(JavaPluginConvention.class);\n if (javaConvention == null) {\n return;\n }\n\n SourceSetContainer sourceSets = javaConvention.getSourceSets();\n SourceSet mainSourceSet = sourceSets.findByName(SourceSet.MAIN_SOURCE_SET_NAME);\n if (mainSourceSet == null) {\n return;\n }\n Set sourcePaths = new LinkedHashSet<>();\n Set sourceParentPaths = new LinkedHashSet<>();\n\n for (File sourceDir : mainSourceSet.getAllJava().getSrcDirs()) {\n if (sourceDir.exists()) {\n sourcePaths.add(sourceDir.toPath().toAbsolutePath());\n sourceParentPaths.add(sourceDir.toPath().getParent().toAbsolutePath());\n }\n }\n final Set resourcesSrcDirs = new LinkedHashSet<>();\n for (File resourcesSrcDir : mainSourceSet.getResources().getSourceDirectories().getFiles()) {\n resourcesSrcDirs.add(resourcesSrcDir.toPath().toAbsolutePath());\n }\n \n final File resourcesOutputDir = mainSourceSet.getOutput().getResourcesDir();\n\n if (sourcePaths.isEmpty() && !resourcesOutputDir.exists()) {\n return;\n }\n\n String classesDir = QuarkusGradleUtils.getClassesDir(mainSourceSet, project.getBuildDir(), false);\n if (classesDir == null) {\n return;\n } else {\n File classesDirFile = new File(classesDir);\n if (!classesDirFile.exists()) {\n return;\n }\n }\n\n final String resourcesOutputPath;\n if (resourcesOutputDir.exists()) {\n resourcesOutputPath = resourcesOutputDir.getAbsolutePath();\n if (!Files.exists(Paths.get(classesDir))) {\n \n classesDir = resourcesOutputPath;\n }\n } else {\n \n resourcesOutputPath = classesDir;\n }\n\n DevModeContext.ModuleInfo.Builder moduleBuilder = new DevModeContext.ModuleInfo.Builder()\n .setArtifactKey(key)\n .setName(project.getName())\n .setProjectDirectory(project.getProjectDir().getAbsolutePath())\n .setSourcePaths(PathList.from(sourcePaths))\n .setClassesPath(classesDir)\n .setResourcePaths(PathList.from(resourcesSrcDirs))\n .setResourcesOutputPath(resourcesOutputPath)\n .setSourceParents(PathList.from(sourceParentPaths))\n .setPreBuildOutputDir(project.getBuildDir().toPath().resolve(\"generated-sources\").toAbsolutePath().toString())\n .setTargetDir(project.getBuildDir().toString());\n\n SourceSet testSourceSet = sourceSets.findByName(SourceSet.TEST_SOURCE_SET_NAME);\n if (testSourceSet != null) {\n\n Set testSourcePaths = new LinkedHashSet<>();\n Set testSourceParentPaths = new LinkedHashSet<>();\n\n for (File sourceDir : testSourceSet.getAllJava().getSrcDirs()) {\n if (sourceDir.exists()) {\n testSourcePaths.add(sourceDir.toPath().toAbsolutePath());\n testSourceParentPaths.add(sourceDir.toPath().getParent().toAbsolutePath());\n }\n }\n final Set testResourcesSrcDirs = new LinkedHashSet<>();\n for (File testResourcesSrcDir : testSourceSet.getResources().getSourceDirectories().getFiles()) {\n testResourcesSrcDirs.add(testResourcesSrcDir.toPath().toAbsolutePath());\n }\n \n final File testResourcesOutputDir = testSourceSet.getOutput().getResourcesDir();\n\n if (!testSourcePaths.isEmpty() || (testResourcesOutputDir != null && testResourcesOutputDir.exists())) {\n String testClassesDir = QuarkusGradleUtils.getClassesDir(testSourceSet, project.getBuildDir(), true);\n if (testClassesDir != null) {\n File testClassesDirFile = new File(testClassesDir);\n if (testClassesDirFile.exists()) {\n final String testResourcesOutputPath;\n if (testResourcesOutputDir.exists()) {\n testResourcesOutputPath = testResourcesOutputDir.getAbsolutePath();\n if (!Files.exists(Paths.get(testClassesDir))) {\n \n testClassesDir = testResourcesOutputPath;\n }\n } else {\n \n testResourcesOutputPath = testClassesDir;\n }\n moduleBuilder.setTestSourcePaths(PathList.from(testSourcePaths))\n .setTestClassesPath(testClassesDir)\n .setTestResourcePaths(PathList.from(testResourcesSrcDirs))\n .setTestResourcesOutputPath(testResourcesOutputPath);\n }\n }\n }\n }\n DevModeContext.ModuleInfo wsModuleInfo = moduleBuilder.build();\n\n if (root) {\n builder.mainModule(wsModuleInfo);\n } else {\n builder.dependency(wsModuleInfo);\n }\n addeDeps.add(key);\n }\n\n private String getSourceEncoding() {\n return getJavaCompileTask()\n .map(javaCompile -> javaCompile.getOptions().getEncoding())\n .orElse(null);\n }\n\n private java.util.Optional getJavaCompileTask() {\n return java.util.Optional\n .ofNullable((JavaCompile) getProject().getTasks().getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME));\n }\n\n public void shouldPropagateJavaCompilerArgs(boolean shouldPropagateJavaCompilerArgs) {\n this.shouldPropagateJavaCompilerArgs = shouldPropagateJavaCompilerArgs;\n }\n}", "context_after": "class QuarkusDev extends QuarkusTask {\n\n public static final String IO_QUARKUS_DEVMODE_ARGS = \"io.quarkus.devmode-args\";\n\n private static final String EXT_PROPERTIES_PATH = \"META-INF/quarkus-extension.properties\";\n private Set filesIncludedInClasspath = new HashSet<>();\n\n protected Configuration quarkusDevConfiguration;\n\n private File buildDir;\n\n private String sourceDir;\n\n private String workingDir;\n\n private List jvmArgs;\n\n private boolean preventnoverify = false;\n\n private List args = new LinkedList();\n\n private List compilerArgs = new LinkedList<>();\n\n private boolean shouldPropagateJavaCompilerArgs = true;\n\n @Inject\n public QuarkusDev() {\n super(\"Development mode: enables hot deployment with background compilation\");\n }\n\n public QuarkusDev(String name) {\n super(name);\n }\n\n @CompileClasspath\n public Configuration getQuarkusDevConfiguration() {\n return this.quarkusDevConfiguration;\n }\n\n public void setQuarkusDevConfiguration(Configuration quarkusDevConfiguration) {\n this.quarkusDevConfiguration = quarkusDevConfiguration;\n }\n\n @InputDirectory\n @Optional\n public File getBuildDir() {\n if (buildDir == null) {\n buildDir = getProject().getBuildDir();\n }\n return buildDir;\n }\n\n public void setBuildDir(File buildDir) {\n this.buildDir = buildDir;\n }\n\n @Optional\n @InputDirectory\n public File getSourceDir() {\n if (sourceDir == null) {\n return extension().sourceDir();\n } else {\n return new File(sourceDir);\n }\n }\n\n @Option(description = \"Set source directory\", option = \"source-dir\")\n public void setSourceDir(String sourceDir) {\n this.sourceDir = sourceDir;\n }\n\n @Input\n \n public String getWorkingDir() {\n if (workingDir == null) {\n return extension().workingDir().toString();\n } else {\n return workingDir;\n }\n }\n\n @Option(description = \"Set working directory\", option = \"working-dir\")\n public void setWorkingDir(String workingDir) {\n this.workingDir = workingDir;\n }\n\n @Optional\n @Input\n public List getJvmArgs() {\n return jvmArgs;\n }\n\n @Option(description = \"Set JVM arguments\", option = \"jvm-args\")\n public void setJvmArgs(List jvmArgs) {\n this.jvmArgs = jvmArgs;\n }\n\n @Optional\n @Input\n public List getArgs() {\n return args;\n }\n\n public void setArgs(List args) {\n this.args = args;\n }\n\n @Option(description = \"Set application arguments\", option = \"quarkus-args\")\n public void setArgsString(String argsString) {\n this.setArgs(Arrays.asList(Commandline.translateCommandline(argsString)));\n }\n\n @Input\n public boolean isPreventnoverify() {\n return preventnoverify;\n }\n\n @Option(description = \"value is intended to be set to true when some generated bytecode is\" +\n \" erroneous causing the JVM to crash when the verify:none option is set \" +\n \"(which is on by default)\", option = \"prevent-noverify\")\n public void setPreventnoverify(boolean preventnoverify) {\n this.preventnoverify = preventnoverify;\n }\n\n @Optional\n @Input\n public List getCompilerArgs() {\n return compilerArgs;\n }\n\n @Option(description = \"Additional parameters to pass to javac when recompiling changed source files\", option = \"compiler-args\")\n public void setCompilerArgs(List compilerArgs) {\n this.compilerArgs = compilerArgs;\n }\n\n @TaskAction\n public void startDev() {\n if (!getSourceDir().isDirectory()) {\n throw new GradleException(\"The `src/main/java` directory is required, please create it.\");\n }\n\n if (!extension().outputDirectory().isDirectory()) {\n throw new GradleException(\"The project has no output yet, \" +\n \"this should not happen as build should have been executed first. \" +\n \"Does the project have any source files?\");\n }\n\n try {\n QuarkusDevModeLauncher runner = newLauncher();\n String outputFile = System.getProperty(IO_QUARKUS_DEVMODE_ARGS);\n if (outputFile == null) {\n getProject().exec(action -> {\n action.commandLine(runner.args()).workingDir(getWorkingDir());\n action.setStandardInput(System.in)\n .setErrorOutput(System.out)\n .setStandardOutput(System.out);\n });\n } else {\n try (BufferedWriter is = Files.newBufferedWriter(Paths.get(outputFile))) {\n for (String i : runner.args()) {\n is.write(i);\n is.newLine();\n }\n }\n }\n\n } catch (Exception e) {\n throw new GradleException(\"Failed to run\", e);\n }\n }\n\n \n\n protected void modifyDevModeContext(GradleDevModeLauncher.Builder builder) {\n\n }\n\n private void addQuarkusDevModeDeps(GradleDevModeLauncher.Builder builder) {\n final String pomPropsPath = \"META-INF/maven/io.quarkus/quarkus-core-deployment/pom.properties\";\n final InputStream devModePomPropsIs = DevModeMain.class.getClassLoader().getResourceAsStream(pomPropsPath);\n if (devModePomPropsIs == null) {\n throw new GradleException(\"Failed to locate \" + pomPropsPath + \" on the classpath\");\n }\n final Properties devModeProps = new Properties();\n try (InputStream is = devModePomPropsIs) {\n devModeProps.load(is);\n } catch (IOException e) {\n throw new GradleException(\"Failed to load \" + pomPropsPath + \" from the classpath\", e);\n }\n final String devModeGroupId = devModeProps.getProperty(\"groupId\");\n if (devModeGroupId == null) {\n throw new GradleException(\"Classpath resource \" + pomPropsPath + \" is missing groupId\");\n }\n final String devModeArtifactId = devModeProps.getProperty(\"artifactId\");\n if (devModeArtifactId == null) {\n throw new GradleException(\"Classpath resource \" + pomPropsPath + \" is missing artifactId\");\n }\n final String devModeVersion = devModeProps.getProperty(\"version\");\n if (devModeVersion == null) {\n throw new GradleException(\"Classpath resource \" + pomPropsPath + \" is missing version\");\n }\n\n Dependency devModeDependency = getProject().getDependencies()\n .create(String.format(\"%s:%s:%s\", devModeGroupId, devModeArtifactId, devModeVersion));\n\n final Configuration devModeDependencyConfiguration = getProject().getConfigurations()\n .detachedConfiguration(devModeDependency);\n\n for (ResolvedArtifact appDep : devModeDependencyConfiguration.getResolvedConfiguration().getResolvedArtifacts()) {\n ModuleVersionIdentifier artifactId = appDep.getModuleVersion().getId();\n \n if (!(artifactId.getGroup().equals(\"io.quarkus\")\n && artifactId.getName().equals(\"quarkus-ide-launcher\"))) {\n if (artifactId.getGroup().equals(\"io.quarkus\")\n && artifactId.getName().equals(\"quarkus-class-change-agent\")) {\n builder.jvmArgs(\"-javaagent:\" + appDep.getFile().getAbsolutePath());\n } else {\n builder.classpathEntry(appDep.getFile());\n }\n }\n }\n }\n\n private void addLocalProject(ResolvedDependency project, GradleDevModeLauncher.Builder builder, Set addeDeps,\n boolean root) {\n addeDeps.add(project.getKey());\n\n final ArtifactSources sources = project.getSources();\n if (sources == null) {\n return;\n }\n\n Set sourcePaths = new LinkedHashSet<>();\n Set sourceParentPaths = new LinkedHashSet<>();\n\n Path classesDir = null;\n for (SourceDir src : sources.getSourceDirs()) {\n if (Files.exists(src.getDir())) {\n sourcePaths.add(src.getDir());\n sourceParentPaths.add(src.getDir().getParent());\n if (classesDir == null) {\n classesDir = src.getOutputDir();\n }\n }\n }\n\n final Set resourcesSrcDirs = new LinkedHashSet<>();\n \n Path resourcesOutputDir = null;\n for (SourceDir resource : sources.getResourceDirs()) {\n resourcesSrcDirs.add(resource.getDir());\n if (resourcesOutputDir == null) {\n resourcesOutputDir = resource.getOutputDir();\n }\n }\n\n if (sourcePaths.isEmpty() && (resourcesOutputDir == null || !Files.exists(resourcesOutputDir)) || classesDir == null) {\n return;\n }\n\n final String resourcesOutputPath;\n if (resourcesOutputDir != null && Files.exists(resourcesOutputDir)) {\n resourcesOutputPath = resourcesOutputDir.toString();\n if (!Files.exists(classesDir)) {\n \n classesDir = resourcesOutputDir;\n }\n } else {\n \n resourcesOutputPath = classesDir.toString();\n }\n\n DevModeContext.ModuleInfo.Builder moduleBuilder = new DevModeContext.ModuleInfo.Builder()\n .setArtifactKey(project.getKey())\n .setName(project.getArtifactId())\n .setProjectDirectory(project.getWorkspaceModule().getModuleDir().getAbsolutePath())\n .setSourcePaths(PathList.from(sourcePaths))\n .setClassesPath(classesDir.toString())\n .setResourcePaths(PathList.from(resourcesSrcDirs))\n .setResourcesOutputPath(resourcesOutputPath)\n .setSourceParents(PathList.from(sourceParentPaths))\n .setPreBuildOutputDir(project.getWorkspaceModule().getBuildDir().toPath().resolve(\"generated-sources\")\n .toAbsolutePath().toString())\n .setTargetDir(project.getWorkspaceModule().getBuildDir().toString());\n\n final ArtifactSources testSources = project.getWorkspaceModule().getTestSources();\n if (testSources != null) {\n Set testSourcePaths = new LinkedHashSet<>();\n Set testSourceParentPaths = new LinkedHashSet<>();\n Path testClassesDir = null;\n\n for (SourceDir src : testSources.getSourceDirs()) {\n if (Files.exists(src.getDir())) {\n testSourcePaths.add(src.getDir());\n testSourceParentPaths.add(src.getDir().getParent());\n if (testClassesDir == null && src.getOutputDir() != null) {\n testClassesDir = src.getOutputDir();\n }\n }\n }\n\n final Set testResourcesSrcDirs = new LinkedHashSet<>();\n \n Path testResourcesOutputDir = null;\n for (SourceDir resource : testSources.getResourceDirs()) {\n testResourcesSrcDirs.add(resource.getDir());\n if (testResourcesOutputDir == null) {\n testResourcesOutputDir = resource.getOutputDir();\n }\n }\n\n if (testClassesDir != null && (!testSourcePaths.isEmpty()\n || (testResourcesOutputDir != null && Files.exists(testResourcesOutputDir)))) {\n final String testResourcesOutputPath;\n if (Files.exists(testResourcesOutputDir)) {\n testResourcesOutputPath = testResourcesOutputDir.toString();\n if (!Files.exists(testClassesDir)) {\n \n testClassesDir = testResourcesOutputDir;\n }\n } else {\n \n testResourcesOutputPath = testClassesDir.toString();\n }\n moduleBuilder.setTestSourcePaths(PathList.from(testSourcePaths))\n .setTestClassesPath(testClassesDir.toString())\n .setTestResourcePaths(PathList.from(testResourcesSrcDirs))\n .setTestResourcesOutputPath(testResourcesOutputPath);\n }\n }\n\n final DevModeContext.ModuleInfo wsModuleInfo = moduleBuilder.build();\n if (root) {\n builder.mainModule(wsModuleInfo);\n } else {\n builder.dependency(wsModuleInfo);\n }\n }\n\n private String getSourceEncoding() {\n return getJavaCompileTask()\n .map(javaCompile -> javaCompile.getOptions().getEncoding())\n .orElse(null);\n }\n\n private java.util.Optional getJavaCompileTask() {\n return java.util.Optional\n .ofNullable((JavaCompile) getProject().getTasks().getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME));\n }\n\n public void shouldPropagateJavaCompilerArgs(boolean shouldPropagateJavaCompilerArgs) {\n this.shouldPropagateJavaCompilerArgs = shouldPropagateJavaCompilerArgs;\n }\n}" }, { "comment": "Shouldn't we have a `finally` block here?", "method_body": "public void run() throws Exception {\n\n SuperstepKickoffLatch nextSuperstepLatch =\n SuperstepKickoffLatchBroker.instance().get(brokerKey());\n\n while (this.running && !terminationRequested()) {\n\n if (log.isInfoEnabled()) {\n log.info(formatLogString(\"starting iteration [\" + currentIteration() + \"]\"));\n }\n\n super.run();\n\n \n verifyEndOfSuperstepState();\n\n if (isWorksetUpdate && isWorksetIteration) {\n long numCollected = worksetUpdateOutputCollector.getElementsCollectedAndReset();\n worksetAggregator.aggregate(numCollected);\n }\n\n if (log.isInfoEnabled()) {\n log.info(formatLogString(\"finishing iteration [\" + currentIteration() + \"]\"));\n }\n\n \n sendEndOfSuperstep();\n\n if (isWorksetUpdate) {\n \n worksetBackChannel.notifyOfEndOfSuperstep();\n }\n\n boolean terminated =\n nextSuperstepLatch.awaitStartOfSuperstepOrTermination(currentIteration() + 1);\n\n if (terminated) {\n requestTermination();\n } else {\n incrementIterationCounter();\n }\n }\n terminationCompleted();\n }", "target_code": "terminationCompleted();", "method_body_after": "public void run() throws Exception {\n\n try {\n SuperstepKickoffLatch nextSuperstepLatch =\n SuperstepKickoffLatchBroker.instance().get(brokerKey());\n\n while (this.running && !terminationRequested()) {\n\n if (log.isInfoEnabled()) {\n log.info(formatLogString(\"starting iteration [\" + currentIteration() + \"]\"));\n }\n\n super.run();\n\n \n verifyEndOfSuperstepState();\n\n if (isWorksetUpdate && isWorksetIteration) {\n long numCollected = worksetUpdateOutputCollector.getElementsCollectedAndReset();\n worksetAggregator.aggregate(numCollected);\n }\n\n if (log.isInfoEnabled()) {\n log.info(formatLogString(\"finishing iteration [\" + currentIteration() + \"]\"));\n }\n\n \n sendEndOfSuperstep();\n\n if (isWorksetUpdate) {\n \n worksetBackChannel.notifyOfEndOfSuperstep();\n }\n\n boolean terminated =\n nextSuperstepLatch.awaitStartOfSuperstepOrTermination(\n currentIteration() + 1);\n\n if (terminated) {\n requestTermination();\n } else {\n incrementIterationCounter();\n }\n }\n } finally {\n terminationCompleted();\n }\n }", "context_before": "class IterationIntermediateTask\n extends AbstractIterativeTask {\n\n private static final Logger log = LoggerFactory.getLogger(IterationIntermediateTask.class);\n\n private WorksetUpdateOutputCollector worksetUpdateOutputCollector;\n\n \n\n /**\n * Create an Invokable task and set its environment.\n *\n * @param environment The environment assigned to this invokable.\n */\n public IterationIntermediateTask(Environment environment) {\n super(environment);\n }\n\n \n\n @Override\n protected void initialize() throws Exception {\n super.initialize();\n\n \n \n \n \n \n\n Collector delegate = getLastOutputCollector();\n if (isWorksetUpdate) {\n \n \n if (isSolutionSetUpdate) {\n throw new IllegalStateException(\n \"Plan bug: Intermediate task performs workset and solutions set update.\");\n }\n\n Collector outputCollector = createWorksetUpdateOutputCollector(delegate);\n\n \n if (isWorksetIteration) {\n worksetUpdateOutputCollector = (WorksetUpdateOutputCollector) outputCollector;\n }\n\n setLastOutputCollector(outputCollector);\n } else if (isSolutionSetUpdate) {\n setLastOutputCollector(createSolutionSetUpdateOutputCollector(delegate));\n }\n }\n\n @Override\n \n\n private void sendEndOfSuperstep() throws IOException, InterruptedException {\n for (RecordWriter eventualOutput : this.eventualOutputs) {\n eventualOutput.broadcastEvent(EndOfSuperstepEvent.INSTANCE);\n }\n }\n}", "context_after": "class IterationIntermediateTask\n extends AbstractIterativeTask {\n\n private static final Logger log = LoggerFactory.getLogger(IterationIntermediateTask.class);\n\n private WorksetUpdateOutputCollector worksetUpdateOutputCollector;\n\n \n\n /**\n * Create an Invokable task and set its environment.\n *\n * @param environment The environment assigned to this invokable.\n */\n public IterationIntermediateTask(Environment environment) {\n super(environment);\n }\n\n \n\n @Override\n protected void initialize() throws Exception {\n super.initialize();\n\n \n \n \n \n \n\n Collector delegate = getLastOutputCollector();\n if (isWorksetUpdate) {\n \n \n if (isSolutionSetUpdate) {\n throw new IllegalStateException(\n \"Plan bug: Intermediate task performs workset and solutions set update.\");\n }\n\n Collector outputCollector = createWorksetUpdateOutputCollector(delegate);\n\n \n if (isWorksetIteration) {\n worksetUpdateOutputCollector = (WorksetUpdateOutputCollector) outputCollector;\n }\n\n setLastOutputCollector(outputCollector);\n } else if (isSolutionSetUpdate) {\n setLastOutputCollector(createSolutionSetUpdateOutputCollector(delegate));\n }\n }\n\n @Override\n \n\n private void sendEndOfSuperstep() throws IOException, InterruptedException {\n for (RecordWriter eventualOutput : this.eventualOutputs) {\n eventualOutput.broadcastEvent(EndOfSuperstepEvent.INSTANCE);\n }\n }\n}" }, { "comment": "I see this is a wrapped byte array in the parent class", "method_body": "protected String deserializePartitionKeyFromStream(DataInputStream in) throws IOException {\n int partitionKeyLength = in.readInt();\n byte[] requestPartitionKeyData = new byte[(int) partitionKeyLength];\n in.read(requestPartitionKeyData);\n return new String(requestPartitionKeyData, StandardCharsets.UTF_8);\n }", "target_code": "in.read(requestPartitionKeyData);", "method_body_after": "protected String deserializePartitionKeyFromStream(DataInputStream in) throws IOException {\n int partitionKeyLength = in.readInt();\n byte[] requestPartitionKeyData = new byte[(int) partitionKeyLength];\n in.read(requestPartitionKeyData);\n return new String(requestPartitionKeyData, StandardCharsets.UTF_8);\n }", "context_before": "class KinesisDataStreamsStateSerializer\n extends AsyncSinkWriterStateSerializer {\n @Override\n protected void serializeRequestToStream(PutRecordsRequestEntry request, DataOutputStream out)\n throws IOException {\n out.write(request.data().asByteArrayUnsafe());\n serializePartitionKeyToStream(request.partitionKey(), out);\n validateExplicitHashKey(request);\n }\n\n protected void serializePartitionKeyToStream(String partitionKey, DataOutputStream out)\n throws IOException {\n out.writeInt(partitionKey.length());\n out.write(partitionKey.getBytes(StandardCharsets.UTF_8));\n }\n\n protected void validateExplicitHashKey(PutRecordsRequestEntry request) {\n if (request.explicitHashKey() != null) {\n throw new IllegalStateException(\n String.format(\n \"KinesisDataStreamsStateSerializer is incompatible with ElementConverter.\"\n + \"Serializer version %d does not support explicit hash key.\",\n getVersion()));\n }\n }\n\n @Override\n protected PutRecordsRequestEntry deserializeRequestFromStream(\n long requestSize, DataInputStream in) throws IOException {\n byte[] requestData = new byte[(int) requestSize];\n in.read(requestData);\n\n return PutRecordsRequestEntry.builder()\n .data(SdkBytes.fromByteArray(requestData))\n .partitionKey(deserializePartitionKeyFromStream(in))\n .build();\n }\n\n \n\n @Override\n public int getVersion() {\n return 1;\n }\n}", "context_after": "class KinesisDataStreamsStateSerializer\n extends AsyncSinkWriterStateSerializer {\n @Override\n protected void serializeRequestToStream(PutRecordsRequestEntry request, DataOutputStream out)\n throws IOException {\n out.write(request.data().asByteArrayUnsafe());\n serializePartitionKeyToStream(request.partitionKey(), out);\n validateExplicitHashKey(request);\n }\n\n protected void serializePartitionKeyToStream(String partitionKey, DataOutputStream out)\n throws IOException {\n out.writeInt(partitionKey.length());\n out.write(partitionKey.getBytes(StandardCharsets.UTF_8));\n }\n\n protected void validateExplicitHashKey(PutRecordsRequestEntry request) {\n if (request.explicitHashKey() != null) {\n throw new IllegalStateException(\n String.format(\n \"KinesisDataStreamsStateSerializer is incompatible with ElementConverter.\"\n + \"Serializer version %d does not support explicit hash key.\",\n getVersion()));\n }\n }\n\n @Override\n protected PutRecordsRequestEntry deserializeRequestFromStream(\n long requestSize, DataInputStream in) throws IOException {\n byte[] requestData = new byte[(int) requestSize];\n in.read(requestData);\n\n return PutRecordsRequestEntry.builder()\n .data(SdkBytes.fromByteArray(requestData))\n .partitionKey(deserializePartitionKeyFromStream(in))\n .build();\n }\n\n \n\n @Override\n public int getVersion() {\n return 1;\n }\n}" }, { "comment": "This is used so that test is not run on arm64, where we have no support for onnx at the moment", "method_body": "public void testModelsEvaluator() {\n ModelsEvaluator modelsEvaluator = ModelsEvaluatorTester.create(\"src/test/cfg/application/stateless_eval\");\n assertEquals(3, modelsEvaluator.models().size());\n\n \n FunctionEvaluator mul = modelsEvaluator.evaluatorOf(\"mul\");\n Tensor input1 = Tensor.from(\"tensor(d0[1]):[2]\");\n Tensor input2 = Tensor.from(\"tensor(d0[1]):[3]\");\n Tensor output = mul.bind(\"input1\", input1).bind(\"input2\", input2).evaluate();\n assertEquals(6.0, output.sum().asDouble(), 1e-9);\n\n \n FunctionEvaluator lgbm = modelsEvaluator.evaluatorOf(\"lightgbm_regression\");\n lgbm.bind(\"numerical_1\", 0.1).bind(\"numerical_2\", 0.2).bind(\"categorical_1\", \"a\").bind(\"categorical_2\", \"i\");\n output = lgbm.evaluate();\n assertEquals(2.0547, output.sum().asDouble(), 1e-4);\n\n \n FunctionEvaluator foo1 = modelsEvaluator.evaluatorOf(\"example\", \"foo1\");\n input1 = Tensor.from(\"tensor(name{},x[3]):{{name:n,x:0}:1,{name:n,x:1}:2,{name:n,x:2}:3 }\");\n input2 = Tensor.from(\"tensor(x[3]):[2,3,4]\");\n output = foo1.bind(\"input1\", input1).bind(\"input2\", input2).evaluate();\n assertEquals(90, output.asDouble(), 1e-9);\n\n FunctionEvaluator foo2 = modelsEvaluator.evaluatorOf(\"example\", \"foo2\");\n input1 = Tensor.from(\"tensor(name{},x[3]):{{name:n,x:0}:1,{name:n,x:1}:2,{name:n,x:2}:3 }\");\n input2 = Tensor.from(\"tensor(x[3]):[2,3,4]\");\n output = foo2.bind(\"input1\", input1).bind(\"input2\", input2).evaluate();\n assertEquals(90, output.asDouble(), 1e-9);\n }", "target_code": "ModelsEvaluator modelsEvaluator = ModelsEvaluatorTester.create(\"src/test/cfg/application/stateless_eval\");", "method_body_after": "public void testModelsEvaluator() {\n ModelsEvaluator modelsEvaluator = ModelsEvaluatorTester.create(\"src/test/cfg/application/stateless_eval\");\n assertEquals(3, modelsEvaluator.models().size());\n\n \n FunctionEvaluator mul = modelsEvaluator.evaluatorOf(\"mul\");\n Tensor input1 = Tensor.from(\"tensor(d0[1]):[2]\");\n Tensor input2 = Tensor.from(\"tensor(d0[1]):[3]\");\n Tensor output = mul.bind(\"input1\", input1).bind(\"input2\", input2).evaluate();\n assertEquals(6.0, output.sum().asDouble(), 1e-9);\n\n \n FunctionEvaluator lgbm = modelsEvaluator.evaluatorOf(\"lightgbm_regression\");\n lgbm.bind(\"numerical_1\", 0.1).bind(\"numerical_2\", 0.2).bind(\"categorical_1\", \"a\").bind(\"categorical_2\", \"i\");\n output = lgbm.evaluate();\n assertEquals(2.0547, output.sum().asDouble(), 1e-4);\n\n \n FunctionEvaluator foo1 = modelsEvaluator.evaluatorOf(\"example\", \"foo1\");\n input1 = Tensor.from(\"tensor(name{},x[3]):{{name:n,x:0}:1,{name:n,x:1}:2,{name:n,x:2}:3 }\");\n input2 = Tensor.from(\"tensor(x[3]):[2,3,4]\");\n output = foo1.bind(\"input1\", input1).bind(\"input2\", input2).evaluate();\n assertEquals(90, output.asDouble(), 1e-9);\n\n FunctionEvaluator foo2 = modelsEvaluator.evaluatorOf(\"example\", \"foo2\");\n input1 = Tensor.from(\"tensor(name{},x[3]):{{name:n,x:0}:1,{name:n,x:1}:2,{name:n,x:2}:3 }\");\n input2 = Tensor.from(\"tensor(x[3]):[2,3,4]\");\n output = foo2.bind(\"input1\", input1).bind(\"input2\", input2).evaluate();\n assertEquals(90, output.asDouble(), 1e-9);\n }", "context_before": "class ModelsEvaluatorTest {\n\n @Test\n \n\n}", "context_after": "class ModelsEvaluatorTest {\n\n @Test\n \n\n}" }, { "comment": "Can we do an integer division here instead of `String.valueOf(statusCode).charAt(0)` ? I think it might be better.", "method_body": "private void addHttpStatusCode(int statusCode) {\n Optional observerContext = ObserveUtils.getObserverContextOfCurrentFrame(context.getStrand());\n observerContext.ifPresent(ctx -> ctx.addTag(ObservabilityConstants.TAG_KEY_HTTP_STATUS_CODE_GROUP,\n String.valueOf(statusCode).charAt(0) + STATUS_CODE_GROUP_SUFFIX));\n }", "target_code": "String.valueOf(statusCode).charAt(0) + STATUS_CODE_GROUP_SUFFIX));", "method_body_after": "private void addHttpStatusCode(int statusCode) {\n Optional observerContext = ObserveUtils.getObserverContextOfCurrentFrame(context.getStrand());\n observerContext.ifPresent(ctx -> ctx.addTag(ObservabilityConstants.TAG_KEY_HTTP_STATUS_CODE_GROUP,\n statusCode / 100 + STATUS_CODE_GROUP_SUFFIX));\n }", "context_before": "class ObservableClientConnectorListener extends ClientConnectorListener {\n private final DataContext context;\n\n public ObservableClientConnectorListener(ClientCall.ClientStreamListener streamListener, DataContext context) {\n super(streamListener);\n this.context = context;\n }\n\n @Override\n public void onMessage(HttpCarbonMessage httpCarbonMessage) {\n super.onMessage(httpCarbonMessage);\n Integer statusCode = (Integer) httpCarbonMessage.getProperty(RESPONSE_STATUS_CODE_FIELD);\n addHttpStatusCode(statusCode == null ? 0 : statusCode);\n }\n\n @Override\n public void onError(Throwable throwable) {\n super.onError(throwable);\n if (throwable instanceof ClientConnectorException) {\n ClientConnectorException clientConnectorException = (ClientConnectorException) throwable;\n addHttpStatusCode(clientConnectorException.getHttpStatusCode());\n Optional observerContext =\n ObserveUtils.getObserverContextOfCurrentFrame(context.getStrand());\n observerContext.ifPresent(ctx -> {\n ctx.addProperty(ObservabilityConstants.PROPERTY_ERROR, Boolean.TRUE);\n ctx.addProperty(ObservabilityConstants.PROPERTY_ERROR_MESSAGE, throwable.getMessage());\n });\n\n }\n }\n\n \n}", "context_after": "class ObservableClientConnectorListener extends ClientConnectorListener {\n private final DataContext context;\n\n public ObservableClientConnectorListener(ClientCall.ClientStreamListener streamListener, DataContext context) {\n super(streamListener);\n this.context = context;\n }\n\n @Override\n public void onMessage(HttpCarbonMessage httpCarbonMessage) {\n super.onMessage(httpCarbonMessage);\n Integer statusCode = (Integer) httpCarbonMessage.getProperty(RESPONSE_STATUS_CODE_FIELD);\n addHttpStatusCode(statusCode == null ? 0 : statusCode);\n }\n\n @Override\n public void onError(Throwable throwable) {\n super.onError(throwable);\n if (throwable instanceof ClientConnectorException) {\n ClientConnectorException clientConnectorException = (ClientConnectorException) throwable;\n addHttpStatusCode(clientConnectorException.getHttpStatusCode());\n Optional observerContext =\n ObserveUtils.getObserverContextOfCurrentFrame(context.getStrand());\n observerContext.ifPresent(ctx -> {\n ctx.addProperty(ObservabilityConstants.PROPERTY_ERROR, Boolean.TRUE);\n ctx.addProperty(ObservabilityConstants.PROPERTY_ERROR_MESSAGE, throwable.getMessage());\n });\n\n }\n }\n\n \n}" }, { "comment": "will it print the reason as well?", "method_body": "private void assertFunctions(BIRNode.BIRFunction func) {\n String expectedBir = null;\n try {\n expectedBir = readFile(func.getName().getValue());\n } catch (IOException e) {\n Assert.fail();\n }\n if (!\"\".equals(expectedBir)) {\n String funcBir = birEmitter.emitFunction(func, 0);\n Assert.assertEquals(funcBir, expectedBir);\n }\n }", "target_code": "Assert.fail();", "method_body_after": "private void assertFunctions(BIRNode.BIRFunction func) {\n String expectedBir = null;\n try {\n expectedBir = readFile(func.getName().getValue());\n } catch (IOException e) {\n Assert.fail(\"Failed when reading file\", e);\n }\n if (!\"\".equals(expectedBir)) {\n String funcBir = birEmitter.emitFunction(func, 0);\n Assert.assertEquals(funcBir, expectedBir);\n }\n }", "context_before": "class BirVariableOptimizationTest {\n private final BIREmitter birEmitter;\n private CompileResult result;\n\n public BirVariableOptimizationTest() {\n birEmitter = BIREmitter.getInstance(new CompilerContext());\n result = BCompileUtil.compileAndGetBIR(\"test-src/bir/biroptimizer.bal\");\n }\n\n @Test(description = \"Test the liveness analysis on functions\")\n public void testFunctions() {\n ((BLangPackage) result.getAST()).symbol.bir.functions.forEach(this::assertFunctions);\n }\n\n @Test(description = \"Test the liveness analysis on attached functions\")\n public void testAttachedFunctions() {\n ((BLangPackage) result.getAST()).symbol.bir.typeDefs.forEach(\n typeDefinition -> typeDefinition.attachedFuncs.forEach(this::assertFunctions));\n }\n\n \n\n private String readFile(String name) throws IOException {\n \n \n Path filePath = Paths.get(\"src/test/resources/test-src/bir/bir-dump/\" + name).toAbsolutePath();\n if (Files.exists(filePath)) {\n StringBuilder contentBuilder = new StringBuilder();\n\n Stream stream = Files.lines(filePath, StandardCharsets.UTF_8);\n stream.forEach(s -> contentBuilder.append(s).append(\"\\n\"));\n\n return contentBuilder.toString().trim();\n }\n return \"\";\n }\n}", "context_after": "class BirVariableOptimizationTest {\n private final BIREmitter birEmitter;\n private CompileResult result;\n\n public BirVariableOptimizationTest() {\n birEmitter = BIREmitter.getInstance(new CompilerContext());\n result = BCompileUtil.compileAndGetBIR(\"test-src/bir/biroptimizer.bal\");\n }\n\n @Test(description = \"Test the liveness analysis on functions\")\n public void testFunctions() {\n ((BLangPackage) result.getAST()).symbol.bir.functions.forEach(this::assertFunctions);\n }\n\n @Test(description = \"Test the liveness analysis on attached functions\")\n public void testAttachedFunctions() {\n ((BLangPackage) result.getAST()).symbol.bir.typeDefs.forEach(\n typeDefinition -> typeDefinition.attachedFuncs.forEach(this::assertFunctions));\n }\n\n \n\n private String readFile(String name) throws IOException {\n \n \n Path filePath = Paths.get(\"src/test/resources/test-src/bir/bir-dump/\" + name).toAbsolutePath();\n if (Files.exists(filePath)) {\n StringBuilder contentBuilder = new StringBuilder();\n\n Stream stream = Files.lines(filePath, StandardCharsets.UTF_8);\n stream.forEach(s -> contentBuilder.append(s).append(\"\\n\"));\n\n return contentBuilder.toString().trim();\n }\n return \"\";\n }\n}" }, { "comment": "can't we assume that the second param is always partition key? that way the caller doesn't have to explicitly wrap pk value in PartitionKey and implementation will take care of that. thoughts?", "method_body": "protected void performWorkload(BaseSubscriber baseSubscriber, long i) throws InterruptedException {\n int index = (int) (i % docsToRead.size());\n PojoizedJson doc = docsToRead.get(index);\n\n String partitionKeyValue = doc.getId();\n Mono> result = cosmosAsyncContainer.readItem(doc.getId(),\n new PartitionKey(partitionKeyValue),\n PojoizedJson.class);\n\n concurrencyControlSemaphore.acquire();\n\n if (configuration.getOperationType() == Configuration.Operation.ReadThroughput) {\n result.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);\n } else {\n LatencySubscriber latencySubscriber = new LatencySubscriber<>(baseSubscriber);\n latencySubscriber.context = latency.time();\n result.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);\n }\n }", "target_code": "new PartitionKey(partitionKeyValue),", "method_body_after": "protected void performWorkload(BaseSubscriber baseSubscriber, long i) throws InterruptedException {\n int index = (int) (i % docsToRead.size());\n PojoizedJson doc = docsToRead.get(index);\n\n String partitionKeyValue = doc.getId();\n Mono> result = cosmosAsyncContainer.readItem(doc.getId(),\n new PartitionKey(partitionKeyValue),\n PojoizedJson.class);\n\n concurrencyControlSemaphore.acquire();\n\n if (configuration.getOperationType() == Configuration.Operation.ReadThroughput) {\n result.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);\n } else {\n LatencySubscriber latencySubscriber = new LatencySubscriber<>(baseSubscriber);\n latencySubscriber.context = latency.time();\n result.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);\n }\n }", "context_before": "class LatencySubscriber extends BaseSubscriber {\n\n Timer.Context context;\n BaseSubscriber baseSubscriber;\n\n LatencySubscriber(BaseSubscriber baseSubscriber) {\n this.baseSubscriber = baseSubscriber;\n }\n\n @Override\n protected void hookOnSubscribe(Subscription subscription) {\n super.hookOnSubscribe(subscription);\n }\n\n @Override\n protected void hookOnNext(T value) {\n }\n\n @Override\n protected void hookOnComplete() {\n context.stop();\n baseSubscriber.onComplete();\n }\n\n @Override\n protected void hookOnError(Throwable throwable) {\n context.stop();\n baseSubscriber.onError(throwable);\n }\n }", "context_after": "class LatencySubscriber extends BaseSubscriber {\n\n Timer.Context context;\n BaseSubscriber baseSubscriber;\n\n LatencySubscriber(BaseSubscriber baseSubscriber) {\n this.baseSubscriber = baseSubscriber;\n }\n\n @Override\n protected void hookOnSubscribe(Subscription subscription) {\n super.hookOnSubscribe(subscription);\n }\n\n @Override\n protected void hookOnNext(T value) {\n }\n\n @Override\n protected void hookOnComplete() {\n context.stop();\n baseSubscriber.onComplete();\n }\n\n @Override\n protected void hookOnError(Throwable throwable) {\n context.stop();\n baseSubscriber.onError(throwable);\n }\n }" }, { "comment": "`Arrays.asList` is already a hack and not readable - the method is for converting an array to a list, but it is used here to define a new list \"literal\", so to speak. But programmers seem to know the pattern so it is not so bad. Removing `Arrays` from the beginning makes it less clear that this hack is being used. These should instead be `ImmutableList.of` (I'm assuming they are all used immutably).", "method_body": "public void setUp() throws TransientKinesisException {\n when(a.getShardId()).thenReturn(\"shard1\");\n when(b.getShardId()).thenReturn(\"shard1\");\n when(c.getShardId()).thenReturn(\"shard2\");\n when(d.getShardId()).thenReturn(\"shard2\");\n when(firstCheckpoint.getShardId()).thenReturn(\"shard1\");\n when(secondCheckpoint.getShardId()).thenReturn(\"shard2\");\n when(firstIterator.getShardId()).thenReturn(\"shard1\");\n when(firstIterator.getCheckpoint()).thenReturn(firstCheckpoint);\n when(secondIterator.getShardId()).thenReturn(\"shard2\");\n when(secondIterator.getCheckpoint()).thenReturn(secondCheckpoint);\n when(thirdIterator.getShardId()).thenReturn(\"shard3\");\n when(fourthIterator.getShardId()).thenReturn(\"shard4\");\n KinesisReaderCheckpoint checkpoint = new KinesisReaderCheckpoint(\n asList(firstCheckpoint, secondCheckpoint));\n shardReadersPool = Mockito.spy(new ShardReadersPool(kinesis, checkpoint));\n doReturn(firstIterator).when(shardReadersPool).createShardIterator(kinesis, firstCheckpoint);\n doReturn(secondIterator).when(shardReadersPool).createShardIterator(kinesis, secondCheckpoint);\n }", "target_code": "asList(firstCheckpoint, secondCheckpoint));", "method_body_after": "public void setUp() throws TransientKinesisException {\n when(a.getShardId()).thenReturn(\"shard1\");\n when(b.getShardId()).thenReturn(\"shard1\");\n when(c.getShardId()).thenReturn(\"shard2\");\n when(d.getShardId()).thenReturn(\"shard2\");\n when(firstCheckpoint.getShardId()).thenReturn(\"shard1\");\n when(secondCheckpoint.getShardId()).thenReturn(\"shard2\");\n when(firstIterator.getShardId()).thenReturn(\"shard1\");\n when(firstIterator.getCheckpoint()).thenReturn(firstCheckpoint);\n when(secondIterator.getShardId()).thenReturn(\"shard2\");\n when(secondIterator.getCheckpoint()).thenReturn(secondCheckpoint);\n when(thirdIterator.getShardId()).thenReturn(\"shard3\");\n when(fourthIterator.getShardId()).thenReturn(\"shard4\");\n KinesisReaderCheckpoint checkpoint = new KinesisReaderCheckpoint(\n ImmutableList.of(firstCheckpoint, secondCheckpoint));\n shardReadersPool = Mockito.spy(new ShardReadersPool(kinesis, checkpoint));\n doReturn(firstIterator).when(shardReadersPool).createShardIterator(kinesis, firstCheckpoint);\n doReturn(secondIterator).when(shardReadersPool).createShardIterator(kinesis, secondCheckpoint);\n }", "context_before": "class ShardReadersPoolTest {\n\n private static final int TIMEOUT_IN_MILLIS = (int) TimeUnit.SECONDS.toMillis(10);\n\n @Mock\n private ShardRecordsIterator firstIterator, secondIterator, thirdIterator, fourthIterator;\n @Mock\n private ShardCheckpoint firstCheckpoint, secondCheckpoint;\n @Mock\n private SimplifiedKinesisClient kinesis;\n @Mock\n private KinesisRecord a, b, c, d;\n\n private ShardReadersPool shardReadersPool;\n\n @Before\n \n\n @After\n public void clean() {\n shardReadersPool.stop();\n }\n\n @Test\n public void shouldReturnAllRecords()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch())\n .thenReturn(Collections.emptyList())\n .thenReturn(asList(a, b))\n .thenReturn(Collections.emptyList());\n when(secondIterator.readNextBatch())\n .thenReturn(singletonList(c))\n .thenReturn(singletonList(d))\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n List fetchedRecords = new ArrayList<>();\n while (fetchedRecords.size() < 4) {\n CustomOptional nextRecord = shardReadersPool.nextRecord();\n if (nextRecord.isPresent()) {\n fetchedRecords.add(nextRecord.get());\n }\n }\n assertThat(fetchedRecords).containsExactlyInAnyOrder(a, b, c, d);\n }\n\n @Test\n public void shouldReturnAbsentOptionalWhenNoRecords()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch())\n .thenReturn(Collections.emptyList());\n when(secondIterator.readNextBatch())\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n CustomOptional nextRecord = shardReadersPool.nextRecord();\n assertThat(nextRecord.isPresent()).isFalse();\n }\n\n @Test\n public void shouldCheckpointReadRecords()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch())\n .thenReturn(asList(a, b))\n .thenReturn(Collections.emptyList());\n when(secondIterator.readNextBatch())\n .thenReturn(singletonList(c))\n .thenReturn(singletonList(d))\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n int recordsFound = 0;\n while (recordsFound < 4) {\n CustomOptional nextRecord = shardReadersPool.nextRecord();\n if (nextRecord.isPresent()) {\n recordsFound++;\n KinesisRecord kinesisRecord = nextRecord.get();\n if (kinesisRecord.getShardId().equals(\"shard1\")) {\n verify(firstIterator).ackRecord(kinesisRecord);\n } else {\n verify(secondIterator).ackRecord(kinesisRecord);\n }\n }\n }\n }\n\n @Test\n public void shouldInterruptKinesisReadingAndStopShortly()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch()).thenAnswer((Answer>) invocation -> {\n Thread.sleep(TimeUnit.MINUTES.toMillis(1));\n return Collections.emptyList();\n });\n shardReadersPool.start();\n\n Stopwatch stopwatch = Stopwatch.createStarted();\n shardReadersPool.stop();\n assertThat(stopwatch.elapsed(TimeUnit.MILLISECONDS)).isLessThan(TIMEOUT_IN_MILLIS);\n }\n\n @Test\n public void shouldInterruptPuttingRecordsToQueueAndStopShortly()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch()).thenReturn(asList(a, b, c));\n KinesisReaderCheckpoint checkpoint = new KinesisReaderCheckpoint(\n asList(firstCheckpoint, secondCheckpoint));\n ShardReadersPool shardReadersPool = new ShardReadersPool(kinesis, checkpoint, 2);\n shardReadersPool.start();\n\n Stopwatch stopwatch = Stopwatch.createStarted();\n shardReadersPool.stop();\n assertThat(stopwatch.elapsed(TimeUnit.MILLISECONDS)).isLessThan(TIMEOUT_IN_MILLIS);\n\n }\n\n @Test\n public void shouldDetectThatNotAllShardsAreUpToDate() throws TransientKinesisException {\n when(firstIterator.isUpToDate()).thenReturn(true);\n when(secondIterator.isUpToDate()).thenReturn(false);\n shardReadersPool.start();\n\n assertThat(shardReadersPool.allShardsUpToDate()).isFalse();\n }\n\n @Test\n public void shouldDetectThatAllShardsAreUpToDate() throws TransientKinesisException {\n when(firstIterator.isUpToDate()).thenReturn(true);\n when(secondIterator.isUpToDate()).thenReturn(true);\n shardReadersPool.start();\n\n assertThat(shardReadersPool.allShardsUpToDate()).isTrue();\n }\n\n @Test\n public void shouldStopReadingShardAfterReceivingShardClosedException() throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n when(firstIterator.findSuccessiveShardRecordIterators())\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n\n verify(firstIterator, timeout(TIMEOUT_IN_MILLIS).times(1)).readNextBatch();\n verify(secondIterator, timeout(TIMEOUT_IN_MILLIS).atLeast(2)).readNextBatch();\n }\n\n @Test\n public void shouldStartReadingSuccessiveShardsAfterReceivingShardClosedException()\n throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n when(firstIterator.findSuccessiveShardRecordIterators())\n .thenReturn(asList(thirdIterator, fourthIterator));\n\n shardReadersPool.start();\n\n verify(thirdIterator, timeout(TIMEOUT_IN_MILLIS).atLeast(2)).readNextBatch();\n verify(fourthIterator, timeout(TIMEOUT_IN_MILLIS).atLeast(2)).readNextBatch();\n }\n\n @Test\n public void shouldStopReadersPoolWhenLastShardReaderStopped() throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n when(firstIterator.findSuccessiveShardRecordIterators())\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n\n verify(firstIterator, timeout(TIMEOUT_IN_MILLIS).times(1)).readNextBatch();\n }\n\n @Test\n public void shouldStopReadersPoolAlsoWhenExceptionsOccurDuringStopping() throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n when(firstIterator.findSuccessiveShardRecordIterators())\n .thenThrow(TransientKinesisException.class)\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n\n verify(firstIterator, timeout(TIMEOUT_IN_MILLIS).times(2)).readNextBatch();\n }\n\n @Test\n public void shouldReturnAbsentOptionalWhenStartedWithNoIterators() throws Exception {\n KinesisReaderCheckpoint checkpoint = new KinesisReaderCheckpoint(\n Collections.emptyList());\n shardReadersPool = Mockito.spy(new ShardReadersPool(kinesis, checkpoint));\n doReturn(firstIterator).when(shardReadersPool)\n .createShardIterator(eq(kinesis), any(ShardCheckpoint.class));\n\n shardReadersPool.start();\n\n assertThat(shardReadersPool.nextRecord()).isEqualTo(CustomOptional.absent());\n }\n\n @Test\n public void shouldForgetClosedShardIterator() throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n List emptyList = Collections.emptyList();\n when(firstIterator.findSuccessiveShardRecordIterators()).thenReturn(emptyList);\n\n shardReadersPool.start();\n verify(shardReadersPool).startReadingShards(asList(firstIterator, secondIterator));\n verify(shardReadersPool, timeout(TIMEOUT_IN_MILLIS)).startReadingShards(emptyList);\n\n KinesisReaderCheckpoint checkpointMark = shardReadersPool.getCheckpointMark();\n assertThat(checkpointMark.iterator())\n .extracting(\"shardId\", String.class)\n .containsOnly(\"shard2\")\n .doesNotContain(\"shard1\");\n }\n}", "context_after": "class ShardReadersPoolTest {\n\n private static final int TIMEOUT_IN_MILLIS = (int) TimeUnit.SECONDS.toMillis(10);\n\n @Mock\n private ShardRecordsIterator firstIterator, secondIterator, thirdIterator, fourthIterator;\n @Mock\n private ShardCheckpoint firstCheckpoint, secondCheckpoint;\n @Mock\n private SimplifiedKinesisClient kinesis;\n @Mock\n private KinesisRecord a, b, c, d;\n\n private ShardReadersPool shardReadersPool;\n\n @Before\n \n\n @After\n public void clean() {\n shardReadersPool.stop();\n }\n\n @Test\n public void shouldReturnAllRecords()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch())\n .thenReturn(Collections.emptyList())\n .thenReturn(ImmutableList.of(a, b))\n .thenReturn(Collections.emptyList());\n when(secondIterator.readNextBatch())\n .thenReturn(singletonList(c))\n .thenReturn(singletonList(d))\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n List fetchedRecords = new ArrayList<>();\n while (fetchedRecords.size() < 4) {\n CustomOptional nextRecord = shardReadersPool.nextRecord();\n if (nextRecord.isPresent()) {\n fetchedRecords.add(nextRecord.get());\n }\n }\n assertThat(fetchedRecords).containsExactlyInAnyOrder(a, b, c, d);\n }\n\n @Test\n public void shouldReturnAbsentOptionalWhenNoRecords()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch())\n .thenReturn(Collections.emptyList());\n when(secondIterator.readNextBatch())\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n CustomOptional nextRecord = shardReadersPool.nextRecord();\n assertThat(nextRecord.isPresent()).isFalse();\n }\n\n @Test\n public void shouldCheckpointReadRecords()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch())\n .thenReturn(ImmutableList.of(a, b))\n .thenReturn(Collections.emptyList());\n when(secondIterator.readNextBatch())\n .thenReturn(singletonList(c))\n .thenReturn(singletonList(d))\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n int recordsFound = 0;\n while (recordsFound < 4) {\n CustomOptional nextRecord = shardReadersPool.nextRecord();\n if (nextRecord.isPresent()) {\n recordsFound++;\n KinesisRecord kinesisRecord = nextRecord.get();\n if (\"shard1\".equals(kinesisRecord.getShardId())) {\n verify(firstIterator).ackRecord(kinesisRecord);\n } else {\n verify(secondIterator).ackRecord(kinesisRecord);\n }\n }\n }\n }\n\n @Test\n public void shouldInterruptKinesisReadingAndStopShortly()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch()).thenAnswer((Answer>) invocation -> {\n Thread.sleep(TimeUnit.MINUTES.toMillis(1));\n return Collections.emptyList();\n });\n shardReadersPool.start();\n\n Stopwatch stopwatch = Stopwatch.createStarted();\n shardReadersPool.stop();\n assertThat(stopwatch.elapsed(TimeUnit.MILLISECONDS)).isLessThan(TIMEOUT_IN_MILLIS);\n }\n\n @Test\n public void shouldInterruptPuttingRecordsToQueueAndStopShortly()\n throws TransientKinesisException, KinesisShardClosedException {\n when(firstIterator.readNextBatch()).thenReturn(ImmutableList.of(a, b, c));\n KinesisReaderCheckpoint checkpoint = new KinesisReaderCheckpoint(\n ImmutableList.of(firstCheckpoint, secondCheckpoint));\n ShardReadersPool shardReadersPool = new ShardReadersPool(kinesis, checkpoint, 2);\n shardReadersPool.start();\n\n Stopwatch stopwatch = Stopwatch.createStarted();\n shardReadersPool.stop();\n assertThat(stopwatch.elapsed(TimeUnit.MILLISECONDS)).isLessThan(TIMEOUT_IN_MILLIS);\n\n }\n\n @Test\n public void shouldDetectThatNotAllShardsAreUpToDate() throws TransientKinesisException {\n when(firstIterator.isUpToDate()).thenReturn(true);\n when(secondIterator.isUpToDate()).thenReturn(false);\n shardReadersPool.start();\n\n assertThat(shardReadersPool.allShardsUpToDate()).isFalse();\n }\n\n @Test\n public void shouldDetectThatAllShardsAreUpToDate() throws TransientKinesisException {\n when(firstIterator.isUpToDate()).thenReturn(true);\n when(secondIterator.isUpToDate()).thenReturn(true);\n shardReadersPool.start();\n\n assertThat(shardReadersPool.allShardsUpToDate()).isTrue();\n }\n\n @Test\n public void shouldStopReadingShardAfterReceivingShardClosedException() throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n when(firstIterator.findSuccessiveShardRecordIterators())\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n\n verify(firstIterator, timeout(TIMEOUT_IN_MILLIS).times(1)).readNextBatch();\n verify(secondIterator, timeout(TIMEOUT_IN_MILLIS).atLeast(2)).readNextBatch();\n }\n\n @Test\n public void shouldStartReadingSuccessiveShardsAfterReceivingShardClosedException()\n throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n when(firstIterator.findSuccessiveShardRecordIterators())\n .thenReturn(ImmutableList.of(thirdIterator, fourthIterator));\n\n shardReadersPool.start();\n\n verify(thirdIterator, timeout(TIMEOUT_IN_MILLIS).atLeast(2)).readNextBatch();\n verify(fourthIterator, timeout(TIMEOUT_IN_MILLIS).atLeast(2)).readNextBatch();\n }\n\n @Test\n public void shouldStopReadersPoolWhenLastShardReaderStopped() throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n when(firstIterator.findSuccessiveShardRecordIterators())\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n\n verify(firstIterator, timeout(TIMEOUT_IN_MILLIS).times(1)).readNextBatch();\n }\n\n @Test\n public void shouldStopReadersPoolAlsoWhenExceptionsOccurDuringStopping() throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n when(firstIterator.findSuccessiveShardRecordIterators())\n .thenThrow(TransientKinesisException.class)\n .thenReturn(Collections.emptyList());\n\n shardReadersPool.start();\n\n verify(firstIterator, timeout(TIMEOUT_IN_MILLIS).times(2)).readNextBatch();\n }\n\n @Test\n public void shouldReturnAbsentOptionalWhenStartedWithNoIterators() throws Exception {\n KinesisReaderCheckpoint checkpoint = new KinesisReaderCheckpoint(\n Collections.emptyList());\n shardReadersPool = Mockito.spy(new ShardReadersPool(kinesis, checkpoint));\n doReturn(firstIterator).when(shardReadersPool)\n .createShardIterator(eq(kinesis), any(ShardCheckpoint.class));\n\n shardReadersPool.start();\n\n assertThat(shardReadersPool.nextRecord()).isEqualTo(CustomOptional.absent());\n }\n\n @Test\n public void shouldForgetClosedShardIterator() throws Exception {\n when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);\n List emptyList = Collections.emptyList();\n when(firstIterator.findSuccessiveShardRecordIterators()).thenReturn(emptyList);\n\n shardReadersPool.start();\n verify(shardReadersPool).startReadingShards(ImmutableList.of(firstIterator, secondIterator));\n verify(shardReadersPool, timeout(TIMEOUT_IN_MILLIS)).startReadingShards(emptyList);\n\n KinesisReaderCheckpoint checkpointMark = shardReadersPool.getCheckpointMark();\n assertThat(checkpointMark.iterator())\n .extracting(\"shardId\", String.class)\n .containsOnly(\"shard2\")\n .doesNotContain(\"shard1\");\n }\n}" }, { "comment": "Does this mean it use a expr parition?", "method_body": "public boolean hasPartitionTransformedEvolution() {\n return getNativeTable().spec().fields().stream().anyMatch(field -> field.transform().isVoid());\n }", "target_code": "return getNativeTable().spec().fields().stream().anyMatch(field -> field.transform().isVoid());", "method_body_after": "public boolean hasPartitionTransformedEvolution() {\n return getNativeTable().spec().fields().stream().anyMatch(field -> field.transform().isVoid());\n }", "context_before": "class IcebergTable extends Table {\n private static final Logger LOG = LogManager.getLogger(IcebergTable.class);\n\n private Optional snapshot = Optional.empty();\n private static final String JSON_KEY_ICEBERG_DB = \"database\";\n private static final String JSON_KEY_ICEBERG_TABLE = \"table\";\n private static final String JSON_KEY_RESOURCE_NAME = \"resource\";\n private static final String JSON_KEY_ICEBERG_PROPERTIES = \"icebergProperties\";\n private static final String PARQUET_FORMAT = \"parquet\";\n\n private String catalogName;\n @SerializedName(value = \"dn\")\n private String remoteDbName;\n @SerializedName(value = \"tn\")\n private String remoteTableName;\n @SerializedName(value = \"rn\")\n private String resourceName;\n @SerializedName(value = \"prop\")\n private Map icebergProperties = Maps.newHashMap();\n\n private org.apache.iceberg.Table nativeTable; \n private List partitionColumns;\n \n private long refreshSnapshotTime = -1L;\n\n private final AtomicLong partitionIdGen = new AtomicLong(0L);\n\n public IcebergTable() {\n super(TableType.ICEBERG);\n }\n\n public IcebergTable(long id, String srTableName, String catalogName, String resourceName, String remoteDbName,\n String remoteTableName, List schema, org.apache.iceberg.Table nativeTable,\n Map icebergProperties) {\n super(id, srTableName, TableType.ICEBERG, schema);\n this.catalogName = catalogName;\n this.resourceName = resourceName;\n this.remoteDbName = remoteDbName;\n this.remoteTableName = remoteTableName;\n this.nativeTable = nativeTable;\n this.icebergProperties = icebergProperties;\n }\n\n @Override\n public String getCatalogName() {\n return catalogName == null ? getResourceMappingCatalogName(resourceName, \"iceberg\") : catalogName;\n }\n\n public String getResourceName() {\n return resourceName;\n }\n\n public String getRemoteDbName() {\n return remoteDbName;\n }\n\n public String getRemoteTableName() {\n return remoteTableName;\n }\n\n public Optional getSnapshot() {\n if (snapshot.isPresent()) {\n return snapshot;\n } else {\n snapshot = Optional.ofNullable(getNativeTable().currentSnapshot());\n return snapshot;\n }\n }\n\n @Override\n public String getUUID() {\n if (CatalogMgr.isExternalCatalog(catalogName)) {\n return String.join(\".\", catalogName, remoteDbName, remoteTableName,\n ((BaseTable) getNativeTable()).operations().current().uuid());\n } else {\n return Long.toString(id);\n }\n }\n\n public List getPartitionColumns() {\n if (partitionColumns == null) {\n List identityPartitionFields = this.getNativeTable().spec().fields().stream().\n filter(partitionField -> partitionField.transform().isIdentity()).collect(Collectors.toList());\n partitionColumns = identityPartitionFields.stream().map(partitionField -> getColumn(partitionField.name()))\n .collect(Collectors.toList());\n }\n\n return partitionColumns;\n }\n public List getPartitionColumnsIncludeTransformed() {\n List allPartitionColumns = new ArrayList<>();\n for (PartitionField field : getNativeTable().spec().fields()) {\n if (!field.transform().isIdentity() && hasPartitionTransformedEvolution()) {\n continue;\n }\n String baseColumnName = nativeTable.schema().findColumnName(field.sourceId());\n Column partitionCol = getColumn(baseColumnName);\n allPartitionColumns.add(partitionCol);\n }\n return allPartitionColumns;\n }\n\n public long nextPartitionId() {\n return partitionIdGen.getAndIncrement();\n }\n\n public List partitionColumnIndexes() {\n List partitionCols = getPartitionColumns();\n return partitionCols.stream().map(col -> fullSchema.indexOf(col)).collect(Collectors.toList());\n }\n\n public List getSortKeyIndexes() {\n List indexes = new ArrayList<>();\n org.apache.iceberg.Table nativeTable = getNativeTable();\n List fields = nativeTable.schema().asStruct().fields();\n List sortFieldSourceIds = nativeTable.sortOrder().fields().stream()\n .map(SortField::sourceId)\n .collect(Collectors.toList());\n\n for (int i = 0; i < fields.size(); i++) {\n Types.NestedField field = fields.get(i);\n if (sortFieldSourceIds.contains(field.fieldId())) {\n indexes.add(i);\n }\n }\n\n return indexes;\n }\n\n \n \n\n public void resetSnapshot() {\n snapshot = Optional.empty();\n }\n\n public boolean isV2Format() {\n return ((BaseTable) getNativeTable()).operations().current().formatVersion() > 1;\n }\n\n public boolean isUnPartitioned() {\n return ((BaseTable) getNativeTable()).operations().current().spec().isUnpartitioned();\n }\n\n public List getPartitionColumnNames() {\n return getPartitionColumns().stream().filter(java.util.Objects::nonNull).map(Column::getName)\n .collect(Collectors.toList());\n }\n\n @Override\n public String getTableIdentifier() {\n return Joiner.on(\":\").join(name, ((BaseTable) getNativeTable()).operations().current().uuid());\n }\n\n public IcebergCatalogType getCatalogType() {\n return IcebergCatalogType.valueOf(icebergProperties.get(ICEBERG_CATALOG_TYPE));\n }\n\n public String getTableLocation() {\n return getNativeTable().location();\n }\n\n public org.apache.iceberg.Table getNativeTable() {\n \n if (nativeTable == null) {\n IcebergTable resourceMappingTable = (IcebergTable) GlobalStateMgr.getCurrentState().getMetadataMgr()\n .getTable(getCatalogName(), remoteDbName, remoteTableName);\n if (resourceMappingTable == null) {\n throw new StarRocksConnectorException(\"Can't find table %s.%s.%s\",\n getCatalogName(), remoteDbName, remoteTableName);\n }\n nativeTable = resourceMappingTable.getNativeTable();\n }\n return nativeTable;\n }\n\n public long getRefreshSnapshotTime() {\n return refreshSnapshotTime;\n }\n\n public void setRefreshSnapshotTime(long refreshSnapshotTime) {\n this.refreshSnapshotTime = refreshSnapshotTime;\n }\n\n @Override\n public TTableDescriptor toThrift(List partitions) {\n Preconditions.checkNotNull(partitions);\n\n TIcebergTable tIcebergTable = new TIcebergTable();\n tIcebergTable.setLocation(nativeTable.location());\n\n List tColumns = Lists.newArrayList();\n for (Column column : getBaseSchema()) {\n tColumns.add(column.toThrift());\n }\n tIcebergTable.setColumns(tColumns);\n\n tIcebergTable.setIceberg_schema(IcebergApiConverter.getTIcebergSchema(nativeTable.schema()));\n tIcebergTable.setPartition_column_names(getPartitionColumnNames());\n\n if (!partitions.isEmpty()) {\n TPartitionMap tPartitionMap = new TPartitionMap();\n for (int i = 0; i < partitions.size(); i++) {\n DescriptorTable.ReferencedPartitionInfo info = partitions.get(i);\n PartitionKey key = info.getKey();\n long partitionId = info.getId();\n THdfsPartition tPartition = new THdfsPartition();\n List keys = key.getKeys();\n tPartition.setPartition_key_exprs(keys.stream().map(Expr::treeToThrift).collect(Collectors.toList()));\n tPartitionMap.putToPartitions(partitionId, tPartition);\n }\n\n \n \n try {\n TSerializer serializer = new TSerializer(TBinaryProtocol::new);\n byte[] bytes = serializer.serialize(tPartitionMap);\n byte[] compressedBytes = Util.compress(bytes);\n TCompressedPartitionMap tCompressedPartitionMap = new TCompressedPartitionMap();\n tCompressedPartitionMap.setOriginal_len(bytes.length);\n tCompressedPartitionMap.setCompressed_len(compressedBytes.length);\n tCompressedPartitionMap.setCompressed_serialized_partitions(Base64.getEncoder().encodeToString(compressedBytes));\n tIcebergTable.setCompressed_partitions(tCompressedPartitionMap);\n } catch (TException | IOException ignore) {\n tIcebergTable.setPartitions(tPartitionMap.getPartitions());\n }\n }\n\n TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.ICEBERG_TABLE,\n fullSchema.size(), 0, remoteTableName, remoteDbName);\n tTableDescriptor.setIcebergTable(tIcebergTable);\n return tTableDescriptor;\n }\n\n @Override\n public void write(DataOutput out) throws IOException {\n super.write(out);\n\n JsonObject jsonObject = new JsonObject();\n jsonObject.addProperty(JSON_KEY_ICEBERG_DB, remoteDbName);\n jsonObject.addProperty(JSON_KEY_ICEBERG_TABLE, remoteTableName);\n if (!Strings.isNullOrEmpty(resourceName)) {\n jsonObject.addProperty(JSON_KEY_RESOURCE_NAME, resourceName);\n }\n if (!icebergProperties.isEmpty()) {\n JsonObject jIcebergProperties = new JsonObject();\n for (Map.Entry entry : icebergProperties.entrySet()) {\n jIcebergProperties.addProperty(entry.getKey(), entry.getValue());\n }\n jsonObject.add(JSON_KEY_ICEBERG_PROPERTIES, jIcebergProperties);\n }\n Text.writeString(out, jsonObject.toString());\n }\n\n @Override\n public void readFields(DataInput in) throws IOException {\n super.readFields(in);\n\n String json = Text.readString(in);\n JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject();\n remoteDbName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_DB).getAsString();\n remoteTableName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_TABLE).getAsString();\n resourceName = jsonObject.getAsJsonPrimitive(JSON_KEY_RESOURCE_NAME).getAsString();\n if (jsonObject.has(JSON_KEY_ICEBERG_PROPERTIES)) {\n JsonObject jIcebergProperties = jsonObject.getAsJsonObject(JSON_KEY_ICEBERG_PROPERTIES);\n for (Map.Entry entry : jIcebergProperties.entrySet()) {\n icebergProperties.put(entry.getKey(), entry.getValue().getAsString());\n }\n }\n }\n\n @Override\n public boolean isSupported() {\n return true;\n }\n\n @Override\n public boolean supportInsert() {\n \n return getNativeTable().properties().getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT)\n .equalsIgnoreCase(PARQUET_FORMAT);\n }\n\n @Override\n public int hashCode() {\n return com.google.common.base.Objects.hashCode(getCatalogName(), remoteDbName, getTableIdentifier());\n }\n\n @Override\n public boolean equals(Object other) {\n if (!(other instanceof IcebergTable)) {\n return false;\n }\n\n IcebergTable otherTable = (IcebergTable) other;\n String catalogName = getCatalogName();\n String tableIdentifier = getTableIdentifier();\n return Objects.equal(catalogName, otherTable.getCatalogName()) &&\n Objects.equal(remoteDbName, otherTable.remoteDbName) &&\n Objects.equal(tableIdentifier, otherTable.getTableIdentifier());\n }\n\n public static Builder builder() {\n return new Builder();\n }\n\n public static class Builder {\n private long id;\n private String srTableName;\n private String catalogName;\n private String resourceName;\n private String remoteDbName;\n private String remoteTableName;\n private List fullSchema;\n private Map icebergProperties;\n private org.apache.iceberg.Table nativeTable;\n\n public Builder() {\n }\n\n public Builder setId(long id) {\n this.id = id;\n return this;\n }\n\n public Builder setSrTableName(String srTableName) {\n this.srTableName = srTableName;\n return this;\n }\n\n public Builder setCatalogName(String catalogName) {\n this.catalogName = catalogName;\n return this;\n }\n\n public Builder setResourceName(String resourceName) {\n this.resourceName = resourceName;\n return this;\n }\n\n public Builder setRemoteDbName(String remoteDbName) {\n this.remoteDbName = remoteDbName;\n return this;\n }\n\n public Builder setRemoteTableName(String remoteTableName) {\n this.remoteTableName = remoteTableName;\n return this;\n }\n\n public Builder setFullSchema(List fullSchema) {\n this.fullSchema = fullSchema;\n return this;\n }\n\n public Builder setIcebergProperties(Map icebergProperties) {\n this.icebergProperties = icebergProperties;\n return this;\n }\n\n public Builder setNativeTable(org.apache.iceberg.Table nativeTable) {\n this.nativeTable = nativeTable;\n return this;\n }\n\n public IcebergTable build() {\n return new IcebergTable(id, srTableName, catalogName, resourceName, remoteDbName, remoteTableName,\n fullSchema, nativeTable, icebergProperties);\n }\n }\n}", "context_after": "class IcebergTable extends Table {\n private static final Logger LOG = LogManager.getLogger(IcebergTable.class);\n\n private Optional snapshot = Optional.empty();\n private static final String JSON_KEY_ICEBERG_DB = \"database\";\n private static final String JSON_KEY_ICEBERG_TABLE = \"table\";\n private static final String JSON_KEY_RESOURCE_NAME = \"resource\";\n private static final String JSON_KEY_ICEBERG_PROPERTIES = \"icebergProperties\";\n private static final String PARQUET_FORMAT = \"parquet\";\n\n private String catalogName;\n @SerializedName(value = \"dn\")\n private String remoteDbName;\n @SerializedName(value = \"tn\")\n private String remoteTableName;\n @SerializedName(value = \"rn\")\n private String resourceName;\n @SerializedName(value = \"prop\")\n private Map icebergProperties = Maps.newHashMap();\n\n private org.apache.iceberg.Table nativeTable; \n private List partitionColumns;\n \n private long refreshSnapshotTime = -1L;\n\n private final AtomicLong partitionIdGen = new AtomicLong(0L);\n\n public IcebergTable() {\n super(TableType.ICEBERG);\n }\n\n public IcebergTable(long id, String srTableName, String catalogName, String resourceName, String remoteDbName,\n String remoteTableName, List schema, org.apache.iceberg.Table nativeTable,\n Map icebergProperties) {\n super(id, srTableName, TableType.ICEBERG, schema);\n this.catalogName = catalogName;\n this.resourceName = resourceName;\n this.remoteDbName = remoteDbName;\n this.remoteTableName = remoteTableName;\n this.nativeTable = nativeTable;\n this.icebergProperties = icebergProperties;\n }\n\n @Override\n public String getCatalogName() {\n return catalogName == null ? getResourceMappingCatalogName(resourceName, \"iceberg\") : catalogName;\n }\n\n public String getResourceName() {\n return resourceName;\n }\n\n public String getRemoteDbName() {\n return remoteDbName;\n }\n\n public String getRemoteTableName() {\n return remoteTableName;\n }\n\n public Optional getSnapshot() {\n if (snapshot.isPresent()) {\n return snapshot;\n } else {\n snapshot = Optional.ofNullable(getNativeTable().currentSnapshot());\n return snapshot;\n }\n }\n\n @Override\n public String getUUID() {\n if (CatalogMgr.isExternalCatalog(catalogName)) {\n return String.join(\".\", catalogName, remoteDbName, remoteTableName,\n ((BaseTable) getNativeTable()).operations().current().uuid());\n } else {\n return Long.toString(id);\n }\n }\n\n public List getPartitionColumns() {\n if (partitionColumns == null) {\n List identityPartitionFields = this.getNativeTable().spec().fields().stream().\n filter(partitionField -> partitionField.transform().isIdentity()).collect(Collectors.toList());\n partitionColumns = identityPartitionFields.stream().map(partitionField -> getColumn(partitionField.name()))\n .collect(Collectors.toList());\n }\n\n return partitionColumns;\n }\n public List getPartitionColumnsIncludeTransformed() {\n List allPartitionColumns = new ArrayList<>();\n for (PartitionField field : getNativeTable().spec().fields()) {\n if (!field.transform().isIdentity() && hasPartitionTransformedEvolution()) {\n continue;\n }\n String baseColumnName = nativeTable.schema().findColumnName(field.sourceId());\n Column partitionCol = getColumn(baseColumnName);\n allPartitionColumns.add(partitionCol);\n }\n return allPartitionColumns;\n }\n\n public long nextPartitionId() {\n return partitionIdGen.getAndIncrement();\n }\n\n public List partitionColumnIndexes() {\n List partitionCols = getPartitionColumns();\n return partitionCols.stream().map(col -> fullSchema.indexOf(col)).collect(Collectors.toList());\n }\n\n public List getSortKeyIndexes() {\n List indexes = new ArrayList<>();\n org.apache.iceberg.Table nativeTable = getNativeTable();\n List fields = nativeTable.schema().asStruct().fields();\n List sortFieldSourceIds = nativeTable.sortOrder().fields().stream()\n .map(SortField::sourceId)\n .collect(Collectors.toList());\n\n for (int i = 0; i < fields.size(); i++) {\n Types.NestedField field = fields.get(i);\n if (sortFieldSourceIds.contains(field.fieldId())) {\n indexes.add(i);\n }\n }\n\n return indexes;\n }\n\n \n \n\n public void resetSnapshot() {\n snapshot = Optional.empty();\n }\n\n public boolean isV2Format() {\n return ((BaseTable) getNativeTable()).operations().current().formatVersion() > 1;\n }\n\n public boolean isUnPartitioned() {\n return ((BaseTable) getNativeTable()).operations().current().spec().isUnpartitioned();\n }\n\n public List getPartitionColumnNames() {\n return getPartitionColumns().stream().filter(java.util.Objects::nonNull).map(Column::getName)\n .collect(Collectors.toList());\n }\n\n @Override\n public String getTableIdentifier() {\n return Joiner.on(\":\").join(name, ((BaseTable) getNativeTable()).operations().current().uuid());\n }\n\n public IcebergCatalogType getCatalogType() {\n return IcebergCatalogType.valueOf(icebergProperties.get(ICEBERG_CATALOG_TYPE));\n }\n\n public String getTableLocation() {\n return getNativeTable().location();\n }\n\n public org.apache.iceberg.Table getNativeTable() {\n \n if (nativeTable == null) {\n IcebergTable resourceMappingTable = (IcebergTable) GlobalStateMgr.getCurrentState().getMetadataMgr()\n .getTable(getCatalogName(), remoteDbName, remoteTableName);\n if (resourceMappingTable == null) {\n throw new StarRocksConnectorException(\"Can't find table %s.%s.%s\",\n getCatalogName(), remoteDbName, remoteTableName);\n }\n nativeTable = resourceMappingTable.getNativeTable();\n }\n return nativeTable;\n }\n\n public long getRefreshSnapshotTime() {\n return refreshSnapshotTime;\n }\n\n public void setRefreshSnapshotTime(long refreshSnapshotTime) {\n this.refreshSnapshotTime = refreshSnapshotTime;\n }\n\n @Override\n public TTableDescriptor toThrift(List partitions) {\n Preconditions.checkNotNull(partitions);\n\n TIcebergTable tIcebergTable = new TIcebergTable();\n tIcebergTable.setLocation(nativeTable.location());\n\n List tColumns = Lists.newArrayList();\n for (Column column : getBaseSchema()) {\n tColumns.add(column.toThrift());\n }\n tIcebergTable.setColumns(tColumns);\n\n tIcebergTable.setIceberg_schema(IcebergApiConverter.getTIcebergSchema(nativeTable.schema()));\n tIcebergTable.setPartition_column_names(getPartitionColumnNames());\n\n if (!partitions.isEmpty()) {\n TPartitionMap tPartitionMap = new TPartitionMap();\n for (int i = 0; i < partitions.size(); i++) {\n DescriptorTable.ReferencedPartitionInfo info = partitions.get(i);\n PartitionKey key = info.getKey();\n long partitionId = info.getId();\n THdfsPartition tPartition = new THdfsPartition();\n List keys = key.getKeys();\n tPartition.setPartition_key_exprs(keys.stream().map(Expr::treeToThrift).collect(Collectors.toList()));\n tPartitionMap.putToPartitions(partitionId, tPartition);\n }\n\n \n \n try {\n TSerializer serializer = new TSerializer(TBinaryProtocol::new);\n byte[] bytes = serializer.serialize(tPartitionMap);\n byte[] compressedBytes = Util.compress(bytes);\n TCompressedPartitionMap tCompressedPartitionMap = new TCompressedPartitionMap();\n tCompressedPartitionMap.setOriginal_len(bytes.length);\n tCompressedPartitionMap.setCompressed_len(compressedBytes.length);\n tCompressedPartitionMap.setCompressed_serialized_partitions(Base64.getEncoder().encodeToString(compressedBytes));\n tIcebergTable.setCompressed_partitions(tCompressedPartitionMap);\n } catch (TException | IOException ignore) {\n tIcebergTable.setPartitions(tPartitionMap.getPartitions());\n }\n }\n\n TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.ICEBERG_TABLE,\n fullSchema.size(), 0, remoteTableName, remoteDbName);\n tTableDescriptor.setIcebergTable(tIcebergTable);\n return tTableDescriptor;\n }\n\n @Override\n public void write(DataOutput out) throws IOException {\n super.write(out);\n\n JsonObject jsonObject = new JsonObject();\n jsonObject.addProperty(JSON_KEY_ICEBERG_DB, remoteDbName);\n jsonObject.addProperty(JSON_KEY_ICEBERG_TABLE, remoteTableName);\n if (!Strings.isNullOrEmpty(resourceName)) {\n jsonObject.addProperty(JSON_KEY_RESOURCE_NAME, resourceName);\n }\n if (!icebergProperties.isEmpty()) {\n JsonObject jIcebergProperties = new JsonObject();\n for (Map.Entry entry : icebergProperties.entrySet()) {\n jIcebergProperties.addProperty(entry.getKey(), entry.getValue());\n }\n jsonObject.add(JSON_KEY_ICEBERG_PROPERTIES, jIcebergProperties);\n }\n Text.writeString(out, jsonObject.toString());\n }\n\n @Override\n public void readFields(DataInput in) throws IOException {\n super.readFields(in);\n\n String json = Text.readString(in);\n JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject();\n remoteDbName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_DB).getAsString();\n remoteTableName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_TABLE).getAsString();\n resourceName = jsonObject.getAsJsonPrimitive(JSON_KEY_RESOURCE_NAME).getAsString();\n if (jsonObject.has(JSON_KEY_ICEBERG_PROPERTIES)) {\n JsonObject jIcebergProperties = jsonObject.getAsJsonObject(JSON_KEY_ICEBERG_PROPERTIES);\n for (Map.Entry entry : jIcebergProperties.entrySet()) {\n icebergProperties.put(entry.getKey(), entry.getValue().getAsString());\n }\n }\n }\n\n @Override\n public boolean isSupported() {\n return true;\n }\n\n @Override\n public boolean supportInsert() {\n \n return getNativeTable().properties().getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT)\n .equalsIgnoreCase(PARQUET_FORMAT);\n }\n\n @Override\n public int hashCode() {\n return com.google.common.base.Objects.hashCode(getCatalogName(), remoteDbName, getTableIdentifier());\n }\n\n @Override\n public boolean equals(Object other) {\n if (!(other instanceof IcebergTable)) {\n return false;\n }\n\n IcebergTable otherTable = (IcebergTable) other;\n String catalogName = getCatalogName();\n String tableIdentifier = getTableIdentifier();\n return Objects.equal(catalogName, otherTable.getCatalogName()) &&\n Objects.equal(remoteDbName, otherTable.remoteDbName) &&\n Objects.equal(tableIdentifier, otherTable.getTableIdentifier());\n }\n\n public static Builder builder() {\n return new Builder();\n }\n\n public static class Builder {\n private long id;\n private String srTableName;\n private String catalogName;\n private String resourceName;\n private String remoteDbName;\n private String remoteTableName;\n private List fullSchema;\n private Map icebergProperties;\n private org.apache.iceberg.Table nativeTable;\n\n public Builder() {\n }\n\n public Builder setId(long id) {\n this.id = id;\n return this;\n }\n\n public Builder setSrTableName(String srTableName) {\n this.srTableName = srTableName;\n return this;\n }\n\n public Builder setCatalogName(String catalogName) {\n this.catalogName = catalogName;\n return this;\n }\n\n public Builder setResourceName(String resourceName) {\n this.resourceName = resourceName;\n return this;\n }\n\n public Builder setRemoteDbName(String remoteDbName) {\n this.remoteDbName = remoteDbName;\n return this;\n }\n\n public Builder setRemoteTableName(String remoteTableName) {\n this.remoteTableName = remoteTableName;\n return this;\n }\n\n public Builder setFullSchema(List fullSchema) {\n this.fullSchema = fullSchema;\n return this;\n }\n\n public Builder setIcebergProperties(Map icebergProperties) {\n this.icebergProperties = icebergProperties;\n return this;\n }\n\n public Builder setNativeTable(org.apache.iceberg.Table nativeTable) {\n this.nativeTable = nativeTable;\n return this;\n }\n\n public IcebergTable build() {\n return new IcebergTable(id, srTableName, catalogName, resourceName, remoteDbName, remoteTableName,\n fullSchema, nativeTable, icebergProperties);\n }\n }\n}" }, { "comment": "1. Could you name it `StartTime`? 2. Is it a datetime or a timestamp value?", "method_body": "public void exec(AuditEvent event) {\n try {\n StringBuilder sb = new StringBuilder();\n long queryTime = 0;\n \n \n Field[] fields = event.getClass().getFields();\n for (Field f : fields) {\n AuditField af = f.getAnnotation(AuditField.class);\n if (af == null) {\n continue;\n }\n\n \n \n if (af.value().equals(\"BigQueryLogCPUSecondThreshold\") ||\n af.value().equals(\"BigQueryLogScanBytesThreshold\") ||\n af.value().equals(\"BigQueryLogScanRowsThreshold\")) {\n continue;\n }\n\n if (af.value().equals(\"Time\")) {\n queryTime = (long) f.get(event);\n }\n\n \n Object value = f.get(event);\n if (af.ignore_zero() && value == null) {\n continue;\n }\n if (value instanceof Long) {\n long longValue = (Long) value;\n if (longValue == -1 || (longValue == 0 && af.ignore_zero())) {\n continue;\n }\n }\n if (value instanceof Integer) {\n int intValue = (Integer) value;\n if (intValue == -1 || (intValue == 0 && af.ignore_zero())) {\n continue;\n }\n }\n if (value instanceof Double) {\n double doubleValue = (Double) value;\n if (doubleValue == -1 || (doubleValue == 0 && af.ignore_zero())) {\n continue;\n }\n }\n sb.append(\"|\").append(af.value()).append(\"=\").append(value);\n }\n\n String auditLog = sb.toString();\n if (event.type == EventType.CONNECTION) {\n AuditLog.getConnectionAudit().log(auditLog);\n } else {\n AuditLog.getQueryAudit().log(auditLog);\n \n if (queryTime > Config.qe_slow_log_ms) {\n AuditLog.getSlowAudit().log(auditLog);\n }\n\n if (isBigQuery(event)) {\n sb.append(\"|bigQueryLogCPUSecondThreshold=\").append(event.bigQueryLogCPUSecondThreshold);\n sb.append(\"|bigQueryLogScanBytesThreshold=\").append(event.bigQueryLogScanBytesThreshold);\n sb.append(\"|bigQueryLogScanRowsThreshold=\").append(event.bigQueryLogScanRowsThreshold);\n String bigQueryLog = sb.toString();\n AuditLog.getBigQueryAudit().log(bigQueryLog);\n }\n }\n } catch (Exception e) {\n LOG.warn(\"failed to process audit event\", e);\n }\n }", "target_code": "", "method_body_after": "public void exec(AuditEvent event) {\n try {\n StringBuilder sb = new StringBuilder();\n long queryTime = 0;\n \n \n Field[] fields = event.getClass().getFields();\n for (Field f : fields) {\n AuditField af = f.getAnnotation(AuditField.class);\n if (af == null) {\n continue;\n }\n\n \n \n if (af.value().equals(\"BigQueryLogCPUSecondThreshold\") ||\n af.value().equals(\"BigQueryLogScanBytesThreshold\") ||\n af.value().equals(\"BigQueryLogScanRowsThreshold\")) {\n continue;\n }\n\n if (af.value().equals(\"Time\")) {\n queryTime = (long) f.get(event);\n }\n\n \n Object value = f.get(event);\n if (af.ignore_zero() && value == null) {\n continue;\n }\n if (value instanceof Long) {\n long longValue = (Long) value;\n if (longValue == -1 || (longValue == 0 && af.ignore_zero())) {\n continue;\n }\n }\n if (value instanceof Integer) {\n int intValue = (Integer) value;\n if (intValue == -1 || (intValue == 0 && af.ignore_zero())) {\n continue;\n }\n }\n if (value instanceof Double) {\n double doubleValue = (Double) value;\n if (doubleValue == -1 || (doubleValue == 0 && af.ignore_zero())) {\n continue;\n }\n }\n sb.append(\"|\").append(af.value()).append(\"=\").append(value);\n }\n\n String auditLog = sb.toString();\n if (event.type == EventType.CONNECTION) {\n AuditLog.getConnectionAudit().log(auditLog);\n } else {\n AuditLog.getQueryAudit().log(auditLog);\n \n if (queryTime > Config.qe_slow_log_ms) {\n AuditLog.getSlowAudit().log(auditLog);\n }\n\n if (isBigQuery(event)) {\n sb.append(\"|bigQueryLogCPUSecondThreshold=\").append(event.bigQueryLogCPUSecondThreshold);\n sb.append(\"|bigQueryLogScanBytesThreshold=\").append(event.bigQueryLogScanBytesThreshold);\n sb.append(\"|bigQueryLogScanRowsThreshold=\").append(event.bigQueryLogScanRowsThreshold);\n String bigQueryLog = sb.toString();\n AuditLog.getBigQueryAudit().log(bigQueryLog);\n }\n }\n } catch (Exception e) {\n LOG.warn(\"failed to process audit event\", e);\n }\n }", "context_before": "class AuditLogBuilder extends Plugin implements AuditPlugin {\n private static final Logger LOG = LogManager.getLogger(AuditLogBuilder.class);\n\n private final PluginInfo pluginInfo;\n\n public AuditLogBuilder() {\n pluginInfo = new PluginInfo(PluginMgr.BUILTIN_PLUGIN_PREFIX + \"AuditLogBuilder\", PluginType.AUDIT,\n \"builtin audit logger\", DigitalVersion.fromString(\"0.12.0\"),\n DigitalVersion.fromString(\"1.8.31\"), AuditLogBuilder.class.getName(), null, null);\n }\n\n public PluginInfo getPluginInfo() {\n return pluginInfo;\n }\n\n @Override\n public boolean eventFilter(EventType type) {\n return type == EventType.AFTER_QUERY || type == EventType.CONNECTION;\n }\n\n @Override\n \n\n private boolean isBigQuery(AuditEvent event) {\n if (event.bigQueryLogCPUSecondThreshold >= 0 &&\n event.cpuCostNs > event.bigQueryLogCPUSecondThreshold * 1000000000L) {\n return true;\n }\n if (event.bigQueryLogScanBytesThreshold >= 0 && event.scanBytes > event.bigQueryLogScanBytesThreshold) {\n return true;\n }\n return event.bigQueryLogScanRowsThreshold >= 0 && event.scanRows > event.bigQueryLogScanRowsThreshold;\n }\n}", "context_after": "class AuditLogBuilder extends Plugin implements AuditPlugin {\n private static final Logger LOG = LogManager.getLogger(AuditLogBuilder.class);\n\n private final PluginInfo pluginInfo;\n\n public AuditLogBuilder() {\n pluginInfo = new PluginInfo(PluginMgr.BUILTIN_PLUGIN_PREFIX + \"AuditLogBuilder\", PluginType.AUDIT,\n \"builtin audit logger\", DigitalVersion.fromString(\"0.12.0\"),\n DigitalVersion.fromString(\"1.8.31\"), AuditLogBuilder.class.getName(), null, null);\n }\n\n public PluginInfo getPluginInfo() {\n return pluginInfo;\n }\n\n @Override\n public boolean eventFilter(EventType type) {\n return type == EventType.AFTER_QUERY || type == EventType.CONNECTION;\n }\n\n @Override\n \n\n private boolean isBigQuery(AuditEvent event) {\n if (event.bigQueryLogCPUSecondThreshold >= 0 &&\n event.cpuCostNs > event.bigQueryLogCPUSecondThreshold * 1000000000L) {\n return true;\n }\n if (event.bigQueryLogScanBytesThreshold >= 0 && event.scanBytes > event.bigQueryLogScanBytesThreshold) {\n return true;\n }\n return event.bigQueryLogScanRowsThreshold >= 0 && event.scanRows > event.bigQueryLogScanRowsThreshold;\n }\n}" }, { "comment": "Should the emitted log level be configurable? I.e. to log it at debug, info or warning level? Also, since toString() on the context could become CPU intensive (if in future it uses JSON etc) one can check the log level enabled for the logger and if not enabled for info or warning then do not call toString or put the rest of the info on the stack since it is not going to be logged anyway.", "method_body": "protected void log(CosmosDiagnosticsContext ctx) {\n if (this.shouldLogDueToStatusCode(ctx.getStatusCode(), ctx.getSubStatusCode())) {\n logger.warn(\n \"Account: {} -> DB: {}, Col:{}, StatusCode: {}:{} Diagnostics: {}\",\n ctx.getAccountName(),\n ctx.getDatabaseName(),\n ctx.getCollectionName(),\n ctx.getStatusCode(),\n ctx.getSubStatusCode(),\n ctx.toString());\n } else {\n logger.info(\n \"Account: {} -> DB: {}, Col:{}, StatusCode: {}:{} Diagnostics: {}\",\n ctx.getAccountName(),\n ctx.getDatabaseName(),\n ctx.getCollectionName(),\n ctx.getStatusCode(),\n ctx.getSubStatusCode(),\n ctx.toString());\n }\n }", "target_code": "logger.info(", "method_body_after": "protected void log(CosmosDiagnosticsContext ctx) {\n if (ctx.isFailure()) {\n if (logger.isErrorEnabled()) {\n logger.error(\n \"Account: {} -> DB: {}, Col:{}, StatusCode: {}:{} Diagnostics: {}\",\n ctx.getAccountName(),\n ctx.getDatabaseName(),\n ctx.getContainerName(),\n ctx.getStatusCode(),\n ctx.getSubStatusCode(),\n ctx);\n }\n } else if (ctx.isThresholdViolated()) {\n if (logger.isInfoEnabled()) {\n logger.info(\n \"Account: {} -> DB: {}, Col:{}, StatusCode: {}:{} Diagnostics: {}\",\n ctx.getAccountName(),\n ctx.getDatabaseName(),\n ctx.getContainerName(),\n ctx.getStatusCode(),\n ctx.getSubStatusCode(),\n ctx);\n }\n } else if (logger.isTraceEnabled()) {\n logger.trace(\n \"Account: {} -> DB: {}, Col:{}, StatusCode: {}:{} Diagnostics: {}\",\n ctx.getAccountName(),\n ctx.getDatabaseName(),\n ctx.getContainerName(),\n ctx.getStatusCode(),\n ctx.getSubStatusCode(),\n ctx);\n } else if (logger.isDebugEnabled()) {\n logger.debug(\n \"Account: {} -> DB: {}, Col:{}, StatusCode: {}:{}, Latency: {}, Request charge: {}\",\n ctx.getAccountName(),\n ctx.getDatabaseName(),\n ctx.getContainerName(),\n ctx.getStatusCode(),\n ctx.getSubStatusCode(),\n ctx.getDuration(),\n ctx.getTotalRequestCharge());\n }\n }", "context_before": "class CosmosDiagnosticsLogger implements CosmosDiagnosticsHandler {\n private final static Logger logger = LoggerFactory.getLogger(CosmosDiagnosticsLogger.class);\n private final static ImplementationBridgeHelpers.CosmosDiagnosticsContextHelper.CosmosDiagnosticsContextAccessor ctxAccessor =\n ImplementationBridgeHelpers.CosmosDiagnosticsContextHelper.getCosmosDiagnosticsContextAccessor();\n\n private final CosmosDiagnosticsLoggerConfig config;\n private final Set pointOperationTypes = new HashSet() {{\n add(OperationType.Create);\n add(OperationType.Delete);\n add(OperationType.Patch);\n add(OperationType.Read);\n add(OperationType.Replace);\n add(OperationType.Upsert);\n }};\n\n /**\n * Creates an instance of the CosmosDiagnosticLogger class\n * @param config the configuration determining the conditions when to log an operation\n */\n public CosmosDiagnosticsLogger(CosmosDiagnosticsLoggerConfig config) {\n checkNotNull(config, \"Argument 'config' must not be null.\");\n this.config = config;\n }\n\n /**\n * Decides whether to log diagnostics for an operation and emits the logs when needed\n * @param traceContext the Azure trace context\n * @param diagnosticsContext the Cosmos DB diagnostic context with metadata for the operation\n */\n @Override\n public final void handleDiagnostics(Context traceContext, CosmosDiagnosticsContext diagnosticsContext) {\n checkNotNull(diagnosticsContext, \"Argument 'diagnosticsContext' must not be null.\");\n\n if (shouldLog(diagnosticsContext)) {\n this.log(diagnosticsContext);\n }\n }\n\n /**\n * Decides whether to log diagnostics for an operation\n * @param diagnosticsContext\n * @return a flag inidcating whether to log the operation or not\n */\n protected boolean shouldLog(CosmosDiagnosticsContext diagnosticsContext) {\n\n if (!diagnosticsContext.hasCompleted()) {\n return false;\n }\n\n if (shouldLogDueToStatusCode(diagnosticsContext.getStatusCode(), diagnosticsContext.getSubStatusCode())) {\n return true;\n }\n\n ResourceType resourceType = ctxAccessor.getResourceType(diagnosticsContext);\n OperationType operationType = ctxAccessor.getOperationType(diagnosticsContext);\n\n if (resourceType == ResourceType.Document) {\n if (pointOperationTypes.contains(operationType)) {\n if (diagnosticsContext.getDuration().compareTo(this.config.getPointOperationLatencyThreshold()) >= 1) {\n return true;\n }\n } else {\n if (diagnosticsContext.getDuration().compareTo(this.config.getFeedOperationLatencyThreshold()) >= 1) {\n return true;\n }\n }\n }\n\n if (diagnosticsContext.getTotalRequestCharge() > this.config.getRequestChargeThreshold()) {\n return true;\n }\n\n return false;\n }\n\n private boolean shouldLogDueToStatusCode(int statusCode, int subStatusCode) {\n return statusCode >= 500 || statusCode == 408 || statusCode == 410;\n }\n\n /**\n * Logs the operation. This method can be overridden for example to emit logs to a different target than log4j\n * @param ctx\n */\n \n}", "context_after": "class CosmosDiagnosticsLogger implements CosmosDiagnosticsHandler {\n private final static Logger logger = LoggerFactory.getLogger(CosmosDiagnosticsLogger.class);\n\n /**\n * Creates an instance of the CosmosDiagnosticLogger class\n */\n public CosmosDiagnosticsLogger() {\n }\n\n /**\n * Decides whether to log diagnostics for an operation and emits the logs when needed\n *\n * @param diagnosticsContext the Cosmos DB diagnostic context with metadata for the operation\n * @param traceContext the Azure trace context\n */\n @Override\n public final void handleDiagnostics(CosmosDiagnosticsContext diagnosticsContext, Context traceContext) {\n checkNotNull(diagnosticsContext, \"Argument 'diagnosticsContext' must not be null.\");\n\n if (shouldLog(diagnosticsContext)) {\n this.log(diagnosticsContext);\n }\n }\n\n /**\n * Decides whether to log diagnostics for an operation\n * @param diagnosticsContext the diagnostics context\n * @return a flag indicating whether to log the operation or not\n */\n protected boolean shouldLog(CosmosDiagnosticsContext diagnosticsContext) {\n\n if (!diagnosticsContext.isCompleted()) {\n return false;\n }\n\n return diagnosticsContext.isFailure() ||\n diagnosticsContext.isThresholdViolated() ||\n logger.isDebugEnabled();\n }\n\n /**\n * Logs the operation. This method can be overridden for example to emit logs to a different target than log4j\n * @param ctx the diagnostics context\n */\n \n}" }, { "comment": "For samples like bellow ```bal public function main() { [T...] a; } ``` This will create a member descriptor containing rest descriptor inside", "method_body": "private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {\n \n switch (peek(lookahead + 1).kind) {\n case IDENTIFIER_TOKEN:\n SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;\n switch (tokenAfterIdentifier) {\n case ON_KEYWORD: \n case OPEN_BRACE_TOKEN: \n return true;\n case EQUAL_TOKEN: \n case SEMICOLON_TOKEN: \n case QUESTION_MARK_TOKEN: \n return false;\n default:\n \n return false;\n }\n case ON_KEYWORD:\n \n \n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse listener declaration, given the qualifier.\n *

\n * \n * listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;\n * \n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the listener declaration\n * @return Parsed node\n */\n private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {\n startContext(ParserRuleContext.LISTENER_DECL);\n STNode listenerKeyword = parseListenerKeyword();\n\n if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {\n STNode listenerDecl =\n parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);\n endContext();\n return listenerDecl;\n }\n\n STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\n STNode variableName = parseVariableName();\n STNode equalsToken = parseAssignOp();\n STNode initializer = parseExpression();\n STNode semicolonToken = parseSemicolon();\n endContext();\n return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,\n equalsToken, initializer, semicolonToken);\n }\n\n /**\n * Parse listener keyword.\n *\n * @return Parsed node\n */\n private STNode parseListenerKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LISTENER_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LISTENER_KEYWORD);\n return parseListenerKeyword();\n }\n }\n\n /**\n * Parse constant declaration, given the qualifier.\n *

\n * module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the listener declaration\n * @return Parsed node\n */\n private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {\n startContext(ParserRuleContext.CONSTANT_DECL);\n STNode constKeyword = parseConstantKeyword();\n \n return parseConstDecl(metadata, qualifier, constKeyword);\n }\n\n /**\n * Parse the components that follows after the const keyword of a constant declaration.\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the constant decl\n * @param constKeyword Const keyword\n * @return Parsed node\n */\n private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case ANNOTATION_KEYWORD:\n endContext();\n return parseAnnotationDeclaration(metadata, qualifier, constKeyword);\n case IDENTIFIER_TOKEN:\n STNode constantDecl =\n parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);\n endContext();\n return constantDecl;\n default:\n if (isTypeStartingToken(nextToken.kind)) {\n break;\n }\n\n recover(peek(), ParserRuleContext.CONST_DECL_TYPE);\n return parseConstDecl(metadata, qualifier, constKeyword);\n }\n\n STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\n STNode variableName = parseVariableName();\n STNode equalsToken = parseAssignOp();\n STNode initializer = parseExpression();\n STNode semicolonToken = parseSemicolon();\n endContext();\n return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,\n equalsToken, initializer, semicolonToken);\n }\n\n private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\n boolean isListener) {\n STNode varNameOrTypeName = parseStatementStartIdentifier();\n return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);\n }\n\n /**\n * Parse the component that follows the first identifier in a const decl. The identifier\n * can be either the type-name (a user defined type) or the var-name there the type-name\n * is not present.\n *\n * @param qualifier Qualifier that precedes the constant decl\n * @param keyword Keyword\n * @param typeOrVarName Identifier that follows the const-keywoord\n * @return Parsed node\n */\n private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,\n STNode typeOrVarName, boolean isListener) {\n if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n STNode type = typeOrVarName;\n STNode variableName = parseVariableName();\n return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n }\n\n STNode type;\n STNode variableName;\n switch (peek().kind) {\n case IDENTIFIER_TOKEN:\n type = typeOrVarName;\n variableName = parseVariableName();\n break;\n case EQUAL_TOKEN:\n variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; \n type = STNodeFactory.createEmptyNode();\n break;\n default:\n recover(peek(), ParserRuleContext.CONST_DECL_RHS);\n return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);\n }\n\n return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n }\n\n private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,\n STNode type, STNode variableName) {\n STNode equalsToken = parseAssignOp();\n STNode initializer = parseExpression();\n STNode semicolonToken = parseSemicolon();\n\n if (isListener) {\n return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,\n equalsToken, initializer, semicolonToken);\n }\n\n return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,\n equalsToken, initializer, semicolonToken);\n }\n\n /**\n * Parse const keyword.\n *\n * @return Parsed node\n */\n private STNode parseConstantKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.CONST_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.CONST_KEYWORD);\n return parseConstantKeyword();\n }\n }\n\n /**\n * Parse typeof expression.\n *

\n * \n * typeof-expr := typeof expression\n * \n *\n * @param isRhsExpr\n * @return Typeof expression node\n */\n private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\n STNode typeofKeyword = parseTypeofKeyword();\n\n \n \n STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\n return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);\n }\n\n /**\n * Parse typeof-keyword.\n *\n * @return Typeof-keyword node\n */\n private STNode parseTypeofKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.TYPEOF_KEYWORD);\n return parseTypeofKeyword();\n }\n }\n\n /**\n * Parse optional type descriptor given the type.\n *

\n * optional-type-descriptor := type-descriptor `?`\n *

\n *\n * @param typeDescriptorNode Preceding type descriptor\n * @return Parsed node\n */\n private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {\n startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);\n STNode questionMarkToken = parseQuestionMark();\n endContext();\n return createOptionalTypeDesc(typeDescriptorNode, questionMarkToken);\n }\n\n private STNode createOptionalTypeDesc(STNode typeDescNode, STNode questionMarkToken) {\n if (typeDescNode.kind == SyntaxKind.UNION_TYPE_DESC) {\n STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDescNode;\n STNode middleTypeDesc = createOptionalTypeDesc(unionTypeDesc.rightTypeDesc, questionMarkToken);\n typeDescNode = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);\n } else if (typeDescNode.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDescNode;\n STNode middleTypeDesc = createOptionalTypeDesc(intersectionTypeDesc.rightTypeDesc, questionMarkToken);\n typeDescNode = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,\n intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n } else {\n typeDescNode = validateForUsageOfVar(typeDescNode);\n typeDescNode = STNodeFactory.createOptionalTypeDescriptorNode(typeDescNode, questionMarkToken);\n }\n\n return typeDescNode;\n }\n\n /**\n * Parse unary expression.\n *

\n * \n * unary-expr := + expression | - expression | ~ expression | ! expression\n * \n *\n * @param isRhsExpr\n * @return Unary expression node\n */\n private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\n STNode unaryOperator = parseUnaryOperator();\n\n \n \n STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\n return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);\n }\n\n /**\n * Parse unary operator.\n * UnaryOperator := + | - | ~ | !\n *\n * @return Parsed node\n */\n private STNode parseUnaryOperator() {\n STToken token = peek();\n if (isUnaryOperator(token.kind)) {\n return consume();\n } else {\n recover(token, ParserRuleContext.UNARY_OPERATOR);\n return parseUnaryOperator();\n }\n }\n\n /**\n * Check whether the given token kind is a unary operator.\n *\n * @param kind STToken kind\n * @return true if the token kind refers to a unary operator. false otherwise\n */\n private boolean isUnaryOperator(SyntaxKind kind) {\n switch (kind) {\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n case NEGATION_TOKEN:\n case EXCLAMATION_MARK_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse array type descriptor.\n *

\n * \n * array-type-descriptor := array-member-type-descriptor [ [ array-length ] ]\n * array-member-type-descriptor := type-descriptor\n * array-length :=\n * int-literal\n * | constant-reference-expr\n * | inferred-array-length\n * inferred-array-length := *\n * \n *

\n *\n * @param memberTypeDesc\n * @return Parsed Node\n */\n private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {\n startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\n STNode openBracketToken = parseOpenBracket();\n STNode arrayLengthNode = parseArrayLength();\n STNode closeBracketToken = parseCloseBracket();\n endContext();\n return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);\n }\n\n private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,\n STNode closeBracketToken) {\n memberTypeDesc = validateForUsageOfVar(memberTypeDesc);\n if (arrayLengthNode != null) {\n switch (arrayLengthNode.kind) {\n case ASTERISK_LITERAL:\n case SIMPLE_NAME_REFERENCE:\n case QUALIFIED_NAME_REFERENCE:\n break;\n case NUMERIC_LITERAL:\n SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;\n if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||\n numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {\n break;\n }\n \n default:\n openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,\n arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\n arrayLengthNode = STNodeFactory.createEmptyNode();\n }\n }\n \n \n List arrayDimensions = new ArrayList();\n if (memberTypeDesc.kind == SyntaxKind.ARRAY_TYPE_DESC) {\n STArrayTypeDescriptorNode innerArrayType = (STArrayTypeDescriptorNode) memberTypeDesc;\n STNode innerArrayDimensions = innerArrayType.dimensions;\n int dimensionCount = innerArrayDimensions.bucketCount();\n \n for (int i = 0; i < dimensionCount; i++) {\n arrayDimensions.add(innerArrayDimensions.childInBucket(i));\n }\n memberTypeDesc = innerArrayType.memberTypeDesc;\n }\n \n STNode arrayDimension = STNodeFactory.createArrayDimensionNode(openBracketToken, arrayLengthNode, \n closeBracketToken);\n arrayDimensions.add(arrayDimension);\n STNode arrayDimensionNodeList = STNodeFactory.createNodeList(arrayDimensions);\n \n return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, arrayDimensionNodeList);\n }\n\n /**\n * Parse array length.\n *

\n * \n * array-length :=\n * int-literal\n * | constant-reference-expr\n * | inferred-array-length\n * constant-reference-expr := variable-reference-expr\n * \n *

\n *\n * @return Parsed array length\n */\n private STNode parseArrayLength() {\n STToken token = peek();\n switch (token.kind) {\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case ASTERISK_TOKEN:\n return parseBasicLiteral();\n case CLOSE_BRACKET_TOKEN:\n return STNodeFactory.createEmptyNode();\n \n case IDENTIFIER_TOKEN:\n return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);\n default:\n recover(token, ParserRuleContext.ARRAY_LENGTH);\n return parseArrayLength();\n }\n }\n\n /**\n * Parse annotations.\n *

\n * Note: In the \n *

\n * annots := annotation*\n *\n * @return Parsed node\n */\n private STNode parseOptionalAnnotations() {\n startContext(ParserRuleContext.ANNOTATIONS);\n List annotList = new ArrayList<>();\n STToken nextToken = peek();\n while (nextToken.kind == SyntaxKind.AT_TOKEN) {\n annotList.add(parseAnnotation());\n nextToken = peek();\n }\n\n endContext();\n return STNodeFactory.createNodeList(annotList);\n }\n\n /**\n * Parse annotation list with at least one annotation.\n *\n * @return Annotation list\n */\n private STNode parseAnnotations() {\n startContext(ParserRuleContext.ANNOTATIONS);\n List annotList = new ArrayList<>();\n annotList.add(parseAnnotation());\n while (peek().kind == SyntaxKind.AT_TOKEN) {\n annotList.add(parseAnnotation());\n }\n\n endContext();\n return STNodeFactory.createNodeList(annotList);\n }\n\n /**\n * Parse annotation attachment.\n *

\n * annotation := @ annot-tag-reference annot-value\n *\n * @return Parsed node\n */\n private STNode parseAnnotation() {\n STNode atToken = parseAtToken();\n STNode annotReference;\n if (isPredeclaredIdentifier(peek().kind)) {\n annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);\n } else {\n annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n annotReference = STNodeFactory.createSimpleNameReferenceNode(annotReference);\n }\n\n STNode annotValue;\n if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\n annotValue = parseMappingConstructorExpr();\n } else {\n annotValue = STNodeFactory.createEmptyNode();\n }\n return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);\n }\n\n /**\n * Parse '@' token.\n *\n * @return Parsed node\n */\n private STNode parseAtToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.AT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.AT);\n return parseAtToken();\n }\n }\n\n /**\n * Parse metadata. Meta data consist of optional doc string and\n * an annotations list.\n *

\n * metadata := [DocumentationString] annots\n *\n * @return Parse node\n */\n private STNode parseMetaData() {\n STNode docString;\n STNode annotations;\n switch (peek().kind) {\n case DOCUMENTATION_STRING:\n docString = parseMarkdownDocumentation();\n annotations = parseOptionalAnnotations();\n break;\n case AT_TOKEN:\n docString = STNodeFactory.createEmptyNode();\n annotations = parseOptionalAnnotations();\n break;\n default:\n return STNodeFactory.createEmptyNode();\n }\n\n return createMetadata(docString, annotations);\n }\n\n /**\n * Create metadata node.\n *\n * @return A metadata node\n */\n private STNode createMetadata(STNode docString, STNode annotations) {\n if (annotations == null && docString == null) {\n return STNodeFactory.createEmptyNode();\n } else {\n return STNodeFactory.createMetadataNode(docString, annotations);\n }\n }\n\n /**\n * Parse type test expression.\n * \n * type-test-expr := expression (is | !is) type-descriptor\n * \n *\n * @param lhsExpr Preceding expression of the is expression\n * @return Is expression node\n */\n private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {\n STNode isOrNotIsKeyword = parseIsOrNotIsKeyword();\n STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);\n return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isOrNotIsKeyword, typeDescriptor);\n }\n\n /**\n * Parse `is` keyword or `!is` keyword.\n *\n * @return is-keyword or not-is-keyword node\n */\n private STNode parseIsOrNotIsKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.IS_KEYWORD ||\n token.kind == SyntaxKind.NOT_IS_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.IS_KEYWORD);\n return parseIsOrNotIsKeyword();\n }\n }\n\n /**\n * Parse local type definition statement statement.\n * ocal-type-defn-stmt := [annots] type identifier type-descriptor ;\n *\n * @return local type definition statement statement\n */\n private STNode parseLocalTypeDefinitionStatement(STNode annots) {\n startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);\n STNode typeKeyword = parseTypeKeyword();\n STNode typeName = parseTypeName();\n STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\n STNode semicolon = parseSemicolon();\n endContext();\n return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,\n semicolon);\n }\n\n /**\n * Parse statement which is only consists of an action or expression.\n *\n * @param annots Annotations\n * @return Statement node\n */\n private STNode parseExpressionStatement(STNode annots) {\n startContext(ParserRuleContext.EXPRESSION_STATEMENT);\n STNode expression = parseActionOrExpressionInLhs(annots);\n return getExpressionAsStatement(expression);\n }\n\n /**\n * Parse statements that starts with an expression.\n *\n * @return Statement node\n */\n private STNode parseStatementStartWithExpr(STNode annots) {\n startContext(ParserRuleContext.AMBIGUOUS_STMT);\n STNode expr = parseActionOrExpressionInLhs(annots);\n return parseStatementStartWithExprRhs(expr);\n }\n\n /**\n * Parse the component followed by the expression, at the beginning of a statement.\n *\n * @param expression Action or expression in LHS\n * @return Statement node\n */\n private STNode parseStatementStartWithExprRhs(STNode expression) {\n SyntaxKind nextTokenKind = peek().kind;\n if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) {\n return getExpressionAsStatement(expression);\n }\n\n switch (nextTokenKind) {\n case EQUAL_TOKEN:\n switchContext(ParserRuleContext.ASSIGNMENT_STMT);\n return parseAssignmentStmtRhs(expression);\n case IDENTIFIER_TOKEN:\n default:\n \n if (isCompoundAssignment(nextTokenKind)) {\n return parseCompoundAssignmentStmtRhs(expression);\n }\n\n ParserRuleContext context;\n if (isPossibleExpressionStatement(expression)) {\n context = ParserRuleContext.EXPR_STMT_RHS;\n } else {\n context = ParserRuleContext.STMT_START_WITH_EXPR_RHS;\n }\n\n recover(peek(), context);\n return parseStatementStartWithExprRhs(expression);\n }\n }\n\n private boolean isPossibleExpressionStatement(STNode expression) {\n switch (expression.kind) {\n case METHOD_CALL:\n case FUNCTION_CALL:\n case CHECK_EXPRESSION:\n case REMOTE_METHOD_CALL_ACTION:\n case CHECK_ACTION:\n case BRACED_ACTION:\n case START_ACTION:\n case TRAP_ACTION:\n case FLUSH_ACTION:\n case ASYNC_SEND_ACTION:\n case SYNC_SEND_ACTION:\n case RECEIVE_ACTION:\n case WAIT_ACTION:\n case QUERY_ACTION:\n case COMMIT_ACTION:\n return true;\n default:\n return false;\n }\n }\n\n private STNode getExpressionAsStatement(STNode expression) {\n switch (expression.kind) {\n case METHOD_CALL:\n case FUNCTION_CALL:\n return parseCallStatement(expression);\n case CHECK_EXPRESSION:\n return parseCheckStatement(expression);\n case REMOTE_METHOD_CALL_ACTION:\n case CHECK_ACTION:\n case BRACED_ACTION:\n case START_ACTION:\n case TRAP_ACTION:\n case FLUSH_ACTION:\n case ASYNC_SEND_ACTION:\n case SYNC_SEND_ACTION:\n case RECEIVE_ACTION:\n case WAIT_ACTION:\n case QUERY_ACTION:\n case COMMIT_ACTION:\n return parseActionStatement(expression);\n default:\n \n STNode semicolon = parseSemicolon();\n endContext();\n expression = getExpression(expression);\n STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,\n expression, semicolon);\n exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);\n return exprStmt;\n }\n }\n\n private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {\n STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);\n STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;\n if (lengthExprs.isEmpty()) {\n return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),\n indexedExpr.closeBracket);\n }\n\n \n STNode lengthExpr = lengthExprs.get(0);\n switch (lengthExpr.kind) {\n case SIMPLE_NAME_REFERENCE:\n STSimpleNameReferenceNode nameRef = (STSimpleNameReferenceNode) lengthExpr;\n if (nameRef.name.isMissing()) {\n return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),\n indexedExpr.closeBracket);\n }\n break;\n case ASTERISK_LITERAL:\n case QUALIFIED_NAME_REFERENCE:\n break;\n case NUMERIC_LITERAL:\n SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;\n if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||\n innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {\n break;\n }\n \n default:\n STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(\n indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\n indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);\n lengthExpr = STNodeFactory.createEmptyNode();\n }\n\n return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);\n }\n\n /**\n *

\n * Parse call statement, given the call expression.\n *

\n * \n * call-stmt := call-expr ;\n *
\n * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr\n *
\n *\n * @param expression Call expression associated with the call statement\n * @return Call statement node\n */\n private STNode parseCallStatement(STNode expression) {\n return parseCallStatementOrCheckStatement(expression);\n }\n\n /**\n *

\n * Parse checking statement.\n *

\n * \n * checking-stmt := checking-expr ;\n *
\n * checking-expr := checking-keyword expr ;\n *
\n *\n * @param expression Checking expression associated with the checking statement\n * @return Checking statement node\n */\n private STNode parseCheckStatement(STNode expression) {\n return parseCallStatementOrCheckStatement(expression);\n }\n\n private STNode parseCallStatementOrCheckStatement(STNode expression) {\n STNode semicolon = parseSemicolon();\n endContext();\n return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);\n }\n\n private STNode parseActionStatement(STNode action) {\n STNode semicolon = parseSemicolon();\n endContext();\n return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);\n }\n\n /**\n * Parse remote method call action, given the starting expression.\n *

\n * \n * remote-method-call-action := expression -> method-name ( arg-list )\n *
\n * async-send-action := expression -> peer-worker ;\n *
\n *\n * @param isRhsExpr Is this an RHS action\n * @param expression LHS expression\n * @return\n */\n private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {\n STNode rightArrow = parseRightArrow();\n return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n }\n\n private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {\n STNode name;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case FUNCTION_KEYWORD:\n STNode functionKeyword = consume();\n name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword);\n return parseAsyncSendAction(expression, rightArrow, name);\n case IDENTIFIER_TOKEN:\n name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());\n break;\n case CONTINUE_KEYWORD:\n case COMMIT_KEYWORD:\n name = getKeywordAsSimpleNameRef();\n break;\n default:\n STToken token = peek();\n recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS);\n return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n }\n\n return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);\n }\n\n private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n return parseRemoteMethodCallAction(expression, rightArrow, name);\n case SEMICOLON_TOKEN:\n return parseAsyncSendAction(expression, rightArrow, name);\n default:\n recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END);\n return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);\n }\n }\n\n private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {\n return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);\n }\n\n private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {\n STNode openParenToken = parseArgListOpenParenthesis();\n STNode arguments = parseArgsList();\n STNode closeParenToken = parseArgListCloseParenthesis();\n return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,\n closeParenToken);\n }\n\n /**\n * Parse right arrow (->) token.\n *\n * @return Parsed node\n */\n private STNode parseRightArrow() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.RIGHT_ARROW);\n return parseRightArrow();\n }\n }\n\n /**\n * Parse map type descriptor.\n * map-type-descriptor := `map` type-parameter\n *\n * @return Parsed node\n */\n private STNode parseMapTypeDescriptor(STNode mapKeyword) {\n STNode typeParameter = parseTypeParameter();\n return STNodeFactory.createMapTypeDescriptorNode(mapKeyword, typeParameter);\n }\n\n /**\n * Parse parameterized type descriptor.\n * parameterized-type-descriptor := `typedesc` [type-parameter]\n *
 | `future` [type-parameter]\n *
 | `xml` [type-parameter]\n *
 | `error` [type-parameter]\n *\n * @return Parsed node\n */\n private STNode parseParameterizedTypeDescriptor(STNode keywordToken) {\n STNode typeParamNode;\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.LT_TOKEN) {\n typeParamNode = parseTypeParameter();\n } else {\n typeParamNode = STNodeFactory.createEmptyNode();\n }\n\n SyntaxKind parameterizedTypeDescKind = getParameterizedTypeDescKind(keywordToken);\n return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeDescKind, keywordToken,\n typeParamNode);\n }\n\n private SyntaxKind getParameterizedTypeDescKind(STNode keywordToken) {\n switch (keywordToken.kind) {\n case TYPEDESC_KEYWORD:\n return SyntaxKind.TYPEDESC_TYPE_DESC;\n case FUTURE_KEYWORD:\n return SyntaxKind.FUTURE_TYPE_DESC;\n case XML_KEYWORD:\n return SyntaxKind.XML_TYPE_DESC;\n case ERROR_KEYWORD:\n default:\n return SyntaxKind.ERROR_TYPE_DESC;\n }\n }\n \n /**\n * Parse < token.\n *\n * @return Parsed node\n */\n private STNode parseGTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.GT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.GT);\n return parseGTToken();\n }\n }\n\n /**\n * Parse > token.\n *\n * @return Parsed node\n */\n private STNode parseLTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.LT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.LT);\n return parseLTToken();\n }\n }\n\n /**\n * Parse nil literal. Here nil literal is only referred to ( ).\n *\n * @return Parsed node\n */\n private STNode parseNilLiteral() {\n startContext(ParserRuleContext.NIL_LITERAL);\n STNode openParenthesisToken = parseOpenParenthesis();\n STNode closeParenthesisToken = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);\n }\n\n /**\n * Parse annotation declaration, given the qualifier.\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the listener declaration\n * @param constKeyword Const keyword\n * @return Parsed node\n */\n private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {\n startContext(ParserRuleContext.ANNOTATION_DECL);\n STNode annotationKeyword = parseAnnotationKeyword();\n STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\n endContext();\n return annotDecl;\n }\n\n /**\n * Parse annotation keyword.\n *\n * @return Parsed node\n */\n private STNode parseAnnotationKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ANNOTATION_KEYWORD);\n return parseAnnotationKeyword();\n }\n }\n\n /**\n * Parse the components that follows after the annotation keyword of a annotation declaration.\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the constant decl\n * @param constKeyword Const keyword\n * @param annotationKeyword\n * @return Parsed node\n */\n private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,\n STNode annotationKeyword) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN:\n return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);\n default:\n if (isTypeStartingToken(nextToken.kind)) {\n break;\n }\n\n recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\n return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\n }\n\n STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);\n STNode annotTag = parseAnnotationTag();\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\n annotTag);\n }\n\n /**\n * Parse annotation tag.\n *

\n * annot-tag := identifier\n *\n * @return\n */\n private STNode parseAnnotationTag() {\n STToken token = peek();\n if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return consume();\n } else {\n recover(peek(), ParserRuleContext.ANNOTATION_TAG);\n return parseAnnotationTag();\n }\n }\n\n private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\n STNode annotationKeyword) {\n \n \n STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\n if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n STNode annotTag = parseAnnotationTag();\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,\n typeDescOrAnnotTag, annotTag);\n }\n\n \n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {\n STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,\n ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);\n STNode annotTag = parseAnnotationTag();\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\n annotTag);\n }\n\n STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;\n return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);\n }\n\n /**\n * Parse the component that follows the first identifier in an annotation decl. The identifier\n * can be either the type-name (a user defined type) or the annot-tag, where the type-name\n * is not present.\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the annotation decl\n * @param constKeyword Const keyword\n * @param annotationKeyword Annotation keyword\n * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword\n * @return Parsed node\n */\n private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,\n STNode annotationKeyword, STNode typeDescOrAnnotTag) {\n STToken nextToken = peek();\n STNode typeDesc;\n STNode annotTag;\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN:\n typeDesc = typeDescOrAnnotTag;\n annotTag = parseAnnotationTag();\n break;\n case SEMICOLON_TOKEN:\n case ON_KEYWORD:\n typeDesc = STNodeFactory.createEmptyNode();\n annotTag = typeDescOrAnnotTag;\n break;\n default:\n recover(peek(), ParserRuleContext.ANNOT_DECL_RHS);\n return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);\n }\n\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\n annotTag);\n }\n\n private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,\n STNode annotationKeyword, STNode typeDesc, STNode annotTag) {\n STNode onKeyword;\n STNode attachPoints;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case SEMICOLON_TOKEN:\n onKeyword = STNodeFactory.createEmptyNode();\n attachPoints = STNodeFactory.createEmptyNodeList();\n break;\n case ON_KEYWORD:\n onKeyword = parseOnKeyword();\n attachPoints = parseAnnotationAttachPoints();\n onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,\n DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\n break;\n default:\n recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS);\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\n annotTag);\n }\n\n STNode semicolonToken = parseSemicolon();\n return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,\n typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);\n }\n\n /**\n * Parse annotation attach points.\n *

\n * \n * annot-attach-points := annot-attach-point (, annot-attach-point)*\n *

\n * annot-attach-point := dual-attach-point | source-only-attach-point\n *

\n * dual-attach-point := [source] dual-attach-point-ident\n *

\n * dual-attach-point-ident :=\n * type\n * | class\n * | [object|service remote] function\n * | parameter\n * | return\n * | service\n * | [object|record] field\n *

\n * source-only-attach-point := source source-only-attach-point-ident\n *

\n * source-only-attach-point-ident :=\n * annotation\n * | external\n * | var\n * | const\n * | listener\n * | worker\n *
\n *\n * @return Parsed node\n */\n private STNode parseAnnotationAttachPoints() {\n startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);\n List attachPoints = new ArrayList<>();\n\n STToken nextToken = peek();\n if (isEndAnnotAttachPointList(nextToken.kind)) {\n endContext();\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode attachPoint = parseAnnotationAttachPoint();\n attachPoints.add(attachPoint);\n\n \n nextToken = peek();\n STNode leadingComma;\n while (!isEndAnnotAttachPointList(nextToken.kind)) {\n leadingComma = parseAttachPointEnd();\n if (leadingComma == null) {\n break;\n }\n attachPoints.add(leadingComma);\n\n \n attachPoint = parseAnnotationAttachPoint();\n if (attachPoint == null) {\n STToken missingAttachPointIdent = SyntaxErrors.createMissingToken(SyntaxKind.TYPE_KEYWORD);\n STNode identList = STNodeFactory.createNodeList(missingAttachPointIdent);\n attachPoint = STNodeFactory.createAnnotationAttachPointNode(STNodeFactory.createEmptyNode(), identList);\n attachPoint = SyntaxErrors.addDiagnostic(attachPoint,\n DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\n attachPoints.add(attachPoint);\n break;\n }\n\n attachPoints.add(attachPoint);\n nextToken = peek();\n }\n \n if (attachPoint.lastToken().isMissing() && this.tokenReader.peek().kind == SyntaxKind.IDENTIFIER_TOKEN &&\n !this.tokenReader.head().hasTrailingNewline()) {\n \n \n STToken nextNonVirtualToken = this.tokenReader.read();\n updateLastNodeInListWithInvalidNode(attachPoints, nextNonVirtualToken,\n DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextNonVirtualToken.text());\n }\n\n endContext();\n return STNodeFactory.createNodeList(attachPoints);\n }\n\n /**\n * Parse annotation attach point end.\n *\n * @return Parsed node\n */\n private STNode parseAttachPointEnd() {\n switch (peek().kind) {\n case SEMICOLON_TOKEN:\n \n return null;\n case COMMA_TOKEN:\n return consume();\n default:\n recover(peek(), ParserRuleContext.ATTACH_POINT_END);\n return parseAttachPointEnd();\n }\n }\n\n private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case EOF_TOKEN:\n case SEMICOLON_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse annotation attach point.\n *\n * @return Parsed node\n */\n private STNode parseAnnotationAttachPoint() {\n switch (peek().kind) {\n case EOF_TOKEN:\n return null;\n\n \n case ANNOTATION_KEYWORD:\n case EXTERNAL_KEYWORD:\n case VAR_KEYWORD:\n case CONST_KEYWORD:\n case LISTENER_KEYWORD:\n case WORKER_KEYWORD:\n \n\n case SOURCE_KEYWORD:\n STNode sourceKeyword = parseSourceKeyword();\n return parseAttachPointIdent(sourceKeyword);\n\n \n case OBJECT_KEYWORD:\n case TYPE_KEYWORD:\n case FUNCTION_KEYWORD:\n case PARAMETER_KEYWORD:\n case RETURN_KEYWORD:\n case SERVICE_KEYWORD:\n case FIELD_KEYWORD:\n case RECORD_KEYWORD:\n case CLASS_KEYWORD:\n sourceKeyword = STNodeFactory.createEmptyNode();\n STNode firstIdent = consume();\n return parseDualAttachPointIdent(sourceKeyword, firstIdent);\n default:\n recover(peek(), ParserRuleContext.ATTACH_POINT);\n return parseAnnotationAttachPoint();\n }\n }\n\n /**\n * Parse source keyword.\n *\n * @return Parsed node\n */\n private STNode parseSourceKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.SOURCE_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.SOURCE_KEYWORD);\n return parseSourceKeyword();\n }\n }\n\n /**\n * Parse attach point ident gievn.\n *

\n * \n * source-only-attach-point-ident := annotation | external | var | const | listener | worker\n *

\n * dual-attach-point-ident := type | class | [object|service remote] function | parameter\n * | return | service | [object|record] field\n *
\n *\n * @param sourceKeyword Source keyword\n * @return Parsed node\n */\n private STNode parseAttachPointIdent(STNode sourceKeyword) {\n switch (peek().kind) {\n case ANNOTATION_KEYWORD:\n case EXTERNAL_KEYWORD:\n case VAR_KEYWORD:\n case CONST_KEYWORD:\n case LISTENER_KEYWORD:\n case WORKER_KEYWORD:\n STNode firstIdent = consume();\n STNode identList = STNodeFactory.createNodeList(firstIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n case OBJECT_KEYWORD:\n case RESOURCE_KEYWORD:\n case RECORD_KEYWORD:\n case TYPE_KEYWORD:\n case FUNCTION_KEYWORD:\n case PARAMETER_KEYWORD:\n case RETURN_KEYWORD:\n case SERVICE_KEYWORD:\n case FIELD_KEYWORD:\n case CLASS_KEYWORD:\n firstIdent = consume();\n return parseDualAttachPointIdent(sourceKeyword, firstIdent);\n default:\n recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT);\n return parseAttachPointIdent(sourceKeyword);\n }\n }\n\n /**\n * Parse dual-attach-point ident.\n *\n * @param sourceKeyword Source keyword\n * @param firstIdent first part of the dual attach-point\n * @return Parsed node\n */\n private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {\n STNode secondIdent;\n switch (firstIdent.kind) {\n case OBJECT_KEYWORD:\n secondIdent = parseIdentAfterObjectIdent();\n break;\n case RESOURCE_KEYWORD:\n secondIdent = parseFunctionIdent();\n break;\n case RECORD_KEYWORD:\n secondIdent = parseFieldIdent();\n break;\n case SERVICE_KEYWORD:\n return parseServiceAttachPoint(sourceKeyword, firstIdent);\n case TYPE_KEYWORD:\n case FUNCTION_KEYWORD:\n case PARAMETER_KEYWORD:\n case RETURN_KEYWORD:\n case FIELD_KEYWORD:\n case CLASS_KEYWORD:\n default: \n STNode identList = STNodeFactory.createNodeList(firstIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n }\n\n STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n }\n\n /**\n * Parse remote ident.\n *\n * @return Parsed node\n */\n private STNode parseRemoteIdent() {\n STToken token = peek();\n if (token.kind == SyntaxKind.REMOTE_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.REMOTE_IDENT);\n return parseRemoteIdent();\n }\n }\n\n /**\n * Parse service attach point.\n * service-attach-point := service | service remote function\n *\n * @return Parsed node\n */\n private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {\n STNode identList;\n STToken token = peek();\n switch (token.kind) {\n case REMOTE_KEYWORD:\n STNode secondIdent = parseRemoteIdent();\n STNode thirdIdent = parseFunctionIdent();\n identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n case COMMA_TOKEN:\n case SEMICOLON_TOKEN:\n identList = STNodeFactory.createNodeList(firstIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n default:\n recover(token, ParserRuleContext.SERVICE_IDENT_RHS);\n return parseServiceAttachPoint(sourceKeyword, firstIdent);\n }\n }\n\n /**\n * Parse the idents that are supported after object-ident.\n *\n * @return Parsed node\n */\n private STNode parseIdentAfterObjectIdent() {\n STToken token = peek();\n switch (token.kind) {\n case FUNCTION_KEYWORD:\n case FIELD_KEYWORD:\n return consume();\n default:\n recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);\n return parseIdentAfterObjectIdent();\n }\n }\n\n /**\n * Parse function ident.\n *\n * @return Parsed node\n */\n private STNode parseFunctionIdent() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FUNCTION_IDENT);\n return parseFunctionIdent();\n }\n }\n\n /**\n * Parse field ident.\n *\n * @return Parsed node\n */\n private STNode parseFieldIdent() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FIELD_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FIELD_IDENT);\n return parseFieldIdent();\n }\n }\n\n /**\n * Parse XML namespace declaration.\n *

\n * xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;\n *
\n * xml-namespace-uri := simple-const-expr\n *
\n * xml-namespace-prefix := identifier\n *
\n *\n * @return\n */\n private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {\n startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);\n STNode xmlnsKeyword = parseXMLNSKeyword();\n\n STNode namespaceUri = parseSimpleConstExpr();\n while (!isValidXMLNameSpaceURI(namespaceUri)) {\n xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,\n DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);\n namespaceUri = parseSimpleConstExpr();\n }\n\n STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\n endContext();\n return xmlnsDecl;\n }\n\n /**\n * Parse xmlns keyword.\n *\n * @return Parsed node\n */\n private STNode parseXMLNSKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.XMLNS_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.XMLNS_KEYWORD);\n return parseXMLNSKeyword();\n }\n }\n\n private boolean isValidXMLNameSpaceURI(STNode expr) {\n switch (expr.kind) {\n case STRING_LITERAL:\n case QUALIFIED_NAME_REFERENCE:\n case SIMPLE_NAME_REFERENCE:\n return true;\n case IDENTIFIER_TOKEN:\n default:\n return false;\n }\n }\n\n private STNode parseSimpleConstExpr() {\n startContext(ParserRuleContext.CONSTANT_EXPRESSION);\n STNode expr = parseSimpleConstExprInternal();\n endContext();\n return expr;\n }\n\n /**\n * Parse simple constants expr.\n *\n * @return Parsed node\n */\n private STNode parseSimpleConstExprInternal() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case STRING_LITERAL_TOKEN:\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case NULL_KEYWORD:\n return parseBasicLiteral();\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n return parseSignedIntOrFloat();\n case OPEN_PAREN_TOKEN:\n return parseNilLiteral();\n default:\n if (isPredeclaredIdentifier(nextToken.kind)) {\n return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n }\n \n recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START);\n return parseSimpleConstExprInternal();\n }\n }\n\n /**\n * Parse the portion after the namsepsace-uri of an XML declaration.\n *\n * @param xmlnsKeyword XMLNS keyword\n * @param namespaceUri Namespace URI\n * @return Parsed node\n */\n private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {\n STNode asKeyword = STNodeFactory.createEmptyNode();\n STNode namespacePrefix = STNodeFactory.createEmptyNode();\n\n switch (peek().kind) {\n case AS_KEYWORD:\n asKeyword = parseAsKeyword();\n namespacePrefix = parseNamespacePrefix();\n break;\n case SEMICOLON_TOKEN:\n break;\n default:\n recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL);\n return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\n }\n\n STNode semicolon = parseSemicolon();\n if (isModuleVar) {\n return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,\n namespacePrefix, semicolon);\n }\n return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,\n semicolon);\n }\n\n /**\n * Parse import prefix.\n *\n * @return Parsed node\n */\n private STNode parseNamespacePrefix() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return consume();\n } else {\n recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);\n return parseNamespacePrefix();\n }\n }\n\n /**\n * Parse named worker declaration.\n *

\n * named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }\n * \n *\n * @param annots Annotations attached to the worker decl\n * @param qualifiers Preceding transactional keyword in a list\n * @return Parsed node\n */\n private STNode parseNamedWorkerDeclaration(STNode annots, List qualifiers) {\n startContext(ParserRuleContext.NAMED_WORKER_DECL);\n STNode transactionalKeyword = getTransactionalKeyword(qualifiers);\n STNode workerKeyword = parseWorkerKeyword();\n STNode workerName = parseWorkerName();\n STNode returnTypeDesc = parseReturnTypeDescriptor();\n STNode workerBody = parseBlockNode();\n endContext();\n return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,\n returnTypeDesc, workerBody);\n }\n\n private STNode getTransactionalKeyword(List qualifierList) {\n \n List validatedList = new ArrayList<>();\n\n for (int i = 0; i < qualifierList.size(); i++) {\n STNode qualifier = qualifierList.get(i);\n int nextIndex = i + 1;\n\n if (isSyntaxKindInList(validatedList, qualifier.kind)) {\n updateLastNodeInListWithInvalidNode(validatedList, qualifier,\n DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());\n } else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\n validatedList.add(qualifier);\n } else if (qualifierList.size() == nextIndex) {\n addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,\n ((STToken) qualifier).text());\n } else {\n updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,\n DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n }\n }\n\n STNode transactionalKeyword;\n if (validatedList.isEmpty()) {\n transactionalKeyword = STNodeFactory.createEmptyNode();\n } else {\n transactionalKeyword = validatedList.get(0);\n }\n return transactionalKeyword;\n }\n\n private STNode parseReturnTypeDescriptor() {\n \n STToken token = peek();\n if (token.kind != SyntaxKind.RETURNS_KEYWORD) {\n return STNodeFactory.createEmptyNode();\n }\n\n STNode returnsKeyword = consume();\n STNode annot = parseOptionalAnnotations();\n STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\n return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n }\n\n /**\n * Parse worker keyword.\n *\n * @return Parsed node\n */\n private STNode parseWorkerKeyword() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {\n return consume();\n } else {\n recover(peek(), ParserRuleContext.WORKER_KEYWORD);\n return parseWorkerKeyword();\n }\n }\n\n /**\n * Parse worker name.\n *

\n * worker-name := identifier\n *\n * @return Parsed node\n */\n private STNode parseWorkerName() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return consume();\n } else {\n recover(peek(), ParserRuleContext.WORKER_NAME);\n return parseWorkerName();\n }\n }\n\n /**\n * Parse lock statement.\n * lock-stmt := lock block-stmt [on-fail-clause]\n *\n * @return Lock statement\n */\n private STNode parseLockStatement() {\n startContext(ParserRuleContext.LOCK_STMT);\n STNode lockKeyword = parseLockKeyword();\n STNode blockStatement = parseBlockNode();\n endContext();\n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);\n }\n\n /**\n * Parse lock-keyword.\n *\n * @return lock-keyword node\n */\n private STNode parseLockKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LOCK_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LOCK_KEYWORD);\n return parseLockKeyword();\n }\n }\n\n /**\n * Parse union type descriptor.\n * union-type-descriptor := type-descriptor | type-descriptor\n *\n * @param leftTypeDesc Type desc in the LHS os the union type desc.\n * @param context Current context.\n * @return parsed union type desc node\n */\n private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\n boolean isTypedBindingPattern) {\n \n STNode pipeToken = consume();\n STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,\n TypePrecedence.UNION);\n return mergeTypesWithUnion(leftTypeDesc, pipeToken, rightTypeDesc);\n }\n\n /**\n * Creates a union type descriptor after validating lhs and rhs types.\n *

\n * Note: Since type precedence and associativity are not taken into account here,\n * this method should not be called directly when types are unknown.\n *
\n * Call {@link \n *\n * @param leftTypeDesc lhs type\n * @param pipeToken pipe token\n * @param rightTypeDesc rhs type\n * @return a UnionTypeDescriptorNode\n */\n private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {\n leftTypeDesc = validateForUsageOfVar(leftTypeDesc);\n rightTypeDesc = validateForUsageOfVar(rightTypeDesc);\n return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);\n }\n\n /**\n * Parse pipe token.\n *\n * @return parsed pipe token node\n */\n private STNode parsePipeToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.PIPE_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.PIPE);\n return parsePipeToken();\n }\n }\n\n private boolean isTypeStartingToken(SyntaxKind nodeKind) {\n return isTypeStartingToken(nodeKind, getNextNextToken());\n }\n \n private static boolean isTypeStartingToken(SyntaxKind nextTokenKind, STToken nextNextToken) {\n switch (nextTokenKind) {\n case IDENTIFIER_TOKEN:\n case SERVICE_KEYWORD:\n case RECORD_KEYWORD:\n case OBJECT_KEYWORD:\n case ABSTRACT_KEYWORD:\n case CLIENT_KEYWORD:\n case OPEN_PAREN_TOKEN: \n case MAP_KEYWORD: \n case STREAM_KEYWORD: \n case TABLE_KEYWORD: \n case FUNCTION_KEYWORD:\n case OPEN_BRACKET_TOKEN:\n case DISTINCT_KEYWORD:\n case ISOLATED_KEYWORD:\n case TRANSACTIONAL_KEYWORD:\n case TRANSACTION_KEYWORD:\n return true;\n default:\n if (isParameterizedTypeToken(nextTokenKind)) {\n return true;\n }\n \n if (isSingletonTypeDescStart(nextTokenKind, nextNextToken)) {\n return true;\n }\n return isSimpleType(nextTokenKind);\n }\n }\n\n /**\n * Check if the token kind is a type descriptor in terminal expression.\n *

\n * simple-type-in-expr :=\n * boolean | int | byte | float | decimal | string | handle | json | anydata | any | never\n *\n * @param nodeKind token kind to check\n * @return true for simple type token in expression. false otherwise.\n */\n private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case VAR_KEYWORD:\n case READONLY_KEYWORD:\n return false;\n default:\n return isSimpleType(nodeKind);\n }\n }\n\n static boolean isSimpleType(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case INT_KEYWORD:\n case FLOAT_KEYWORD:\n case DECIMAL_KEYWORD:\n case BOOLEAN_KEYWORD:\n case STRING_KEYWORD:\n case BYTE_KEYWORD:\n case JSON_KEYWORD:\n case HANDLE_KEYWORD:\n case ANY_KEYWORD:\n case ANYDATA_KEYWORD:\n case NEVER_KEYWORD:\n case VAR_KEYWORD:\n case READONLY_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n static boolean isPredeclaredPrefix(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case BOOLEAN_KEYWORD:\n case DECIMAL_KEYWORD:\n case ERROR_KEYWORD:\n case FLOAT_KEYWORD:\n case FUTURE_KEYWORD:\n case INT_KEYWORD:\n case MAP_KEYWORD:\n case OBJECT_KEYWORD:\n case STREAM_KEYWORD:\n case STRING_KEYWORD:\n case TABLE_KEYWORD:\n case TRANSACTION_KEYWORD:\n case TYPEDESC_KEYWORD:\n case XML_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {\n return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;\n }\n\n private static SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) {\n switch (typeKeyword) {\n case INT_KEYWORD:\n return SyntaxKind.INT_TYPE_DESC;\n case FLOAT_KEYWORD:\n return SyntaxKind.FLOAT_TYPE_DESC;\n case DECIMAL_KEYWORD:\n return SyntaxKind.DECIMAL_TYPE_DESC;\n case BOOLEAN_KEYWORD:\n return SyntaxKind.BOOLEAN_TYPE_DESC;\n case STRING_KEYWORD:\n return SyntaxKind.STRING_TYPE_DESC;\n case BYTE_KEYWORD:\n return SyntaxKind.BYTE_TYPE_DESC;\n case JSON_KEYWORD:\n return SyntaxKind.JSON_TYPE_DESC;\n case HANDLE_KEYWORD:\n return SyntaxKind.HANDLE_TYPE_DESC;\n case ANY_KEYWORD:\n return SyntaxKind.ANY_TYPE_DESC;\n case ANYDATA_KEYWORD:\n return SyntaxKind.ANYDATA_TYPE_DESC;\n case NEVER_KEYWORD:\n return SyntaxKind.NEVER_TYPE_DESC;\n case VAR_KEYWORD:\n return SyntaxKind.VAR_TYPE_DESC;\n case READONLY_KEYWORD:\n return SyntaxKind.READONLY_TYPE_DESC;\n default:\n assert false : typeKeyword + \" is not a built-in type\";\n return SyntaxKind.TYPE_REFERENCE;\n }\n }\n\n /**\n * Parse fork-keyword.\n *\n * @return Fork-keyword node\n */\n private STNode parseForkKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FORK_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FORK_KEYWORD);\n return parseForkKeyword();\n }\n }\n\n /**\n * Parse fork statement.\n * fork-stmt := fork { named-worker-decl+ }\n *\n * @return Fork statement\n */\n private STNode parseForkStatement() {\n startContext(ParserRuleContext.FORK_STMT);\n STNode forkKeyword = parseForkKeyword();\n STNode openBrace = parseOpenBrace();\n\n \n ArrayList workers = new ArrayList<>();\n while (!isEndOfStatements()) {\n STNode stmt = parseStatement();\n if (stmt == null) {\n break;\n }\n\n if (validateStatement(stmt)) {\n continue;\n }\n\n switch (stmt.kind) {\n case NAMED_WORKER_DECLARATION:\n workers.add(stmt);\n break;\n default:\n \n \n \n if (workers.isEmpty()) {\n openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,\n DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n } else {\n updateLastNodeInListWithInvalidNode(workers, stmt,\n DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n }\n }\n }\n\n STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);\n STNode closeBrace = parseCloseBrace();\n endContext();\n\n STNode forkStmt =\n STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);\n if (isNodeListEmpty(namedWorkerDeclarations)) {\n return SyntaxErrors.addDiagnostic(forkStmt,\n DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);\n }\n\n return forkStmt;\n }\n\n /**\n * Parse trap expression.\n *

\n * \n * trap-expr := trap expression\n * \n *\n * @param allowActions Allow actions\n * @param isRhsExpr Whether this is a RHS expression or not\n * @return Trap expression node\n */\n private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\n STNode trapKeyword = parseTrapKeyword();\n STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr);\n if (isAction(expr)) {\n return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);\n }\n\n return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);\n }\n\n /**\n * Parse trap-keyword.\n *\n * @return Trap-keyword node\n */\n private STNode parseTrapKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.TRAP_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.TRAP_KEYWORD);\n return parseTrapKeyword();\n }\n }\n\n /**\n * Parse list constructor expression.\n *

\n * \n * list-constructor-expr := [ [ list-members ] ]\n *
\n * list-members := list-member (, list-member)*\n *
\n * list-member := expression | spread-member\n *
\n * spread-member := ... expression\n *
\n *\n * @return Parsed node\n */\n private STNode parseListConstructorExpr() {\n startContext(ParserRuleContext.LIST_CONSTRUCTOR);\n STNode openBracket = parseOpenBracket();\n STNode listMembers = parseListMembers();\n STNode closeBracket = parseCloseBracket();\n endContext();\n return STNodeFactory.createListConstructorExpressionNode(openBracket, listMembers, closeBracket);\n }\n\n /**\n * Parse optional list member list.\n *\n * @return Parsed node\n */\n private STNode parseListMembers() {\n List listMembers = new ArrayList<>();\n if (isEndOfListConstructor(peek().kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n STNode listMember = parseListMember();\n listMembers.add(listMember);\n return parseListMembers(listMembers);\n }\n\n private STNode parseListMembers(List listMembers) {\n \n STNode listConstructorMemberEnd;\n while (!isEndOfListConstructor(peek().kind)) {\n listConstructorMemberEnd = parseListConstructorMemberEnd();\n if (listConstructorMemberEnd == null) {\n break;\n }\n listMembers.add(listConstructorMemberEnd);\n\n STNode listMember = parseListMember();\n listMembers.add(listMember);\n }\n\n return STNodeFactory.createNodeList(listMembers);\n }\n\n /**\n * Parse list member.\n *

\n * \n * list-member := expression | spread-member\n * \n *\n * @return Parsed node\n */\n private STNode parseListMember() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {\n return parseSpreadMember();\n } else {\n return parseExpression();\n }\n }\n\n /**\n * Parse spread member.\n *

\n * \n * spread-member := ... expression\n * \n *\n * @return Parsed node\n */\n private STNode parseSpreadMember() {\n STNode ellipsis = parseEllipsis();\n STNode expr = parseExpression();\n return STNodeFactory.createSpreadMemberNode(ellipsis, expr);\n }\n\n private boolean isEndOfListConstructor(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseListConstructorMemberEnd() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n return consume();\n case CLOSE_BRACKET_TOKEN:\n return null;\n default:\n recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);\n return parseListConstructorMemberEnd();\n }\n }\n\n /**\n * Parse foreach statement.\n * foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]\n *\n * @return foreach statement\n */\n private STNode parseForEachStatement() {\n startContext(ParserRuleContext.FOREACH_STMT);\n STNode forEachKeyword = parseForEachKeyword();\n STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);\n STNode inKeyword = parseInKeyword();\n STNode actionOrExpr = parseActionOrExpression();\n STNode blockStatement = parseBlockNode();\n endContext();\n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,\n blockStatement, onFailClause);\n }\n\n /**\n * Parse foreach-keyword.\n *\n * @return ForEach-keyword node\n */\n private STNode parseForEachKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FOREACH_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FOREACH_KEYWORD);\n return parseForEachKeyword();\n }\n }\n\n /**\n * Parse in-keyword.\n *\n * @return In-keyword node\n */\n private STNode parseInKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.IN_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.IN_KEYWORD);\n return parseInKeyword();\n }\n }\n\n /**\n * Parse type cast expression.\n *

\n * \n * type-cast-expr := < type-cast-param > expression\n *
\n * type-cast-param := [annots] type-descriptor | annots\n *
\n *\n * @return Parsed node\n */\n private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\n startContext(ParserRuleContext.TYPE_CAST);\n STNode ltToken = parseLTToken();\n return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr);\n }\n\n private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions,\n boolean isInConditionalExpr) {\n STNode typeCastParam = parseTypeCastParam();\n STNode gtToken = parseGTToken();\n endContext();\n\n \n \n STNode expression =\n parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\n return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);\n }\n\n private STNode parseTypeCastParam() {\n STNode annot;\n STNode type;\n STToken token = peek();\n\n switch (token.kind) {\n case AT_TOKEN:\n annot = parseOptionalAnnotations();\n token = peek();\n if (isTypeStartingToken(token.kind)) {\n type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n } else {\n type = STNodeFactory.createEmptyNode();\n }\n break;\n default:\n annot = STNodeFactory.createEmptyNode();\n type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n break;\n }\n\n return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);\n }\n\n /**\n * Parse table constructor expression.\n *

\n * \n * table-constructor-expr-rhs := [ [row-list] ]\n * \n *\n * @param tableKeyword tableKeyword that precedes this rhs\n * @param keySpecifier keySpecifier that precedes this rhs\n * @return Parsed node\n */\n private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {\n switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);\n STNode openBracket = parseOpenBracket();\n STNode rowList = parseRowList();\n STNode closeBracket = parseCloseBracket();\n return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,\n closeBracket);\n }\n\n /**\n * Parse table-keyword.\n *\n * @return Table-keyword node\n */\n private STNode parseTableKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.TABLE_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.TABLE_KEYWORD);\n return parseTableKeyword();\n }\n }\n\n /**\n * Parse table rows.\n *

\n * row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]\n *\n * @return Parsed node\n */\n private STNode parseRowList() {\n STToken nextToken = peek();\n \n if (isEndOfTableRowList(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n List mappings = new ArrayList<>();\n STNode mapExpr = parseMappingConstructorExpr();\n mappings.add(mapExpr);\n\n \n nextToken = peek();\n STNode rowEnd;\n while (!isEndOfTableRowList(nextToken.kind)) {\n rowEnd = parseTableRowEnd();\n if (rowEnd == null) {\n break;\n }\n\n mappings.add(rowEnd);\n mapExpr = parseMappingConstructorExpr();\n mappings.add(mapExpr);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(mappings);\n }\n\n private boolean isEndOfTableRowList(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n return true;\n case COMMA_TOKEN:\n case OPEN_BRACE_TOKEN:\n return false;\n default:\n return isEndOfMappingConstructor(tokenKind);\n }\n }\n\n private STNode parseTableRowEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.TABLE_ROW_END);\n return parseTableRowEnd();\n }\n }\n\n /**\n * Parse key specifier.\n *

\n * key-specifier := key ( [ field-name (, field-name)* ] )\n *\n * @return Parsed node\n */\n private STNode parseKeySpecifier() {\n startContext(ParserRuleContext.KEY_SPECIFIER);\n STNode keyKeyword = parseKeyKeyword();\n STNode openParen = parseOpenParenthesis();\n STNode fieldNames = parseFieldNames();\n STNode closeParen = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);\n }\n\n /**\n * Parse key-keyword.\n *\n * @return Key-keyword node\n */\n private STNode parseKeyKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.KEY_KEYWORD) {\n return consume();\n }\n\n if (isKeyKeyword(token)) {\n \n return getKeyKeyword(consume());\n }\n\n recover(token, ParserRuleContext.KEY_KEYWORD);\n return parseKeyKeyword();\n }\n\n static boolean isKeyKeyword(STToken token) {\n return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());\n }\n\n private STNode getKeyKeyword(STToken token) {\n return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),\n token.diagnostics());\n }\n\n private STToken getUnderscoreKeyword(STToken token) {\n return STNodeFactory.createToken(SyntaxKind.UNDERSCORE_KEYWORD, token.leadingMinutiae(),\n token.trailingMinutiae(), token.diagnostics());\n }\n\n /**\n * Parse field names.\n *

\n * field-name-list := [ field-name (, field-name)* ]\n *\n * @return Parsed node\n */\n private STNode parseFieldNames() {\n STToken nextToken = peek();\n \n if (isEndOfFieldNamesList(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n List fieldNames = new ArrayList<>();\n STNode fieldName = parseVariableName();\n fieldNames.add(fieldName);\n\n \n nextToken = peek();\n STNode leadingComma;\n while (!isEndOfFieldNamesList(nextToken.kind)) {\n leadingComma = parseComma();\n fieldNames.add(leadingComma);\n fieldName = parseVariableName();\n fieldNames.add(fieldName);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(fieldNames);\n }\n\n private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case COMMA_TOKEN:\n case IDENTIFIER_TOKEN:\n return false;\n default:\n return true;\n }\n }\n\n /**\n * Parse error-keyword.\n *\n * @return Parsed error-keyword node\n */\n private STNode parseErrorKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ERROR_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ERROR_KEYWORD);\n return parseErrorKeyword();\n }\n }\n\n /**\n * Parse stream type descriptor.\n *

\n * stream-type-descriptor := stream [stream-type-parameters]\n *
\n * stream-type-parameters := < type-descriptor [, type-descriptor]>\n *

\n *\n * @return Parsed stream type descriptor node\n */\n private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {\n STNode streamTypeParamsNode;\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.LT_TOKEN) {\n streamTypeParamsNode = parseStreamTypeParamsNode();\n } else {\n streamTypeParamsNode = STNodeFactory.createEmptyNode();\n }\n return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);\n }\n\n /**\n * Parse stream type params node.\n *

\n * stream-type-parameters := < type-descriptor [, type-descriptor]>\n *

\n *\n * @return Parsed stream type params node\n */\n private STNode parseStreamTypeParamsNode() {\n STNode ltToken = parseLTToken();\n startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\n STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\n STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\n endContext();\n return streamTypedesc;\n }\n\n private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {\n STNode commaToken, rightTypeDescNode, gtToken;\n switch (peek().kind) {\n case COMMA_TOKEN:\n commaToken = parseComma();\n rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\n break;\n case GT_TOKEN:\n commaToken = STNodeFactory.createEmptyNode();\n rightTypeDescNode = STNodeFactory.createEmptyNode();\n break;\n default:\n recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS);\n return parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\n }\n\n gtToken = parseGTToken();\n return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,\n gtToken);\n }\n\n /**\n * Parse stream-keyword.\n *\n * @return Parsed stream-keyword node\n */\n private STNode parseStreamKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.STREAM_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.STREAM_KEYWORD);\n return parseStreamKeyword();\n }\n }\n\n /**\n * Parse let expression.\n *

\n * \n * let-expr := let let-var-decl [, let-var-decl]* in expression\n * \n *\n * @return Parsed node\n */\n private STNode parseLetExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\n STNode letKeyword = parseLetKeyword();\n STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);\n STNode inKeyword = parseInKeyword();\n\n \n letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\n DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false, isInConditionalExpr);\n return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);\n }\n\n /**\n * Parse let-keyword.\n *\n * @return Let-keyword node\n */\n private STNode parseLetKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LET_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LET_KEYWORD);\n return parseLetKeyword();\n }\n }\n\n /**\n * Parse let variable declarations.\n *

\n * let-var-decl-list := let-var-decl [, let-var-decl]*\n *\n * @return Parsed node\n */\n private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {\n startContext(context);\n List varDecls = new ArrayList<>();\n STToken nextToken = peek();\n\n if (isEndOfLetVarDeclarations(nextToken.kind, getNextNextToken())) {\n endContext();\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode varDec = parseLetVarDecl(isRhsExpr);\n varDecls.add(varDec);\n\n \n nextToken = peek();\n STNode leadingComma;\n while (!isEndOfLetVarDeclarations(nextToken.kind, getNextNextToken())) {\n leadingComma = parseComma();\n varDecls.add(leadingComma);\n varDec = parseLetVarDecl(isRhsExpr);\n varDecls.add(varDec);\n nextToken = peek();\n }\n\n endContext();\n return STNodeFactory.createNodeList(varDecls);\n }\n\n static boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind, STToken nextNextToken) {\n switch (tokenKind) {\n case COMMA_TOKEN:\n case AT_TOKEN:\n return false;\n case IN_KEYWORD:\n return true;\n default:\n return !isTypeStartingToken(tokenKind, nextNextToken);\n }\n }\n\n /**\n * Parse let variable declaration.\n *

\n * let-var-decl := [annots] typed-binding-pattern = expression\n *\n * @return Parsed node\n */\n private STNode parseLetVarDecl(boolean isRhsExpr) {\n STNode annot = parseOptionalAnnotations();\n STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);\n STNode assign = parseAssignOp();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);\n return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);\n }\n\n /**\n * Parse raw backtick string template expression.\n *

\n * BacktickString := `expression`\n *\n * @return Template expression node\n */\n private STNode parseTemplateExpression() {\n STNode type = STNodeFactory.createEmptyNode();\n STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n STNode content = parseTemplateContent();\n STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,\n content, endingBackTick);\n }\n\n private STNode parseTemplateContent() {\n List items = new ArrayList<>();\n STToken nextToken = peek();\n while (!isEndOfBacktickContent(nextToken.kind)) {\n STNode contentItem = parseTemplateItem();\n items.add(contentItem);\n nextToken = peek();\n }\n return STNodeFactory.createNodeList(items);\n }\n\n private boolean isEndOfBacktickContent(SyntaxKind kind) {\n switch (kind) {\n case EOF_TOKEN:\n case BACKTICK_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseTemplateItem() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\n return parseInterpolation();\n }\n\n \n return consume();\n }\n\n /**\n * Parse string template expression.\n *

\n * string-template-expr := string ` expression `\n *\n * @return String template expression node\n */\n private STNode parseStringTemplateExpression() {\n STNode type = parseStringKeyword();\n STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n STNode content = parseTemplateContent();\n STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\n return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,\n content, endingBackTick);\n }\n\n /**\n * Parse string keyword.\n *\n * @return string keyword node\n */\n private STNode parseStringKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.STRING_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.STRING_KEYWORD);\n return parseStringKeyword();\n }\n }\n\n /**\n * Parse XML template expression.\n *

\n * xml-template-expr := xml BacktickString\n *\n * @return XML template expression\n */\n private STNode parseXMLTemplateExpression() {\n STNode xmlKeyword = parseXMLKeyword();\n STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n\n STNode content;\n STNode endingBackTick;\n if (startingBackTick.isMissing()) {\n \n startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\n endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\n content = STNodeFactory.createEmptyNodeList();\n STNode templateExpr = STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION,\n xmlKeyword, startingBackTick, content, endingBackTick);\n templateExpr = SyntaxErrors.addDiagnostic(templateExpr, DiagnosticErrorCode.ERROR_MISSING_BACKTICK_STRING);\n return templateExpr;\n }\n\n content = parseTemplateContentAsXML();\n endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\n return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,\n startingBackTick, content, endingBackTick);\n }\n\n /**\n * Parse xml keyword.\n *\n * @return xml keyword node\n */\n private STNode parseXMLKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.XML_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.XML_KEYWORD);\n return parseXMLKeyword();\n }\n }\n\n /**\n * Parse the content of the template string as XML. This method first read the\n * input in the same way as the raw-backtick-template (BacktickString). Then\n * it parses the content as XML.\n *\n * @return XML node\n */\n private STNode parseTemplateContentAsXML() {\n \n \n \n ArrayDeque expressions = new ArrayDeque<>();\n StringBuilder xmlStringBuilder = new StringBuilder();\n STToken nextToken = peek();\n while (!isEndOfBacktickContent(nextToken.kind)) {\n STNode contentItem = parseTemplateItem();\n if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {\n xmlStringBuilder.append(((STToken) contentItem).text());\n } else {\n xmlStringBuilder.append(\"${}\");\n expressions.add(contentItem);\n }\n nextToken = peek();\n }\n\n CharReader charReader = CharReader.from(xmlStringBuilder.toString());\n AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));\n XMLParser xmlParser = new XMLParser(tokenReader, expressions);\n return xmlParser.parse();\n }\n\n /**\n * Parse interpolation of a back-tick string.\n *

\n * \n * interpolation := ${ expression }\n * \n *\n * @return Interpolation node\n */\n private STNode parseInterpolation() {\n startContext(ParserRuleContext.INTERPOLATION);\n STNode interpolStart = parseInterpolationStart();\n STNode expr = parseExpression();\n\n \n while (!isEndOfInterpolation()) {\n STToken nextToken = consume();\n expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,\n DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());\n }\n\n STNode closeBrace = parseCloseBrace();\n endContext();\n return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);\n }\n\n private boolean isEndOfInterpolation() {\n SyntaxKind nextTokenKind = peek().kind;\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case BACKTICK_TOKEN:\n return true;\n default:\n \n ParserMode currentLexerMode = this.tokenReader.getCurrentMode();\n return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION &&\n currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT;\n }\n }\n\n /**\n * Parse interpolation start token.\n *

\n * interpolation-start := ${\n *\n * @return Interpolation start token\n */\n private STNode parseInterpolationStart() {\n STToken token = peek();\n if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);\n return parseInterpolationStart();\n }\n }\n\n /**\n * Parse back-tick token.\n *\n * @return Back-tick token\n */\n private STNode parseBacktickToken(ParserRuleContext ctx) {\n STToken token = peek();\n if (token.kind == SyntaxKind.BACKTICK_TOKEN) {\n return consume();\n } else {\n recover(token, ctx);\n return parseBacktickToken(ctx);\n }\n }\n\n /**\n * Parse table type descriptor.\n *

\n * table-type-descriptor := table row-type-parameter [key-constraint]\n * row-type-parameter := type-parameter\n * key-constraint := key-specifier | key-type-constraint\n * key-specifier := key ( [ field-name (, field-name)* ] )\n * key-type-constraint := key type-parameter\n *

\n *\n * @return Parsed table type desc node.\n */\n private STNode parseTableTypeDescriptor(STNode tableKeywordToken) {\n STNode rowTypeParameterNode = parseRowTypeParameter();\n STNode keyConstraintNode;\n STToken nextToken = peek();\n if (isKeyKeyword(nextToken)) {\n STNode keyKeywordToken = getKeyKeyword(consume());\n keyConstraintNode = parseKeyConstraint(keyKeywordToken);\n } else {\n keyConstraintNode = STNodeFactory.createEmptyNode();\n }\n return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);\n }\n\n /**\n * Parse row type parameter node.\n *

\n * row-type-parameter := type-parameter\n *

\n *\n * @return Parsed node.\n */\n private STNode parseRowTypeParameter() {\n startContext(ParserRuleContext.ROW_TYPE_PARAM);\n STNode rowTypeParameterNode = parseTypeParameter();\n endContext();\n return rowTypeParameterNode;\n }\n\n /**\n * Parse type parameter node.\n *

\n * type-parameter := < type-descriptor >\n *

\n *\n * @return Parsed node\n */\n private STNode parseTypeParameter() {\n STNode ltToken = parseLTToken();\n STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n STNode gtToken = parseGTToken();\n return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);\n }\n\n /**\n * Parse key constraint.\n *

\n * key-constraint := key-specifier | key-type-constraint\n *

\n *\n * @return Parsed node.\n */\n private STNode parseKeyConstraint(STNode keyKeywordToken) {\n switch (peek().kind) {\n case OPEN_PAREN_TOKEN:\n return parseKeySpecifier(keyKeywordToken);\n case LT_TOKEN:\n return parseKeyTypeConstraint(keyKeywordToken);\n default:\n recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS);\n return parseKeyConstraint(keyKeywordToken);\n }\n }\n\n /**\n * Parse key specifier given parsed key keyword token.\n *

\n * key-specifier := key ( [ field-name (, field-name)* ] )\n *\n * @return Parsed node\n */\n private STNode parseKeySpecifier(STNode keyKeywordToken) {\n startContext(ParserRuleContext.KEY_SPECIFIER);\n STNode openParenToken = parseOpenParenthesis();\n STNode fieldNamesNode = parseFieldNames();\n STNode closeParenToken = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);\n }\n\n /**\n * Parse key type constraint.\n *

\n * key-type-constraint := key type-parameter\n *

\n *\n * @return Parsed node\n */\n private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {\n STNode typeParameterNode = parseTypeParameter();\n return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);\n }\n\n /**\n * Parse function type descriptor.\n *

\n * \n * function-type-descriptor := function-quals function function-signature \n *
 | [isolated] function\n *
\n * function-quals := (transactional | isolated)*\n *
\n *\n * @param qualifiers Preceding type descriptor qualifiers\n * @return Function type descriptor node\n */\n private STNode parseFunctionTypeDesc(List qualifiers) {\n startContext(ParserRuleContext.FUNC_TYPE_DESC);\n STNode functionKeyword = parseFunctionKeyword();\n\n boolean hasFuncSignature = false;\n STNode signature = STNodeFactory.createEmptyNode();\n if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN ||\n isSyntaxKindInList(qualifiers, SyntaxKind.TRANSACTIONAL_KEYWORD)) {\n signature = parseFuncSignature(true);\n hasFuncSignature = true;\n }\n\n \n STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, hasFuncSignature);\n STNode qualifierList = nodes[0];\n functionKeyword = nodes[1];\n\n endContext();\n return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);\n }\n \n private STNode getLastNodeInList(List nodeList) {\n return nodeList.get(nodeList.size() - 1);\n }\n\n private STNode[] createFuncTypeQualNodeList(List qualifierList, STNode functionKeyword,\n boolean hasFuncSignature) {\n \n List validatedList = new ArrayList<>();\n\n for (int i = 0; i < qualifierList.size(); i++) {\n STNode qualifier = qualifierList.get(i);\n int nextIndex = i + 1;\n\n if (isSyntaxKindInList(validatedList, qualifier.kind)) {\n updateLastNodeInListWithInvalidNode(validatedList, qualifier,\n DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());\n } else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {\n validatedList.add(qualifier);\n } else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {\n validatedList.add(qualifier);\n } else if (qualifierList.size() == nextIndex) {\n functionKeyword = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(functionKeyword, qualifier,\n DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n } else {\n updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,\n DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n }\n }\n\n STNode nodeList = STNodeFactory.createNodeList(validatedList);\n return new STNode[]{ nodeList, functionKeyword };\n }\n\n private boolean isRegularFuncQual(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case ISOLATED_KEYWORD:\n case TRANSACTIONAL_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse explicit anonymous function expression.\n *

\n * explicit-anonymous-function-expr :=\n * [annots] (isolated| transactional) function function-signature anon-func-body\n *\n * @param annots Annotations.\n * @param qualifiers Function qualifiers\n * @param isRhsExpr Is expression in rhs context\n * @return Anonymous function expression node\n */\n private STNode parseExplicitFunctionExpression(STNode annots, List qualifiers, boolean isRhsExpr) {\n startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\n STNode funcKeyword = parseFunctionKeyword();\n\n \n STNode[] nodes = createFuncTypeQualNodeList(qualifiers, funcKeyword, true);\n STNode qualifierList = nodes[0];\n funcKeyword = nodes[1];\n\n STNode funcSignature = parseFuncSignature(false);\n \n STNode funcBody = parseAnonFuncBody(isRhsExpr);\n return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,\n funcSignature, funcBody);\n }\n\n /**\n * Parse anonymous function body.\n *

\n * anon-func-body := block-function-body | expr-function-body\n *\n * @param isRhsExpr Is expression in rhs context\n * @return Anon function body node\n */\n private STNode parseAnonFuncBody(boolean isRhsExpr) {\n switch (peek().kind) {\n case OPEN_BRACE_TOKEN:\n case EOF_TOKEN:\n STNode body = parseFunctionBodyBlock(true);\n endContext();\n return body;\n case RIGHT_DOUBLE_ARROW_TOKEN:\n \n \n endContext();\n return parseExpressionFuncBody(true, isRhsExpr);\n default:\n recover(peek(), ParserRuleContext.ANON_FUNC_BODY);\n return parseAnonFuncBody(isRhsExpr);\n }\n }\n\n /**\n * Parse expression function body.\n *

\n * expr-function-body := => expression\n *\n * @param isAnon Is anonymous function.\n * @param isRhsExpr Is expression in rhs context\n * @return Expression function body node\n */\n private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {\n STNode rightDoubleArrow = parseDoubleRightArrow();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n\n STNode semiColon;\n if (isAnon) {\n semiColon = STNodeFactory.createEmptyNode();\n } else {\n semiColon = parseSemicolon();\n }\n return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);\n }\n\n /**\n * Parse '=>' token.\n *\n * @return Double right arrow token\n */\n private STNode parseDoubleRightArrow() {\n STToken token = peek();\n if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);\n return parseDoubleRightArrow();\n }\n }\n\n private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {\n switch (params.kind) {\n case SIMPLE_NAME_REFERENCE:\n case INFER_PARAM_LIST:\n break;\n case BRACED_EXPRESSION:\n params = getAnonFuncParam((STBracedExpressionNode) params);\n break;\n case NIL_LITERAL:\n STNilLiteralNode nilLiteralNode = (STNilLiteralNode) params;\n params = STNodeFactory.createImplicitAnonymousFunctionParameters(nilLiteralNode.openParenToken,\n STNodeFactory.createNodeList(new ArrayList<>()), nilLiteralNode.closeParenToken);\n break;\n default:\n STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,\n DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\n params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);\n }\n\n STNode rightDoubleArrow = parseDoubleRightArrow();\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);\n }\n\n /**\n * Create a new anon-func-param node from a braced expression.\n *\n * @param bracedExpression Braced expression\n * @return Anon-func param node\n */\n private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) {\n List paramList = new ArrayList<>();\n STNode innerExpression = bracedExpression.expression;\n STNode openParen = bracedExpression.openParen;\n if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n paramList.add(innerExpression);\n } else {\n openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression,\n DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\n }\n return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen,\n STNodeFactory.createNodeList(paramList), bracedExpression.closeParen);\n }\n\n /**\n * Parse implicit anon function expression.\n *\n * @param openParen Open parenthesis token\n * @param firstParam First parameter\n * @param isRhsExpr Is expression in rhs context\n * @return Implicit anon function expression node\n */\n private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {\n List paramList = new ArrayList<>();\n paramList.add(firstParam);\n\n \n STToken nextToken = peek();\n STNode paramEnd;\n STNode param;\n while (!isEndOfAnonFuncParametersList(nextToken.kind)) {\n paramEnd = parseImplicitAnonFuncParamEnd();\n if (paramEnd == null) {\n break;\n }\n\n paramList.add(paramEnd);\n param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);\n param = STNodeFactory.createSimpleNameReferenceNode(param);\n paramList.add(param);\n nextToken = peek();\n }\n\n STNode params = STNodeFactory.createNodeList(paramList);\n STNode closeParen = parseCloseParenthesis();\n endContext(); \n\n STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\n return parseImplicitAnonFunc(inferedParams, isRhsExpr);\n }\n\n private STNode parseImplicitAnonFuncParamEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_PAREN_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);\n return parseImplicitAnonFuncParamEnd();\n }\n }\n\n private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case SEMICOLON_TOKEN:\n case RETURNS_KEYWORD:\n case TYPE_KEYWORD:\n case LISTENER_KEYWORD:\n case IF_KEYWORD:\n case WHILE_KEYWORD:\n case DO_KEYWORD:\n case OPEN_BRACE_TOKEN:\n case RIGHT_DOUBLE_ARROW_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse tuple type descriptor.\n *

\n * tuple-type-descriptor := [ tuple-member-type-descriptors ]\n *

\n * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]\n * | [ tuple-rest-descriptor ]\n *

\n * member-type-descriptor := [annots] type-descriptor\n * tuple-rest-descriptor := type-descriptor ...\n *
\n *\n * @return\n */\n private STNode parseTupleTypeDesc() {\n STNode openBracket = parseOpenBracket();\n startContext(ParserRuleContext.TUPLE_MEMBERS);\n STNode memberTypeDesc = parseTupleMemberTypeDescList();\n STNode closeBracket = parseCloseBracket();\n endContext();\n\n \n openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,\n DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);\n\n return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);\n }\n\n /**\n * Parse tuple member type descriptors.\n *\n * @return Parsed node\n */\n private STNode parseTupleMemberTypeDescList() {\n List typeDescList = new ArrayList<>();\n STToken nextToken = peek();\n\n \n if (isEndOfTypeList(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode typeDesc = parseMemberDescriptor();\n return parseTupleTypeMembers(typeDesc, typeDescList);\n }\n\n private STNode parseTupleTypeMembers(STNode membertypeDesc, List memberTypeDescList) {\n STNode tupleMemberRhs;\n \n while (!isEndOfTypeList(peek().kind)) {\n if (membertypeDesc.kind == SyntaxKind.REST_TYPE) {\n membertypeDesc = invalidateTypeDescAfterRestDesc(membertypeDesc);\n break;\n }\n\n tupleMemberRhs = parseTupleMemberRhs();\n if (tupleMemberRhs == null) {\n break;\n }\n memberTypeDescList.add(membertypeDesc);\n memberTypeDescList.add(tupleMemberRhs);\n membertypeDesc = parseMemberDescriptor();\n }\n\n memberTypeDescList.add(membertypeDesc);\n return STNodeFactory.createNodeList(memberTypeDescList);\n }\n\n private STNode parseMemberDescriptor() {\n STNode annot = parseOptionalAnnotations();\n STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n return createMemberOrRestNode(annot, typeDesc);\n }\n\n private STNode createMemberOrRestNode(STNode annot, STNode typeDesc) {\n STNode tupleMemberRhs = parseTypeDescInTupleRhs();\n if (tupleMemberRhs != null) {\n if (!((STNodeList) annot).isEmpty()) {\n typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, annot,\n DiagnosticErrorCode.ERROR_ANNOTATIONS_NOT_ALLOWED_FOR_TUPLE_REST_DESCRIPTOR);\n }\n return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);\n }\n return STNodeFactory.createMemberTypeDescriptorNode(annot, typeDesc);\n }\n private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) {\n while (!isEndOfTypeList(peek().kind)) {\n STNode tupleMemberRhs = parseTupleMemberRhs();\n if (tupleMemberRhs == null) {\n break;\n }\n\n restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null);\n restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseMemberDescriptor(),\n DiagnosticErrorCode.ERROR_TYPE_DESC_AFTER_REST_DESCRIPTOR);\n }\n\n return restDescriptor;\n }\n\n private STNode parseTupleMemberRhs() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n return null;\n default:\n recover(nextToken, ParserRuleContext.TUPLE_TYPE_MEMBER_RHS);\n return parseTupleMemberRhs();\n }\n }\n\n private STNode parseTypeDescInTupleRhs() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n return null;\n case ELLIPSIS_TOKEN:\n return parseEllipsis();\n default:\n recover(nextToken, ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);\n return parseTypeDescInTupleRhs();\n }\n }\n\n private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case CLOSE_BRACKET_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case EOF_TOKEN:\n case EQUAL_TOKEN:\n case SEMICOLON_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse table constructor or query expression.\n *

\n * \n * table-constructor-or-query-expr := table-constructor-expr | query-expr\n *
\n * table-constructor-expr := table [key-specifier] [ [row-list] ]\n *
\n * query-expr := [query-construct-type] query-pipeline select-clause\n * [query-construct-type] query-pipeline select-clause on-conflict-clause?\n *
\n * query-construct-type := table key-specifier | stream\n *
\n *\n * @return Parsed node\n */\n private STNode parseTableConstructorOrQuery(boolean isRhsExpr) {\n startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);\n STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr);\n endContext();\n return tableOrQueryExpr;\n }\n\n private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) {\n STNode queryConstructType;\n switch (peek().kind) {\n case FROM_KEYWORD:\n queryConstructType = STNodeFactory.createEmptyNode();\n return parseQueryExprRhs(queryConstructType, isRhsExpr);\n case STREAM_KEYWORD:\n queryConstructType = parseQueryConstructType(parseStreamKeyword(), null);\n return parseQueryExprRhs(queryConstructType, isRhsExpr);\n case TABLE_KEYWORD:\n STNode tableKeyword = parseTableKeyword();\n return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\n default:\n recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START);\n return parseTableConstructorOrQueryInternal(isRhsExpr);\n }\n\n }\n\n private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {\n STNode keySpecifier;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_BRACKET_TOKEN:\n keySpecifier = STNodeFactory.createEmptyNode();\n return parseTableConstructorExprRhs(tableKeyword, keySpecifier);\n case KEY_KEYWORD:\n keySpecifier = parseKeySpecifier();\n return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n case IDENTIFIER_TOKEN:\n if (isKeyKeyword(nextToken)) {\n keySpecifier = parseKeySpecifier();\n return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n }\n break;\n default:\n break;\n }\n\n recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS);\n return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\n }\n\n private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {\n switch (peek().kind) {\n case FROM_KEYWORD:\n return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);\n case OPEN_BRACKET_TOKEN:\n return parseTableConstructorExprRhs(tableKeyword, keySpecifier);\n default:\n recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS);\n return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n }\n }\n\n /**\n * Parse query construct type.\n *

\n * query-construct-type := table key-specifier | stream\n *\n * @return Parsed node\n */\n private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {\n return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);\n }\n\n /**\n * Parse query action or expression.\n *

\n * \n * query-expr-rhs := query-pipeline select-clause\n * query-pipeline select-clause on-conflict-clause?\n *
\n * query-pipeline := from-clause intermediate-clause*\n *
\n *\n * @param queryConstructType queryConstructType that precedes this rhs\n * @return Parsed node\n */\n private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {\n\n switchContext(ParserRuleContext.QUERY_EXPRESSION);\n STNode fromClause = parseFromClause(isRhsExpr);\n\n List clauses = new ArrayList<>();\n STNode intermediateClause;\n STNode selectClause = null;\n while (!isEndOfIntermediateClause(peek().kind)) {\n intermediateClause = parseIntermediateClause(isRhsExpr);\n if (intermediateClause == null) {\n break;\n }\n\n \n if (selectClause != null) {\n selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,\n DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);\n continue;\n }\n\n if (intermediateClause.kind != SyntaxKind.SELECT_CLAUSE) {\n clauses.add(intermediateClause);\n continue;\n }\n\n selectClause = intermediateClause;\n\n if (isNestedQueryExpr() || !isValidIntermediateQueryStart(peek().kind)) {\n \n \n \n break;\n }\n }\n\n if (peek().kind == SyntaxKind.DO_KEYWORD) {\n STNode intermediateClauses = STNodeFactory.createNodeList(clauses);\n STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\n return parseQueryAction(queryConstructType, queryPipeline, selectClause);\n }\n\n if (selectClause == null) {\n STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);\n STNode expr = STNodeFactory\n .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\n selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);\n\n \n \n if (clauses.isEmpty()) {\n fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n } else {\n int lastIndex = clauses.size() - 1;\n STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),\n DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n clauses.set(lastIndex, intClauseWithDiagnostic);\n }\n }\n\n STNode intermediateClauses = STNodeFactory.createNodeList(clauses);\n STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\n STNode onConflictClause = parseOnConflictClause(isRhsExpr);\n\n\n return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,\n onConflictClause);\n }\n\n /**\n * Check whether currently parsing query expr is a nested query expression.\n *\n * @return true if currently parsing query-expr is a nested query-expr. false otherwise.\n */\n private boolean isNestedQueryExpr() {\n return Collections.frequency(this.errorHandler.getContextStack(), ParserRuleContext.QUERY_EXPRESSION) > 1;\n }\n\n private boolean isValidIntermediateQueryStart(SyntaxKind syntaxKind) {\n switch (syntaxKind) {\n case FROM_KEYWORD:\n case WHERE_KEYWORD:\n case LET_KEYWORD:\n case SELECT_KEYWORD:\n case JOIN_KEYWORD:\n case OUTER_KEYWORD:\n case ORDER_KEYWORD:\n case BY_KEYWORD:\n case ASCENDING_KEYWORD:\n case DESCENDING_KEYWORD:\n case LIMIT_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse an intermediate clause.\n *

\n * \n * intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause\n * \n *\n * @return Parsed node\n */\n private STNode parseIntermediateClause(boolean isRhsExpr) {\n switch (peek().kind) {\n case FROM_KEYWORD:\n return parseFromClause(isRhsExpr);\n case WHERE_KEYWORD:\n return parseWhereClause(isRhsExpr);\n case LET_KEYWORD:\n return parseLetClause(isRhsExpr);\n case SELECT_KEYWORD:\n return parseSelectClause(isRhsExpr);\n case JOIN_KEYWORD:\n case OUTER_KEYWORD:\n return parseJoinClause(isRhsExpr);\n case ORDER_KEYWORD:\n case BY_KEYWORD:\n case ASCENDING_KEYWORD:\n case DESCENDING_KEYWORD:\n return parseOrderByClause(isRhsExpr);\n case LIMIT_KEYWORD:\n return parseLimitClause(isRhsExpr);\n case DO_KEYWORD:\n case SEMICOLON_TOKEN:\n case ON_KEYWORD:\n case CONFLICT_KEYWORD:\n return null;\n default:\n recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS);\n return parseIntermediateClause(isRhsExpr);\n }\n }\n\n /**\n * Parse join-keyword.\n *\n * @return Join-keyword node\n */\n private STNode parseJoinKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.JOIN_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.JOIN_KEYWORD);\n return parseJoinKeyword();\n }\n }\n\n /**\n * Parse equals keyword.\n *\n * @return Parsed node\n */\n private STNode parseEqualsKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.EQUALS_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.EQUALS_KEYWORD);\n return parseEqualsKeyword();\n }\n }\n\n private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case CLOSE_BRACE_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case OPEN_BRACE_TOKEN:\n case SEMICOLON_TOKEN:\n case PUBLIC_KEYWORD:\n case FUNCTION_KEYWORD:\n case EOF_TOKEN:\n case RESOURCE_KEYWORD:\n case LISTENER_KEYWORD:\n case DOCUMENTATION_STRING:\n case PRIVATE_KEYWORD:\n case RETURNS_KEYWORD:\n case SERVICE_KEYWORD:\n case TYPE_KEYWORD:\n case CONST_KEYWORD:\n case FINAL_KEYWORD:\n case DO_KEYWORD:\n case ON_KEYWORD:\n case CONFLICT_KEYWORD:\n return true;\n default:\n return isValidExprRhsStart(tokenKind, SyntaxKind.NONE);\n }\n }\n\n /**\n * Parse from clause.\n *

\n * from-clause := from typed-binding-pattern in expression\n *\n * @return Parsed node\n */\n private STNode parseFromClause(boolean isRhsExpr) {\n STNode fromKeyword = parseFromKeyword();\n STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);\n STNode inKeyword = parseInKeyword();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);\n }\n\n /**\n * Parse from-keyword.\n *\n * @return From-keyword node\n */\n private STNode parseFromKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FROM_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FROM_KEYWORD);\n return parseFromKeyword();\n }\n }\n\n /**\n * Parse where clause.\n *

\n * where-clause := where expression\n *\n * @return Parsed node\n */\n private STNode parseWhereClause(boolean isRhsExpr) {\n STNode whereKeyword = parseWhereKeyword();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createWhereClauseNode(whereKeyword, expression);\n }\n\n /**\n * Parse where-keyword.\n *\n * @return Where-keyword node\n */\n private STNode parseWhereKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.WHERE_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.WHERE_KEYWORD);\n return parseWhereKeyword();\n }\n }\n\n /**\n * Parse limit-keyword.\n *\n * @return limit-keyword node\n */\n private STNode parseLimitKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LIMIT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LIMIT_KEYWORD);\n return parseLimitKeyword();\n }\n }\n\n /**\n * Parse let clause.\n *

\n * let-clause := let let-var-decl [, let-var-decl]* \n *\n * @return Parsed node\n */\n private STNode parseLetClause(boolean isRhsExpr) {\n STNode letKeyword = parseLetKeyword();\n STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);\n\n \n letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\n DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\n\n return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);\n }\n\n /**\n * Parse order-keyword.\n *\n * @return Order-keyword node\n */\n private STNode parseOrderKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ORDER_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ORDER_KEYWORD);\n return parseOrderKeyword();\n }\n }\n\n /**\n * Parse by-keyword.\n *\n * @return By-keyword node\n */\n private STNode parseByKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.BY_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.BY_KEYWORD);\n return parseByKeyword();\n }\n }\n\n /**\n * Parse order by clause.\n *

\n * order-by-clause := order by order-key-list\n * \n *\n * @return Parsed node\n */\n private STNode parseOrderByClause(boolean isRhsExpr) {\n STNode orderKeyword = parseOrderKeyword();\n STNode byKeyword = parseByKeyword();\n STNode orderKeys = parseOrderKeyList(isRhsExpr);\n byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);\n return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);\n }\n\n /**\n * Parse order key.\n *

\n * order-key-list := order-key [, order-key]*\n *\n * @return Parsed node\n */\n private STNode parseOrderKeyList(boolean isRhsExpr) {\n startContext(ParserRuleContext.ORDER_KEY_LIST);\n List orderKeys = new ArrayList<>();\n STToken nextToken = peek();\n\n if (isEndOfOrderKeys(nextToken.kind)) {\n endContext();\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode orderKey = parseOrderKey(isRhsExpr);\n orderKeys.add(orderKey);\n\n \n nextToken = peek();\n STNode orderKeyListMemberEnd;\n while (!isEndOfOrderKeys(nextToken.kind)) {\n orderKeyListMemberEnd = parseOrderKeyListMemberEnd();\n if (orderKeyListMemberEnd == null) {\n break;\n }\n orderKeys.add(orderKeyListMemberEnd);\n orderKey = parseOrderKey(isRhsExpr);\n orderKeys.add(orderKey);\n nextToken = peek();\n }\n\n endContext();\n return STNodeFactory.createNodeList(orderKeys);\n }\n\n private boolean isEndOfOrderKeys(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case COMMA_TOKEN:\n case ASCENDING_KEYWORD:\n case DESCENDING_KEYWORD:\n return false;\n case SEMICOLON_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return isQueryClauseStartToken(tokenKind);\n }\n }\n\n private boolean isQueryClauseStartToken(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case SELECT_KEYWORD:\n case LET_KEYWORD:\n case WHERE_KEYWORD:\n case OUTER_KEYWORD:\n case JOIN_KEYWORD:\n case ORDER_KEYWORD:\n case DO_KEYWORD:\n case FROM_KEYWORD:\n case LIMIT_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseOrderKeyListMemberEnd() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n return parseComma();\n case EOF_TOKEN:\n return null;\n default:\n if (isQueryClauseStartToken(nextToken.kind)) {\n \n return null;\n }\n\n recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);\n return parseOrderKeyListMemberEnd();\n }\n }\n\n /**\n * Parse order key.\n *

\n * order-key := expression (ascending | descending)?\n *\n * @return Parsed node\n */\n private STNode parseOrderKey(boolean isRhsExpr) {\n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n\n STNode orderDirection;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case ASCENDING_KEYWORD:\n case DESCENDING_KEYWORD:\n orderDirection = consume();\n break;\n default:\n orderDirection = STNodeFactory.createEmptyNode();\n }\n\n return STNodeFactory.createOrderKeyNode(expression, orderDirection);\n }\n\n /**\n * Parse select clause.\n *

\n * select-clause := select expression\n *\n * @return Parsed node\n */\n private STNode parseSelectClause(boolean isRhsExpr) {\n startContext(ParserRuleContext.SELECT_CLAUSE);\n STNode selectKeyword = parseSelectKeyword();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n endContext();\n return STNodeFactory.createSelectClauseNode(selectKeyword, expression);\n }\n\n /**\n * Parse select-keyword.\n *\n * @return Select-keyword node\n */\n private STNode parseSelectKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.SELECT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.SELECT_KEYWORD);\n return parseSelectKeyword();\n }\n }\n\n /**\n * Parse on-conflict clause.\n *

\n * \n * onConflictClause := on conflict expression\n * \n *\n * @return On conflict clause node\n */\n private STNode parseOnConflictClause(boolean isRhsExpr) {\n STToken nextToken = peek();\n if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {\n return STNodeFactory.createEmptyNode();\n }\n\n startContext(ParserRuleContext.ON_CONFLICT_CLAUSE);\n STNode onKeyword = parseOnKeyword();\n STNode conflictKeyword = parseConflictKeyword();\n endContext();\n STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);\n }\n\n /**\n * Parse conflict keyword.\n *\n * @return Conflict keyword node\n */\n private STNode parseConflictKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.CONFLICT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.CONFLICT_KEYWORD);\n return parseConflictKeyword();\n }\n }\n\n /**\n * Parse limit clause.\n *

\n * limitClause := limit expression\n *\n * @return Limit expression node\n */\n private STNode parseLimitClause(boolean isRhsExpr) {\n STNode limitKeyword = parseLimitKeyword();\n \n \n STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createLimitClauseNode(limitKeyword, expr);\n }\n\n /**\n * Parse join clause.\n *

\n * \n * join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause\n *
\n * join-var-decl := join (typeName | var) bindingPattern\n *
\n * outer-join-var-decl := outer join var binding-pattern\n *
\n *\n * @return Join clause\n */\n private STNode parseJoinClause(boolean isRhsExpr) {\n startContext(ParserRuleContext.JOIN_CLAUSE);\n STNode outerKeyword;\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {\n outerKeyword = consume();\n } else {\n outerKeyword = STNodeFactory.createEmptyNode();\n }\n\n STNode joinKeyword = parseJoinKeyword();\n STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);\n STNode inKeyword = parseInKeyword();\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n endContext();\n STNode onCondition = parseOnClause(isRhsExpr);\n return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,\n onCondition);\n }\n\n /**\n * Parse on clause.\n *

\n * on clause := `on` expression `equals` expression\n *\n * @return On clause node\n */\n private STNode parseOnClause(boolean isRhsExpr) {\n STToken nextToken = peek();\n if (isQueryClauseStartToken(nextToken.kind)) {\n return createMissingOnClauseNode();\n }\n\n startContext(ParserRuleContext.ON_CLAUSE);\n STNode onKeyword = parseOnKeyword();\n STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n STNode equalsKeyword = parseEqualsKeyword();\n endContext();\n STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);\n }\n\n private STNode createMissingOnClauseNode() {\n STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,\n DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);\n STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\n STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,\n DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);\n\n STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);\n STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);\n return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);\n }\n\n /**\n * Parse start action.\n *

\n * start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)\n *\n * @return Start action node\n */\n private STNode parseStartAction(STNode annots) {\n STNode startKeyword = parseStartKeyword();\n STNode expr = parseActionOrExpression();\n\n \n switch (expr.kind) {\n case FUNCTION_CALL:\n case METHOD_CALL:\n case REMOTE_METHOD_CALL_ACTION:\n break;\n case SIMPLE_NAME_REFERENCE:\n case QUALIFIED_NAME_REFERENCE:\n case FIELD_ACCESS:\n case ASYNC_SEND_ACTION:\n expr = generateValidExprForStartAction(expr);\n break;\n default:\n startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,\n DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);\n STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);\n STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);\n STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);\n expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken,\n STNodeFactory.createEmptyNodeList(), closeParenToken);\n break;\n }\n\n return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);\n }\n\n private STNode generateValidExprForStartAction(STNode expr) {\n STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);\n STNode arguments = STNodeFactory.createEmptyNodeList();\n STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);\n\n switch (expr.kind) {\n case FIELD_ACCESS:\n STFieldAccessExpressionNode fieldAccessExpr = (STFieldAccessExpressionNode) expr;\n return STNodeFactory.createMethodCallExpressionNode(fieldAccessExpr.expression,\n fieldAccessExpr.dotToken, fieldAccessExpr.fieldName, openParenToken, arguments,\n closeParenToken);\n case ASYNC_SEND_ACTION:\n STAsyncSendActionNode asyncSendAction = (STAsyncSendActionNode) expr;\n return STNodeFactory.createRemoteMethodCallActionNode(asyncSendAction.expression,\n asyncSendAction.rightArrowToken, asyncSendAction.peerWorker, openParenToken, arguments,\n closeParenToken);\n default: \n return STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);\n }\n }\n\n /**\n * Parse start keyword.\n *\n * @return Start keyword node\n */\n private STNode parseStartKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.START_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.START_KEYWORD);\n return parseStartKeyword();\n }\n }\n\n /**\n * Parse flush action.\n *

\n * flush-action := flush [peer-worker]\n *\n * @return flush action node\n */\n private STNode parseFlushAction() {\n STNode flushKeyword = parseFlushKeyword();\n STNode peerWorker = parseOptionalPeerWorkerName();\n return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);\n }\n\n /**\n * Parse flush keyword.\n *\n * @return flush keyword node\n */\n private STNode parseFlushKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FLUSH_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FLUSH_KEYWORD);\n return parseFlushKeyword();\n }\n }\n\n /**\n * Parse peer worker.\n *

\n * peer-worker := worker-name | function\n *\n * @return peer worker name node\n */\n private STNode parseOptionalPeerWorkerName() {\n STToken token = peek();\n switch (token.kind) {\n case IDENTIFIER_TOKEN:\n case FUNCTION_KEYWORD:\n return STNodeFactory.createSimpleNameReferenceNode(consume());\n default:\n return STNodeFactory.createEmptyNode();\n }\n }\n\n /**\n * Parse intersection type descriptor.\n *

\n * intersection-type-descriptor := type-descriptor & type-descriptor\n *

\n *\n * @return Parsed node\n */\n private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\n boolean isTypedBindingPattern) {\n \n STNode bitwiseAndToken = consume();\n STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,\n TypePrecedence.INTERSECTION);\n return mergeTypesWithIntersection(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n }\n\n /**\n * Creates an intersection type descriptor after validating lhs and rhs types.\n *

\n * Note: Since type precedence and associativity are not taken into account here,\n * this method should not be called directly when types are unknown.\n *
\n * Call {@link \n *\n * @param leftTypeDesc lhs type\n * @param bitwiseAndToken bitwise-and token\n * @param rightTypeDesc rhs type\n * @return an IntersectionTypeDescriptorNode\n */\n private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {\n leftTypeDesc = validateForUsageOfVar(leftTypeDesc);\n rightTypeDesc = validateForUsageOfVar(rightTypeDesc);\n return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n }\n\n /**\n * Parse singleton type descriptor.\n *

\n * singleton-type-descriptor := simple-const-expr\n * simple-const-expr :=\n * nil-literal\n * | boolean-literal\n * | [Sign] int-literal\n * | [Sign] floating-point-literal\n * | string-literal\n * | constant-reference-expr\n *

\n */\n private STNode parseSingletonTypeDesc() {\n STNode simpleContExpr = parseSimpleConstExpr();\n return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);\n }\n\n \n private STNode parseSignedIntOrFloat() {\n STNode operator = parseUnaryOperator();\n STNode literal;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n literal = parseBasicLiteral();\n break;\n default: \n literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);\n literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);\n }\n return STNodeFactory.createUnaryExpressionNode(operator, literal);\n }\n\n private static boolean isSingletonTypeDescStart(SyntaxKind tokenKind, STToken nextNextToken) {\n switch (tokenKind) {\n case STRING_LITERAL_TOKEN:\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case NULL_KEYWORD:\n return true;\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n return isIntOrFloat(nextNextToken);\n default:\n return false;\n }\n }\n\n static boolean isIntOrFloat(STToken token) {\n switch (token.kind) {\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Check whether the parser reached to a valid expression start.\n *\n * @param nextTokenKind Kind of the next immediate token.\n * @param nextTokenIndex Index to the next token.\n * @return true if this is a start of a valid expression. false otherwise\n */\n private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {\n nextTokenIndex++;\n switch (nextTokenKind) {\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;\n if (nextNextTokenKind == SyntaxKind.PIPE_TOKEN || nextNextTokenKind == SyntaxKind.BITWISE_AND_TOKEN) {\n \n nextTokenIndex++;\n return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\n }\n\n return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN ||\n nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||\n nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||\n isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);\n case IDENTIFIER_TOKEN:\n return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);\n case OPEN_PAREN_TOKEN:\n case CHECK_KEYWORD:\n case CHECKPANIC_KEYWORD:\n case OPEN_BRACE_TOKEN:\n case TYPEOF_KEYWORD:\n case NEGATION_TOKEN:\n case EXCLAMATION_MARK_TOKEN:\n case TRAP_KEYWORD:\n case OPEN_BRACKET_TOKEN:\n case LT_TOKEN:\n case FROM_KEYWORD:\n case LET_KEYWORD:\n case BACKTICK_TOKEN:\n case NEW_KEYWORD:\n case LEFT_ARROW_TOKEN:\n case FUNCTION_KEYWORD:\n case TRANSACTIONAL_KEYWORD:\n case ISOLATED_KEYWORD:\n case BASE16_KEYWORD:\n case BASE64_KEYWORD:\n return true;\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\n case TABLE_KEYWORD:\n return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;\n case STREAM_KEYWORD:\n STToken nextNextToken = peek(nextTokenIndex);\n return nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||\n nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||\n nextNextToken.kind == SyntaxKind.FROM_KEYWORD;\n case ERROR_KEYWORD:\n return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;\n case XML_KEYWORD:\n case STRING_KEYWORD:\n return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;\n\n \n case START_KEYWORD:\n case FLUSH_KEYWORD:\n case WAIT_KEYWORD:\n default:\n return false;\n }\n }\n\n /**\n * Parse sync send action.\n *

\n * sync-send-action := expression ->> peer-worker\n *\n * @param expression LHS expression of the sync send action\n * @return Sync send action node\n */\n private STNode parseSyncSendAction(STNode expression) {\n STNode syncSendToken = parseSyncSendToken();\n STNode peerWorker = parsePeerWorkerName();\n return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);\n }\n\n /**\n * Parse peer worker.\n *

\n * peer-worker := worker-name | function\n *\n * @return peer worker name node\n */\n private STNode parsePeerWorkerName() {\n STToken token = peek();\n switch (token.kind) {\n case IDENTIFIER_TOKEN:\n case FUNCTION_KEYWORD:\n return STNodeFactory.createSimpleNameReferenceNode(consume());\n default:\n recover(token, ParserRuleContext.PEER_WORKER_NAME);\n return parsePeerWorkerName();\n }\n }\n\n /**\n * Parse sync send token.\n *

\n * sync-send-token := ->> \n *\n * @return sync send token\n */\n private STNode parseSyncSendToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.SYNC_SEND_TOKEN);\n return parseSyncSendToken();\n }\n }\n\n /**\n * Parse receive action.\n *

\n * receive-action := single-receive-action | multiple-receive-action\n *\n * @return Receive action\n */\n private STNode parseReceiveAction() {\n STNode leftArrow = parseLeftArrowToken();\n STNode receiveWorkers = parseReceiveWorkers();\n return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);\n }\n\n private STNode parseReceiveWorkers() {\n switch (peek().kind) {\n case FUNCTION_KEYWORD:\n case IDENTIFIER_TOKEN:\n return parsePeerWorkerName();\n case OPEN_BRACE_TOKEN:\n return parseMultipleReceiveWorkers();\n default:\n recover(peek(), ParserRuleContext.RECEIVE_WORKERS);\n return parseReceiveWorkers();\n }\n }\n\n /**\n * Parse multiple worker receivers.\n *

\n * { receive-field (, receive-field)* }\n *\n * @return Multiple worker receiver node\n */\n private STNode parseMultipleReceiveWorkers() {\n startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);\n STNode openBrace = parseOpenBrace();\n STNode receiveFields = parseReceiveFields();\n STNode closeBrace = parseCloseBrace();\n endContext();\n\n openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,\n DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);\n return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);\n }\n\n private STNode parseReceiveFields() {\n List receiveFields = new ArrayList<>();\n STToken nextToken = peek();\n\n \n if (isEndOfReceiveFields(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode receiveField = parseReceiveField();\n receiveFields.add(receiveField);\n\n \n nextToken = peek();\n STNode recieveFieldEnd;\n while (!isEndOfReceiveFields(nextToken.kind)) {\n recieveFieldEnd = parseReceiveFieldEnd();\n if (recieveFieldEnd == null) {\n break;\n }\n\n receiveFields.add(recieveFieldEnd);\n receiveField = parseReceiveField();\n receiveFields.add(receiveField);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(receiveFields);\n }\n\n private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseReceiveFieldEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);\n return parseReceiveFieldEnd();\n }\n }\n\n /**\n * Parse receive field.\n *

\n * receive-field := peer-worker | field-name : peer-worker\n *\n * @return Receiver field node\n */\n private STNode parseReceiveField() {\n switch (peek().kind) {\n case FUNCTION_KEYWORD:\n STNode functionKeyword = consume();\n return STNodeFactory.createSimpleNameReferenceNode(functionKeyword);\n case IDENTIFIER_TOKEN:\n STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);\n return createQualifiedReceiveField(identifier);\n default:\n recover(peek(), ParserRuleContext.RECEIVE_FIELD);\n return parseReceiveField();\n }\n }\n\n private STNode createQualifiedReceiveField(STNode identifier) {\n if (peek().kind != SyntaxKind.COLON_TOKEN) {\n return identifier;\n }\n\n STNode colon = parseColon();\n STNode peerWorker = parsePeerWorkerName();\n return createQualifiedNameReferenceNode(identifier, colon, peerWorker);\n }\n\n /**\n * Parse left arrow (<-) token.\n *\n * @return left arrow token\n */\n private STNode parseLeftArrowToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);\n return parseLeftArrowToken();\n }\n }\n\n /**\n * Parse signed right shift token (>>).\n * This method should only be called by seeing a `DOUBLE_GT_TOKEN` or\n * by seeing a `GT_TOKEN` followed by a `GT_TOKEN`\n *\n * @return Parsed node\n */\n private STNode parseSignedRightShiftToken() {\n STNode firstToken = consume();\n if (firstToken.kind == SyntaxKind.DOUBLE_GT_TOKEN) {\n return firstToken;\n }\n\n STToken endLGToken = consume();\n STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, firstToken.leadingMinutiae(),\n endLGToken.trailingMinutiae());\n\n if (hasTrailingMinutiae(firstToken)) {\n doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,\n DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);\n }\n return doubleGTToken;\n }\n\n /**\n * Parse unsigned right shift token (>>>).\n * This method should only be called by seeing a `TRIPPLE_GT_TOKEN` or\n * by seeing a `GT_TOKEN` followed by two `GT_TOKEN`s\n *\n * @return Parsed node\n */\n private STNode parseUnsignedRightShiftToken() {\n STNode firstToken = consume();\n if (firstToken.kind == SyntaxKind.TRIPPLE_GT_TOKEN) {\n return firstToken;\n }\n\n STNode middleGTToken = consume();\n STNode endLGToken = consume();\n STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,\n firstToken.leadingMinutiae(), endLGToken.trailingMinutiae());\n\n boolean validOpenGTToken = !hasTrailingMinutiae(firstToken);\n boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);\n if (validOpenGTToken && validMiddleGTToken) {\n return unsignedRightShiftToken;\n }\n\n unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,\n DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);\n return unsignedRightShiftToken;\n }\n\n /**\n * Parse wait action.\n *

\n * wait-action := single-wait-action | multiple-wait-action | alternate-wait-action \n *\n * @return Wait action node\n */\n private STNode parseWaitAction() {\n STNode waitKeyword = parseWaitKeyword();\n if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\n return parseMultiWaitAction(waitKeyword);\n }\n\n return parseSingleOrAlternateWaitAction(waitKeyword);\n }\n\n /**\n * Parse wait keyword.\n *\n * @return wait keyword\n */\n private STNode parseWaitKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.WAIT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.WAIT_KEYWORD);\n return parseWaitKeyword();\n }\n }\n\n /**\n * Parse single or alternate wait actions.\n *

\n * \n * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+\n *
\n * wait-future-expr := expression but not mapping-constructor-expr\n *
\n *\n * @param waitKeyword wait keyword\n * @return Single or alternate wait action node\n */\n private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {\n startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);\n STToken nextToken = peek();\n\n \n if (isEndOfWaitFutureExprList(nextToken.kind)) {\n endContext();\n STNode waitFutureExprs = STNodeFactory\n .createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\n waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,\n DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);\n return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);\n }\n\n \n List waitFutureExprList = new ArrayList<>();\n STNode waitField = parseWaitFutureExpr();\n waitFutureExprList.add(waitField);\n\n \n nextToken = peek();\n STNode waitFutureExprEnd;\n while (!isEndOfWaitFutureExprList(nextToken.kind)) {\n waitFutureExprEnd = parseWaitFutureExprEnd();\n if (waitFutureExprEnd == null) {\n break;\n }\n\n waitFutureExprList.add(waitFutureExprEnd);\n waitField = parseWaitFutureExpr();\n waitFutureExprList.add(waitField);\n nextToken = peek();\n }\n\n \n \n endContext();\n return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));\n }\n\n private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case SEMICOLON_TOKEN:\n case OPEN_BRACE_TOKEN:\n return true;\n case PIPE_TOKEN:\n default:\n return false;\n }\n }\n\n private STNode parseWaitFutureExpr() {\n STNode waitFutureExpr = parseActionOrExpression();\n if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {\n waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,\n DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);\n } else if (isAction(waitFutureExpr)) {\n waitFutureExpr =\n SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);\n }\n return waitFutureExpr;\n }\n\n private STNode parseWaitFutureExprEnd() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case PIPE_TOKEN:\n return parsePipeToken();\n default:\n if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {\n return null;\n }\n\n recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);\n return parseWaitFutureExprEnd();\n }\n }\n\n /**\n * Parse multiple wait action.\n *

\n * multiple-wait-action := wait { wait-field (, wait-field)* }\n *\n * @param waitKeyword Wait keyword\n * @return Multiple wait action node\n */\n private STNode parseMultiWaitAction(STNode waitKeyword) {\n startContext(ParserRuleContext.MULTI_WAIT_FIELDS);\n STNode openBrace = parseOpenBrace();\n STNode waitFields = parseWaitFields();\n STNode closeBrace = parseCloseBrace();\n endContext();\n\n openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,\n DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);\n STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);\n return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);\n }\n\n private STNode parseWaitFields() {\n List waitFields = new ArrayList<>();\n STToken nextToken = peek();\n\n \n if (isEndOfWaitFields(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode waitField = parseWaitField();\n waitFields.add(waitField);\n\n \n nextToken = peek();\n STNode waitFieldEnd;\n while (!isEndOfWaitFields(nextToken.kind)) {\n waitFieldEnd = parseWaitFieldEnd();\n if (waitFieldEnd == null) {\n break;\n }\n\n waitFields.add(waitFieldEnd);\n waitField = parseWaitField();\n waitFields.add(waitField);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(waitFields);\n }\n\n private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseWaitFieldEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.WAIT_FIELD_END);\n return parseWaitFieldEnd();\n }\n }\n\n /**\n * Parse wait field.\n *

\n * wait-field := variable-name | field-name : wait-future-expr\n *\n * @return Receiver field node\n */\n private STNode parseWaitField() {\n switch (peek().kind) {\n case IDENTIFIER_TOKEN:\n STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);\n identifier = STNodeFactory.createSimpleNameReferenceNode(identifier);\n return createQualifiedWaitField(identifier);\n default:\n recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);\n return parseWaitField();\n }\n }\n\n private STNode createQualifiedWaitField(STNode identifier) {\n if (peek().kind != SyntaxKind.COLON_TOKEN) {\n return identifier;\n }\n\n STNode colon = parseColon();\n STNode waitFutureExpr = parseWaitFutureExpr();\n return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);\n }\n\n /**\n * Parse annot access expression.\n *

\n * \n * annot-access-expr := expression .@ annot-tag-reference\n *
\n * annot-tag-reference := qualified-identifier | identifier\n *
\n *\n * @param lhsExpr Preceding expression of the annot access access\n * @return Parsed node\n */\n private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\n STNode annotAccessToken = parseAnnotChainingToken();\n STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);\n return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);\n }\n\n /**\n * Parse annot-chaining-token.\n *\n * @return Parsed node\n */\n private STNode parseAnnotChainingToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);\n return parseAnnotChainingToken();\n }\n }\n\n /**\n * Parse field access identifier.\n *

\n * field-access-identifier := qualified-identifier | identifier\n *\n * @return Parsed node\n */\n private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {\n STToken nextToken = peek();\n if (!isPredeclaredIdentifier(nextToken.kind)) {\n \n STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\n return parseQualifiedIdentifier(identifier, isInConditionalExpr);\n }\n\n return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);\n }\n\n /**\n * Parse query action.\n *

\n * query-action := query-pipeline do-clause\n *
\n * do-clause := do block-stmt\n *
\n *\n * @param queryConstructType Query construct type. This is only for validation\n * @param queryPipeline Query pipeline\n * @param selectClause Select clause if any This is only for validation.\n * @return Query action node\n */\n private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause) {\n if (queryConstructType != null) {\n queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,\n DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);\n }\n if (selectClause != null) {\n queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,\n DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);\n }\n\n startContext(ParserRuleContext.DO_CLAUSE);\n STNode doKeyword = parseDoKeyword();\n STNode blockStmt = parseBlockNode();\n endContext();\n\n return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);\n }\n\n /**\n * Parse 'do' keyword.\n *\n * @return do keyword node\n */\n private STNode parseDoKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.DO_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.DO_KEYWORD);\n return parseDoKeyword();\n }\n }\n\n /**\n * Parse optional field access or xml optional attribute access expression.\n *

\n * \n * optional-field-access-expr := expression ?. field-name\n *
\n * xml-optional-attribute-access-expr := expression ?. xml-attribute-name\n *
\n * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n *
\n * xml-qualified-name := xml-namespace-prefix : identifier\n *
\n * xml-namespace-prefix := identifier\n *
\n *\n * @param lhsExpr Preceding expression of the optional access\n * @return Parsed node\n */\n private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\n STNode optionalFieldAccessToken = parseOptionalChainingToken();\n STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);\n return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);\n }\n\n /**\n * Parse optional chaining token.\n *\n * @return parsed node\n */\n private STNode parseOptionalChainingToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);\n return parseOptionalChainingToken();\n }\n }\n\n /**\n * Parse conditional expression.\n *

\n * conditional-expr := expression ? expression : expression\n *\n * @param lhsExpr Preceding expression of the question mark\n * @param isInConditionalExpr whether calling from a conditional-expr\n * @return Parsed node\n */\n private STNode parseConditionalExpression(STNode lhsExpr, boolean isInConditionalExpr) {\n startContext(ParserRuleContext.CONDITIONAL_EXPRESSION);\n STNode questionMark = parseQuestionMark();\n \n \n STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);\n\n \n \n if (peek().kind != SyntaxKind.COLON_TOKEN) {\n if (middleExpr.kind == SyntaxKind.CONDITIONAL_EXPRESSION) {\n STConditionalExpressionNode innerConditionalExpr = (STConditionalExpressionNode) middleExpr;\n STNode innerMiddleExpr = innerConditionalExpr.middleExpression;\n\n \n \n \n STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, false);\n if (rightMostQNameRef != null) {\n middleExpr = generateConditionalExprForRightMost(innerConditionalExpr.lhsExpression,\n innerConditionalExpr.questionMarkToken, innerMiddleExpr, rightMostQNameRef);\n endContext();\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,\n innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);\n }\n\n STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, true);\n if (leftMostQNameRef != null) {\n middleExpr = generateConditionalExprForLeftMost(innerConditionalExpr.lhsExpression,\n innerConditionalExpr.questionMarkToken, innerMiddleExpr, leftMostQNameRef);\n endContext();\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,\n innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);\n }\n }\n\n \n \n \n STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, false);\n if (rightMostQNameRef != null) {\n endContext();\n return generateConditionalExprForRightMost(lhsExpr, questionMark, middleExpr, rightMostQNameRef);\n }\n\n STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, true);\n if (leftMostQNameRef != null) {\n endContext();\n return generateConditionalExprForLeftMost(lhsExpr, questionMark, middleExpr, leftMostQNameRef);\n }\n }\n\n return parseConditionalExprRhs(lhsExpr, questionMark, middleExpr, isInConditionalExpr);\n }\n\n private STNode generateConditionalExprForRightMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,\n STNode rightMostQualifiedNameRef) {\n STQualifiedNameReferenceNode qualifiedNameRef =\n (STQualifiedNameReferenceNode) rightMostQualifiedNameRef;\n STNode endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n\n STNode simpleNameRef =\n ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);\n middleExpr = middleExpr.replace(rightMostQualifiedNameRef, simpleNameRef);\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,\n endExpr);\n }\n\n private STNode generateConditionalExprForLeftMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,\n STNode leftMostQualifiedNameRef) {\n STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) leftMostQualifiedNameRef;\n STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n STNode endExpr = middleExpr.replace(leftMostQualifiedNameRef, simpleNameRef);\n middleExpr = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,\n endExpr);\n }\n\n private STNode parseConditionalExprRhs(STNode lhsExpr, STNode questionMark, STNode middleExpr,\n boolean isInConditionalExpr) {\n STNode colon = parseColon();\n endContext();\n \n \n STNode endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false,\n isInConditionalExpr);\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);\n }\n\n /**\n * Parse enum declaration.\n *

\n * module-enum-decl :=\n * metadata\n * [public] enum identifier { enum-member (, enum-member)* }\n * enum-member := metadata identifier [= const-expr]\n *

\n *\n * @param metadata\n * @param qualifier\n * @return Parsed enum node.\n */\n private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {\n startContext(ParserRuleContext.MODULE_ENUM_DECLARATION);\n STNode enumKeywordToken = parseEnumKeyword();\n STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);\n STNode openBraceToken = parseOpenBrace();\n STNode enumMemberList = parseEnumMemberList();\n STNode closeBraceToken = parseCloseBrace();\n\n endContext();\n openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,\n DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);\n return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,\n openBraceToken, enumMemberList, closeBraceToken);\n }\n\n /**\n * Parse 'enum' keyword.\n *\n * @return enum keyword node\n */\n private STNode parseEnumKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ENUM_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ENUM_KEYWORD);\n return parseEnumKeyword();\n }\n }\n\n /**\n * Parse enum member list.\n *

\n * enum-member := metadata identifier [= const-expr]\n *

\n *\n * @return enum member list node.\n */\n private STNode parseEnumMemberList() {\n startContext(ParserRuleContext.ENUM_MEMBER_LIST);\n\n \n if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n List enumMemberList = new ArrayList<>();\n STNode enumMember = parseEnumMember();\n\n \n STNode enumMemberRhs;\n while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {\n enumMemberRhs = parseEnumMemberEnd();\n if (enumMemberRhs == null) {\n break;\n }\n enumMemberList.add(enumMember);\n enumMemberList.add(enumMemberRhs);\n enumMember = parseEnumMember();\n }\n\n enumMemberList.add(enumMember);\n\n endContext();\n return STNodeFactory.createNodeList(enumMemberList);\n }\n\n /**\n * Parse enum member.\n *

\n * enum-member := metadata identifier [= const-expr]\n *

\n *\n * @return Parsed enum member node.\n */\n private STNode parseEnumMember() {\n STNode metadata;\n switch (peek().kind) {\n case DOCUMENTATION_STRING:\n case AT_TOKEN:\n metadata = parseMetaData();\n break;\n default:\n metadata = STNodeFactory.createEmptyNode();\n }\n\n STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);\n return parseEnumMemberRhs(metadata, identifierNode);\n }\n\n private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {\n STNode equalToken, constExprNode;\n switch (peek().kind) {\n case EQUAL_TOKEN:\n equalToken = parseAssignOp();\n constExprNode = parseExpression();\n break;\n case COMMA_TOKEN:\n case CLOSE_BRACE_TOKEN:\n equalToken = STNodeFactory.createEmptyNode();\n constExprNode = STNodeFactory.createEmptyNode();\n break;\n default:\n recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS);\n return parseEnumMemberRhs(metadata, identifierNode);\n }\n\n return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);\n }\n\n private STNode parseEnumMemberEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.ENUM_MEMBER_END);\n return parseEnumMemberEnd();\n }\n }\n\n private STNode parseTransactionStmtOrVarDecl(STNode annots, List qualifiers, STToken transactionKeyword) {\n switch (peek().kind) {\n case OPEN_BRACE_TOKEN:\n reportInvalidStatementAnnots(annots, qualifiers);\n reportInvalidQualifierList(qualifiers);\n return parseTransactionStatement(transactionKeyword);\n case COLON_TOKEN:\n if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) {\n STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);\n return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);\n }\n \n default:\n Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF);\n\n if (solution.action == Action.KEEP ||\n (solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) {\n \n STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);\n return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);\n }\n\n return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword);\n }\n }\n\n /**\n * Parse transaction statement.\n *

\n * transaction-stmt := `transaction` block-stmt [on-fail-clause]\n *\n * @return Transaction statement node\n */\n private STNode parseTransactionStatement(STNode transactionKeyword) {\n startContext(ParserRuleContext.TRANSACTION_STMT);\n STNode blockStmt = parseBlockNode();\n endContext();\n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);\n }\n\n /**\n * Parse commit action.\n *

\n * commit-action := \"commit\"\n *\n * @return Commit action node\n */\n private STNode parseCommitAction() {\n STNode commitKeyword = parseCommitKeyword();\n return STNodeFactory.createCommitActionNode(commitKeyword);\n }\n\n /**\n * Parse commit keyword.\n *\n * @return parsed node\n */\n private STNode parseCommitKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.COMMIT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.COMMIT_KEYWORD);\n return parseCommitKeyword();\n }\n }\n\n /**\n * Parse retry statement.\n *

\n * \n * retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]\n *
\n * retry-spec := [type-parameter] [ `(` arg-list `)` ]\n *
\n *\n * @return Retry statement node\n */\n private STNode parseRetryStatement() {\n startContext(ParserRuleContext.RETRY_STMT);\n STNode retryKeyword = parseRetryKeyword();\n \n STNode retryStmt = parseRetryKeywordRhs(retryKeyword);\n return retryStmt;\n }\n\n private STNode parseRetryKeywordRhs(STNode retryKeyword) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case LT_TOKEN:\n STNode typeParam = parseTypeParameter();\n return parseRetryTypeParamRhs(retryKeyword, typeParam);\n case OPEN_PAREN_TOKEN:\n case OPEN_BRACE_TOKEN:\n case TRANSACTION_KEYWORD:\n typeParam = STNodeFactory.createEmptyNode();\n return parseRetryTypeParamRhs(retryKeyword, typeParam);\n default:\n recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS);\n return parseRetryKeywordRhs(retryKeyword);\n }\n }\n\n private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {\n STNode args;\n switch (peek().kind) {\n case OPEN_PAREN_TOKEN:\n args = parseParenthesizedArgList();\n break;\n case OPEN_BRACE_TOKEN:\n case TRANSACTION_KEYWORD:\n args = STNodeFactory.createEmptyNode();\n break;\n default:\n recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS);\n return parseRetryTypeParamRhs(retryKeyword, typeParam);\n }\n\n STNode blockStmt = parseRetryBody();\n endContext(); \n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);\n }\n\n private STNode parseRetryBody() {\n switch (peek().kind) {\n case OPEN_BRACE_TOKEN:\n return parseBlockNode();\n case TRANSACTION_KEYWORD:\n return parseTransactionStatement(consume());\n default:\n recover(peek(), ParserRuleContext.RETRY_BODY);\n return parseRetryBody();\n }\n }\n\n /**\n * Parse optional on fail clause.\n *\n * @return Parsed node\n */\n private STNode parseOptionalOnFailClause() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.ON_KEYWORD) {\n return parseOnFailClause();\n }\n\n if (isEndOfRegularCompoundStmt(nextToken.kind)) {\n return STNodeFactory.createEmptyNode();\n }\n\n recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);\n return parseOptionalOnFailClause();\n }\n\n private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case CLOSE_BRACE_TOKEN:\n case SEMICOLON_TOKEN:\n case AT_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return isStatementStartingToken(nodeKind);\n }\n }\n\n private boolean isStatementStartingToken(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case FINAL_KEYWORD:\n\n \n case IF_KEYWORD:\n case WHILE_KEYWORD:\n case DO_KEYWORD:\n case PANIC_KEYWORD:\n case CONTINUE_KEYWORD:\n case BREAK_KEYWORD:\n case RETURN_KEYWORD:\n case LOCK_KEYWORD:\n case OPEN_BRACE_TOKEN:\n case FORK_KEYWORD:\n case FOREACH_KEYWORD:\n case XMLNS_KEYWORD:\n case TRANSACTION_KEYWORD:\n case RETRY_KEYWORD:\n case ROLLBACK_KEYWORD:\n case MATCH_KEYWORD:\n case FAIL_KEYWORD:\n\n \n case CHECK_KEYWORD:\n case CHECKPANIC_KEYWORD:\n case TRAP_KEYWORD:\n case START_KEYWORD:\n case FLUSH_KEYWORD:\n case LEFT_ARROW_TOKEN:\n case WAIT_KEYWORD:\n case COMMIT_KEYWORD:\n\n \n \n \n case WORKER_KEYWORD:\n case TYPE_KEYWORD: \n case CONST_KEYWORD: \n return true;\n default:\n \n if (isTypeStartingToken(nodeKind)) {\n return true;\n }\n\n \n if (isValidExpressionStart(nodeKind, 1)) {\n return true;\n }\n\n return false;\n }\n }\n\n /**\n * Parse on fail clause.\n *

\n * \n * on-fail-clause := on fail typed-binding-pattern statement-block\n * \n *\n * @return On fail clause node\n */\n private STNode parseOnFailClause() {\n startContext(ParserRuleContext.ON_FAIL_CLAUSE);\n STNode onKeyword = parseOnKeyword();\n STNode failKeyword = parseFailKeyword();\n STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false,\n TypePrecedence.DEFAULT);\n STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_NAME);\n STNode blockStatement = parseBlockNode();\n endContext();\n return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,\n blockStatement);\n }\n\n /**\n * Parse retry keyword.\n *\n * @return parsed node\n */\n private STNode parseRetryKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.RETRY_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.RETRY_KEYWORD);\n return parseRetryKeyword();\n }\n }\n\n /**\n * Parse transaction statement.\n *

\n * rollback-stmt := \"rollback\" [expression] \";\"\n *\n * @return Rollback statement node\n */\n private STNode parseRollbackStatement() {\n startContext(ParserRuleContext.ROLLBACK_STMT);\n STNode rollbackKeyword = parseRollbackKeyword();\n STNode expression;\n if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {\n expression = STNodeFactory.createEmptyNode();\n } else {\n expression = parseExpression();\n }\n\n STNode semicolon = parseSemicolon();\n endContext();\n return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);\n }\n\n /**\n * Parse rollback keyword.\n *\n * @return Rollback keyword node\n */\n private STNode parseRollbackKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\n return parseRollbackKeyword();\n }\n }\n\n /**\n * Parse transactional expression.\n *

\n * transactional-expr := \"transactional\"\n *\n * @return Transactional expression node\n */\n private STNode parseTransactionalExpression() {\n STNode transactionalKeyword = parseTransactionalKeyword();\n return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);\n }\n\n /**\n * Parse transactional keyword.\n *\n * @return Transactional keyword node\n */\n private STNode parseTransactionalKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);\n return parseTransactionalKeyword();\n }\n }\n\n /**\n * Parse base16 literal.\n *

\n * \n * byte-array-literal := Base16Literal | Base64Literal\n *
\n * Base16Literal := base16 WS ` HexGroup* WS `\n *
\n * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n *
\n *\n * @return parsed node\n */\n private STNode parseByteArrayLiteral() {\n STNode type;\n if (peek().kind == SyntaxKind.BASE16_KEYWORD) {\n type = parseBase16Keyword();\n } else {\n type = parseBase64Keyword();\n }\n\n STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n if (startingBackTick.isMissing()) {\n \n startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\n STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\n STNode content = STNodeFactory.createEmptyNode();\n STNode byteArrayLiteral =\n STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);\n byteArrayLiteral =\n SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);\n return byteArrayLiteral;\n }\n\n STNode content = parseByteArrayContent();\n return parseByteArrayLiteral(type, startingBackTick, content);\n }\n\n /**\n * Parse byte array literal.\n *\n * @param typeKeyword keyword token, possible values are `base16` and `base64`\n * @param startingBackTick starting backtick token\n * @param byteArrayContent byte array literal content to be validated\n * @return parsed byte array literal node\n */\n private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {\n STNode content = STNodeFactory.createEmptyNode();\n STNode newStartingBackTick = startingBackTick;\n STNodeList items = (STNodeList) byteArrayContent;\n if (items.size() == 1) {\n STNode item = items.get(0);\n if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {\n newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\n DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);\n } else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {\n newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\n DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);\n } else if (item.kind != SyntaxKind.TEMPLATE_STRING) {\n newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\n DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n } else {\n content = item;\n }\n } else if (items.size() > 1) {\n \n STNode clonedStartingBackTick = startingBackTick;\n for (int index = 0; index < items.size(); index++) {\n STNode item = items.get(index);\n clonedStartingBackTick =\n SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);\n }\n newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,\n DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n }\n\n STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\n return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);\n }\n\n /**\n * Parse base16 keyword.\n *\n * @return base16 keyword node\n */\n private STNode parseBase16Keyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.BASE16_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.BASE16_KEYWORD);\n return parseBase16Keyword();\n }\n }\n\n /**\n * Parse base64 keyword.\n *\n * @return base64 keyword node\n */\n private STNode parseBase64Keyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.BASE64_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.BASE64_KEYWORD);\n return parseBase64Keyword();\n }\n }\n\n /**\n * Validate and parse byte array literal content.\n * An error is reported, if the content is invalid.\n *\n * @return parsed node\n */\n private STNode parseByteArrayContent() {\n STToken nextToken = peek();\n\n List items = new ArrayList<>();\n while (!isEndOfBacktickContent(nextToken.kind)) {\n STNode content = parseTemplateItem();\n items.add(content);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(items);\n }\n\n /**\n * Validate base16 literal content.\n *

\n * \n * Base16Literal := base16 WS ` HexGroup* WS `\n *
\n * HexGroup := WS HexDigit WS HexDigit\n *
\n * WS := WhiteSpaceChar*\n *
\n * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n *
\n *\n * @param content the string surrounded by the backticks\n * @return true, if the string content is valid. false otherwise.\n */\n static boolean isValidBase16LiteralContent(String content) {\n char[] charArray = content.toCharArray();\n int hexDigitCount = 0;\n\n for (char c : charArray) {\n switch (c) {\n case LexerTerminals.TAB:\n case LexerTerminals.NEWLINE:\n case LexerTerminals.CARRIAGE_RETURN:\n case LexerTerminals.SPACE:\n break;\n default:\n if (isHexDigit(c)) {\n hexDigitCount++;\n } else {\n return false;\n }\n break;\n }\n }\n\n return hexDigitCount % 2 == 0;\n }\n\n /**\n * Validate base64 literal content.\n *

\n * \n * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n *
\n * Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char\n *
\n * PaddedBase64Group :=\n * WS Base64Char WS Base64Char WS Base64Char WS PaddingChar\n * | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar\n *
\n * Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n *
\n * PaddingChar := =\n *
\n * WS := WhiteSpaceChar*\n *
\n * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n *
\n *\n * @param content the string surrounded by the backticks\n * @return true, if the string content is valid. false otherwise.\n */\n static boolean isValidBase64LiteralContent(String content) {\n char[] charArray = content.toCharArray();\n int base64CharCount = 0;\n int paddingCharCount = 0;\n\n for (char c : charArray) {\n switch (c) {\n case LexerTerminals.TAB:\n case LexerTerminals.NEWLINE:\n case LexerTerminals.CARRIAGE_RETURN:\n case LexerTerminals.SPACE:\n break;\n case LexerTerminals.EQUAL:\n paddingCharCount++;\n break;\n default:\n if (isBase64Char(c)) {\n if (paddingCharCount == 0) {\n base64CharCount++;\n } else {\n return false;\n }\n } else {\n return false;\n }\n break;\n }\n }\n\n if (paddingCharCount > 2) {\n return false;\n } else if (paddingCharCount == 0) {\n return base64CharCount % 4 == 0;\n } else {\n return base64CharCount % 4 == 4 - paddingCharCount;\n }\n }\n\n /**\n *

\n * Check whether a given char is a base64 char.\n *

\n * Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n *\n * @param c character to check\n * @return true, if the character represents a base64 char. false otherwise.\n */\n static boolean isBase64Char(int c) {\n if ('a' <= c && c <= 'z') {\n return true;\n }\n if ('A' <= c && c <= 'Z') {\n return true;\n }\n if (c == '+' || c == '/') {\n return true;\n }\n return isDigit(c);\n }\n\n static boolean isHexDigit(int c) {\n if ('a' <= c && c <= 'f') {\n return true;\n }\n if ('A' <= c && c <= 'F') {\n return true;\n }\n return isDigit(c);\n }\n\n static boolean isDigit(int c) {\n return ('0' <= c && c <= '9');\n }\n\n /**\n * Parse xml filter expression.\n *

\n * xml-filter-expr := expression .< xml-name-pattern >\n *\n * @param lhsExpr Preceding expression of .< token\n * @return Parsed node\n */\n private STNode parseXMLFilterExpression(STNode lhsExpr) {\n STNode xmlNamePatternChain = parseXMLFilterExpressionRhs();\n return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);\n }\n\n /**\n * Parse xml filter expression rhs.\n *

\n * filer-expression-rhs := .< xml-name-pattern >\n *\n * @return Parsed node\n */\n private STNode parseXMLFilterExpressionRhs() {\n STNode dotLTToken = parseDotLTToken();\n return parseXMLNamePatternChain(dotLTToken);\n }\n\n /**\n * Parse xml name pattern chain.\n *

\n * \n * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step\n *
\n * filer-expression-rhs := .< xml-name-pattern >\n *
\n * xml-element-children-step := /< xml-name-pattern >\n *
\n * xml-element-descendants-step := /**\\/\n *
\n *\n * @param startToken Preceding token of xml name pattern\n * @return Parsed node\n */\n private STNode parseXMLNamePatternChain(STNode startToken) {\n startContext(ParserRuleContext.XML_NAME_PATTERN);\n STNode xmlNamePattern = parseXMLNamePattern();\n STNode gtToken = parseGTToken();\n endContext();\n\n startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,\n DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);\n return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);\n }\n\n /**\n * Parse .< token.\n *\n * @return Parsed node\n */\n private STNode parseDotLTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);\n return parseDotLTToken();\n }\n }\n\n /**\n * Parse xml name pattern.\n *

\n * xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*\n *\n * @return Parsed node\n */\n private STNode parseXMLNamePattern() {\n List xmlAtomicNamePatternList = new ArrayList<>();\n STToken nextToken = peek();\n\n \n if (isEndOfXMLNamePattern(nextToken.kind)) {\n return STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n }\n\n \n STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();\n xmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n\n \n STNode separator;\n while (!isEndOfXMLNamePattern(peek().kind)) {\n separator = parseXMLNamePatternSeparator();\n if (separator == null) {\n break;\n }\n xmlAtomicNamePatternList.add(separator);\n\n xmlAtomicNamePattern = parseXMLAtomicNamePattern();\n xmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n }\n\n return STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n }\n\n private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case GT_TOKEN:\n case EOF_TOKEN:\n return true;\n case IDENTIFIER_TOKEN:\n case ASTERISK_TOKEN:\n case COLON_TOKEN:\n default:\n return false;\n }\n }\n\n private STNode parseXMLNamePatternSeparator() {\n STToken token = peek();\n switch (token.kind) {\n case PIPE_TOKEN:\n return consume();\n case GT_TOKEN:\n case EOF_TOKEN:\n return null;\n default:\n recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);\n return parseXMLNamePatternSeparator();\n }\n }\n\n /**\n * Parse xml atomic name pattern.\n *

\n * \n * xml-atomic-name-pattern :=\n * *\n * | identifier\n * | xml-namespace-prefix : identifier\n * | xml-namespace-prefix : *\n * \n *\n * @return Parsed node\n */\n private STNode parseXMLAtomicNamePattern() {\n startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);\n STNode atomicNamePattern = parseXMLAtomicNamePatternBody();\n endContext();\n return atomicNamePattern;\n }\n\n private STNode parseXMLAtomicNamePatternBody() {\n STToken token = peek();\n STNode identifier;\n switch (token.kind) {\n case ASTERISK_TOKEN:\n return consume();\n case IDENTIFIER_TOKEN:\n identifier = consume();\n break;\n default:\n recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);\n return parseXMLAtomicNamePatternBody();\n }\n\n return parseXMLAtomicNameIdentifier(identifier);\n }\n\n private STNode parseXMLAtomicNameIdentifier(STNode identifier) {\n STToken token = peek();\n if (token.kind == SyntaxKind.COLON_TOKEN) {\n STNode colon = consume();\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\n STToken endToken = consume();\n return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);\n }\n }\n return STNodeFactory.createSimpleNameReferenceNode(identifier);\n }\n\n /**\n * Parse xml step expression.\n *

\n * xml-step-expr := expression xml-step-start\n *\n * @param lhsExpr Preceding expression of /*, /<, or /**\\/< token\n * @return Parsed node\n */\n private STNode parseXMLStepExpression(STNode lhsExpr) {\n STNode xmlStepStart = parseXMLStepStart();\n return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);\n }\n\n /**\n * Parse xml filter expression rhs.\n *

\n * \n * xml-step-start :=\n * xml-all-children-step\n * | xml-element-children-step\n * | xml-element-descendants-step\n *
\n * xml-all-children-step := /*\n *
\n *\n * @return Parsed node\n */\n private STNode parseXMLStepStart() {\n STToken token = peek();\n STNode startToken;\n\n switch (token.kind) {\n case SLASH_ASTERISK_TOKEN:\n return consume();\n case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\n startToken = parseDoubleSlashDoubleAsteriskLTToken();\n break;\n case SLASH_LT_TOKEN:\n default:\n startToken = parseSlashLTToken();\n break;\n }\n return parseXMLNamePatternChain(startToken);\n }\n\n /**\n * Parse /< token.\n *\n * @return Parsed node\n */\n private STNode parseSlashLTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);\n return parseSlashLTToken();\n }\n }\n\n /**\n * Parse /< token.\n *\n * @return Parsed node\n */\n private STNode parseDoubleSlashDoubleAsteriskLTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);\n return parseDoubleSlashDoubleAsteriskLTToken();\n }\n }\n\n /**\n * Parse match statement.\n *

\n * match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]\n *\n * @return Match statement\n */\n private STNode parseMatchStatement() {\n startContext(ParserRuleContext.MATCH_STMT);\n STNode matchKeyword = parseMatchKeyword();\n STNode actionOrExpr = parseActionOrExpression();\n startContext(ParserRuleContext.MATCH_BODY);\n STNode openBrace = parseOpenBrace();\n \n List matchClausesList = new ArrayList<>();\n while (!isEndOfMatchClauses(peek().kind)) {\n STNode clause = parseMatchClause();\n matchClausesList.add(clause);\n }\n STNode matchClauses = STNodeFactory.createNodeList(matchClausesList);\n \n if (isNodeListEmpty(matchClauses)) {\n openBrace = SyntaxErrors.addDiagnostic(openBrace,\n DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);\n }\n\n STNode closeBrace = parseCloseBrace();\n endContext();\n endContext();\n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,\n onFailClause);\n }\n\n /**\n * Parse match keyword.\n *\n * @return Match keyword node\n */\n private STNode parseMatchKeyword() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.MATCH_KEYWORD);\n return parseMatchKeyword();\n }\n }\n\n private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case TYPE_KEYWORD:\n return true;\n default:\n return isEndOfStatements();\n }\n }\n\n /**\n * Parse a single match match clause.\n *

\n * \n * match-clause := match-pattern-list [match-guard] => block-stmt\n *
\n * match-guard := if expression\n *
\n *\n * @return A match clause\n */\n private STNode parseMatchClause() {\n STNode matchPatterns = parseMatchPatternList();\n STNode matchGuard = parseMatchGuard();\n STNode rightDoubleArrow = parseDoubleRightArrow();\n STNode blockStmt = parseBlockNode();\n \n if (isNodeListEmpty(matchPatterns)) {\n STToken identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n STNode constantPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);\n matchPatterns = STNodeFactory.createNodeList(constantPattern);\n \n DiagnosticErrorCode errorCode = DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN;\n if (matchGuard != null) {\n matchGuard = SyntaxErrors.addDiagnostic(matchGuard, errorCode);\n } else {\n rightDoubleArrow = SyntaxErrors.addDiagnostic(rightDoubleArrow, errorCode);\n }\n }\n \n return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);\n }\n\n /**\n * Parse match guard.\n *

\n * match-guard := if expression\n *\n * @return Match guard\n */\n private STNode parseMatchGuard() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IF_KEYWORD:\n STNode ifKeyword = parseIfKeyword();\n STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);\n return STNodeFactory.createMatchGuardNode(ifKeyword, expr);\n case RIGHT_DOUBLE_ARROW_TOKEN:\n return STNodeFactory.createEmptyNode();\n default:\n recover(nextToken, ParserRuleContext.OPTIONAL_MATCH_GUARD);\n return parseMatchGuard();\n }\n }\n\n /**\n * Parse match patterns list.\n *

\n * match-pattern-list := match-pattern (| match-pattern)*\n *\n * @return Match patterns list\n */\n private STNode parseMatchPatternList() {\n startContext(ParserRuleContext.MATCH_PATTERN);\n List matchClauses = new ArrayList<>();\n while (!isEndOfMatchPattern(peek().kind)) {\n STNode clause = parseMatchPattern();\n if (clause == null) {\n break;\n }\n matchClauses.add(clause);\n\n STNode seperator = parseMatchPatternListMemberRhs();\n if (seperator == null) {\n break;\n }\n matchClauses.add(seperator);\n }\n\n endContext();\n return STNodeFactory.createNodeList(matchClauses);\n }\n\n private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case PIPE_TOKEN:\n case IF_KEYWORD:\n case RIGHT_DOUBLE_ARROW_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse match pattern.\n *

\n * \n * match-pattern := var binding-pattern\n * | wildcard-match-pattern\n * | const-pattern\n * | list-match-pattern\n * | mapping-match-pattern\n * | error-match-pattern\n * \n *\n * @return Match pattern\n */\n private STNode parseMatchPattern() {\n STToken nextToken = peek();\n if (isPredeclaredIdentifier(nextToken.kind)) {\n \n STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\n return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);\n }\n\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n return parseSimpleConstExpr();\n case VAR_KEYWORD:\n return parseVarTypedBindingPattern();\n case OPEN_BRACKET_TOKEN:\n return parseListMatchPattern();\n case OPEN_BRACE_TOKEN:\n return parseMappingMatchPattern();\n case ERROR_KEYWORD:\n return parseErrorMatchPattern();\n default:\n recover(nextToken, ParserRuleContext.MATCH_PATTERN_START);\n return parseMatchPattern();\n }\n }\n\n private STNode parseMatchPatternListMemberRhs() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case PIPE_TOKEN:\n return parsePipeToken();\n case IF_KEYWORD:\n case RIGHT_DOUBLE_ARROW_TOKEN:\n \n return null;\n default:\n recover(nextToken, ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);\n return parseMatchPatternListMemberRhs();\n }\n }\n\n /**\n * Parse var typed binding pattern.\n *

\n * var binding-pattern\n *

\n *\n * @return Parsed typed binding pattern node\n */\n private STNode parseVarTypedBindingPattern() {\n STNode varKeyword = parseVarKeyword();\n STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);\n STNode bindingPattern = parseBindingPattern();\n return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);\n }\n\n /**\n * Parse var keyword.\n *\n * @return Var keyword node\n */\n private STNode parseVarKeyword() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.VAR_KEYWORD) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.VAR_KEYWORD);\n return parseVarKeyword();\n }\n }\n\n /**\n * Parse list match pattern.\n *

\n * \n * list-match-pattern := [ list-member-match-patterns ]\n * list-member-match-patterns :=\n * match-pattern (, match-pattern)* [, rest-match-pattern]\n * | [ rest-match-pattern ]\n * \n *

\n *\n * @return Parsed list match pattern node\n */\n private STNode parseListMatchPattern() {\n startContext(ParserRuleContext.LIST_MATCH_PATTERN);\n STNode openBracketToken = parseOpenBracket();\n List matchPatternList = new ArrayList<>();\n STNode listMatchPatternMemberRhs = null;\n boolean isEndOfFields = false;\n\n while (!isEndOfListMatchPattern()) {\n STNode listMatchPatternMember = parseListMatchPatternMember();\n matchPatternList.add(listMatchPatternMember);\n listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();\n\n if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\n isEndOfFields = true;\n break;\n }\n\n if (listMatchPatternMemberRhs != null) {\n matchPatternList.add(listMatchPatternMemberRhs);\n } else {\n break;\n }\n }\n\n \n \n while (isEndOfFields && listMatchPatternMemberRhs != null) {\n updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);\n\n if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {\n break;\n }\n\n STNode invalidField = parseListMatchPatternMember();\n updateLastNodeInListWithInvalidNode(matchPatternList, invalidField,\n DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);\n listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();\n }\n\n STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);\n STNode closeBracketToken = parseCloseBracket();\n endContext();\n\n return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);\n }\n\n public boolean isEndOfListMatchPattern() {\n switch (peek().kind) {\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseListMatchPatternMember() {\n STNode nextToken = peek();\n switch (nextToken.kind) {\n case ELLIPSIS_TOKEN:\n return parseRestMatchPattern();\n default:\n \n return parseMatchPattern();\n }\n }\n\n /**\n * Parse rest match pattern.\n *

\n * \n * rest-match-pattern := ... var variable-name\n * \n *

\n *\n * @return Parsed rest match pattern node\n */\n private STNode parseRestMatchPattern() {\n startContext(ParserRuleContext.REST_MATCH_PATTERN);\n STNode ellipsisToken = parseEllipsis();\n STNode varKeywordToken = parseVarKeyword();\n STNode variableName = parseVariableName();\n endContext();\n\n STSimpleNameReferenceNode simpleNameReferenceNode =\n (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);\n return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);\n }\n\n private STNode parseListMatchPatternMemberRhs() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);\n return parseListMatchPatternMemberRhs();\n }\n }\n\n /**\n * Parse mapping match pattern.\n *

\n * mapping-match-pattern := { field-match-patterns }\n *
\n * field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]\n * | [ rest-match-pattern ]\n *
\n * field-match-pattern := field-name : match-pattern\n *
\n * rest-match-pattern := ... var variable-name\n *

\n *\n * @return Parsed Node.\n */\n private STNode parseMappingMatchPattern() {\n startContext(ParserRuleContext.MAPPING_MATCH_PATTERN);\n STNode openBraceToken = parseOpenBrace();\n STNode fieldMatchPatterns = parseFieldMatchPatternList();\n STNode closeBraceToken = parseCloseBrace();\n endContext();\n return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);\n }\n\n private STNode parseFieldMatchPatternList() {\n List fieldMatchPatterns = new ArrayList<>();\n\n STNode fieldMatchPatternMember = parseFieldMatchPatternMember();\n if (fieldMatchPatternMember == null) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n fieldMatchPatterns.add(fieldMatchPatternMember);\n if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\n invalidateExtraFieldMatchPatterns(fieldMatchPatterns);\n return STNodeFactory.createNodeList(fieldMatchPatterns);\n }\n\n return parseFieldMatchPatternList(fieldMatchPatterns);\n }\n\n private STNode parseFieldMatchPatternList(List fieldMatchPatterns) {\n while (!isEndOfMappingMatchPattern()) {\n STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\n if (fieldMatchPatternRhs == null) {\n break;\n }\n\n fieldMatchPatterns.add(fieldMatchPatternRhs);\n STNode fieldMatchPatternMember = parseFieldMatchPatternMember();\n if (fieldMatchPatternMember == null) {\n fieldMatchPatternMember = createMissingFieldMatchPattern();\n }\n\n fieldMatchPatterns.add(fieldMatchPatternMember);\n if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\n invalidateExtraFieldMatchPatterns(fieldMatchPatterns);\n break;\n }\n }\n\n return STNodeFactory.createNodeList(fieldMatchPatterns);\n }\n\n private STNode createMissingFieldMatchPattern() {\n STNode fieldName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n STNode colon = SyntaxErrors.createMissingToken(SyntaxKind.COLON_TOKEN);\n STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n STNode matchPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);\n STNode fieldMatchPatternMember = STNodeFactory.createFieldMatchPatternNode(fieldName, colon, matchPattern);\n fieldMatchPatternMember = SyntaxErrors.addDiagnostic(fieldMatchPatternMember,\n DiagnosticErrorCode.ERROR_MISSING_FIELD_MATCH_PATTERN_MEMBER);\n return fieldMatchPatternMember;\n }\n\n /**\n * Parse and invalidate all field match pattern members after a rest-match-pattern.\n *\n * @param fieldMatchPatterns field-match-patterns list\n */\n private void invalidateExtraFieldMatchPatterns(List fieldMatchPatterns) {\n while (!isEndOfMappingMatchPattern()) {\n STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\n if (fieldMatchPatternRhs == null) {\n break;\n }\n\n STNode fieldMatchPatternMember = parseFieldMatchPatternMember();\n if (fieldMatchPatternMember == null) {\n updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs,\n DiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) fieldMatchPatternRhs).text());\n } else {\n updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, null);\n updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternMember,\n DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);\n }\n }\n }\n\n private STNode parseFieldMatchPatternMember() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN:\n return parseFieldMatchPattern();\n case ELLIPSIS_TOKEN:\n return parseRestMatchPattern();\n case CLOSE_BRACE_TOKEN:\n case EOF_TOKEN:\n \n return null;\n default:\n recover(nextToken, ParserRuleContext.FIELD_MATCH_PATTERNS_START);\n return parseFieldMatchPatternMember();\n }\n }\n\n /**\n * Parse filed match pattern.\n *

\n * field-match-pattern := field-name : match-pattern\n *

\n *\n * @return Parsed field match pattern node\n */\n public STNode parseFieldMatchPattern() {\n STNode fieldNameNode = parseVariableName();\n STNode colonToken = parseColon();\n STNode matchPattern = parseMatchPattern();\n return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);\n }\n\n public boolean isEndOfMappingMatchPattern() {\n switch (peek().kind) {\n case CLOSE_BRACE_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseFieldMatchPatternRhs() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n case EOF_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);\n return parseFieldMatchPatternRhs();\n }\n }\n\n private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,\n ParserRuleContext.ERROR_KEYWORD);\n startContext(ParserRuleContext.ERROR_MATCH_PATTERN); \n return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);\n default:\n if (isMatchPatternEnd(peek().kind)) {\n return typeRefOrConstExpr;\n }\n recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN);\n return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);\n }\n }\n\n private boolean isMatchPatternEnd(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case RIGHT_DOUBLE_ARROW_TOKEN:\n case COMMA_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case PIPE_TOKEN:\n case IF_KEYWORD:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse functional match pattern.\n *

\n * error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )\n * error-arg-list-match-pattern :=\n * error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]\n * | [error-field-match-patterns]\n * error-message-match-pattern := simple-match-pattern\n * error-cause-match-pattern := simple-match-pattern | error-match-pattern\n * simple-match-pattern :=\n * wildcard-match-pattern\n * | const-pattern\n * | var variable-name\n * error-field-match-patterns :=\n * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]\n * | rest-match-pattern\n * named-arg-match-pattern := arg-name = match-pattern\n *

\n *\n * @return Parsed functional match pattern node.\n */\n private STNode parseErrorMatchPattern() {\n startContext(ParserRuleContext.ERROR_MATCH_PATTERN);\n STNode errorKeyword = consume();\n return parseErrorMatchPattern(errorKeyword);\n }\n\n private STNode parseErrorMatchPattern(STNode errorKeyword) {\n STToken nextToken = peek();\n STNode typeRef;\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n typeRef = STNodeFactory.createEmptyNode();\n break;\n default:\n if (isPredeclaredIdentifier(nextToken.kind)) {\n typeRef = parseTypeReference();\n break;\n }\n recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);\n return parseErrorMatchPattern(errorKeyword);\n }\n return parseErrorMatchPattern(errorKeyword, typeRef);\n }\n\n private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {\n STNode openParenthesisToken = parseOpenParenthesis();\n STNode argListMatchPatternNode = parseErrorArgListMatchPatterns();\n STNode closeParenthesisToken = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,\n argListMatchPatternNode, closeParenthesisToken);\n }\n\n private STNode parseErrorArgListMatchPatterns() {\n List argListMatchPatterns = new ArrayList<>();\n\n if (isEndOfErrorFieldMatchPatterns()) {\n return STNodeFactory.createNodeList(argListMatchPatterns);\n }\n startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);\n STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);\n endContext();\n\n if (isSimpleMatchPattern(firstArg.kind)) {\n\n argListMatchPatterns.add(firstArg);\n STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);\n if (argEnd != null) {\n \n STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);\n if (isValidSecondArgMatchPattern(secondArg.kind)) {\n argListMatchPatterns.add(argEnd);\n argListMatchPatterns.add(secondArg);\n } else {\n updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);\n updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,\n DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);\n }\n }\n } else {\n if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&\n firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {\n addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);\n } else {\n argListMatchPatterns.add(firstArg);\n }\n }\n\n parseErrorFieldMatchPatterns(argListMatchPatterns);\n return STNodeFactory.createNodeList(argListMatchPatterns);\n }\n\n private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {\n switch (matchPatternKind) {\n case IDENTIFIER_TOKEN:\n case SIMPLE_NAME_REFERENCE:\n case QUALIFIED_NAME_REFERENCE:\n case NUMERIC_LITERAL:\n case STRING_LITERAL:\n case NULL_LITERAL:\n case NIL_LITERAL:\n case BOOLEAN_LITERAL:\n case TYPED_BINDING_PATTERN:\n case UNARY_EXPRESSION:\n return true;\n default:\n return false;\n }\n }\n\n private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {\n switch (syntaxKind) {\n case ERROR_MATCH_PATTERN:\n case NAMED_ARG_MATCH_PATTERN:\n case REST_MATCH_PATTERN:\n return true;\n default:\n if (isSimpleMatchPattern(syntaxKind)) {\n return true;\n }\n return false;\n }\n }\n\n /**\n * Parse error field match patterns.\n * error-field-match-patterns :=\n * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]\n * | rest-match-pattern\n * named-arg-match-pattern := arg-name = match-pattern\n * @param argListMatchPatterns\n */\n private void parseErrorFieldMatchPatterns(List argListMatchPatterns) {\n SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;\n while (!isEndOfErrorFieldMatchPatterns()) {\n STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);\n if (argEnd == null) {\n \n break;\n }\n STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);\n DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);\n if (errorCode == null) {\n argListMatchPatterns.add(argEnd);\n argListMatchPatterns.add(currentArg);\n lastValidArgKind = currentArg.kind;\n } else if (argListMatchPatterns.size() == 0) {\n addInvalidNodeToNextToken(argEnd, null);\n addInvalidNodeToNextToken(currentArg, errorCode);\n } else {\n updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);\n updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);\n }\n }\n }\n\n private boolean isEndOfErrorFieldMatchPatterns() {\n \n return isEndOfErrorFieldBindingPatterns();\n }\n\n private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return consume();\n case CLOSE_PAREN_TOKEN:\n return null;\n default:\n recover(peek(), currentCtx);\n return parseErrorArgListMatchPatternEnd(currentCtx);\n }\n }\n\n private STNode parseErrorArgListMatchPattern(ParserRuleContext context) {\n STToken nextToken = peek();\n if (isPredeclaredIdentifier(nextToken.kind)) {\n return parseNamedArgOrSimpleMatchPattern();\n }\n\n switch (nextToken.kind) {\n case ELLIPSIS_TOKEN:\n return parseRestMatchPattern();\n case OPEN_PAREN_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n case OPEN_BRACKET_TOKEN:\n case OPEN_BRACE_TOKEN:\n case ERROR_KEYWORD:\n return parseMatchPattern();\n case VAR_KEYWORD:\n STNode varType = createBuiltinSimpleNameReference(consume());\n STNode variableName = createCaptureOrWildcardBP(parseVariableName());\n return STNodeFactory.createTypedBindingPatternNode(varType, variableName);\n case CLOSE_PAREN_TOKEN:\n return SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN);\n default:\n recover(nextToken, context);\n return parseErrorArgListMatchPattern(context);\n }\n }\n\n private STNode parseNamedArgOrSimpleMatchPattern() {\n STNode constRefExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\n if (constRefExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || peek().kind != SyntaxKind.EQUAL_TOKEN) {\n return constRefExpr;\n }\n\n \n return parseNamedArgMatchPattern(((STSimpleNameReferenceNode) constRefExpr).name);\n }\n\n /**\n * Parses the next named arg match pattern.\n *
\n * named-arg-match-pattern := arg-name = match-pattern\n *
\n *
\n *\n * @return arg match pattern list node added the new arg match pattern\n */\n private STNode parseNamedArgMatchPattern(STNode identifier) {\n startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);\n STNode equalToken = parseAssignOp();\n STNode matchPattern = parseMatchPattern();\n endContext();\n return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);\n }\n\n private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\n switch (currentArgKind) {\n case NAMED_ARG_MATCH_PATTERN:\n case REST_MATCH_PATTERN:\n \n if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {\n return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;\n }\n return null;\n default:\n return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;\n }\n }\n\n /**\n * Parse markdown documentation.\n *\n * @return markdown documentation node\n */\n private STNode parseMarkdownDocumentation() {\n List markdownDocLineList = new ArrayList<>();\n\n \n \n \n \n \n STToken nextToken = peek();\n while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {\n STToken documentationString = consume();\n STNode parsedDocLines = parseDocumentationString(documentationString);\n appendParsedDocumentationLines(markdownDocLineList, parsedDocLines);\n nextToken = peek();\n }\n\n STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);\n return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);\n }\n\n /**\n * Parse documentation string.\n *\n * @return markdown documentation line list node\n */\n private STNode parseDocumentationString(STToken documentationStringToken) {\n List leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());\n Collection diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));\n\n CharReader charReader = CharReader.from(documentationStringToken.text());\n DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);\n AbstractTokenReader tokenReader = new TokenReader(documentationLexer);\n DocumentationParser documentationParser = new DocumentationParser(tokenReader);\n return documentationParser.parse();\n }\n\n private List getLeadingTriviaList(STNode leadingMinutiaeNode) {\n List leadingTriviaList = new ArrayList<>();\n int bucketCount = leadingMinutiaeNode.bucketCount();\n for (int i = 0; i < bucketCount; i++) {\n leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));\n }\n\n return leadingTriviaList;\n }\n\n private void appendParsedDocumentationLines(List markdownDocLineList, STNode parsedDocLines) {\n int bucketCount = parsedDocLines.bucketCount();\n for (int i = 0; i < bucketCount; i++) {\n STNode markdownDocLine = parsedDocLines.childInBucket(i);\n markdownDocLineList.add(markdownDocLine);\n }\n }\n\n \n\n /**\n * Parse any statement that starts with a token that has ambiguity between being\n * a type-desc or an expression.\n *\n * @param annots Annotations\n * @param qualifiers\n * @return Statement node\n */\n private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List qualifiers) {\n startContext(ParserRuleContext.AMBIGUOUS_STMT);\n STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);\n return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);\n }\n\n private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {\n if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\n List varDeclQualifiers = new ArrayList<>();\n switchContext(ParserRuleContext.VAR_DECL_STMT);\n return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);\n }\n\n STNode expr = getExpression(typedBindingPatternOrExpr);\n expr = getExpression(parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true));\n return parseStatementStartWithExprRhs(expr);\n }\n\n private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {\n List typeDescQualifiers = new ArrayList<>();\n return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);\n }\n\n private STNode parseTypedBindingPatternOrExpr(List qualifiers, boolean allowAssignment) {\n parseTypeDescQualifiers(qualifiers);\n STToken nextToken = peek();\n STNode typeOrExpr;\n if (isPredeclaredIdentifier(nextToken.kind)) {\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\n return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\n }\n \n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n reportInvalidQualifierList(qualifiers);\n return parseTypedBPOrExprStartsWithOpenParenthesis();\n case FUNCTION_KEYWORD:\n return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);\n case OPEN_BRACKET_TOKEN:\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());\n return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\n \n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n reportInvalidQualifierList(qualifiers);\n STNode basicLiteral = parseBasicLiteral();\n return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);\n default:\n if (isValidExpressionStart(nextToken.kind, 1)) {\n reportInvalidQualifierList(qualifiers);\n return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());\n }\n\n return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);\n }\n }\n\n /**\n * Parse the component after the ambiguous starting node. Ambiguous node could be either an expr\n * or a type-desc. The component followed by this ambiguous node could be the binding-pattern or\n * the expression-rhs.\n *\n * @param typeOrExpr Type desc or the expression\n * @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a\n * valid lvalue expression\n * @return Typed-binding-pattern node or an expression node\n */\n private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case PIPE_TOKEN:\n case BITWISE_AND_TOKEN:\n STToken nextNextToken = peek(2);\n if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\n return typeOrExpr;\n }\n\n STNode pipeOrAndToken = parseBinaryOperator();\n STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\n if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\n STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\n typeOrExpr = getTypeDescFromExpr(typeOrExpr);\n\n STNode newTypeDesc = mergeTypes(typeOrExpr, pipeOrAndToken, typedBP.typeDescriptor);\n return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n }\n\n \n if (peek().kind == SyntaxKind.EQUAL_TOKEN) {\n return createCaptureBPWithMissingVarName(typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr);\n }\n\n return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,\n pipeOrAndToken, rhsTypedBPOrExpr);\n case SEMICOLON_TOKEN:\n if (isDefiniteExpr(typeOrExpr.kind)) {\n return typeOrExpr;\n }\n\n if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {\n \n STNode typeDesc = getTypeDescFromExpr(typeOrExpr);\n return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n }\n\n return typeOrExpr;\n case IDENTIFIER_TOKEN:\n case QUESTION_MARK_TOKEN:\n if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {\n \n STNode typeDesc = getTypeDescFromExpr(typeOrExpr);\n return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n }\n\n return typeOrExpr;\n case EQUAL_TOKEN:\n return typeOrExpr;\n case OPEN_BRACKET_TOKEN:\n return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,\n ParserRuleContext.AMBIGUOUS_STMT);\n case OPEN_BRACE_TOKEN: \n case ERROR_KEYWORD: \n STNode typeDesc = getTypeDescFromExpr(typeOrExpr);\n return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n default:\n \n if (isCompoundAssignment(nextToken.kind)) {\n return typeOrExpr;\n }\n\n \n \n if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {\n return typeOrExpr;\n }\n\n STToken token = peek();\n SyntaxKind typeOrExprKind = typeOrExpr.kind;\n if (typeOrExprKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||\n typeOrExprKind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n recover(token, ParserRuleContext.BINDING_PATTERN_OR_VAR_REF_RHS);\n } else {\n recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS);\n }\n\n return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\n }\n }\n\n private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) {\n lhsType = getTypeDescFromExpr(lhsType);\n rhsType = getTypeDescFromExpr(rhsType);\n\n STNode newTypeDesc = mergeTypes(lhsType, separatorToken, rhsType);\n\n STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n ParserRuleContext.VARIABLE_NAME);\n STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier);\n\n return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP);\n }\n\n private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {\n \n\n \n \n typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\n }\n\n private STNode parseTypedBPOrExprStartsWithOpenParenthesis() {\n STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();\n if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {\n return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);\n }\n\n return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);\n }\n\n private boolean isDefiniteTypeDesc(SyntaxKind kind) {\n return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.FUTURE_TYPE_DESC) <= 0;\n }\n\n private boolean isDefiniteExpr(SyntaxKind kind) {\n if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n return false;\n }\n\n return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0;\n }\n\n private boolean isDefiniteAction(SyntaxKind kind) {\n return kind.compareTo(SyntaxKind.REMOTE_METHOD_CALL_ACTION) >= 0 && \n kind.compareTo(SyntaxKind.COMMIT_ACTION) <= 0;\n }\n\n /**\n * Parse type or expression that starts with open parenthesis. Possible options are:\n * 1) () - nil type-desc or nil-literal\n * 2) (T) - Parenthesized type-desc\n * 3) (expr) - Parenthesized expression\n * 4) (param, param, ..) - Anon function params\n *\n * @return Type-desc or expression node\n */\n private STNode parseTypedDescOrExprStartsWithOpenParenthesis() {\n STNode openParen = parseOpenParenthesis();\n STToken nextToken = peek();\n\n if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\n STNode closeParen = parseCloseParenthesis();\n return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);\n }\n\n STNode typeOrExpr = parseTypeDescOrExpr();\n if (isAction(typeOrExpr)) {\n STNode closeParen = parseCloseParenthesis();\n return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,\n closeParen);\n }\n\n if (isExpression(typeOrExpr.kind)) {\n startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\n return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);\n }\n\n STNode typeDescNode = getTypeDescFromExpr(typeOrExpr);\n typeDescNode = parseComplexTypeDescriptor(typeDescNode, ParserRuleContext.TYPE_DESC_IN_PARENTHESIS, false);\n\n STNode closeParen = parseCloseParenthesis();\n return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen);\n }\n\n /**\n * Parse type-desc or expression. This method does not handle binding patterns.\n *\n * @return Type-desc node or expression node\n */\n private STNode parseTypeDescOrExpr() {\n List typeDescQualifiers = new ArrayList<>();\n return parseTypeDescOrExpr(typeDescQualifiers);\n }\n\n private STNode parseTypeDescOrExpr(List qualifiers) {\n parseTypeDescQualifiers(qualifiers);\n STToken nextToken = peek();\n STNode typeOrExpr;\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();\n break;\n case FUNCTION_KEYWORD:\n typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);\n break;\n case IDENTIFIER_TOKEN:\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\n return parseTypeDescOrExprRhs(typeOrExpr);\n case OPEN_BRACKET_TOKEN:\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());\n break;\n \n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n reportInvalidQualifierList(qualifiers);\n STNode basicLiteral = parseBasicLiteral();\n return parseTypeDescOrExprRhs(basicLiteral);\n default:\n if (isValidExpressionStart(nextToken.kind, 1)) {\n reportInvalidQualifierList(qualifiers);\n return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());\n }\n return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n }\n\n if (isDefiniteTypeDesc(typeOrExpr.kind)) {\n return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n }\n\n return parseTypeDescOrExprRhs(typeOrExpr);\n }\n\n private boolean isExpression(SyntaxKind kind) {\n switch (kind) {\n case NUMERIC_LITERAL:\n case STRING_LITERAL_TOKEN:\n case NIL_LITERAL:\n case NULL_LITERAL:\n case BOOLEAN_LITERAL:\n return true;\n default:\n return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\n kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n }\n }\n\n /**\n * Parse statement that starts with an empty parenthesis. Empty parenthesis can be\n * 1) Nil literal\n * 2) Nil type-desc\n * 3) Anon-function params\n *\n * @param openParen Open parenthesis\n * @param closeParen Close parenthesis\n * @return Parsed node\n */\n private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case RIGHT_DOUBLE_ARROW_TOKEN:\n STNode params = STNodeFactory.createEmptyNodeList();\n STNode anonFuncParam =\n STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\n return parseImplicitAnonFunc(anonFuncParam, false);\n default:\n return STNodeFactory.createNilLiteralNode(openParen, closeParen);\n }\n }\n\n private STNode parseAnonFuncExprOrTypedBPWithFuncType(List qualifiers) {\n STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);\n if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {\n return exprOrTypeDesc;\n }\n\n return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);\n }\n\n /**\n * Parse anon-func-expr or function-type-desc, by resolving the ambiguity.\n *\n * @param qualifiers Preceding qualifiers\n * @return Anon-func-expr or function-type-desc\n */\n private STNode parseAnonFuncExprOrFuncTypeDesc(List qualifiers) {\n startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);\n STNode qualifierList;\n STNode functionKeyword = parseFunctionKeyword();\n STNode funcSignature;\n\n if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {\n funcSignature = parseFuncSignature(true);\n\n \n STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, true);\n qualifierList = nodes[0];\n functionKeyword = nodes[1];\n\n endContext();\n return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);\n }\n\n funcSignature = STNodeFactory.createEmptyNode();\n\n \n STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, false);\n qualifierList = nodes[0];\n functionKeyword = nodes[1];\n\n STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,\n funcSignature);\n if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {\n switchContext(ParserRuleContext.VAR_DECL_STMT);\n return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n }\n return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n }\n\n private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {\n ParserRuleContext currentCtx = getCurrentContext();\n switch (peek().kind) {\n case OPEN_BRACE_TOKEN:\n case RIGHT_DOUBLE_ARROW_TOKEN:\n if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {\n switchContext(ParserRuleContext.EXPRESSION_STATEMENT);\n }\n startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\n \n funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\n\n STNode funcBody = parseAnonFuncBody(false);\n STNode annots = STNodeFactory.createEmptyNodeList();\n STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,\n functionKeyword, funcSignature, funcBody);\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);\n case IDENTIFIER_TOKEN:\n default:\n STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,\n funcSignature);\n if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {\n switchContext(ParserRuleContext.VAR_DECL_STMT);\n return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\n true);\n }\n return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n }\n }\n\n private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {\n STToken nextToken = peek();\n STNode typeDesc;\n switch (nextToken.kind) {\n case PIPE_TOKEN:\n case BITWISE_AND_TOKEN:\n STToken nextNextToken = peek(2);\n if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\n return typeOrExpr;\n }\n\n STNode pipeOrAndToken = parseBinaryOperator();\n STNode rhsTypeDescOrExpr = parseTypeDescOrExpr();\n if (isExpression(rhsTypeDescOrExpr.kind)) {\n return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,\n pipeOrAndToken, rhsTypeDescOrExpr);\n }\n\n typeDesc = getTypeDescFromExpr(typeOrExpr);\n rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\n return mergeTypes(typeDesc, pipeOrAndToken, rhsTypeDescOrExpr);\n case IDENTIFIER_TOKEN:\n case QUESTION_MARK_TOKEN:\n \n \n \n typeDesc = parseComplexTypeDescriptor(getTypeDescFromExpr(typeOrExpr), \n ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);\n return typeDesc;\n case SEMICOLON_TOKEN:\n return getTypeDescFromExpr(typeOrExpr);\n case EQUAL_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n case COMMA_TOKEN:\n return typeOrExpr;\n case OPEN_BRACKET_TOKEN:\n return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,\n ParserRuleContext.AMBIGUOUS_STMT);\n case ELLIPSIS_TOKEN:\n STNode ellipsis = parseEllipsis();\n typeOrExpr = getTypeDescFromExpr(typeOrExpr);\n return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);\n default:\n \n if (isCompoundAssignment(nextToken.kind)) {\n return typeOrExpr;\n }\n\n \n \n if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);\n }\n\n recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS);\n return parseTypeDescOrExprRhs(typeOrExpr);\n }\n }\n\n private boolean isAmbiguous(STNode node) {\n switch (node.kind) {\n case SIMPLE_NAME_REFERENCE:\n case QUALIFIED_NAME_REFERENCE:\n case NIL_LITERAL:\n case NULL_LITERAL:\n case NUMERIC_LITERAL:\n case STRING_LITERAL:\n case BOOLEAN_LITERAL:\n case BRACKETED_LIST:\n return true;\n case BINARY_EXPRESSION:\n STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\n if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\n binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\n return false;\n }\n return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\n case BRACED_EXPRESSION:\n return isAmbiguous(((STBracedExpressionNode) node).expression);\n case INDEXED_EXPRESSION:\n STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;\n if (!isAmbiguous(indexExpr.containerExpression)) {\n return false;\n }\n\n STNode keys = indexExpr.keyExpression;\n for (int i = 0; i < keys.bucketCount(); i++) {\n STNode item = keys.childInBucket(i);\n if (item.kind == SyntaxKind.COMMA_TOKEN) {\n continue;\n }\n\n if (!isAmbiguous(item)) {\n return false;\n }\n }\n return true;\n default:\n return false;\n }\n }\n\n private boolean isAllBasicLiterals(STNode node) {\n switch (node.kind) {\n case NIL_LITERAL:\n case NULL_LITERAL:\n case NUMERIC_LITERAL:\n case STRING_LITERAL:\n case BOOLEAN_LITERAL:\n return true;\n case BINARY_EXPRESSION:\n STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\n if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\n binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\n return false;\n }\n return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\n case BRACED_EXPRESSION:\n return isAmbiguous(((STBracedExpressionNode) node).expression);\n case BRACKETED_LIST:\n STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;\n for (STNode member : list.members) {\n if (member.kind == SyntaxKind.COMMA_TOKEN) {\n continue;\n }\n\n if (!isAllBasicLiterals(member)) {\n return false;\n }\n }\n\n return true;\n case UNARY_EXPRESSION:\n STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;\n if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&\n unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {\n return false;\n }\n\n return isNumericLiteral(unaryExpr.expression);\n default:\n return false;\n }\n }\n\n private boolean isNumericLiteral(STNode node) {\n switch (node.kind) {\n case NUMERIC_LITERAL:\n return true;\n default:\n return false;\n }\n }\n\n \n\n /**\n * Parse binding-patterns.\n *

\n * \n * binding-pattern := capture-binding-pattern\n * | wildcard-binding-pattern\n * | list-binding-pattern\n * | mapping-binding-pattern\n * | functional-binding-pattern\n *

\n *

\n * capture-binding-pattern := variable-name\n * variable-name := identifier\n *

\n *

\n * wildcard-binding-pattern := _\n * list-binding-pattern := [ list-member-binding-patterns ]\n *
\n * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *

\n *

\n * mapping-binding-pattern := { field-binding-patterns }\n * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *
\n * field-binding-pattern := field-name : binding-pattern | variable-name\n *
\n * rest-binding-pattern := ... variable-name\n *

\n *

\n * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n *
\n * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n * | other-arg-binding-patterns\n *
\n * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n *
\n * positional-arg-binding-pattern := binding-pattern\n *
\n * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]\n * | [rest-binding-pattern]\n *
\n * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n *
\n * named-arg-binding-pattern := arg-name = binding-pattern\n *
\n *\n * @return binding-pattern node\n */\n private STNode parseBindingPattern() {\n switch (peek().kind) {\n case OPEN_BRACKET_TOKEN:\n return parseListBindingPattern();\n case IDENTIFIER_TOKEN:\n return parseBindingPatternStartsWithIdentifier();\n case OPEN_BRACE_TOKEN:\n return parseMappingBindingPattern();\n case ERROR_KEYWORD:\n return parseErrorBindingPattern();\n default:\n recover(peek(), ParserRuleContext.BINDING_PATTERN);\n return parseBindingPattern();\n }\n }\n\n private STNode parseBindingPatternStartsWithIdentifier() {\n STNode argNameOrBindingPattern =\n parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\n STToken secondToken = peek();\n if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\n startContext(ParserRuleContext.ERROR_BINDING_PATTERN);\n STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,\n ParserRuleContext.ERROR_KEYWORD);\n return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);\n }\n\n if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {\n STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\n identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);\n return createCaptureOrWildcardBP(identifier);\n }\n\n return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);\n }\n\n private STNode createCaptureOrWildcardBP(STNode varName) {\n STNode bindingPattern;\n if (isWildcardBP(varName)) {\n bindingPattern = getWildcardBindingPattern(varName);\n } else {\n bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);\n }\n return bindingPattern;\n }\n\n /**\n * Parse list-binding-patterns.\n *

\n * \n * list-binding-pattern := [ list-member-binding-patterns ]\n *
\n * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *
\n *\n * @return list-binding-pattern node\n */\n private STNode parseListBindingPattern() {\n startContext(ParserRuleContext.LIST_BINDING_PATTERN);\n STNode openBracket = parseOpenBracket();\n List bindingPatternsList = new ArrayList<>();\n STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);\n endContext();\n return listBindingPattern;\n }\n\n private STNode parseListBindingPattern(STNode openBracket, List bindingPatternsList) {\n if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {\n \n STNode closeBracket = parseCloseBracket();\n STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);\n return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);\n }\n STNode listBindingPatternMember = parseListBindingPatternMember();\n bindingPatternsList.add(listBindingPatternMember);\n STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);\n return listBindingPattern;\n }\n\n private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List bindingPatterns) {\n STNode member = firstMember;\n \n STToken token = peek(); \n STNode listBindingPatternRhs = null;\n while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\n listBindingPatternRhs = parseListBindingPatternMemberRhs();\n if (listBindingPatternRhs == null) {\n break;\n }\n\n bindingPatterns.add(listBindingPatternRhs);\n member = parseListBindingPatternMember();\n bindingPatterns.add(member);\n token = peek();\n }\n\n STNode closeBracket = parseCloseBracket();\n STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\n return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);\n }\n\n private STNode parseListBindingPatternMemberRhs() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);\n return parseListBindingPatternMemberRhs();\n }\n }\n\n private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse list-binding-pattern member.\n *

\n * \n * list-binding-pattern := [ list-member-binding-patterns ]\n *
\n * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *
\n *\n * @return List binding pattern member\n */\n private STNode parseListBindingPatternMember() {\n switch (peek().kind) {\n case ELLIPSIS_TOKEN:\n return parseRestBindingPattern();\n case OPEN_BRACKET_TOKEN:\n case IDENTIFIER_TOKEN:\n case OPEN_BRACE_TOKEN:\n case ERROR_KEYWORD:\n return parseBindingPattern();\n default:\n recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);\n return parseListBindingPatternMember();\n }\n }\n\n /**\n * Parse rest binding pattern.\n *

\n * \n * rest-binding-pattern := ... variable-name\n * \n *\n * @return Rest binding pattern node\n */\n private STNode parseRestBindingPattern() {\n startContext(ParserRuleContext.REST_BINDING_PATTERN);\n STNode ellipsis = parseEllipsis();\n STNode varName = parseVariableName();\n endContext();\n\n STSimpleNameReferenceNode simpleNameReferenceNode =\n (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);\n return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);\n }\n\n /**\n * Parse Typed-binding-pattern.\n *

\n * \n * typed-binding-pattern := inferable-type-descriptor binding-pattern\n *

\n * inferable-type-descriptor := type-descriptor | var\n *
\n *\n * @return Typed binding pattern node\n */\n private STNode parseTypedBindingPattern(ParserRuleContext context) {\n List typeDescQualifiers = new ArrayList<>();\n return parseTypedBindingPattern(typeDescQualifiers, context);\n }\n\n private STNode parseTypedBindingPattern(List qualifiers, ParserRuleContext context) {\n STNode typeDesc = parseTypeDescriptor(qualifiers,\n ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT);\n STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);\n return typeBindingPattern;\n }\n\n /**\n * Parse mapping-binding-patterns.\n *

\n * \n * mapping-binding-pattern := { field-binding-patterns }\n *

\n * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *

\n * field-binding-pattern := field-name : binding-pattern | variable-name\n *
\n *\n * @return mapping-binding-pattern node\n */\n private STNode parseMappingBindingPattern() {\n startContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\n STNode openBrace = parseOpenBrace();\n\n STToken token = peek();\n if (isEndOfMappingBindingPattern(token.kind)) {\n STNode closeBrace = parseCloseBrace();\n STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();\n endContext();\n return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);\n }\n\n List bindingPatterns = new ArrayList<>();\n STNode prevMember = parseMappingBindingPatternMember();\n if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\n bindingPatterns.add(prevMember);\n }\n return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);\n }\n\n private STNode parseMappingBindingPattern(STNode openBrace, List bindingPatterns, STNode prevMember) {\n STToken token = peek(); \n STNode mappingBindingPatternRhs = null;\n while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\n mappingBindingPatternRhs = parseMappingBindingPatternEnd();\n if (mappingBindingPatternRhs == null) {\n break;\n }\n\n bindingPatterns.add(mappingBindingPatternRhs);\n prevMember = parseMappingBindingPatternMember();\n if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\n break;\n }\n bindingPatterns.add(prevMember);\n token = peek();\n }\n\n if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\n bindingPatterns.add(prevMember);\n }\n\n STNode closeBrace = parseCloseBrace();\n STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\n endContext();\n return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);\n }\n\n /**\n * Parse mapping-binding-pattern entry.\n *

\n * \n * mapping-binding-pattern := { field-binding-patterns }\n *

\n * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *

\n * field-binding-pattern := field-name : binding-pattern\n * | variable-name\n *
\n *\n * @return mapping-binding-pattern node\n */\n private STNode parseMappingBindingPatternMember() {\n STToken token = peek();\n switch (token.kind) {\n case ELLIPSIS_TOKEN:\n return parseRestBindingPattern();\n default:\n return parseFieldBindingPattern();\n }\n }\n\n private STNode parseMappingBindingPatternEnd() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n return null;\n default:\n recover(nextToken, ParserRuleContext.MAPPING_BINDING_PATTERN_END);\n return parseMappingBindingPatternEnd();\n }\n }\n\n /**\n * Parse field-binding-pattern.\n * field-binding-pattern := field-name : binding-pattern | varname\n *\n * @return field-binding-pattern node\n */\n private STNode parseFieldBindingPattern() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN:\n STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\n STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);\n return parseFieldBindingPattern(simpleNameReference);\n default:\n recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\n return parseFieldBindingPattern();\n }\n }\n\n private STNode parseFieldBindingPattern(STNode simpleNameReference) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n case CLOSE_BRACE_TOKEN:\n return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);\n case COLON_TOKEN:\n STNode colon = parseColon();\n STNode bindingPattern = parseBindingPattern();\n return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);\n default:\n recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_END);\n return parseFieldBindingPattern(simpleNameReference);\n }\n }\n\n private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {\n return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || isEndOfModuleLevelNode(1);\n }\n\n private STNode parseErrorTypeDescOrErrorBP(STNode annots) {\n STToken nextNextToken = peek(2);\n switch (nextNextToken.kind) {\n case OPEN_PAREN_TOKEN:\n return parseAsErrorBindingPattern();\n case LT_TOKEN:\n return parseAsErrorTypeDesc(annots);\n case IDENTIFIER_TOKEN:\n \n SyntaxKind nextNextNextTokenKind = peek(3).kind;\n if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||\n nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\n return parseAsErrorBindingPattern();\n }\n \n default:\n return parseAsErrorTypeDesc(annots);\n }\n }\n\n private STNode parseAsErrorBindingPattern() {\n startContext(ParserRuleContext.ASSIGNMENT_STMT);\n return parseAssignmentStmtRhs(parseErrorBindingPattern());\n }\n\n private STNode parseAsErrorTypeDesc(STNode annots) {\n STNode finalKeyword = STNodeFactory.createEmptyNode();\n return parseVariableDecl(getAnnotations(annots), finalKeyword);\n }\n\n /**\n * Parse error binding pattern node.\n *

\n * error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )\n *

\n * error-arg-list-binding-pattern :=\n * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]\n * | [error-field-binding-patterns]\n *

\n * error-message-binding-pattern := simple-binding-pattern\n *

\n * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern\n *

\n * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern\n *

\n * error-field-binding-patterns :=\n * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]\n * | rest-binding-pattern\n *

\n * named-arg-binding-pattern := arg-name = binding-pattern\n *\n * @return Error binding pattern node.\n */\n private STNode parseErrorBindingPattern() {\n startContext(ParserRuleContext.ERROR_BINDING_PATTERN);\n STNode errorKeyword = parseErrorKeyword();\n return parseErrorBindingPattern(errorKeyword);\n }\n\n private STNode parseErrorBindingPattern(STNode errorKeyword) {\n STToken nextToken = peek();\n STNode typeRef;\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n typeRef = STNodeFactory.createEmptyNode();\n break;\n default:\n if (isPredeclaredIdentifier(nextToken.kind)) {\n typeRef = parseTypeReference();\n break;\n }\n recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);\n return parseErrorBindingPattern(errorKeyword);\n }\n return parseErrorBindingPattern(errorKeyword, typeRef);\n }\n\n private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {\n STNode openParenthesis = parseOpenParenthesis();\n STNode argListBindingPatterns = parseErrorArgListBindingPatterns();\n STNode closeParenthesis = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,\n argListBindingPatterns, closeParenthesis);\n }\n\n /**\n * Parse error arg list binding pattern.\n *

\n * \n * error-arg-list-binding-pattern :=\n * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]\n * | [error-field-binding-patterns]\n *

\n *

\n * error-message-binding-pattern := simple-binding-pattern\n *

\n *

\n * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern\n *

\n *

\n * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern\n *

\n *

\n * error-field-binding-patterns :=\n * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]\n * | rest-binding-pattern\n *

\n *

\n * named-arg-binding-pattern := arg-name = binding-pattern\n * \n *\n * @return Error arg list binding patterns.\n */\n private STNode parseErrorArgListBindingPatterns() {\n List argListBindingPatterns = new ArrayList<>();\n if (isEndOfErrorFieldBindingPatterns()) {\n return STNodeFactory.createNodeList(argListBindingPatterns);\n }\n return parseErrorArgListBindingPatterns(argListBindingPatterns);\n }\n\n private STNode parseErrorArgListBindingPatterns(List argListBindingPatterns) {\n STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);\n if (firstArg == null) {\n \n return STNodeFactory.createNodeList(argListBindingPatterns);\n }\n\n switch (firstArg.kind) {\n case CAPTURE_BINDING_PATTERN:\n case WILDCARD_BINDING_PATTERN:\n argListBindingPatterns.add(firstArg);\n return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);\n case ERROR_BINDING_PATTERN:\n STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);\n missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,\n DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);\n STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);\n argListBindingPatterns.add(missingErrorMsgBP);\n argListBindingPatterns.add(missingComma);\n argListBindingPatterns.add(firstArg);\n return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);\n case REST_BINDING_PATTERN:\n case NAMED_ARG_BINDING_PATTERN:\n argListBindingPatterns.add(firstArg);\n return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);\n default:\n \n \n addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);\n return parseErrorArgListBindingPatterns(argListBindingPatterns);\n }\n }\n\n private STNode parseErrorArgListBPWithoutErrorMsg(List argListBindingPatterns) {\n STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);\n if (argEnd == null) {\n \n return STNodeFactory.createNodeList(argListBindingPatterns);\n }\n\n STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);\n assert secondArg != null; \n switch (secondArg.kind) {\n case CAPTURE_BINDING_PATTERN:\n case WILDCARD_BINDING_PATTERN:\n case ERROR_BINDING_PATTERN:\n case REST_BINDING_PATTERN:\n case NAMED_ARG_BINDING_PATTERN:\n argListBindingPatterns.add(argEnd);\n argListBindingPatterns.add(secondArg);\n return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);\n default:\n \n \n updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\n updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,\n DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);\n return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);\n }\n }\n\n private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List argListBindingPatterns,\n SyntaxKind lastValidArgKind) {\n while (!isEndOfErrorFieldBindingPatterns()) {\n STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);\n if (argEnd == null) {\n \n break;\n }\n STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);\n assert currentArg != null; \n DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);\n if (errorCode == null) {\n argListBindingPatterns.add(argEnd);\n argListBindingPatterns.add(currentArg);\n lastValidArgKind = currentArg.kind;\n } else if (argListBindingPatterns.size() == 0) {\n addInvalidNodeToNextToken(argEnd, null);\n addInvalidNodeToNextToken(currentArg, errorCode);\n } else {\n updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\n updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);\n }\n }\n\n return STNodeFactory.createNodeList(argListBindingPatterns);\n }\n\n private boolean isEndOfErrorFieldBindingPatterns() {\n SyntaxKind nextTokenKind = peek().kind;\n switch (nextTokenKind) {\n case CLOSE_PAREN_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return consume();\n case CLOSE_PAREN_TOKEN:\n return null;\n default:\n recover(peek(), currentCtx);\n return parseErrorArgsBindingPatternEnd(currentCtx);\n }\n }\n\n private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {\n switch (peek().kind) {\n case ELLIPSIS_TOKEN:\n return parseRestBindingPattern();\n case IDENTIFIER_TOKEN:\n \n STNode argNameOrSimpleBindingPattern = consume();\n return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);\n case OPEN_BRACKET_TOKEN:\n case OPEN_BRACE_TOKEN:\n case ERROR_KEYWORD:\n return parseBindingPattern();\n case CLOSE_PAREN_TOKEN:\n if (isFirstArg) {\n \n return null;\n }\n \n default:\n recover(peek(), context);\n return parseErrorArgListBindingPattern(context, isFirstArg);\n }\n }\n\n private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {\n STToken secondToken = peek();\n switch (secondToken.kind) {\n case EQUAL_TOKEN:\n STNode equal = consume();\n STNode bindingPattern = parseBindingPattern();\n return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,\n equal, bindingPattern);\n case COMMA_TOKEN:\n case CLOSE_PAREN_TOKEN:\n default:\n return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);\n }\n }\n\n private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,\n SyntaxKind currentArgKind) {\n switch (currentArgKind) {\n case NAMED_ARG_BINDING_PATTERN:\n case REST_BINDING_PATTERN:\n \n if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {\n return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;\n }\n return null;\n case CAPTURE_BINDING_PATTERN:\n case WILDCARD_BINDING_PATTERN:\n case ERROR_BINDING_PATTERN:\n case LIST_BINDING_PATTERN:\n case MAPPING_BINDING_PATTERN:\n default:\n return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;\n }\n }\n\n \n\n /*\n * This parses Typed binding patterns and deals with ambiguity between types,\n * and binding patterns. An example is 'T[a]'.\n * The ambiguity lies in between:\n * 1) Array Type\n * 2) List binding pattern\n * 3) Member access expression.\n */\n\n /**\n * Parse the component after the type-desc, of a typed-binding-pattern.\n *\n * @param typeDesc Starting type-desc of the typed-binding-pattern\n * @return Typed-binding pattern\n */\n private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {\n return parseTypedBindingPatternTypeRhs(typeDesc, context, true);\n }\n\n private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN: \n case OPEN_BRACE_TOKEN: \n case ERROR_KEYWORD: \n STNode bindingPattern = parseBindingPattern();\n return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n case OPEN_BRACKET_TOKEN:\n \n STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);\n assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;\n return typedBindingPattern;\n case CLOSE_PAREN_TOKEN:\n case COMMA_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case CLOSE_BRACE_TOKEN:\n if (!isRoot) {\n return typeDesc;\n }\n \n default:\n recover(nextToken, ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS);\n return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);\n }\n }\n\n /**\n * Parse typed-binding pattern with list, array-type-desc, or member-access-expr.\n *\n * @param typeDescOrExpr Type desc or the expression at the start\n * @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous\n * @return Parsed node\n */\n private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,\n boolean allowAssignment, ParserRuleContext context) {\n startContext(ParserRuleContext.BRACKETED_LIST);\n STNode openBracket = parseOpenBracket();\n\n \n if (isBracketedListEnd(peek().kind)) {\n return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);\n }\n\n \n STNode member = parseBracketedListMember(isTypedBindingPattern);\n SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);\n switch (currentNodeType) {\n case ARRAY_TYPE_DESC:\n STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);\n return typedBindingPattern;\n case LIST_BINDING_PATTERN:\n \n \n STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);\n STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n case INDEXED_EXPRESSION:\n return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);\n case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS:\n break;\n case NONE:\n default:\n \n \n\n \n STNode memberEnd = parseBracketedListMemberEnd();\n if (memberEnd != null) {\n \n List memberList = new ArrayList<>();\n memberList.add(getBindingPattern(member));\n memberList.add(memberEnd);\n bindingPattern = parseAsListBindingPattern(openBracket, memberList);\n typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n }\n }\n\n \n \n \n \n \n STNode closeBracket = parseCloseBracket();\n endContext();\n return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\n isTypedBindingPattern, allowAssignment, context);\n }\n\n private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {\n member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);\n STNode closeBracket = parseCloseBracket();\n endContext();\n STNode keyExpr = STNodeFactory.createNodeList(member);\n STNode memberAccessExpr =\n STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);\n }\n\n private boolean isBracketedListEnd(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse a member of an ambiguous bracketed list. This member could be:\n * 1) Array length\n * 2) Key expression of a member-access-expr\n * 3) A member-binding pattern of a list-binding-pattern.\n *\n * @param isTypedBindingPattern Is this in a definite typed-binding pattern\n * @return Parsed member node\n */\n private STNode parseBracketedListMember(boolean isTypedBindingPattern) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case ASTERISK_TOKEN:\n case STRING_LITERAL_TOKEN:\n return parseBasicLiteral();\n case CLOSE_BRACKET_TOKEN:\n return STNodeFactory.createEmptyNode();\n case OPEN_BRACE_TOKEN:\n case ERROR_KEYWORD: \n case ELLIPSIS_TOKEN: \n case OPEN_BRACKET_TOKEN: \n return parseStatementStartBracketedListMember();\n case IDENTIFIER_TOKEN:\n if (isTypedBindingPattern) {\n return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n }\n break;\n default:\n if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) ||\n isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) {\n break;\n }\n\n ParserRuleContext recoverContext =\n isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH\n : ParserRuleContext.BRACKETED_LIST_MEMBER;\n recover(peek(), recoverContext);\n return parseBracketedListMember(isTypedBindingPattern);\n }\n\n STNode expr = parseExpression();\n if (isWildcardBP(expr)) {\n return getWildcardBindingPattern(expr);\n }\n\n \n return expr;\n }\n\n /**\n * Treat the current node as an array, and parse the remainder of the binding pattern.\n *\n * @param typeDesc Type-desc\n * @param openBracket Open bracket\n * @param member Member\n * @return Parsed node\n */\n private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {\n \n \n typeDesc = getTypeDescFromExpr(typeDesc);\n switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\n STNode closeBracket = parseCloseBracket();\n endContext();\n endContext();\n return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,\n context);\n }\n\n private STNode parseBracketedListMemberEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);\n return parseBracketedListMemberEnd();\n }\n }\n\n /**\n * We reach here to break ambiguity of T[a]. This could be:\n * 1) Array Type Desc\n * 2) Member access on LHS\n * 3) Typed-binding-pattern\n *\n * @param typeDescOrExpr Type name or the expr that precede the open-bracket.\n * @param openBracket Open bracket\n * @param member Member\n * @param closeBracket Open bracket\n * @param isTypedBindingPattern Is this is a typed-binding-pattern.\n * @return Specific node that matches to T[a], after solving ambiguity.\n */\n private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\n STNode closeBracket, boolean isTypedBindingPattern,\n boolean allowAssignment, ParserRuleContext context) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN: \n case OPEN_BRACE_TOKEN: \n case ERROR_KEYWORD: \n \n STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\n return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n case OPEN_BRACKET_TOKEN: \n if (isTypedBindingPattern) {\n typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\n return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n }\n\n \n STNode keyExpr = getKeyExpr(member);\n STNode expr =\n STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\n return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);\n case QUESTION_MARK_TOKEN:\n \n typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\n typeDesc = parseComplexTypeDescriptor(arrayTypeDesc,\n ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n return parseTypedBindingPatternTypeRhs(typeDesc, context);\n case PIPE_TOKEN:\n case BITWISE_AND_TOKEN:\n \n return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket,\n isTypedBindingPattern);\n case IN_KEYWORD:\n \n if (context != ParserRuleContext.FOREACH_STMT &&\n context != ParserRuleContext.FROM_CLAUSE &&\n context != ParserRuleContext.JOIN_CLAUSE) {\n break;\n }\n return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n case EQUAL_TOKEN: \n if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\n \n \n break;\n }\n\n \n if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {\n return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n }\n\n keyExpr = getKeyExpr(member);\n typeDescOrExpr = getExpression(typeDescOrExpr);\n return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\n case SEMICOLON_TOKEN: \n if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\n \n \n break;\n }\n\n return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n case CLOSE_BRACE_TOKEN: \n case COMMA_TOKEN:\n if (context == ParserRuleContext.AMBIGUOUS_STMT) {\n keyExpr = getKeyExpr(member);\n return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\n closeBracket);\n }\n \n default:\n if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {\n \n keyExpr = getKeyExpr(member);\n typeDescOrExpr = getExpression(typeDescOrExpr);\n return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\n closeBracket);\n }\n\n break;\n }\n\n ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS;\n if (isTypedBindingPattern) {\n recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS;\n }\n\n recover(peek(), recoveryCtx);\n return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\n isTypedBindingPattern, allowAssignment, context);\n }\n\n private STNode getKeyExpr(STNode member) {\n if (member == null) {\n STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);\n STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);\n\n return STNodeFactory.createNodeList(missingVarRef);\n }\n return STNodeFactory.createNodeList(member);\n }\n\n private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\n STNode closeBracket) {\n STNode bindingPatterns = STNodeFactory.createEmptyNodeList();\n if (!isEmpty(member)) {\n SyntaxKind memberKind = member.kind;\n if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) {\n STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\n STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME);\n STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken);\n return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName);\n }\n \n if (member.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n \n openBracket = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracket, member,\n DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP);\n } else {\n STNode bindingPattern = getBindingPattern(member);\n bindingPatterns = STNodeFactory.createNodeList(bindingPattern);\n }\n }\n\n STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);\n STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n }\n\n /**\n * Parse a union or intersection type-desc/binary-expression that involves ambiguous\n * bracketed list in lhs.\n *

\n * e.g: (T[a] & R..) or (T[a] | R.. )\n *

\n * Complexity occurs in scenarios such as T[a] |/& R[b]. If the token after this\n * is another binding-pattern, then (T[a] |/& R[b]) becomes the type-desc. However,\n * if the token follows this is an equal or semicolon, then (T[a] |/& R) becomes\n * the type-desc, and [b] becomes the binding pattern.\n *\n * @param typeDescOrExpr Type desc or the expression\n * @param openBracket Open bracket\n * @param member Member\n * @param closeBracket Close bracket\n * @return Parsed node\n */\n private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\n STNode closeBracket, boolean isTypedBindingPattern) {\n STNode pipeOrAndToken = parseUnionOrIntersectionToken();\n STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);\n\n if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\n \n \n STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\n lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\n\n STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;\n STNode rhsTypeDesc = rhsTypedBindingPattern.typeDescriptor;\n\n STNode newTypeDesc = mergeTypes(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);\n return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);\n }\n\n if (isTypedBindingPattern) {\n \n STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\n lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\n return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr);\n }\n\n STNode keyExpr = getExpression(member);\n STNode containerExpr = getExpression(typeDescOrExpr);\n STNode lhsExpr =\n STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);\n return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,\n typedBindingPatternOrExpr);\n\n }\n\n /**\n * Merges two types separated by | or & into one type, while taking precedence\n * and associativity into account.\n *\n * @param lhsTypeDesc lhs type\n * @param pipeOrAndToken pipe or bitwise-and token\n * @param rhsTypeDesc rhs type\n * @return a TypeDescriptorNode\n */\n private STNode mergeTypes(STNode lhsTypeDesc, STNode pipeOrAndToken, STNode rhsTypeDesc) {\n if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {\n return mergeTypesWithUnion(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);\n } else {\n return mergeTypesWithIntersection(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);\n }\n }\n\n /**\n * Merges two types separated by | into one type, while taking precedence\n * and associativity into account.\n *\n * @param lhsTypeDesc lhs type\n * @param pipeToken pipe token\n * @param rhsTypeDesc rhs type\n * @return a TypeDescriptorNode\n */\n private STNode mergeTypesWithUnion(STNode lhsTypeDesc, STNode pipeToken, STNode rhsTypeDesc) {\n if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n \n \n \n\n STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;\n return replaceLeftMostUnionWithAUnion(lhsTypeDesc, pipeToken, rhsUnionTypeDesc);\n } else {\n return createUnionTypeDesc(lhsTypeDesc, pipeToken, rhsTypeDesc);\n }\n }\n\n /**\n * Merges two types separated by & into one type, while taking precedence\n * and associativity into account.\n *\n * @param lhsTypeDesc lhs type\n * @param bitwiseAndToken bitwise-and token\n * @param rhsTypeDesc rhs type\n * @return a TypeDescriptorNode\n */\n private STNode mergeTypesWithIntersection(STNode lhsTypeDesc, STNode bitwiseAndToken, STNode rhsTypeDesc) {\n if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n \n STUnionTypeDescriptorNode lhsUnionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\n if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n rhsTypeDesc = replaceLeftMostIntersectionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,\n bitwiseAndToken, (STIntersectionTypeDescriptorNode) rhsTypeDesc);\n return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);\n } else if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n rhsTypeDesc = replaceLeftMostUnionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,\n bitwiseAndToken, (STUnionTypeDescriptorNode) rhsTypeDesc);\n return replaceLeftMostUnionWithAUnion(lhsUnionTypeDesc.leftTypeDesc,\n lhsUnionTypeDesc.pipeToken, (STUnionTypeDescriptorNode) rhsTypeDesc);\n } else {\n rhsTypeDesc = createIntersectionTypeDesc(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, rhsTypeDesc);\n return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);\n }\n }\n\n if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n \n \n \n\n STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;\n return replaceLeftMostUnionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsUnionTypeDesc);\n } else if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n \n \n \n\n STIntersectionTypeDescriptorNode rhsIntSecTypeDesc = (STIntersectionTypeDescriptorNode) rhsTypeDesc;\n return replaceLeftMostIntersectionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsIntSecTypeDesc);\n } else {\n return createIntersectionTypeDesc(lhsTypeDesc, bitwiseAndToken, rhsTypeDesc);\n }\n }\n\n private STNode replaceLeftMostUnionWithAUnion(STNode typeDesc, STNode pipeToken,\n STUnionTypeDescriptorNode unionTypeDesc) {\n STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;\n\n \n \n if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,\n replaceLeftMostUnionWithAUnion(typeDesc, pipeToken, (STUnionTypeDescriptorNode) leftTypeDesc));\n }\n\n \n \n leftTypeDesc = createUnionTypeDesc(typeDesc, pipeToken, leftTypeDesc);\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);\n }\n \n private STNode replaceLeftMostUnionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken,\n STUnionTypeDescriptorNode unionTypeDesc) {\n STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;\n \n \n \n if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,\n replaceLeftMostUnionWithAIntersection(typeDesc, bitwiseAndToken,\n (STUnionTypeDescriptorNode) leftTypeDesc));\n }\n\n \n \n if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, \n replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,\n (STIntersectionTypeDescriptorNode) leftTypeDesc));\n }\n\n \n \n leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);\n }\n\n private STNode replaceLeftMostIntersectionWithAIntersection(STNode typeDesc,\n STNode bitwiseAndToken,\n STIntersectionTypeDescriptorNode intersectionTypeDesc) {\n STNode leftTypeDesc = intersectionTypeDesc.leftTypeDesc;\n\n \n \n if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc,\n replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,\n (STIntersectionTypeDescriptorNode) leftTypeDesc));\n }\n\n \n \n leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);\n return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, leftTypeDesc);\n }\n \n private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {\n if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\n STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);\n lhsTypeDesc = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);\n } else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;\n STNode middleTypeDesc =\n getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);\n lhsTypeDesc = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,\n intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n } else {\n lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);\n }\n\n return lhsTypeDesc;\n }\n\n /**\n * Parse union (|) or intersection (&) type operator.\n *\n * @return pipe or bitwise and token\n */\n private STNode parseUnionOrIntersectionToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);\n return parseUnionOrIntersectionToken();\n }\n }\n\n /**\n * Infer the type of the ambiguous bracketed list, based on the type of the member.\n *\n * @param memberNode Member node\n * @return Inferred type of the bracketed list\n */\n private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {\n if (isEmpty(memberNode)) {\n \n return SyntaxKind.NONE;\n }\n\n if (isDefiniteTypeDesc(memberNode.kind)) {\n return SyntaxKind.TUPLE_TYPE_DESC;\n }\n\n switch (memberNode.kind) {\n case ASTERISK_LITERAL:\n return SyntaxKind.ARRAY_TYPE_DESC;\n case CAPTURE_BINDING_PATTERN:\n case LIST_BINDING_PATTERN:\n case REST_BINDING_PATTERN:\n case MAPPING_BINDING_PATTERN:\n case WILDCARD_BINDING_PATTERN:\n return SyntaxKind.LIST_BINDING_PATTERN;\n case QUALIFIED_NAME_REFERENCE: \n case REST_TYPE:\n return SyntaxKind.TUPLE_TYPE_DESC;\n case NUMERIC_LITERAL: \n if (isTypedBindingPattern) {\n return SyntaxKind.ARRAY_TYPE_DESC;\n }\n return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS;\n case SIMPLE_NAME_REFERENCE: \n case BRACKETED_LIST: \n case MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\n return SyntaxKind.NONE;\n case ERROR_CONSTRUCTOR:\n if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {\n return SyntaxKind.NONE;\n }\n return SyntaxKind.INDEXED_EXPRESSION;\n default:\n if (isTypedBindingPattern) {\n return SyntaxKind.NONE;\n }\n return SyntaxKind.INDEXED_EXPRESSION;\n }\n }\n\n \n\n /*\n * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.\n * The ambiguity lies in between:\n * 1) Assignment that starts with list binding pattern\n * 2) Var-decl statement that starts with tuple type\n * 3) Statement that starts with list constructor, such as sync-send, etc.\n */\n\n /**\n * Parse any statement that starts with an open-bracket.\n *\n * @param annots Annotations attached to the statement.\n * @return Parsed node\n */\n private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {\n startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);\n return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);\n }\n\n private STNode parseMemberBracketedList() {\n STNode annots = STNodeFactory.createEmptyNodeList();\n return parseStatementStartsWithOpenBracket(annots, false, false);\n }\n\n /**\n * The bracketed list at the start of a statement can be one of the following.\n * 1) List binding pattern\n * 2) Tuple type\n * 3) List constructor\n *\n * @param isRoot Is this the root of the list\n * @return Parsed node\n */\n private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {\n startContext(ParserRuleContext.STMT_START_BRACKETED_LIST);\n STNode openBracket = parseOpenBracket();\n List memberList = new ArrayList<>();\n while (!isBracketedListEnd(peek().kind)) {\n \n STNode member = parseStatementStartBracketedListMember();\n SyntaxKind currentNodeType = getStmtStartBracketedListType(member);\n\n switch (currentNodeType) {\n case TUPLE_TYPE_DESC:\n \n \n member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);\n return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\n case MEMBER_TYPE_DESC:\n return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\n case LIST_BINDING_PATTERN:\n \n \n return parseAsListBindingPattern(openBracket, memberList, member, isRoot);\n case LIST_CONSTRUCTOR:\n \n \n return parseAsListConstructor(openBracket, memberList, member, isRoot);\n case LIST_BP_OR_LIST_CONSTRUCTOR:\n return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);\n case TUPLE_TYPE_DESC_OR_LIST_CONST:\n return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);\n case NONE:\n default:\n memberList.add(member);\n break;\n }\n\n \n STNode memberEnd = parseBracketedListMemberEnd();\n if (memberEnd == null) {\n break;\n }\n memberList.add(memberEnd);\n }\n\n \n STNode closeBracket = parseCloseBracket();\n STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket,\n isRoot, possibleMappingField);\n return bracketedList;\n }\n\n /**\n * Parse a member of a list-binding-pattern, tuple-type-desc, or\n * list-constructor-expr, when the parent is ambiguous.\n *\n * @return Parsed node\n */\n private STNode parseStatementStartBracketedListMember() {\n List typeDescQualifiers = new ArrayList<>();\n return parseStatementStartBracketedListMember(typeDescQualifiers);\n }\n\n private STNode parseStatementStartBracketedListMember(List qualifiers) {\n parseTypeDescQualifiers(qualifiers);\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_BRACKET_TOKEN:\n reportInvalidQualifierList(qualifiers);\n return parseMemberBracketedList();\n case IDENTIFIER_TOKEN:\n reportInvalidQualifierList(qualifiers);\n STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n if (isWildcardBP(identifier)) {\n STNode varName = ((STSimpleNameReferenceNode) identifier).name;\n return getWildcardBindingPattern(varName);\n }\n\n nextToken = peek();\n if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {\n STNode ellipsis = parseEllipsis();\n return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n }\n\n if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) {\n \n return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n }\n\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);\n case OPEN_BRACE_TOKEN:\n \n reportInvalidQualifierList(qualifiers);\n return parseMappingBindingPatterOrMappingConstructor();\n case ERROR_KEYWORD:\n reportInvalidQualifierList(qualifiers);\n STToken nextNextToken = getNextNextToken();\n if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||\n nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return parseErrorBindingPatternOrErrorConstructor();\n }\n \n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n case ELLIPSIS_TOKEN:\n \n reportInvalidQualifierList(qualifiers);\n return parseRestBindingOrSpreadMember();\n case XML_KEYWORD:\n case STRING_KEYWORD:\n reportInvalidQualifierList(qualifiers);\n if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {\n return parseExpression(false);\n }\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n case TABLE_KEYWORD:\n case STREAM_KEYWORD:\n reportInvalidQualifierList(qualifiers);\n if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n }\n return parseExpression(false);\n case OPEN_PAREN_TOKEN:\n return parseTypeDescOrExpr(qualifiers);\n case FUNCTION_KEYWORD:\n return parseAnonFuncExprOrFuncTypeDesc(qualifiers);\n case AT_TOKEN:\n return parseMemberDescriptor();\n default:\n if (isValidExpressionStart(nextToken.kind, 1)) {\n reportInvalidQualifierList(qualifiers);\n return parseExpression(false);\n }\n\n if (isTypeStartingToken(nextToken.kind)) {\n return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);\n }\n\n recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);\n return parseStatementStartBracketedListMember(qualifiers);\n }\n }\n\n private STNode parseRestBindingOrSpreadMember() {\n STNode ellipsis = parseEllipsis();\n STNode expr = parseExpression();\n if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n return STNodeFactory.createRestBindingPatternNode(ellipsis, expr);\n } else {\n return STNodeFactory.createSpreadMemberNode(ellipsis, expr);\n }\n }\n\n private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\n STNode member, boolean isRoot) {\n memberList.add(member);\n STNode memberEnd = parseBracketedListMemberEnd();\n\n STNode tupleTypeDescOrListCons;\n if (memberEnd == null) {\n \n STNode closeBracket = parseCloseBracket();\n tupleTypeDescOrListCons =\n parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n } else {\n memberList.add(memberEnd);\n tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);\n }\n\n return tupleTypeDescOrListCons;\n }\n\n /**\n * Parse tuple type desc or list constructor.\n *\n * @return Parsed node\n */\n private STNode parseTupleTypeDescOrListConstructor(STNode annots) {\n startContext(ParserRuleContext.BRACKETED_LIST);\n STNode openBracket = parseOpenBracket();\n List memberList = new ArrayList<>();\n return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);\n }\n\n private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\n boolean isRoot) {\n \n STToken nextToken = peek();\n while (!isBracketedListEnd(nextToken.kind)) {\n \n STNode member = parseTupleTypeDescOrListConstructorMember(annots);\n SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);\n\n switch (currentNodeType) {\n case LIST_CONSTRUCTOR:\n \n \n return parseAsListConstructor(openBracket, memberList, member, isRoot);\n case TUPLE_TYPE_DESC:\n \n \n member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);\n return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\n case MEMBER_TYPE_DESC:\n return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\n case TUPLE_TYPE_DESC_OR_LIST_CONST:\n default:\n memberList.add(member);\n break;\n }\n\n \n STNode memberEnd = parseBracketedListMemberEnd();\n if (memberEnd == null) {\n break;\n }\n memberList.add(memberEnd);\n nextToken = peek();\n }\n\n \n STNode closeBracket = parseCloseBracket();\n return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n }\n\n private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_BRACKET_TOKEN:\n \n return parseTupleTypeDescOrListConstructor(annots);\n case IDENTIFIER_TOKEN:\n STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n \n if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {\n STNode ellipsis = parseEllipsis();\n return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n }\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);\n case OPEN_BRACE_TOKEN:\n \n return parseMappingConstructorExpr();\n case ERROR_KEYWORD:\n STToken nextNextToken = getNextNextToken();\n if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||\n nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return parseErrorConstructorExpr(false);\n }\n \n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n case XML_KEYWORD:\n case STRING_KEYWORD:\n if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {\n return parseExpression(false);\n }\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n case TABLE_KEYWORD:\n case STREAM_KEYWORD:\n if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n }\n return parseExpression(false);\n case OPEN_PAREN_TOKEN:\n return parseTypeDescOrExpr();\n case AT_TOKEN:\n return parseMemberDescriptor();\n default:\n if (isValidExpressionStart(nextToken.kind, 1)) {\n return parseExpression(false);\n }\n\n if (isTypeStartingToken(nextToken.kind)) {\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n }\n\n recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER);\n return parseTupleTypeDescOrListConstructorMember(annots);\n }\n }\n\n private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {\n \n return getStmtStartBracketedListType(memberNode);\n }\n\n private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List members, STNode closeBracket,\n boolean isRoot) {\n STNode tupleTypeOrListConst;\n switch (peek().kind) {\n case COMMA_TOKEN: \n case CLOSE_BRACE_TOKEN: \n case CLOSE_BRACKET_TOKEN: \n case PIPE_TOKEN: \n case BITWISE_AND_TOKEN: \n if (!isRoot) {\n endContext();\n return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,\n closeBracket);\n }\n \n default:\n if (isValidExprRhsStart(peek().kind, closeBracket.kind) ||\n (isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {\n members = getExpressionList(members, false);\n STNode memberExpressions = STNodeFactory.createNodeList(members);\n tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,\n memberExpressions, closeBracket);\n break;\n }\n\n \n STNode memberTypeDescs = STNodeFactory.createNodeList(getTupleMemberList(members));\n STNode tupleTypeDesc =\n STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\n tupleTypeOrListConst =\n parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n }\n\n endContext();\n\n if (!isRoot) {\n return tupleTypeOrListConst;\n }\n\n STNode annots = STNodeFactory.createEmptyNodeList();\n return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);\n\n }", "target_code": "return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);", "method_body_after": "private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {\n \n switch (peek(lookahead + 1).kind) {\n case IDENTIFIER_TOKEN:\n SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;\n switch (tokenAfterIdentifier) {\n case ON_KEYWORD: \n case OPEN_BRACE_TOKEN: \n return true;\n case EQUAL_TOKEN: \n case SEMICOLON_TOKEN: \n case QUESTION_MARK_TOKEN: \n return false;\n default:\n \n return false;\n }\n case ON_KEYWORD:\n \n \n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse listener declaration, given the qualifier.\n *

\n * \n * listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;\n * \n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the listener declaration\n * @return Parsed node\n */\n private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {\n startContext(ParserRuleContext.LISTENER_DECL);\n STNode listenerKeyword = parseListenerKeyword();\n\n if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {\n STNode listenerDecl =\n parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);\n endContext();\n return listenerDecl;\n }\n\n STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\n STNode variableName = parseVariableName();\n STNode equalsToken = parseAssignOp();\n STNode initializer = parseExpression();\n STNode semicolonToken = parseSemicolon();\n endContext();\n return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,\n equalsToken, initializer, semicolonToken);\n }\n\n /**\n * Parse listener keyword.\n *\n * @return Parsed node\n */\n private STNode parseListenerKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LISTENER_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LISTENER_KEYWORD);\n return parseListenerKeyword();\n }\n }\n\n /**\n * Parse constant declaration, given the qualifier.\n *

\n * module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the listener declaration\n * @return Parsed node\n */\n private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {\n startContext(ParserRuleContext.CONSTANT_DECL);\n STNode constKeyword = parseConstantKeyword();\n \n return parseConstDecl(metadata, qualifier, constKeyword);\n }\n\n /**\n * Parse the components that follows after the const keyword of a constant declaration.\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the constant decl\n * @param constKeyword Const keyword\n * @return Parsed node\n */\n private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case ANNOTATION_KEYWORD:\n endContext();\n return parseAnnotationDeclaration(metadata, qualifier, constKeyword);\n case IDENTIFIER_TOKEN:\n STNode constantDecl =\n parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);\n endContext();\n return constantDecl;\n default:\n if (isTypeStartingToken(nextToken.kind)) {\n break;\n }\n\n recover(peek(), ParserRuleContext.CONST_DECL_TYPE);\n return parseConstDecl(metadata, qualifier, constKeyword);\n }\n\n STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);\n STNode variableName = parseVariableName();\n STNode equalsToken = parseAssignOp();\n STNode initializer = parseExpression();\n STNode semicolonToken = parseSemicolon();\n endContext();\n return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,\n equalsToken, initializer, semicolonToken);\n }\n\n private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\n boolean isListener) {\n STNode varNameOrTypeName = parseStatementStartIdentifier();\n return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);\n }\n\n /**\n * Parse the component that follows the first identifier in a const decl. The identifier\n * can be either the type-name (a user defined type) or the var-name there the type-name\n * is not present.\n *\n * @param qualifier Qualifier that precedes the constant decl\n * @param keyword Keyword\n * @param typeOrVarName Identifier that follows the const-keywoord\n * @return Parsed node\n */\n private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,\n STNode typeOrVarName, boolean isListener) {\n if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n STNode type = typeOrVarName;\n STNode variableName = parseVariableName();\n return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n }\n\n STNode type;\n STNode variableName;\n switch (peek().kind) {\n case IDENTIFIER_TOKEN:\n type = typeOrVarName;\n variableName = parseVariableName();\n break;\n case EQUAL_TOKEN:\n variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; \n type = STNodeFactory.createEmptyNode();\n break;\n default:\n recover(peek(), ParserRuleContext.CONST_DECL_RHS);\n return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);\n }\n\n return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);\n }\n\n private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,\n STNode type, STNode variableName) {\n STNode equalsToken = parseAssignOp();\n STNode initializer = parseExpression();\n STNode semicolonToken = parseSemicolon();\n\n if (isListener) {\n return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,\n equalsToken, initializer, semicolonToken);\n }\n\n return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,\n equalsToken, initializer, semicolonToken);\n }\n\n /**\n * Parse const keyword.\n *\n * @return Parsed node\n */\n private STNode parseConstantKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.CONST_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.CONST_KEYWORD);\n return parseConstantKeyword();\n }\n }\n\n /**\n * Parse typeof expression.\n *

\n * \n * typeof-expr := typeof expression\n * \n *\n * @param isRhsExpr\n * @return Typeof expression node\n */\n private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\n STNode typeofKeyword = parseTypeofKeyword();\n\n \n \n STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\n return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);\n }\n\n /**\n * Parse typeof-keyword.\n *\n * @return Typeof-keyword node\n */\n private STNode parseTypeofKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.TYPEOF_KEYWORD);\n return parseTypeofKeyword();\n }\n }\n\n /**\n * Parse optional type descriptor given the type.\n *

\n * optional-type-descriptor := type-descriptor `?`\n *

\n *\n * @param typeDescriptorNode Preceding type descriptor\n * @return Parsed node\n */\n private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {\n startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);\n STNode questionMarkToken = parseQuestionMark();\n endContext();\n return createOptionalTypeDesc(typeDescriptorNode, questionMarkToken);\n }\n\n private STNode createOptionalTypeDesc(STNode typeDescNode, STNode questionMarkToken) {\n if (typeDescNode.kind == SyntaxKind.UNION_TYPE_DESC) {\n STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDescNode;\n STNode middleTypeDesc = createOptionalTypeDesc(unionTypeDesc.rightTypeDesc, questionMarkToken);\n typeDescNode = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);\n } else if (typeDescNode.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDescNode;\n STNode middleTypeDesc = createOptionalTypeDesc(intersectionTypeDesc.rightTypeDesc, questionMarkToken);\n typeDescNode = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,\n intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n } else {\n typeDescNode = validateForUsageOfVar(typeDescNode);\n typeDescNode = STNodeFactory.createOptionalTypeDescriptorNode(typeDescNode, questionMarkToken);\n }\n\n return typeDescNode;\n }\n\n /**\n * Parse unary expression.\n *

\n * \n * unary-expr := + expression | - expression | ~ expression | ! expression\n * \n *\n * @param isRhsExpr\n * @return Unary expression node\n */\n private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\n STNode unaryOperator = parseUnaryOperator();\n\n \n \n STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);\n return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);\n }\n\n /**\n * Parse unary operator.\n * UnaryOperator := + | - | ~ | !\n *\n * @return Parsed node\n */\n private STNode parseUnaryOperator() {\n STToken token = peek();\n if (isUnaryOperator(token.kind)) {\n return consume();\n } else {\n recover(token, ParserRuleContext.UNARY_OPERATOR);\n return parseUnaryOperator();\n }\n }\n\n /**\n * Check whether the given token kind is a unary operator.\n *\n * @param kind STToken kind\n * @return true if the token kind refers to a unary operator. false otherwise\n */\n private boolean isUnaryOperator(SyntaxKind kind) {\n switch (kind) {\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n case NEGATION_TOKEN:\n case EXCLAMATION_MARK_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse array type descriptor.\n *

\n * \n * array-type-descriptor := array-member-type-descriptor [ [ array-length ] ]\n * array-member-type-descriptor := type-descriptor\n * array-length :=\n * int-literal\n * | constant-reference-expr\n * | inferred-array-length\n * inferred-array-length := *\n * \n *

\n *\n * @param memberTypeDesc\n * @return Parsed Node\n */\n private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {\n startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\n STNode openBracketToken = parseOpenBracket();\n STNode arrayLengthNode = parseArrayLength();\n STNode closeBracketToken = parseCloseBracket();\n endContext();\n return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);\n }\n\n private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,\n STNode closeBracketToken) {\n memberTypeDesc = validateForUsageOfVar(memberTypeDesc);\n if (arrayLengthNode != null) {\n switch (arrayLengthNode.kind) {\n case ASTERISK_LITERAL:\n case SIMPLE_NAME_REFERENCE:\n case QUALIFIED_NAME_REFERENCE:\n break;\n case NUMERIC_LITERAL:\n SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;\n if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||\n numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {\n break;\n }\n \n default:\n openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,\n arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\n arrayLengthNode = STNodeFactory.createEmptyNode();\n }\n }\n \n \n List arrayDimensions = new ArrayList();\n if (memberTypeDesc.kind == SyntaxKind.ARRAY_TYPE_DESC) {\n STArrayTypeDescriptorNode innerArrayType = (STArrayTypeDescriptorNode) memberTypeDesc;\n STNode innerArrayDimensions = innerArrayType.dimensions;\n int dimensionCount = innerArrayDimensions.bucketCount();\n \n for (int i = 0; i < dimensionCount; i++) {\n arrayDimensions.add(innerArrayDimensions.childInBucket(i));\n }\n memberTypeDesc = innerArrayType.memberTypeDesc;\n }\n \n STNode arrayDimension = STNodeFactory.createArrayDimensionNode(openBracketToken, arrayLengthNode, \n closeBracketToken);\n arrayDimensions.add(arrayDimension);\n STNode arrayDimensionNodeList = STNodeFactory.createNodeList(arrayDimensions);\n \n return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, arrayDimensionNodeList);\n }\n\n /**\n * Parse array length.\n *

\n * \n * array-length :=\n * int-literal\n * | constant-reference-expr\n * | inferred-array-length\n * constant-reference-expr := variable-reference-expr\n * \n *

\n *\n * @return Parsed array length\n */\n private STNode parseArrayLength() {\n STToken token = peek();\n switch (token.kind) {\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case ASTERISK_TOKEN:\n return parseBasicLiteral();\n case CLOSE_BRACKET_TOKEN:\n return STNodeFactory.createEmptyNode();\n \n case IDENTIFIER_TOKEN:\n return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);\n default:\n recover(token, ParserRuleContext.ARRAY_LENGTH);\n return parseArrayLength();\n }\n }\n\n /**\n * Parse annotations.\n *

\n * annots := annotation*\n *\n * @return Parsed node\n */\n private STNode parseOptionalAnnotations() {\n startContext(ParserRuleContext.ANNOTATIONS);\n List annotList = new ArrayList<>();\n STToken nextToken = peek();\n while (nextToken.kind == SyntaxKind.AT_TOKEN) {\n annotList.add(parseAnnotation());\n nextToken = peek();\n }\n\n endContext();\n return STNodeFactory.createNodeList(annotList);\n }\n\n /**\n * Parse annotation list with at least one annotation.\n *\n * @return Annotation list\n */\n private STNode parseAnnotations() {\n startContext(ParserRuleContext.ANNOTATIONS);\n List annotList = new ArrayList<>();\n annotList.add(parseAnnotation());\n while (peek().kind == SyntaxKind.AT_TOKEN) {\n annotList.add(parseAnnotation());\n }\n\n endContext();\n return STNodeFactory.createNodeList(annotList);\n }\n\n /**\n * Parse annotation attachment.\n *

\n * annotation := @ annot-tag-reference annot-value\n *\n * @return Parsed node\n */\n private STNode parseAnnotation() {\n STNode atToken = parseAtToken();\n STNode annotReference;\n if (isPredeclaredIdentifier(peek().kind)) {\n annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);\n } else {\n annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n annotReference = STNodeFactory.createSimpleNameReferenceNode(annotReference);\n }\n\n STNode annotValue;\n if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\n annotValue = parseMappingConstructorExpr();\n } else {\n annotValue = STNodeFactory.createEmptyNode();\n }\n return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);\n }\n\n /**\n * Parse '@' token.\n *\n * @return Parsed node\n */\n private STNode parseAtToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.AT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.AT);\n return parseAtToken();\n }\n }\n\n /**\n * Parse metadata. Meta data consist of optional doc string and\n * an annotations list.\n *

\n * metadata := [DocumentationString] annots\n *\n * @return Parse node\n */\n private STNode parseMetaData() {\n STNode docString;\n STNode annotations;\n switch (peek().kind) {\n case DOCUMENTATION_STRING:\n docString = parseMarkdownDocumentation();\n annotations = parseOptionalAnnotations();\n break;\n case AT_TOKEN:\n docString = STNodeFactory.createEmptyNode();\n annotations = parseOptionalAnnotations();\n break;\n default:\n return STNodeFactory.createEmptyNode();\n }\n\n return createMetadata(docString, annotations);\n }\n\n /**\n * Create metadata node.\n *\n * @return A metadata node\n */\n private STNode createMetadata(STNode docString, STNode annotations) {\n if (annotations == null && docString == null) {\n return STNodeFactory.createEmptyNode();\n } else {\n return STNodeFactory.createMetadataNode(docString, annotations);\n }\n }\n\n /**\n * Parse type test expression.\n * \n * type-test-expr := expression (is | !is) type-descriptor\n * \n *\n * @param lhsExpr Preceding expression of the is expression\n * @return Is expression node\n */\n private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {\n STNode isOrNotIsKeyword = parseIsOrNotIsKeyword();\n STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);\n return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isOrNotIsKeyword, typeDescriptor);\n }\n\n /**\n * Parse `is` keyword or `!is` keyword.\n *\n * @return is-keyword or not-is-keyword node\n */\n private STNode parseIsOrNotIsKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.IS_KEYWORD ||\n token.kind == SyntaxKind.NOT_IS_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.IS_KEYWORD);\n return parseIsOrNotIsKeyword();\n }\n }\n\n /**\n * Parse local type definition statement statement.\n * ocal-type-defn-stmt := [annots] type identifier type-descriptor ;\n *\n * @return local type definition statement statement\n */\n private STNode parseLocalTypeDefinitionStatement(STNode annots) {\n startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);\n STNode typeKeyword = parseTypeKeyword();\n STNode typeName = parseTypeName();\n STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);\n STNode semicolon = parseSemicolon();\n endContext();\n return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,\n semicolon);\n }\n\n /**\n * Parse statement which is only consists of an action or expression.\n *\n * @param annots Annotations\n * @return Statement node\n */\n private STNode parseExpressionStatement(STNode annots) {\n startContext(ParserRuleContext.EXPRESSION_STATEMENT);\n STNode expression = parseActionOrExpressionInLhs(annots);\n return getExpressionAsStatement(expression);\n }\n\n /**\n * Parse statements that starts with an expression.\n *\n * @return Statement node\n */\n private STNode parseStatementStartWithExpr(STNode annots) {\n startContext(ParserRuleContext.AMBIGUOUS_STMT);\n STNode expr = parseActionOrExpressionInLhs(annots);\n return parseStatementStartWithExprRhs(expr);\n }\n\n /**\n * Parse the component followed by the expression, at the beginning of a statement.\n *\n * @param expression Action or expression in LHS\n * @return Statement node\n */\n private STNode parseStatementStartWithExprRhs(STNode expression) {\n SyntaxKind nextTokenKind = peek().kind;\n if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) {\n return getExpressionAsStatement(expression);\n }\n\n switch (nextTokenKind) {\n case EQUAL_TOKEN:\n switchContext(ParserRuleContext.ASSIGNMENT_STMT);\n return parseAssignmentStmtRhs(expression);\n case IDENTIFIER_TOKEN:\n default:\n \n if (isCompoundAssignment(nextTokenKind)) {\n return parseCompoundAssignmentStmtRhs(expression);\n }\n\n ParserRuleContext context;\n if (isPossibleExpressionStatement(expression)) {\n context = ParserRuleContext.EXPR_STMT_RHS;\n } else {\n context = ParserRuleContext.STMT_START_WITH_EXPR_RHS;\n }\n\n recover(peek(), context);\n return parseStatementStartWithExprRhs(expression);\n }\n }\n\n private boolean isPossibleExpressionStatement(STNode expression) {\n switch (expression.kind) {\n case METHOD_CALL:\n case FUNCTION_CALL:\n case CHECK_EXPRESSION:\n case REMOTE_METHOD_CALL_ACTION:\n case CHECK_ACTION:\n case BRACED_ACTION:\n case START_ACTION:\n case TRAP_ACTION:\n case FLUSH_ACTION:\n case ASYNC_SEND_ACTION:\n case SYNC_SEND_ACTION:\n case RECEIVE_ACTION:\n case WAIT_ACTION:\n case QUERY_ACTION:\n case COMMIT_ACTION:\n return true;\n default:\n return false;\n }\n }\n\n private STNode getExpressionAsStatement(STNode expression) {\n switch (expression.kind) {\n case METHOD_CALL:\n case FUNCTION_CALL:\n return parseCallStatement(expression);\n case CHECK_EXPRESSION:\n return parseCheckStatement(expression);\n case REMOTE_METHOD_CALL_ACTION:\n case CHECK_ACTION:\n case BRACED_ACTION:\n case START_ACTION:\n case TRAP_ACTION:\n case FLUSH_ACTION:\n case ASYNC_SEND_ACTION:\n case SYNC_SEND_ACTION:\n case RECEIVE_ACTION:\n case WAIT_ACTION:\n case QUERY_ACTION:\n case COMMIT_ACTION:\n return parseActionStatement(expression);\n default:\n \n STNode semicolon = parseSemicolon();\n endContext();\n expression = getExpression(expression);\n STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,\n expression, semicolon);\n exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);\n return exprStmt;\n }\n }\n\n private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {\n STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);\n STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;\n if (lengthExprs.isEmpty()) {\n return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),\n indexedExpr.closeBracket);\n }\n\n \n STNode lengthExpr = lengthExprs.get(0);\n switch (lengthExpr.kind) {\n case SIMPLE_NAME_REFERENCE:\n STSimpleNameReferenceNode nameRef = (STSimpleNameReferenceNode) lengthExpr;\n if (nameRef.name.isMissing()) {\n return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),\n indexedExpr.closeBracket);\n }\n break;\n case ASTERISK_LITERAL:\n case QUALIFIED_NAME_REFERENCE:\n break;\n case NUMERIC_LITERAL:\n SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;\n if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||\n innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {\n break;\n }\n \n default:\n STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(\n indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);\n indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);\n lengthExpr = STNodeFactory.createEmptyNode();\n }\n\n return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);\n }\n\n /**\n *

\n * Parse call statement, given the call expression.\n *

\n * \n * call-stmt := call-expr ;\n *
\n * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr\n *
\n *\n * @param expression Call expression associated with the call statement\n * @return Call statement node\n */\n private STNode parseCallStatement(STNode expression) {\n return parseCallStatementOrCheckStatement(expression);\n }\n\n /**\n *

\n * Parse checking statement.\n *

\n * \n * checking-stmt := checking-expr ;\n *
\n * checking-expr := checking-keyword expr ;\n *
\n *\n * @param expression Checking expression associated with the checking statement\n * @return Checking statement node\n */\n private STNode parseCheckStatement(STNode expression) {\n return parseCallStatementOrCheckStatement(expression);\n }\n\n private STNode parseCallStatementOrCheckStatement(STNode expression) {\n STNode semicolon = parseSemicolon();\n endContext();\n return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);\n }\n\n private STNode parseActionStatement(STNode action) {\n STNode semicolon = parseSemicolon();\n endContext();\n return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);\n }\n\n /**\n * Parse remote method call action, given the starting expression.\n *

\n * \n * remote-method-call-action := expression -> method-name ( arg-list )\n *
\n * async-send-action := expression -> peer-worker ;\n *
\n *\n * @param isRhsExpr Is this an RHS action\n * @param expression LHS expression\n * @return\n */\n private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {\n STNode rightArrow = parseRightArrow();\n return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n }\n\n private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {\n STNode name;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case FUNCTION_KEYWORD:\n STNode functionKeyword = consume();\n name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword);\n return parseAsyncSendAction(expression, rightArrow, name);\n case IDENTIFIER_TOKEN:\n name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());\n break;\n case CONTINUE_KEYWORD:\n case COMMIT_KEYWORD:\n name = getKeywordAsSimpleNameRef();\n break;\n default:\n STToken token = peek();\n recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS);\n return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);\n }\n\n return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);\n }\n\n private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n return parseRemoteMethodCallAction(expression, rightArrow, name);\n case SEMICOLON_TOKEN:\n return parseAsyncSendAction(expression, rightArrow, name);\n default:\n recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END);\n return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);\n }\n }\n\n private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {\n return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);\n }\n\n private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {\n STNode openParenToken = parseArgListOpenParenthesis();\n STNode arguments = parseArgsList();\n STNode closeParenToken = parseArgListCloseParenthesis();\n return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,\n closeParenToken);\n }\n\n /**\n * Parse right arrow (->) token.\n *\n * @return Parsed node\n */\n private STNode parseRightArrow() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.RIGHT_ARROW);\n return parseRightArrow();\n }\n }\n\n /**\n * Parse map type descriptor.\n * map-type-descriptor := `map` type-parameter\n *\n * @return Parsed node\n */\n private STNode parseMapTypeDescriptor(STNode mapKeyword) {\n STNode typeParameter = parseTypeParameter();\n return STNodeFactory.createMapTypeDescriptorNode(mapKeyword, typeParameter);\n }\n\n /**\n * Parse parameterized type descriptor.\n * parameterized-type-descriptor := `typedesc` [type-parameter]\n *
 | `future` [type-parameter]\n *
 | `xml` [type-parameter]\n *
 | `error` [type-parameter]\n *\n * @return Parsed node\n */\n private STNode parseParameterizedTypeDescriptor(STNode keywordToken) {\n STNode typeParamNode;\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.LT_TOKEN) {\n typeParamNode = parseTypeParameter();\n } else {\n typeParamNode = STNodeFactory.createEmptyNode();\n }\n\n SyntaxKind parameterizedTypeDescKind = getParameterizedTypeDescKind(keywordToken);\n return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeDescKind, keywordToken,\n typeParamNode);\n }\n\n private SyntaxKind getParameterizedTypeDescKind(STNode keywordToken) {\n switch (keywordToken.kind) {\n case TYPEDESC_KEYWORD:\n return SyntaxKind.TYPEDESC_TYPE_DESC;\n case FUTURE_KEYWORD:\n return SyntaxKind.FUTURE_TYPE_DESC;\n case XML_KEYWORD:\n return SyntaxKind.XML_TYPE_DESC;\n case ERROR_KEYWORD:\n default:\n return SyntaxKind.ERROR_TYPE_DESC;\n }\n }\n \n /**\n * Parse < token.\n *\n * @return Parsed node\n */\n private STNode parseGTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.GT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.GT);\n return parseGTToken();\n }\n }\n\n /**\n * Parse > token.\n *\n * @return Parsed node\n */\n private STNode parseLTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.LT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.LT);\n return parseLTToken();\n }\n }\n\n /**\n * Parse nil literal. Here nil literal is only referred to ( ).\n *\n * @return Parsed node\n */\n private STNode parseNilLiteral() {\n startContext(ParserRuleContext.NIL_LITERAL);\n STNode openParenthesisToken = parseOpenParenthesis();\n STNode closeParenthesisToken = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);\n }\n\n /**\n * Parse annotation declaration, given the qualifier.\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the listener declaration\n * @param constKeyword Const keyword\n * @return Parsed node\n */\n private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {\n startContext(ParserRuleContext.ANNOTATION_DECL);\n STNode annotationKeyword = parseAnnotationKeyword();\n STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\n endContext();\n return annotDecl;\n }\n\n /**\n * Parse annotation keyword.\n *\n * @return Parsed node\n */\n private STNode parseAnnotationKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ANNOTATION_KEYWORD);\n return parseAnnotationKeyword();\n }\n }\n\n /**\n * Parse the components that follows after the annotation keyword of a annotation declaration.\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the constant decl\n * @param constKeyword Const keyword\n * @param annotationKeyword\n * @return Parsed node\n */\n private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,\n STNode annotationKeyword) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN:\n return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);\n default:\n if (isTypeStartingToken(nextToken.kind)) {\n break;\n }\n\n recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\n return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);\n }\n\n STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);\n STNode annotTag = parseAnnotationTag();\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\n annotTag);\n }\n\n /**\n * Parse annotation tag.\n *

\n * annot-tag := identifier\n *\n * @return\n */\n private STNode parseAnnotationTag() {\n STToken token = peek();\n if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return consume();\n } else {\n recover(peek(), ParserRuleContext.ANNOTATION_TAG);\n return parseAnnotationTag();\n }\n }\n\n private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,\n STNode annotationKeyword) {\n \n \n STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);\n if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n STNode annotTag = parseAnnotationTag();\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,\n typeDescOrAnnotTag, annotTag);\n }\n\n \n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {\n STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,\n ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);\n STNode annotTag = parseAnnotationTag();\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\n annotTag);\n }\n\n STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;\n return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);\n }\n\n /**\n * Parse the component that follows the first identifier in an annotation decl. The identifier\n * can be either the type-name (a user defined type) or the annot-tag, where the type-name\n * is not present.\n *\n * @param metadata Metadata\n * @param qualifier Qualifier that precedes the annotation decl\n * @param constKeyword Const keyword\n * @param annotationKeyword Annotation keyword\n * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword\n * @return Parsed node\n */\n private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,\n STNode annotationKeyword, STNode typeDescOrAnnotTag) {\n STToken nextToken = peek();\n STNode typeDesc;\n STNode annotTag;\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN:\n typeDesc = typeDescOrAnnotTag;\n annotTag = parseAnnotationTag();\n break;\n case SEMICOLON_TOKEN:\n case ON_KEYWORD:\n typeDesc = STNodeFactory.createEmptyNode();\n annotTag = typeDescOrAnnotTag;\n break;\n default:\n recover(peek(), ParserRuleContext.ANNOT_DECL_RHS);\n return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);\n }\n\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\n annotTag);\n }\n\n private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,\n STNode annotationKeyword, STNode typeDesc, STNode annotTag) {\n STNode onKeyword;\n STNode attachPoints;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case SEMICOLON_TOKEN:\n onKeyword = STNodeFactory.createEmptyNode();\n attachPoints = STNodeFactory.createEmptyNodeList();\n break;\n case ON_KEYWORD:\n onKeyword = parseOnKeyword();\n attachPoints = parseAnnotationAttachPoints();\n onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,\n DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\n break;\n default:\n recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS);\n return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,\n annotTag);\n }\n\n STNode semicolonToken = parseSemicolon();\n return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,\n typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);\n }\n\n /**\n * Parse annotation attach points.\n *

\n * \n * annot-attach-points := annot-attach-point (, annot-attach-point)*\n *

\n * annot-attach-point := dual-attach-point | source-only-attach-point\n *

\n * dual-attach-point := [source] dual-attach-point-ident\n *

\n * dual-attach-point-ident :=\n * type\n * | class\n * | [object|service remote] function\n * | parameter\n * | return\n * | service\n * | [object|record] field\n *

\n * source-only-attach-point := source source-only-attach-point-ident\n *

\n * source-only-attach-point-ident :=\n * annotation\n * | external\n * | var\n * | const\n * | listener\n * | worker\n *
\n *\n * @return Parsed node\n */\n private STNode parseAnnotationAttachPoints() {\n startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);\n List attachPoints = new ArrayList<>();\n\n STToken nextToken = peek();\n if (isEndAnnotAttachPointList(nextToken.kind)) {\n endContext();\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode attachPoint = parseAnnotationAttachPoint();\n attachPoints.add(attachPoint);\n\n \n nextToken = peek();\n STNode leadingComma;\n while (!isEndAnnotAttachPointList(nextToken.kind)) {\n leadingComma = parseAttachPointEnd();\n if (leadingComma == null) {\n break;\n }\n attachPoints.add(leadingComma);\n\n \n attachPoint = parseAnnotationAttachPoint();\n if (attachPoint == null) {\n STToken missingAttachPointIdent = SyntaxErrors.createMissingToken(SyntaxKind.TYPE_KEYWORD);\n STNode identList = STNodeFactory.createNodeList(missingAttachPointIdent);\n attachPoint = STNodeFactory.createAnnotationAttachPointNode(STNodeFactory.createEmptyNode(), identList);\n attachPoint = SyntaxErrors.addDiagnostic(attachPoint,\n DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);\n attachPoints.add(attachPoint);\n break;\n }\n\n attachPoints.add(attachPoint);\n nextToken = peek();\n }\n \n if (attachPoint.lastToken().isMissing() && this.tokenReader.peek().kind == SyntaxKind.IDENTIFIER_TOKEN &&\n !this.tokenReader.head().hasTrailingNewline()) {\n \n \n STToken nextNonVirtualToken = this.tokenReader.read();\n updateLastNodeInListWithInvalidNode(attachPoints, nextNonVirtualToken,\n DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextNonVirtualToken.text());\n }\n\n endContext();\n return STNodeFactory.createNodeList(attachPoints);\n }\n\n /**\n * Parse annotation attach point end.\n *\n * @return Parsed node\n */\n private STNode parseAttachPointEnd() {\n switch (peek().kind) {\n case SEMICOLON_TOKEN:\n \n return null;\n case COMMA_TOKEN:\n return consume();\n default:\n recover(peek(), ParserRuleContext.ATTACH_POINT_END);\n return parseAttachPointEnd();\n }\n }\n\n private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case EOF_TOKEN:\n case SEMICOLON_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse annotation attach point.\n *\n * @return Parsed node\n */\n private STNode parseAnnotationAttachPoint() {\n switch (peek().kind) {\n case EOF_TOKEN:\n return null;\n\n \n case ANNOTATION_KEYWORD:\n case EXTERNAL_KEYWORD:\n case VAR_KEYWORD:\n case CONST_KEYWORD:\n case LISTENER_KEYWORD:\n case WORKER_KEYWORD:\n \n\n case SOURCE_KEYWORD:\n STNode sourceKeyword = parseSourceKeyword();\n return parseAttachPointIdent(sourceKeyword);\n\n \n case OBJECT_KEYWORD:\n case TYPE_KEYWORD:\n case FUNCTION_KEYWORD:\n case PARAMETER_KEYWORD:\n case RETURN_KEYWORD:\n case SERVICE_KEYWORD:\n case FIELD_KEYWORD:\n case RECORD_KEYWORD:\n case CLASS_KEYWORD:\n sourceKeyword = STNodeFactory.createEmptyNode();\n STNode firstIdent = consume();\n return parseDualAttachPointIdent(sourceKeyword, firstIdent);\n default:\n recover(peek(), ParserRuleContext.ATTACH_POINT);\n return parseAnnotationAttachPoint();\n }\n }\n\n /**\n * Parse source keyword.\n *\n * @return Parsed node\n */\n private STNode parseSourceKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.SOURCE_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.SOURCE_KEYWORD);\n return parseSourceKeyword();\n }\n }\n\n /**\n * Parse attach point ident gievn.\n *

\n * \n * source-only-attach-point-ident := annotation | external | var | const | listener | worker\n *

\n * dual-attach-point-ident := type | class | [object|service remote] function | parameter\n * | return | service | [object|record] field\n *
\n *\n * @param sourceKeyword Source keyword\n * @return Parsed node\n */\n private STNode parseAttachPointIdent(STNode sourceKeyword) {\n switch (peek().kind) {\n case ANNOTATION_KEYWORD:\n case EXTERNAL_KEYWORD:\n case VAR_KEYWORD:\n case CONST_KEYWORD:\n case LISTENER_KEYWORD:\n case WORKER_KEYWORD:\n STNode firstIdent = consume();\n STNode identList = STNodeFactory.createNodeList(firstIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n case OBJECT_KEYWORD:\n case RESOURCE_KEYWORD:\n case RECORD_KEYWORD:\n case TYPE_KEYWORD:\n case FUNCTION_KEYWORD:\n case PARAMETER_KEYWORD:\n case RETURN_KEYWORD:\n case SERVICE_KEYWORD:\n case FIELD_KEYWORD:\n case CLASS_KEYWORD:\n firstIdent = consume();\n return parseDualAttachPointIdent(sourceKeyword, firstIdent);\n default:\n recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT);\n return parseAttachPointIdent(sourceKeyword);\n }\n }\n\n /**\n * Parse dual-attach-point ident.\n *\n * @param sourceKeyword Source keyword\n * @param firstIdent first part of the dual attach-point\n * @return Parsed node\n */\n private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {\n STNode secondIdent;\n switch (firstIdent.kind) {\n case OBJECT_KEYWORD:\n secondIdent = parseIdentAfterObjectIdent();\n break;\n case RESOURCE_KEYWORD:\n secondIdent = parseFunctionIdent();\n break;\n case RECORD_KEYWORD:\n secondIdent = parseFieldIdent();\n break;\n case SERVICE_KEYWORD:\n return parseServiceAttachPoint(sourceKeyword, firstIdent);\n case TYPE_KEYWORD:\n case FUNCTION_KEYWORD:\n case PARAMETER_KEYWORD:\n case RETURN_KEYWORD:\n case FIELD_KEYWORD:\n case CLASS_KEYWORD:\n default: \n STNode identList = STNodeFactory.createNodeList(firstIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n }\n\n STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n }\n\n /**\n * Parse remote ident.\n *\n * @return Parsed node\n */\n private STNode parseRemoteIdent() {\n STToken token = peek();\n if (token.kind == SyntaxKind.REMOTE_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.REMOTE_IDENT);\n return parseRemoteIdent();\n }\n }\n\n /**\n * Parse service attach point.\n * service-attach-point := service | service remote function\n *\n * @return Parsed node\n */\n private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {\n STNode identList;\n STToken token = peek();\n switch (token.kind) {\n case REMOTE_KEYWORD:\n STNode secondIdent = parseRemoteIdent();\n STNode thirdIdent = parseFunctionIdent();\n identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n case COMMA_TOKEN:\n case SEMICOLON_TOKEN:\n identList = STNodeFactory.createNodeList(firstIdent);\n return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);\n default:\n recover(token, ParserRuleContext.SERVICE_IDENT_RHS);\n return parseServiceAttachPoint(sourceKeyword, firstIdent);\n }\n }\n\n /**\n * Parse the idents that are supported after object-ident.\n *\n * @return Parsed node\n */\n private STNode parseIdentAfterObjectIdent() {\n STToken token = peek();\n switch (token.kind) {\n case FUNCTION_KEYWORD:\n case FIELD_KEYWORD:\n return consume();\n default:\n recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);\n return parseIdentAfterObjectIdent();\n }\n }\n\n /**\n * Parse function ident.\n *\n * @return Parsed node\n */\n private STNode parseFunctionIdent() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FUNCTION_IDENT);\n return parseFunctionIdent();\n }\n }\n\n /**\n * Parse field ident.\n *\n * @return Parsed node\n */\n private STNode parseFieldIdent() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FIELD_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FIELD_IDENT);\n return parseFieldIdent();\n }\n }\n\n /**\n * Parse XML namespace declaration.\n *

\n * xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;\n *
\n * xml-namespace-uri := simple-const-expr\n *
\n * xml-namespace-prefix := identifier\n *
\n *\n * @return\n */\n private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {\n startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);\n STNode xmlnsKeyword = parseXMLNSKeyword();\n\n STNode namespaceUri = parseSimpleConstExpr();\n while (!isValidXMLNameSpaceURI(namespaceUri)) {\n xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,\n DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);\n namespaceUri = parseSimpleConstExpr();\n }\n\n STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\n endContext();\n return xmlnsDecl;\n }\n\n /**\n * Parse xmlns keyword.\n *\n * @return Parsed node\n */\n private STNode parseXMLNSKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.XMLNS_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.XMLNS_KEYWORD);\n return parseXMLNSKeyword();\n }\n }\n\n private boolean isValidXMLNameSpaceURI(STNode expr) {\n switch (expr.kind) {\n case STRING_LITERAL:\n case QUALIFIED_NAME_REFERENCE:\n case SIMPLE_NAME_REFERENCE:\n return true;\n case IDENTIFIER_TOKEN:\n default:\n return false;\n }\n }\n\n private STNode parseSimpleConstExpr() {\n startContext(ParserRuleContext.CONSTANT_EXPRESSION);\n STNode expr = parseSimpleConstExprInternal();\n endContext();\n return expr;\n }\n\n /**\n * Parse simple constants expr.\n *\n * @return Parsed node\n */\n private STNode parseSimpleConstExprInternal() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case STRING_LITERAL_TOKEN:\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case NULL_KEYWORD:\n return parseBasicLiteral();\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n return parseSignedIntOrFloat();\n case OPEN_PAREN_TOKEN:\n return parseNilLiteral();\n default:\n if (isPredeclaredIdentifier(nextToken.kind)) {\n return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n }\n \n recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START);\n return parseSimpleConstExprInternal();\n }\n }\n\n /**\n * Parse the portion after the namsepsace-uri of an XML declaration.\n *\n * @param xmlnsKeyword XMLNS keyword\n * @param namespaceUri Namespace URI\n * @return Parsed node\n */\n private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {\n STNode asKeyword = STNodeFactory.createEmptyNode();\n STNode namespacePrefix = STNodeFactory.createEmptyNode();\n\n switch (peek().kind) {\n case AS_KEYWORD:\n asKeyword = parseAsKeyword();\n namespacePrefix = parseNamespacePrefix();\n break;\n case SEMICOLON_TOKEN:\n break;\n default:\n recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL);\n return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);\n }\n\n STNode semicolon = parseSemicolon();\n if (isModuleVar) {\n return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,\n namespacePrefix, semicolon);\n }\n return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,\n semicolon);\n }\n\n /**\n * Parse import prefix.\n *\n * @return Parsed node\n */\n private STNode parseNamespacePrefix() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return consume();\n } else {\n recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);\n return parseNamespacePrefix();\n }\n }\n\n /**\n * Parse named worker declaration.\n *

\n * named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }\n * \n *\n * @param annots Annotations attached to the worker decl\n * @param qualifiers Preceding transactional keyword in a list\n * @return Parsed node\n */\n private STNode parseNamedWorkerDeclaration(STNode annots, List qualifiers) {\n startContext(ParserRuleContext.NAMED_WORKER_DECL);\n STNode transactionalKeyword = getTransactionalKeyword(qualifiers);\n STNode workerKeyword = parseWorkerKeyword();\n STNode workerName = parseWorkerName();\n STNode returnTypeDesc = parseReturnTypeDescriptor();\n STNode workerBody = parseBlockNode();\n endContext();\n return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,\n returnTypeDesc, workerBody);\n }\n\n private STNode getTransactionalKeyword(List qualifierList) {\n \n List validatedList = new ArrayList<>();\n\n for (int i = 0; i < qualifierList.size(); i++) {\n STNode qualifier = qualifierList.get(i);\n int nextIndex = i + 1;\n\n if (isSyntaxKindInList(validatedList, qualifier.kind)) {\n updateLastNodeInListWithInvalidNode(validatedList, qualifier,\n DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());\n } else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\n validatedList.add(qualifier);\n } else if (qualifierList.size() == nextIndex) {\n addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,\n ((STToken) qualifier).text());\n } else {\n updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,\n DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n }\n }\n\n STNode transactionalKeyword;\n if (validatedList.isEmpty()) {\n transactionalKeyword = STNodeFactory.createEmptyNode();\n } else {\n transactionalKeyword = validatedList.get(0);\n }\n return transactionalKeyword;\n }\n\n private STNode parseReturnTypeDescriptor() {\n \n STToken token = peek();\n if (token.kind != SyntaxKind.RETURNS_KEYWORD) {\n return STNodeFactory.createEmptyNode();\n }\n\n STNode returnsKeyword = consume();\n STNode annot = parseOptionalAnnotations();\n STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);\n return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);\n }\n\n /**\n * Parse worker keyword.\n *\n * @return Parsed node\n */\n private STNode parseWorkerKeyword() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {\n return consume();\n } else {\n recover(peek(), ParserRuleContext.WORKER_KEYWORD);\n return parseWorkerKeyword();\n }\n }\n\n /**\n * Parse worker name.\n *

\n * worker-name := identifier\n *\n * @return Parsed node\n */\n private STNode parseWorkerName() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return consume();\n } else {\n recover(peek(), ParserRuleContext.WORKER_NAME);\n return parseWorkerName();\n }\n }\n\n /**\n * Parse lock statement.\n * lock-stmt := lock block-stmt [on-fail-clause]\n *\n * @return Lock statement\n */\n private STNode parseLockStatement() {\n startContext(ParserRuleContext.LOCK_STMT);\n STNode lockKeyword = parseLockKeyword();\n STNode blockStatement = parseBlockNode();\n endContext();\n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);\n }\n\n /**\n * Parse lock-keyword.\n *\n * @return lock-keyword node\n */\n private STNode parseLockKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LOCK_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LOCK_KEYWORD);\n return parseLockKeyword();\n }\n }\n\n /**\n * Parse union type descriptor.\n * union-type-descriptor := type-descriptor | type-descriptor\n *\n * @param leftTypeDesc Type desc in the LHS os the union type desc.\n * @param context Current context.\n * @return parsed union type desc node\n */\n private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\n boolean isTypedBindingPattern) {\n \n STNode pipeToken = consume();\n STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,\n TypePrecedence.UNION);\n return mergeTypesWithUnion(leftTypeDesc, pipeToken, rightTypeDesc);\n }\n\n /**\n * Creates a union type descriptor after validating lhs and rhs types.\n *

\n * Note: Since type precedence and associativity are not taken into account here,\n * this method should not be called directly when types are unknown.\n *
\n * Call {@link \n *\n * @param leftTypeDesc lhs type\n * @param pipeToken pipe token\n * @param rightTypeDesc rhs type\n * @return a UnionTypeDescriptorNode\n */\n private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {\n leftTypeDesc = validateForUsageOfVar(leftTypeDesc);\n rightTypeDesc = validateForUsageOfVar(rightTypeDesc);\n return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);\n }\n\n /**\n * Parse pipe token.\n *\n * @return parsed pipe token node\n */\n private STNode parsePipeToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.PIPE_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.PIPE);\n return parsePipeToken();\n }\n }\n\n private boolean isTypeStartingToken(SyntaxKind nodeKind) {\n return isTypeStartingToken(nodeKind, getNextNextToken());\n }\n \n private static boolean isTypeStartingToken(SyntaxKind nextTokenKind, STToken nextNextToken) {\n switch (nextTokenKind) {\n case IDENTIFIER_TOKEN:\n case SERVICE_KEYWORD:\n case RECORD_KEYWORD:\n case OBJECT_KEYWORD:\n case ABSTRACT_KEYWORD:\n case CLIENT_KEYWORD:\n case OPEN_PAREN_TOKEN: \n case MAP_KEYWORD: \n case STREAM_KEYWORD: \n case TABLE_KEYWORD: \n case FUNCTION_KEYWORD:\n case OPEN_BRACKET_TOKEN:\n case DISTINCT_KEYWORD:\n case ISOLATED_KEYWORD:\n case TRANSACTIONAL_KEYWORD:\n case TRANSACTION_KEYWORD:\n return true;\n default:\n if (isParameterizedTypeToken(nextTokenKind)) {\n return true;\n }\n \n if (isSingletonTypeDescStart(nextTokenKind, nextNextToken)) {\n return true;\n }\n return isSimpleType(nextTokenKind);\n }\n }\n\n /**\n * Check if the token kind is a type descriptor in terminal expression.\n *

\n * simple-type-in-expr :=\n * boolean | int | byte | float | decimal | string | handle | json | anydata | any | never\n *\n * @param nodeKind token kind to check\n * @return true for simple type token in expression. false otherwise.\n */\n private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case VAR_KEYWORD:\n case READONLY_KEYWORD:\n return false;\n default:\n return isSimpleType(nodeKind);\n }\n }\n\n static boolean isSimpleType(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case INT_KEYWORD:\n case FLOAT_KEYWORD:\n case DECIMAL_KEYWORD:\n case BOOLEAN_KEYWORD:\n case STRING_KEYWORD:\n case BYTE_KEYWORD:\n case JSON_KEYWORD:\n case HANDLE_KEYWORD:\n case ANY_KEYWORD:\n case ANYDATA_KEYWORD:\n case NEVER_KEYWORD:\n case VAR_KEYWORD:\n case READONLY_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n static boolean isPredeclaredPrefix(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case BOOLEAN_KEYWORD:\n case DECIMAL_KEYWORD:\n case ERROR_KEYWORD:\n case FLOAT_KEYWORD:\n case FUTURE_KEYWORD:\n case INT_KEYWORD:\n case MAP_KEYWORD:\n case OBJECT_KEYWORD:\n case STREAM_KEYWORD:\n case STRING_KEYWORD:\n case TABLE_KEYWORD:\n case TRANSACTION_KEYWORD:\n case TYPEDESC_KEYWORD:\n case XML_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {\n return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;\n }\n\n private static SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) {\n switch (typeKeyword) {\n case INT_KEYWORD:\n return SyntaxKind.INT_TYPE_DESC;\n case FLOAT_KEYWORD:\n return SyntaxKind.FLOAT_TYPE_DESC;\n case DECIMAL_KEYWORD:\n return SyntaxKind.DECIMAL_TYPE_DESC;\n case BOOLEAN_KEYWORD:\n return SyntaxKind.BOOLEAN_TYPE_DESC;\n case STRING_KEYWORD:\n return SyntaxKind.STRING_TYPE_DESC;\n case BYTE_KEYWORD:\n return SyntaxKind.BYTE_TYPE_DESC;\n case JSON_KEYWORD:\n return SyntaxKind.JSON_TYPE_DESC;\n case HANDLE_KEYWORD:\n return SyntaxKind.HANDLE_TYPE_DESC;\n case ANY_KEYWORD:\n return SyntaxKind.ANY_TYPE_DESC;\n case ANYDATA_KEYWORD:\n return SyntaxKind.ANYDATA_TYPE_DESC;\n case NEVER_KEYWORD:\n return SyntaxKind.NEVER_TYPE_DESC;\n case VAR_KEYWORD:\n return SyntaxKind.VAR_TYPE_DESC;\n case READONLY_KEYWORD:\n return SyntaxKind.READONLY_TYPE_DESC;\n default:\n assert false : typeKeyword + \" is not a built-in type\";\n return SyntaxKind.TYPE_REFERENCE;\n }\n }\n\n /**\n * Parse fork-keyword.\n *\n * @return Fork-keyword node\n */\n private STNode parseForkKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FORK_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FORK_KEYWORD);\n return parseForkKeyword();\n }\n }\n\n /**\n * Parse fork statement.\n * fork-stmt := fork { named-worker-decl+ }\n *\n * @return Fork statement\n */\n private STNode parseForkStatement() {\n startContext(ParserRuleContext.FORK_STMT);\n STNode forkKeyword = parseForkKeyword();\n STNode openBrace = parseOpenBrace();\n\n \n ArrayList workers = new ArrayList<>();\n while (!isEndOfStatements()) {\n STNode stmt = parseStatement();\n if (stmt == null) {\n break;\n }\n\n if (validateStatement(stmt)) {\n continue;\n }\n\n switch (stmt.kind) {\n case NAMED_WORKER_DECLARATION:\n workers.add(stmt);\n break;\n default:\n \n \n \n if (workers.isEmpty()) {\n openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,\n DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n } else {\n updateLastNodeInListWithInvalidNode(workers, stmt,\n DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);\n }\n }\n }\n\n STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);\n STNode closeBrace = parseCloseBrace();\n endContext();\n\n STNode forkStmt =\n STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);\n if (isNodeListEmpty(namedWorkerDeclarations)) {\n return SyntaxErrors.addDiagnostic(forkStmt,\n DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);\n }\n\n return forkStmt;\n }\n\n /**\n * Parse trap expression.\n *

\n * \n * trap-expr := trap expression\n * \n *\n * @param allowActions Allow actions\n * @param isRhsExpr Whether this is a RHS expression or not\n * @return Trap expression node\n */\n private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\n STNode trapKeyword = parseTrapKeyword();\n STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr);\n if (isAction(expr)) {\n return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);\n }\n\n return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);\n }\n\n /**\n * Parse trap-keyword.\n *\n * @return Trap-keyword node\n */\n private STNode parseTrapKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.TRAP_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.TRAP_KEYWORD);\n return parseTrapKeyword();\n }\n }\n\n /**\n * Parse list constructor expression.\n *

\n * \n * list-constructor-expr := [ [ list-members ] ]\n *
\n * list-members := list-member (, list-member)*\n *
\n * list-member := expression | spread-member\n *
\n * spread-member := ... expression\n *
\n *\n * @return Parsed node\n */\n private STNode parseListConstructorExpr() {\n startContext(ParserRuleContext.LIST_CONSTRUCTOR);\n STNode openBracket = parseOpenBracket();\n STNode listMembers = parseListMembers();\n STNode closeBracket = parseCloseBracket();\n endContext();\n return STNodeFactory.createListConstructorExpressionNode(openBracket, listMembers, closeBracket);\n }\n\n /**\n * Parse optional list member list.\n *\n * @return Parsed node\n */\n private STNode parseListMembers() {\n List listMembers = new ArrayList<>();\n if (isEndOfListConstructor(peek().kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n STNode listMember = parseListMember();\n listMembers.add(listMember);\n return parseListMembers(listMembers);\n }\n\n private STNode parseListMembers(List listMembers) {\n \n STNode listConstructorMemberEnd;\n while (!isEndOfListConstructor(peek().kind)) {\n listConstructorMemberEnd = parseListConstructorMemberEnd();\n if (listConstructorMemberEnd == null) {\n break;\n }\n listMembers.add(listConstructorMemberEnd);\n\n STNode listMember = parseListMember();\n listMembers.add(listMember);\n }\n\n return STNodeFactory.createNodeList(listMembers);\n }\n\n /**\n * Parse list member.\n *

\n * \n * list-member := expression | spread-member\n * \n *\n * @return Parsed node\n */\n private STNode parseListMember() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {\n return parseSpreadMember();\n } else {\n return parseExpression();\n }\n }\n\n /**\n * Parse spread member.\n *

\n * \n * spread-member := ... expression\n * \n *\n * @return Parsed node\n */\n private STNode parseSpreadMember() {\n STNode ellipsis = parseEllipsis();\n STNode expr = parseExpression();\n return STNodeFactory.createSpreadMemberNode(ellipsis, expr);\n }\n\n private boolean isEndOfListConstructor(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseListConstructorMemberEnd() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n return consume();\n case CLOSE_BRACKET_TOKEN:\n return null;\n default:\n recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);\n return parseListConstructorMemberEnd();\n }\n }\n\n /**\n * Parse foreach statement.\n * foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]\n *\n * @return foreach statement\n */\n private STNode parseForEachStatement() {\n startContext(ParserRuleContext.FOREACH_STMT);\n STNode forEachKeyword = parseForEachKeyword();\n STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);\n STNode inKeyword = parseInKeyword();\n STNode actionOrExpr = parseActionOrExpression();\n STNode blockStatement = parseBlockNode();\n endContext();\n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,\n blockStatement, onFailClause);\n }\n\n /**\n * Parse foreach-keyword.\n *\n * @return ForEach-keyword node\n */\n private STNode parseForEachKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FOREACH_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FOREACH_KEYWORD);\n return parseForEachKeyword();\n }\n }\n\n /**\n * Parse in-keyword.\n *\n * @return In-keyword node\n */\n private STNode parseInKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.IN_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.IN_KEYWORD);\n return parseInKeyword();\n }\n }\n\n /**\n * Parse type cast expression.\n *

\n * \n * type-cast-expr := < type-cast-param > expression\n *
\n * type-cast-param := [annots] type-descriptor | annots\n *
\n *\n * @return Parsed node\n */\n private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {\n startContext(ParserRuleContext.TYPE_CAST);\n STNode ltToken = parseLTToken();\n return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr);\n }\n\n private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions,\n boolean isInConditionalExpr) {\n STNode typeCastParam = parseTypeCastParam();\n STNode gtToken = parseGTToken();\n endContext();\n\n \n \n STNode expression =\n parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);\n return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);\n }\n\n private STNode parseTypeCastParam() {\n STNode annot;\n STNode type;\n STToken token = peek();\n\n switch (token.kind) {\n case AT_TOKEN:\n annot = parseOptionalAnnotations();\n token = peek();\n if (isTypeStartingToken(token.kind)) {\n type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n } else {\n type = STNodeFactory.createEmptyNode();\n }\n break;\n default:\n annot = STNodeFactory.createEmptyNode();\n type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n break;\n }\n\n return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);\n }\n\n /**\n * Parse table constructor expression.\n *

\n * \n * table-constructor-expr-rhs := [ [row-list] ]\n * \n *\n * @param tableKeyword tableKeyword that precedes this rhs\n * @param keySpecifier keySpecifier that precedes this rhs\n * @return Parsed node\n */\n private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {\n switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);\n STNode openBracket = parseOpenBracket();\n STNode rowList = parseRowList();\n STNode closeBracket = parseCloseBracket();\n return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,\n closeBracket);\n }\n\n /**\n * Parse table-keyword.\n *\n * @return Table-keyword node\n */\n private STNode parseTableKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.TABLE_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.TABLE_KEYWORD);\n return parseTableKeyword();\n }\n }\n\n /**\n * Parse table rows.\n *

\n * row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]\n *\n * @return Parsed node\n */\n private STNode parseRowList() {\n STToken nextToken = peek();\n \n if (isEndOfTableRowList(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n List mappings = new ArrayList<>();\n STNode mapExpr = parseMappingConstructorExpr();\n mappings.add(mapExpr);\n\n \n nextToken = peek();\n STNode rowEnd;\n while (!isEndOfTableRowList(nextToken.kind)) {\n rowEnd = parseTableRowEnd();\n if (rowEnd == null) {\n break;\n }\n\n mappings.add(rowEnd);\n mapExpr = parseMappingConstructorExpr();\n mappings.add(mapExpr);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(mappings);\n }\n\n private boolean isEndOfTableRowList(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n return true;\n case COMMA_TOKEN:\n case OPEN_BRACE_TOKEN:\n return false;\n default:\n return isEndOfMappingConstructor(tokenKind);\n }\n }\n\n private STNode parseTableRowEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.TABLE_ROW_END);\n return parseTableRowEnd();\n }\n }\n\n /**\n * Parse key specifier.\n *

\n * key-specifier := key ( [ field-name (, field-name)* ] )\n *\n * @return Parsed node\n */\n private STNode parseKeySpecifier() {\n startContext(ParserRuleContext.KEY_SPECIFIER);\n STNode keyKeyword = parseKeyKeyword();\n STNode openParen = parseOpenParenthesis();\n STNode fieldNames = parseFieldNames();\n STNode closeParen = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);\n }\n\n /**\n * Parse key-keyword.\n *\n * @return Key-keyword node\n */\n private STNode parseKeyKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.KEY_KEYWORD) {\n return consume();\n }\n\n if (isKeyKeyword(token)) {\n \n return getKeyKeyword(consume());\n }\n\n recover(token, ParserRuleContext.KEY_KEYWORD);\n return parseKeyKeyword();\n }\n\n static boolean isKeyKeyword(STToken token) {\n return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());\n }\n\n private STNode getKeyKeyword(STToken token) {\n return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),\n token.diagnostics());\n }\n\n private STToken getUnderscoreKeyword(STToken token) {\n return STNodeFactory.createToken(SyntaxKind.UNDERSCORE_KEYWORD, token.leadingMinutiae(),\n token.trailingMinutiae(), token.diagnostics());\n }\n\n /**\n * Parse field names.\n *

\n * field-name-list := [ field-name (, field-name)* ]\n *\n * @return Parsed node\n */\n private STNode parseFieldNames() {\n STToken nextToken = peek();\n \n if (isEndOfFieldNamesList(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n List fieldNames = new ArrayList<>();\n STNode fieldName = parseVariableName();\n fieldNames.add(fieldName);\n\n \n nextToken = peek();\n STNode leadingComma;\n while (!isEndOfFieldNamesList(nextToken.kind)) {\n leadingComma = parseComma();\n fieldNames.add(leadingComma);\n fieldName = parseVariableName();\n fieldNames.add(fieldName);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(fieldNames);\n }\n\n private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case COMMA_TOKEN:\n case IDENTIFIER_TOKEN:\n return false;\n default:\n return true;\n }\n }\n\n /**\n * Parse error-keyword.\n *\n * @return Parsed error-keyword node\n */\n private STNode parseErrorKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ERROR_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ERROR_KEYWORD);\n return parseErrorKeyword();\n }\n }\n\n /**\n * Parse stream type descriptor.\n *

\n * stream-type-descriptor := stream [stream-type-parameters]\n *
\n * stream-type-parameters := < type-descriptor [, type-descriptor]>\n *

\n *\n * @return Parsed stream type descriptor node\n */\n private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {\n STNode streamTypeParamsNode;\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.LT_TOKEN) {\n streamTypeParamsNode = parseStreamTypeParamsNode();\n } else {\n streamTypeParamsNode = STNodeFactory.createEmptyNode();\n }\n return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);\n }\n\n /**\n * Parse stream type params node.\n *

\n * stream-type-parameters := < type-descriptor [, type-descriptor]>\n *

\n *\n * @return Parsed stream type params node\n */\n private STNode parseStreamTypeParamsNode() {\n STNode ltToken = parseLTToken();\n startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\n STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\n STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\n endContext();\n return streamTypedesc;\n }\n\n private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {\n STNode commaToken, rightTypeDescNode, gtToken;\n switch (peek().kind) {\n case COMMA_TOKEN:\n commaToken = parseComma();\n rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);\n break;\n case GT_TOKEN:\n commaToken = STNodeFactory.createEmptyNode();\n rightTypeDescNode = STNodeFactory.createEmptyNode();\n break;\n default:\n recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS);\n return parseStreamTypeParamsNode(ltToken, leftTypeDescNode);\n }\n\n gtToken = parseGTToken();\n return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,\n gtToken);\n }\n\n /**\n * Parse stream-keyword.\n *\n * @return Parsed stream-keyword node\n */\n private STNode parseStreamKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.STREAM_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.STREAM_KEYWORD);\n return parseStreamKeyword();\n }\n }\n\n /**\n * Parse let expression.\n *

\n * \n * let-expr := let let-var-decl [, let-var-decl]* in expression\n * \n *\n * @return Parsed node\n */\n private STNode parseLetExpression(boolean isRhsExpr, boolean isInConditionalExpr) {\n STNode letKeyword = parseLetKeyword();\n STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);\n STNode inKeyword = parseInKeyword();\n\n \n letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\n DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false, isInConditionalExpr);\n return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);\n }\n\n /**\n * Parse let-keyword.\n *\n * @return Let-keyword node\n */\n private STNode parseLetKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LET_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LET_KEYWORD);\n return parseLetKeyword();\n }\n }\n\n /**\n * Parse let variable declarations.\n *

\n * let-var-decl-list := let-var-decl [, let-var-decl]*\n *\n * @return Parsed node\n */\n private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {\n startContext(context);\n List varDecls = new ArrayList<>();\n STToken nextToken = peek();\n\n if (isEndOfLetVarDeclarations(nextToken.kind, getNextNextToken())) {\n endContext();\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode varDec = parseLetVarDecl(isRhsExpr);\n varDecls.add(varDec);\n\n \n nextToken = peek();\n STNode leadingComma;\n while (!isEndOfLetVarDeclarations(nextToken.kind, getNextNextToken())) {\n leadingComma = parseComma();\n varDecls.add(leadingComma);\n varDec = parseLetVarDecl(isRhsExpr);\n varDecls.add(varDec);\n nextToken = peek();\n }\n\n endContext();\n return STNodeFactory.createNodeList(varDecls);\n }\n\n static boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind, STToken nextNextToken) {\n switch (tokenKind) {\n case COMMA_TOKEN:\n case AT_TOKEN:\n return false;\n case IN_KEYWORD:\n return true;\n default:\n return !isTypeStartingToken(tokenKind, nextNextToken);\n }\n }\n\n /**\n * Parse let variable declaration.\n *

\n * let-var-decl := [annots] typed-binding-pattern = expression\n *\n * @return Parsed node\n */\n private STNode parseLetVarDecl(boolean isRhsExpr) {\n STNode annot = parseOptionalAnnotations();\n STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);\n STNode assign = parseAssignOp();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);\n return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);\n }\n\n /**\n * Parse raw backtick string template expression.\n *

\n * BacktickString := `expression`\n *\n * @return Template expression node\n */\n private STNode parseTemplateExpression() {\n STNode type = STNodeFactory.createEmptyNode();\n STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n STNode content = parseTemplateContent();\n STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,\n content, endingBackTick);\n }\n\n private STNode parseTemplateContent() {\n List items = new ArrayList<>();\n STToken nextToken = peek();\n while (!isEndOfBacktickContent(nextToken.kind)) {\n STNode contentItem = parseTemplateItem();\n items.add(contentItem);\n nextToken = peek();\n }\n return STNodeFactory.createNodeList(items);\n }\n\n private boolean isEndOfBacktickContent(SyntaxKind kind) {\n switch (kind) {\n case EOF_TOKEN:\n case BACKTICK_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseTemplateItem() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\n return parseInterpolation();\n }\n\n \n return consume();\n }\n\n /**\n * Parse string template expression.\n *

\n * string-template-expr := string ` expression `\n *\n * @return String template expression node\n */\n private STNode parseStringTemplateExpression() {\n STNode type = parseStringKeyword();\n STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n STNode content = parseTemplateContent();\n STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\n return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,\n content, endingBackTick);\n }\n\n /**\n * Parse string keyword.\n *\n * @return string keyword node\n */\n private STNode parseStringKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.STRING_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.STRING_KEYWORD);\n return parseStringKeyword();\n }\n }\n\n /**\n * Parse XML template expression.\n *

\n * xml-template-expr := xml BacktickString\n *\n * @return XML template expression\n */\n private STNode parseXMLTemplateExpression() {\n STNode xmlKeyword = parseXMLKeyword();\n STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n\n STNode content;\n STNode endingBackTick;\n if (startingBackTick.isMissing()) {\n \n startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\n endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\n content = STNodeFactory.createEmptyNodeList();\n STNode templateExpr = STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION,\n xmlKeyword, startingBackTick, content, endingBackTick);\n templateExpr = SyntaxErrors.addDiagnostic(templateExpr, DiagnosticErrorCode.ERROR_MISSING_BACKTICK_STRING);\n return templateExpr;\n }\n\n content = parseTemplateContentAsXML();\n endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\n return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,\n startingBackTick, content, endingBackTick);\n }\n\n /**\n * Parse xml keyword.\n *\n * @return xml keyword node\n */\n private STNode parseXMLKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.XML_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.XML_KEYWORD);\n return parseXMLKeyword();\n }\n }\n\n /**\n * Parse the content of the template string as XML. This method first read the\n * input in the same way as the raw-backtick-template (BacktickString). Then\n * it parses the content as XML.\n *\n * @return XML node\n */\n private STNode parseTemplateContentAsXML() {\n \n \n \n ArrayDeque expressions = new ArrayDeque<>();\n StringBuilder xmlStringBuilder = new StringBuilder();\n STToken nextToken = peek();\n while (!isEndOfBacktickContent(nextToken.kind)) {\n STNode contentItem = parseTemplateItem();\n if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {\n xmlStringBuilder.append(((STToken) contentItem).text());\n } else {\n xmlStringBuilder.append(\"${}\");\n expressions.add(contentItem);\n }\n nextToken = peek();\n }\n\n CharReader charReader = CharReader.from(xmlStringBuilder.toString());\n AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));\n XMLParser xmlParser = new XMLParser(tokenReader, expressions);\n return xmlParser.parse();\n }\n\n /**\n * Parse interpolation of a back-tick string.\n *

\n * \n * interpolation := ${ expression }\n * \n *\n * @return Interpolation node\n */\n private STNode parseInterpolation() {\n startContext(ParserRuleContext.INTERPOLATION);\n STNode interpolStart = parseInterpolationStart();\n STNode expr = parseExpression();\n\n \n while (!isEndOfInterpolation()) {\n STToken nextToken = consume();\n expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,\n DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());\n }\n\n STNode closeBrace = parseCloseBrace();\n endContext();\n return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);\n }\n\n private boolean isEndOfInterpolation() {\n SyntaxKind nextTokenKind = peek().kind;\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case BACKTICK_TOKEN:\n return true;\n default:\n \n ParserMode currentLexerMode = this.tokenReader.getCurrentMode();\n return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION &&\n currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT;\n }\n }\n\n /**\n * Parse interpolation start token.\n *

\n * interpolation-start := ${\n *\n * @return Interpolation start token\n */\n private STNode parseInterpolationStart() {\n STToken token = peek();\n if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);\n return parseInterpolationStart();\n }\n }\n\n /**\n * Parse back-tick token.\n *\n * @return Back-tick token\n */\n private STNode parseBacktickToken(ParserRuleContext ctx) {\n STToken token = peek();\n if (token.kind == SyntaxKind.BACKTICK_TOKEN) {\n return consume();\n } else {\n recover(token, ctx);\n return parseBacktickToken(ctx);\n }\n }\n\n /**\n * Parse table type descriptor.\n *

\n * table-type-descriptor := table row-type-parameter [key-constraint]\n * row-type-parameter := type-parameter\n * key-constraint := key-specifier | key-type-constraint\n * key-specifier := key ( [ field-name (, field-name)* ] )\n * key-type-constraint := key type-parameter\n *

\n *\n * @return Parsed table type desc node.\n */\n private STNode parseTableTypeDescriptor(STNode tableKeywordToken) {\n STNode rowTypeParameterNode = parseRowTypeParameter();\n STNode keyConstraintNode;\n STToken nextToken = peek();\n if (isKeyKeyword(nextToken)) {\n STNode keyKeywordToken = getKeyKeyword(consume());\n keyConstraintNode = parseKeyConstraint(keyKeywordToken);\n } else {\n keyConstraintNode = STNodeFactory.createEmptyNode();\n }\n return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);\n }\n\n /**\n * Parse row type parameter node.\n *

\n * row-type-parameter := type-parameter\n *

\n *\n * @return Parsed node.\n */\n private STNode parseRowTypeParameter() {\n startContext(ParserRuleContext.ROW_TYPE_PARAM);\n STNode rowTypeParameterNode = parseTypeParameter();\n endContext();\n return rowTypeParameterNode;\n }\n\n /**\n * Parse type parameter node.\n *

\n * type-parameter := < type-descriptor >\n *

\n *\n * @return Parsed node\n */\n private STNode parseTypeParameter() {\n STNode ltToken = parseLTToken();\n STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);\n STNode gtToken = parseGTToken();\n return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);\n }\n\n /**\n * Parse key constraint.\n *

\n * key-constraint := key-specifier | key-type-constraint\n *

\n *\n * @return Parsed node.\n */\n private STNode parseKeyConstraint(STNode keyKeywordToken) {\n switch (peek().kind) {\n case OPEN_PAREN_TOKEN:\n return parseKeySpecifier(keyKeywordToken);\n case LT_TOKEN:\n return parseKeyTypeConstraint(keyKeywordToken);\n default:\n recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS);\n return parseKeyConstraint(keyKeywordToken);\n }\n }\n\n /**\n * Parse key specifier given parsed key keyword token.\n *

\n * key-specifier := key ( [ field-name (, field-name)* ] )\n *\n * @return Parsed node\n */\n private STNode parseKeySpecifier(STNode keyKeywordToken) {\n startContext(ParserRuleContext.KEY_SPECIFIER);\n STNode openParenToken = parseOpenParenthesis();\n STNode fieldNamesNode = parseFieldNames();\n STNode closeParenToken = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);\n }\n\n /**\n * Parse key type constraint.\n *

\n * key-type-constraint := key type-parameter\n *

\n *\n * @return Parsed node\n */\n private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {\n STNode typeParameterNode = parseTypeParameter();\n return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);\n }\n\n /**\n * Parse function type descriptor.\n *

\n * \n * function-type-descriptor := function-quals function function-signature \n *
 | [isolated] function\n *
\n * function-quals := (transactional | isolated)*\n *
\n *\n * @param qualifiers Preceding type descriptor qualifiers\n * @return Function type descriptor node\n */\n private STNode parseFunctionTypeDesc(List qualifiers) {\n startContext(ParserRuleContext.FUNC_TYPE_DESC);\n STNode functionKeyword = parseFunctionKeyword();\n\n boolean hasFuncSignature = false;\n STNode signature = STNodeFactory.createEmptyNode();\n if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN ||\n isSyntaxKindInList(qualifiers, SyntaxKind.TRANSACTIONAL_KEYWORD)) {\n signature = parseFuncSignature(true);\n hasFuncSignature = true;\n }\n\n \n STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, hasFuncSignature);\n STNode qualifierList = nodes[0];\n functionKeyword = nodes[1];\n\n endContext();\n return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);\n }\n \n private STNode getLastNodeInList(List nodeList) {\n return nodeList.get(nodeList.size() - 1);\n }\n\n private STNode[] createFuncTypeQualNodeList(List qualifierList, STNode functionKeyword,\n boolean hasFuncSignature) {\n \n List validatedList = new ArrayList<>();\n\n for (int i = 0; i < qualifierList.size(); i++) {\n STNode qualifier = qualifierList.get(i);\n int nextIndex = i + 1;\n\n if (isSyntaxKindInList(validatedList, qualifier.kind)) {\n updateLastNodeInListWithInvalidNode(validatedList, qualifier,\n DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());\n } else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {\n validatedList.add(qualifier);\n } else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {\n validatedList.add(qualifier);\n } else if (qualifierList.size() == nextIndex) {\n functionKeyword = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(functionKeyword, qualifier,\n DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n } else {\n updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,\n DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());\n }\n }\n\n STNode nodeList = STNodeFactory.createNodeList(validatedList);\n return new STNode[]{ nodeList, functionKeyword };\n }\n\n private boolean isRegularFuncQual(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case ISOLATED_KEYWORD:\n case TRANSACTIONAL_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse explicit anonymous function expression.\n *

\n * explicit-anonymous-function-expr :=\n * [annots] (isolated| transactional) function function-signature anon-func-body\n *\n * @param annots Annotations.\n * @param qualifiers Function qualifiers\n * @param isRhsExpr Is expression in rhs context\n * @return Anonymous function expression node\n */\n private STNode parseExplicitFunctionExpression(STNode annots, List qualifiers, boolean isRhsExpr) {\n startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\n STNode funcKeyword = parseFunctionKeyword();\n\n \n STNode[] nodes = createFuncTypeQualNodeList(qualifiers, funcKeyword, true);\n STNode qualifierList = nodes[0];\n funcKeyword = nodes[1];\n\n STNode funcSignature = parseFuncSignature(false);\n \n STNode funcBody = parseAnonFuncBody(isRhsExpr);\n return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,\n funcSignature, funcBody);\n }\n\n /**\n * Parse anonymous function body.\n *

\n * anon-func-body := block-function-body | expr-function-body\n *\n * @param isRhsExpr Is expression in rhs context\n * @return Anon function body node\n */\n private STNode parseAnonFuncBody(boolean isRhsExpr) {\n switch (peek().kind) {\n case OPEN_BRACE_TOKEN:\n case EOF_TOKEN:\n STNode body = parseFunctionBodyBlock(true);\n endContext();\n return body;\n case RIGHT_DOUBLE_ARROW_TOKEN:\n \n \n endContext();\n return parseExpressionFuncBody(true, isRhsExpr);\n default:\n recover(peek(), ParserRuleContext.ANON_FUNC_BODY);\n return parseAnonFuncBody(isRhsExpr);\n }\n }\n\n /**\n * Parse expression function body.\n *

\n * expr-function-body := => expression\n *\n * @param isAnon Is anonymous function.\n * @param isRhsExpr Is expression in rhs context\n * @return Expression function body node\n */\n private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {\n STNode rightDoubleArrow = parseDoubleRightArrow();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n\n STNode semiColon;\n if (isAnon) {\n semiColon = STNodeFactory.createEmptyNode();\n } else {\n semiColon = parseSemicolon();\n }\n return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);\n }\n\n /**\n * Parse '=>' token.\n *\n * @return Double right arrow token\n */\n private STNode parseDoubleRightArrow() {\n STToken token = peek();\n if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);\n return parseDoubleRightArrow();\n }\n }\n\n private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {\n switch (params.kind) {\n case SIMPLE_NAME_REFERENCE:\n case INFER_PARAM_LIST:\n break;\n case BRACED_EXPRESSION:\n params = getAnonFuncParam((STBracedExpressionNode) params);\n break;\n case NIL_LITERAL:\n STNilLiteralNode nilLiteralNode = (STNilLiteralNode) params;\n params = STNodeFactory.createImplicitAnonymousFunctionParameters(nilLiteralNode.openParenToken,\n STNodeFactory.createNodeList(new ArrayList<>()), nilLiteralNode.closeParenToken);\n break;\n default:\n STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,\n DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\n params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);\n }\n\n STNode rightDoubleArrow = parseDoubleRightArrow();\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);\n }\n\n /**\n * Create a new anon-func-param node from a braced expression.\n *\n * @param bracedExpression Braced expression\n * @return Anon-func param node\n */\n private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) {\n List paramList = new ArrayList<>();\n STNode innerExpression = bracedExpression.expression;\n STNode openParen = bracedExpression.openParen;\n if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n paramList.add(innerExpression);\n } else {\n openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression,\n DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);\n }\n return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen,\n STNodeFactory.createNodeList(paramList), bracedExpression.closeParen);\n }\n\n /**\n * Parse implicit anon function expression.\n *\n * @param openParen Open parenthesis token\n * @param firstParam First parameter\n * @param isRhsExpr Is expression in rhs context\n * @return Implicit anon function expression node\n */\n private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {\n List paramList = new ArrayList<>();\n paramList.add(firstParam);\n\n \n STToken nextToken = peek();\n STNode paramEnd;\n STNode param;\n while (!isEndOfAnonFuncParametersList(nextToken.kind)) {\n paramEnd = parseImplicitAnonFuncParamEnd();\n if (paramEnd == null) {\n break;\n }\n\n paramList.add(paramEnd);\n param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);\n param = STNodeFactory.createSimpleNameReferenceNode(param);\n paramList.add(param);\n nextToken = peek();\n }\n\n STNode params = STNodeFactory.createNodeList(paramList);\n STNode closeParen = parseCloseParenthesis();\n endContext(); \n\n STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\n return parseImplicitAnonFunc(inferedParams, isRhsExpr);\n }\n\n private STNode parseImplicitAnonFuncParamEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_PAREN_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);\n return parseImplicitAnonFuncParamEnd();\n }\n }\n\n private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case SEMICOLON_TOKEN:\n case RETURNS_KEYWORD:\n case TYPE_KEYWORD:\n case LISTENER_KEYWORD:\n case IF_KEYWORD:\n case WHILE_KEYWORD:\n case DO_KEYWORD:\n case OPEN_BRACE_TOKEN:\n case RIGHT_DOUBLE_ARROW_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse tuple type descriptor.\n *

\n * tuple-type-descriptor := [ tuple-member-type-descriptors ]\n *

\n * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]\n * | [ tuple-rest-descriptor ]\n *

\n * member-type-descriptor := [annots] type-descriptor\n * tuple-rest-descriptor := type-descriptor ...\n *
\n *\n * @return\n */\n private STNode parseTupleTypeDesc() {\n STNode openBracket = parseOpenBracket();\n startContext(ParserRuleContext.TUPLE_MEMBERS);\n STNode memberTypeDesc = parseTupleMemberTypeDescList();\n STNode closeBracket = parseCloseBracket();\n endContext();\n\n \n openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,\n DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);\n\n return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);\n }\n\n /**\n * Parse tuple member type descriptors.\n *\n * @return Parsed node\n */\n private STNode parseTupleMemberTypeDescList() {\n List typeDescList = new ArrayList<>();\n STToken nextToken = peek();\n\n \n if (isEndOfTypeList(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode typeDesc = parseTupleMember();\n return parseTupleTypeMembers(typeDesc, typeDescList);\n }\n\n private STNode parseTupleTypeMembers(STNode firstMember, List memberList) {\n STNode tupleMemberRhs;\n \n while (!isEndOfTypeList(peek().kind)) {\n if (firstMember.kind == SyntaxKind.REST_TYPE) {\n firstMember = invalidateTypeDescAfterRestDesc(firstMember);\n break;\n }\n\n tupleMemberRhs = parseTupleMemberRhs();\n if (tupleMemberRhs == null) {\n break;\n }\n memberList.add(firstMember);\n memberList.add(tupleMemberRhs);\n firstMember = parseTupleMember();\n }\n\n memberList.add(firstMember);\n return STNodeFactory.createNodeList(memberList);\n }\n\n private STNode parseTupleMember() {\n STNode annot = parseOptionalAnnotations();\n STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n return createMemberOrRestNode(annot, typeDesc);\n }\n\n private STNode createMemberOrRestNode(STNode annot, STNode typeDesc) {\n STNode tupleMemberRhs = parseTypeDescInTupleRhs();\n if (tupleMemberRhs != null) {\n if (!((STNodeList) annot).isEmpty()) {\n typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, annot,\n DiagnosticErrorCode.ERROR_ANNOTATIONS_NOT_ALLOWED_FOR_TUPLE_REST_DESCRIPTOR);\n }\n return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);\n }\n\n return STNodeFactory.createMemberTypeDescriptorNode(annot, typeDesc);\n }\n\n private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) {\n while (!isEndOfTypeList(peek().kind)) {\n STNode tupleMemberRhs = parseTupleMemberRhs();\n if (tupleMemberRhs == null) {\n break;\n }\n\n restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null);\n restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseTupleMember(),\n DiagnosticErrorCode.ERROR_TYPE_DESC_AFTER_REST_DESCRIPTOR);\n }\n\n return restDescriptor;\n }\n\n private STNode parseTupleMemberRhs() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n return null;\n default:\n recover(nextToken, ParserRuleContext.TUPLE_TYPE_MEMBER_RHS);\n return parseTupleMemberRhs();\n }\n }\n\n private STNode parseTypeDescInTupleRhs() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n return null;\n case ELLIPSIS_TOKEN:\n return parseEllipsis();\n default:\n recover(nextToken, ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);\n return parseTypeDescInTupleRhs();\n }\n }\n\n private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case CLOSE_BRACKET_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case EOF_TOKEN:\n case EQUAL_TOKEN:\n case SEMICOLON_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse table constructor or query expression.\n *

\n * \n * table-constructor-or-query-expr := table-constructor-expr | query-expr\n *
\n * table-constructor-expr := table [key-specifier] [ [row-list] ]\n *
\n * query-expr := [query-construct-type] query-pipeline select-clause\n * [query-construct-type] query-pipeline select-clause on-conflict-clause?\n *
\n * query-construct-type := table key-specifier | stream\n *
\n *\n * @return Parsed node\n */\n private STNode parseTableConstructorOrQuery(boolean isRhsExpr) {\n startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);\n STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr);\n endContext();\n return tableOrQueryExpr;\n }\n\n private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) {\n STNode queryConstructType;\n switch (peek().kind) {\n case FROM_KEYWORD:\n queryConstructType = STNodeFactory.createEmptyNode();\n return parseQueryExprRhs(queryConstructType, isRhsExpr);\n case STREAM_KEYWORD:\n queryConstructType = parseQueryConstructType(parseStreamKeyword(), null);\n return parseQueryExprRhs(queryConstructType, isRhsExpr);\n case TABLE_KEYWORD:\n STNode tableKeyword = parseTableKeyword();\n return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\n default:\n recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START);\n return parseTableConstructorOrQueryInternal(isRhsExpr);\n }\n\n }\n\n private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {\n STNode keySpecifier;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_BRACKET_TOKEN:\n keySpecifier = STNodeFactory.createEmptyNode();\n return parseTableConstructorExprRhs(tableKeyword, keySpecifier);\n case KEY_KEYWORD:\n keySpecifier = parseKeySpecifier();\n return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n case IDENTIFIER_TOKEN:\n if (isKeyKeyword(nextToken)) {\n keySpecifier = parseKeySpecifier();\n return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n }\n break;\n default:\n break;\n }\n\n recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS);\n return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);\n }\n\n private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {\n switch (peek().kind) {\n case FROM_KEYWORD:\n return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);\n case OPEN_BRACKET_TOKEN:\n return parseTableConstructorExprRhs(tableKeyword, keySpecifier);\n default:\n recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS);\n return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);\n }\n }\n\n /**\n * Parse query construct type.\n *

\n * query-construct-type := table key-specifier | stream\n *\n * @return Parsed node\n */\n private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {\n return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);\n }\n\n /**\n * Parse query action or expression.\n *

\n * \n * query-expr-rhs := query-pipeline select-clause\n * query-pipeline select-clause on-conflict-clause?\n *
\n * query-pipeline := from-clause intermediate-clause*\n *
\n *\n * @param queryConstructType queryConstructType that precedes this rhs\n * @return Parsed node\n */\n private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {\n\n switchContext(ParserRuleContext.QUERY_EXPRESSION);\n STNode fromClause = parseFromClause(isRhsExpr);\n\n List clauses = new ArrayList<>();\n STNode intermediateClause;\n STNode selectClause = null;\n while (!isEndOfIntermediateClause(peek().kind)) {\n intermediateClause = parseIntermediateClause(isRhsExpr);\n if (intermediateClause == null) {\n break;\n }\n\n \n if (selectClause != null) {\n selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,\n DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);\n continue;\n }\n\n if (intermediateClause.kind != SyntaxKind.SELECT_CLAUSE) {\n clauses.add(intermediateClause);\n continue;\n }\n\n selectClause = intermediateClause;\n\n if (isNestedQueryExpr() || !isValidIntermediateQueryStart(peek().kind)) {\n \n \n \n break;\n }\n }\n\n if (peek().kind == SyntaxKind.DO_KEYWORD) {\n STNode intermediateClauses = STNodeFactory.createNodeList(clauses);\n STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\n return parseQueryAction(queryConstructType, queryPipeline, selectClause);\n }\n\n if (selectClause == null) {\n STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);\n STNode expr = STNodeFactory\n .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\n selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);\n\n \n \n if (clauses.isEmpty()) {\n fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n } else {\n int lastIndex = clauses.size() - 1;\n STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),\n DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);\n clauses.set(lastIndex, intClauseWithDiagnostic);\n }\n }\n\n STNode intermediateClauses = STNodeFactory.createNodeList(clauses);\n STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);\n STNode onConflictClause = parseOnConflictClause(isRhsExpr);\n\n\n return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,\n onConflictClause);\n }\n\n /**\n * Check whether currently parsing query expr is a nested query expression.\n *\n * @return true if currently parsing query-expr is a nested query-expr. false otherwise.\n */\n private boolean isNestedQueryExpr() {\n return Collections.frequency(this.errorHandler.getContextStack(), ParserRuleContext.QUERY_EXPRESSION) > 1;\n }\n\n private boolean isValidIntermediateQueryStart(SyntaxKind syntaxKind) {\n switch (syntaxKind) {\n case FROM_KEYWORD:\n case WHERE_KEYWORD:\n case LET_KEYWORD:\n case SELECT_KEYWORD:\n case JOIN_KEYWORD:\n case OUTER_KEYWORD:\n case ORDER_KEYWORD:\n case BY_KEYWORD:\n case ASCENDING_KEYWORD:\n case DESCENDING_KEYWORD:\n case LIMIT_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse an intermediate clause.\n *

\n * \n * intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause\n * \n *\n * @return Parsed node\n */\n private STNode parseIntermediateClause(boolean isRhsExpr) {\n switch (peek().kind) {\n case FROM_KEYWORD:\n return parseFromClause(isRhsExpr);\n case WHERE_KEYWORD:\n return parseWhereClause(isRhsExpr);\n case LET_KEYWORD:\n return parseLetClause(isRhsExpr);\n case SELECT_KEYWORD:\n return parseSelectClause(isRhsExpr);\n case JOIN_KEYWORD:\n case OUTER_KEYWORD:\n return parseJoinClause(isRhsExpr);\n case ORDER_KEYWORD:\n case BY_KEYWORD:\n case ASCENDING_KEYWORD:\n case DESCENDING_KEYWORD:\n return parseOrderByClause(isRhsExpr);\n case LIMIT_KEYWORD:\n return parseLimitClause(isRhsExpr);\n case DO_KEYWORD:\n case SEMICOLON_TOKEN:\n case ON_KEYWORD:\n case CONFLICT_KEYWORD:\n return null;\n default:\n recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS);\n return parseIntermediateClause(isRhsExpr);\n }\n }\n\n /**\n * Parse join-keyword.\n *\n * @return Join-keyword node\n */\n private STNode parseJoinKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.JOIN_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.JOIN_KEYWORD);\n return parseJoinKeyword();\n }\n }\n\n /**\n * Parse equals keyword.\n *\n * @return Parsed node\n */\n private STNode parseEqualsKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.EQUALS_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.EQUALS_KEYWORD);\n return parseEqualsKeyword();\n }\n }\n\n private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case CLOSE_BRACE_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case OPEN_BRACE_TOKEN:\n case SEMICOLON_TOKEN:\n case PUBLIC_KEYWORD:\n case FUNCTION_KEYWORD:\n case EOF_TOKEN:\n case RESOURCE_KEYWORD:\n case LISTENER_KEYWORD:\n case DOCUMENTATION_STRING:\n case PRIVATE_KEYWORD:\n case RETURNS_KEYWORD:\n case SERVICE_KEYWORD:\n case TYPE_KEYWORD:\n case CONST_KEYWORD:\n case FINAL_KEYWORD:\n case DO_KEYWORD:\n case ON_KEYWORD:\n case CONFLICT_KEYWORD:\n return true;\n default:\n return isValidExprRhsStart(tokenKind, SyntaxKind.NONE);\n }\n }\n\n /**\n * Parse from clause.\n *

\n * from-clause := from typed-binding-pattern in expression\n *\n * @return Parsed node\n */\n private STNode parseFromClause(boolean isRhsExpr) {\n STNode fromKeyword = parseFromKeyword();\n STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);\n STNode inKeyword = parseInKeyword();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);\n }\n\n /**\n * Parse from-keyword.\n *\n * @return From-keyword node\n */\n private STNode parseFromKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FROM_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FROM_KEYWORD);\n return parseFromKeyword();\n }\n }\n\n /**\n * Parse where clause.\n *

\n * where-clause := where expression\n *\n * @return Parsed node\n */\n private STNode parseWhereClause(boolean isRhsExpr) {\n STNode whereKeyword = parseWhereKeyword();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createWhereClauseNode(whereKeyword, expression);\n }\n\n /**\n * Parse where-keyword.\n *\n * @return Where-keyword node\n */\n private STNode parseWhereKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.WHERE_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.WHERE_KEYWORD);\n return parseWhereKeyword();\n }\n }\n\n /**\n * Parse limit-keyword.\n *\n * @return limit-keyword node\n */\n private STNode parseLimitKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LIMIT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LIMIT_KEYWORD);\n return parseLimitKeyword();\n }\n }\n\n /**\n * Parse let clause.\n *

\n * let-clause := let let-var-decl [, let-var-decl]* \n *\n * @return Parsed node\n */\n private STNode parseLetClause(boolean isRhsExpr) {\n STNode letKeyword = parseLetKeyword();\n STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);\n\n \n letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,\n DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);\n\n return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);\n }\n\n /**\n * Parse order-keyword.\n *\n * @return Order-keyword node\n */\n private STNode parseOrderKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ORDER_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ORDER_KEYWORD);\n return parseOrderKeyword();\n }\n }\n\n /**\n * Parse by-keyword.\n *\n * @return By-keyword node\n */\n private STNode parseByKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.BY_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.BY_KEYWORD);\n return parseByKeyword();\n }\n }\n\n /**\n * Parse order by clause.\n *

\n * order-by-clause := order by order-key-list\n * \n *\n * @return Parsed node\n */\n private STNode parseOrderByClause(boolean isRhsExpr) {\n STNode orderKeyword = parseOrderKeyword();\n STNode byKeyword = parseByKeyword();\n STNode orderKeys = parseOrderKeyList(isRhsExpr);\n byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);\n return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);\n }\n\n /**\n * Parse order key.\n *

\n * order-key-list := order-key [, order-key]*\n *\n * @return Parsed node\n */\n private STNode parseOrderKeyList(boolean isRhsExpr) {\n startContext(ParserRuleContext.ORDER_KEY_LIST);\n List orderKeys = new ArrayList<>();\n STToken nextToken = peek();\n\n if (isEndOfOrderKeys(nextToken.kind)) {\n endContext();\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode orderKey = parseOrderKey(isRhsExpr);\n orderKeys.add(orderKey);\n\n \n nextToken = peek();\n STNode orderKeyListMemberEnd;\n while (!isEndOfOrderKeys(nextToken.kind)) {\n orderKeyListMemberEnd = parseOrderKeyListMemberEnd();\n if (orderKeyListMemberEnd == null) {\n break;\n }\n orderKeys.add(orderKeyListMemberEnd);\n orderKey = parseOrderKey(isRhsExpr);\n orderKeys.add(orderKey);\n nextToken = peek();\n }\n\n endContext();\n return STNodeFactory.createNodeList(orderKeys);\n }\n\n private boolean isEndOfOrderKeys(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case COMMA_TOKEN:\n case ASCENDING_KEYWORD:\n case DESCENDING_KEYWORD:\n return false;\n case SEMICOLON_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return isQueryClauseStartToken(tokenKind);\n }\n }\n\n private boolean isQueryClauseStartToken(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case SELECT_KEYWORD:\n case LET_KEYWORD:\n case WHERE_KEYWORD:\n case OUTER_KEYWORD:\n case JOIN_KEYWORD:\n case ORDER_KEYWORD:\n case DO_KEYWORD:\n case FROM_KEYWORD:\n case LIMIT_KEYWORD:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseOrderKeyListMemberEnd() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n return parseComma();\n case EOF_TOKEN:\n return null;\n default:\n if (isQueryClauseStartToken(nextToken.kind)) {\n \n return null;\n }\n\n recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);\n return parseOrderKeyListMemberEnd();\n }\n }\n\n /**\n * Parse order key.\n *

\n * order-key := expression (ascending | descending)?\n *\n * @return Parsed node\n */\n private STNode parseOrderKey(boolean isRhsExpr) {\n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n\n STNode orderDirection;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case ASCENDING_KEYWORD:\n case DESCENDING_KEYWORD:\n orderDirection = consume();\n break;\n default:\n orderDirection = STNodeFactory.createEmptyNode();\n }\n\n return STNodeFactory.createOrderKeyNode(expression, orderDirection);\n }\n\n /**\n * Parse select clause.\n *

\n * select-clause := select expression\n *\n * @return Parsed node\n */\n private STNode parseSelectClause(boolean isRhsExpr) {\n startContext(ParserRuleContext.SELECT_CLAUSE);\n STNode selectKeyword = parseSelectKeyword();\n\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n endContext();\n return STNodeFactory.createSelectClauseNode(selectKeyword, expression);\n }\n\n /**\n * Parse select-keyword.\n *\n * @return Select-keyword node\n */\n private STNode parseSelectKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.SELECT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.SELECT_KEYWORD);\n return parseSelectKeyword();\n }\n }\n\n /**\n * Parse on-conflict clause.\n *

\n * \n * onConflictClause := on conflict expression\n * \n *\n * @return On conflict clause node\n */\n private STNode parseOnConflictClause(boolean isRhsExpr) {\n STToken nextToken = peek();\n if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {\n return STNodeFactory.createEmptyNode();\n }\n\n startContext(ParserRuleContext.ON_CONFLICT_CLAUSE);\n STNode onKeyword = parseOnKeyword();\n STNode conflictKeyword = parseConflictKeyword();\n endContext();\n STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);\n }\n\n /**\n * Parse conflict keyword.\n *\n * @return Conflict keyword node\n */\n private STNode parseConflictKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.CONFLICT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.CONFLICT_KEYWORD);\n return parseConflictKeyword();\n }\n }\n\n /**\n * Parse limit clause.\n *

\n * limitClause := limit expression\n *\n * @return Limit expression node\n */\n private STNode parseLimitClause(boolean isRhsExpr) {\n STNode limitKeyword = parseLimitKeyword();\n \n \n STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createLimitClauseNode(limitKeyword, expr);\n }\n\n /**\n * Parse join clause.\n *

\n * \n * join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause\n *
\n * join-var-decl := join (typeName | var) bindingPattern\n *
\n * outer-join-var-decl := outer join var binding-pattern\n *
\n *\n * @return Join clause\n */\n private STNode parseJoinClause(boolean isRhsExpr) {\n startContext(ParserRuleContext.JOIN_CLAUSE);\n STNode outerKeyword;\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {\n outerKeyword = consume();\n } else {\n outerKeyword = STNodeFactory.createEmptyNode();\n }\n\n STNode joinKeyword = parseJoinKeyword();\n STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);\n STNode inKeyword = parseInKeyword();\n \n \n STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n endContext();\n STNode onCondition = parseOnClause(isRhsExpr);\n return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,\n onCondition);\n }\n\n /**\n * Parse on clause.\n *

\n * on clause := `on` expression `equals` expression\n *\n * @return On clause node\n */\n private STNode parseOnClause(boolean isRhsExpr) {\n STToken nextToken = peek();\n if (isQueryClauseStartToken(nextToken.kind)) {\n return createMissingOnClauseNode();\n }\n\n startContext(ParserRuleContext.ON_CLAUSE);\n STNode onKeyword = parseOnKeyword();\n STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n STNode equalsKeyword = parseEqualsKeyword();\n endContext();\n STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);\n return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);\n }\n\n private STNode createMissingOnClauseNode() {\n STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,\n DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);\n STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\n STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,\n DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);\n\n STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);\n STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);\n return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);\n }\n\n /**\n * Parse start action.\n *

\n * start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)\n *\n * @return Start action node\n */\n private STNode parseStartAction(STNode annots) {\n STNode startKeyword = parseStartKeyword();\n STNode expr = parseActionOrExpression();\n\n \n switch (expr.kind) {\n case FUNCTION_CALL:\n case METHOD_CALL:\n case REMOTE_METHOD_CALL_ACTION:\n break;\n case SIMPLE_NAME_REFERENCE:\n case QUALIFIED_NAME_REFERENCE:\n case FIELD_ACCESS:\n case ASYNC_SEND_ACTION:\n expr = generateValidExprForStartAction(expr);\n break;\n default:\n startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,\n DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);\n STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);\n STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);\n STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);\n expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken,\n STNodeFactory.createEmptyNodeList(), closeParenToken);\n break;\n }\n\n return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);\n }\n\n private STNode generateValidExprForStartAction(STNode expr) {\n STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);\n STNode arguments = STNodeFactory.createEmptyNodeList();\n STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);\n\n switch (expr.kind) {\n case FIELD_ACCESS:\n STFieldAccessExpressionNode fieldAccessExpr = (STFieldAccessExpressionNode) expr;\n return STNodeFactory.createMethodCallExpressionNode(fieldAccessExpr.expression,\n fieldAccessExpr.dotToken, fieldAccessExpr.fieldName, openParenToken, arguments,\n closeParenToken);\n case ASYNC_SEND_ACTION:\n STAsyncSendActionNode asyncSendAction = (STAsyncSendActionNode) expr;\n return STNodeFactory.createRemoteMethodCallActionNode(asyncSendAction.expression,\n asyncSendAction.rightArrowToken, asyncSendAction.peerWorker, openParenToken, arguments,\n closeParenToken);\n default: \n return STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);\n }\n }\n\n /**\n * Parse start keyword.\n *\n * @return Start keyword node\n */\n private STNode parseStartKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.START_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.START_KEYWORD);\n return parseStartKeyword();\n }\n }\n\n /**\n * Parse flush action.\n *

\n * flush-action := flush [peer-worker]\n *\n * @return flush action node\n */\n private STNode parseFlushAction() {\n STNode flushKeyword = parseFlushKeyword();\n STNode peerWorker = parseOptionalPeerWorkerName();\n return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);\n }\n\n /**\n * Parse flush keyword.\n *\n * @return flush keyword node\n */\n private STNode parseFlushKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.FLUSH_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.FLUSH_KEYWORD);\n return parseFlushKeyword();\n }\n }\n\n /**\n * Parse peer worker.\n *

\n * peer-worker := worker-name | function\n *\n * @return peer worker name node\n */\n private STNode parseOptionalPeerWorkerName() {\n STToken token = peek();\n switch (token.kind) {\n case IDENTIFIER_TOKEN:\n case FUNCTION_KEYWORD:\n return STNodeFactory.createSimpleNameReferenceNode(consume());\n default:\n return STNodeFactory.createEmptyNode();\n }\n }\n\n /**\n * Parse intersection type descriptor.\n *

\n * intersection-type-descriptor := type-descriptor & type-descriptor\n *

\n *\n * @return Parsed node\n */\n private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,\n boolean isTypedBindingPattern) {\n \n STNode bitwiseAndToken = consume();\n STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,\n TypePrecedence.INTERSECTION);\n return mergeTypesWithIntersection(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n }\n\n /**\n * Creates an intersection type descriptor after validating lhs and rhs types.\n *

\n * Note: Since type precedence and associativity are not taken into account here,\n * this method should not be called directly when types are unknown.\n *
\n * Call {@link \n *\n * @param leftTypeDesc lhs type\n * @param bitwiseAndToken bitwise-and token\n * @param rightTypeDesc rhs type\n * @return an IntersectionTypeDescriptorNode\n */\n private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {\n leftTypeDesc = validateForUsageOfVar(leftTypeDesc);\n rightTypeDesc = validateForUsageOfVar(rightTypeDesc);\n return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);\n }\n\n /**\n * Parse singleton type descriptor.\n *

\n * singleton-type-descriptor := simple-const-expr\n * simple-const-expr :=\n * nil-literal\n * | boolean-literal\n * | [Sign] int-literal\n * | [Sign] floating-point-literal\n * | string-literal\n * | constant-reference-expr\n *

\n */\n private STNode parseSingletonTypeDesc() {\n STNode simpleContExpr = parseSimpleConstExpr();\n return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);\n }\n\n \n private STNode parseSignedIntOrFloat() {\n STNode operator = parseUnaryOperator();\n STNode literal;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n literal = parseBasicLiteral();\n break;\n default: \n literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);\n literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);\n }\n return STNodeFactory.createUnaryExpressionNode(operator, literal);\n }\n\n private static boolean isSingletonTypeDescStart(SyntaxKind tokenKind, STToken nextNextToken) {\n switch (tokenKind) {\n case STRING_LITERAL_TOKEN:\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case NULL_KEYWORD:\n return true;\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n return isIntOrFloat(nextNextToken);\n default:\n return false;\n }\n }\n\n static boolean isIntOrFloat(STToken token) {\n switch (token.kind) {\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Check whether the parser reached to a valid expression start.\n *\n * @param nextTokenKind Kind of the next immediate token.\n * @param nextTokenIndex Index to the next token.\n * @return true if this is a start of a valid expression. false otherwise\n */\n private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {\n nextTokenIndex++;\n switch (nextTokenKind) {\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;\n if (nextNextTokenKind == SyntaxKind.PIPE_TOKEN || nextNextTokenKind == SyntaxKind.BITWISE_AND_TOKEN) {\n \n nextTokenIndex++;\n return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\n }\n\n return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN ||\n nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||\n nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||\n isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);\n case IDENTIFIER_TOKEN:\n return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);\n case OPEN_PAREN_TOKEN:\n case CHECK_KEYWORD:\n case CHECKPANIC_KEYWORD:\n case OPEN_BRACE_TOKEN:\n case TYPEOF_KEYWORD:\n case NEGATION_TOKEN:\n case EXCLAMATION_MARK_TOKEN:\n case TRAP_KEYWORD:\n case OPEN_BRACKET_TOKEN:\n case LT_TOKEN:\n case FROM_KEYWORD:\n case LET_KEYWORD:\n case BACKTICK_TOKEN:\n case NEW_KEYWORD:\n case LEFT_ARROW_TOKEN:\n case FUNCTION_KEYWORD:\n case TRANSACTIONAL_KEYWORD:\n case ISOLATED_KEYWORD:\n case BASE16_KEYWORD:\n case BASE64_KEYWORD:\n return true;\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);\n case TABLE_KEYWORD:\n return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;\n case STREAM_KEYWORD:\n STToken nextNextToken = peek(nextTokenIndex);\n return nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||\n nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||\n nextNextToken.kind == SyntaxKind.FROM_KEYWORD;\n case ERROR_KEYWORD:\n return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;\n case XML_KEYWORD:\n case STRING_KEYWORD:\n return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;\n\n \n case START_KEYWORD:\n case FLUSH_KEYWORD:\n case WAIT_KEYWORD:\n default:\n return false;\n }\n }\n\n /**\n * Parse sync send action.\n *

\n * sync-send-action := expression ->> peer-worker\n *\n * @param expression LHS expression of the sync send action\n * @return Sync send action node\n */\n private STNode parseSyncSendAction(STNode expression) {\n STNode syncSendToken = parseSyncSendToken();\n STNode peerWorker = parsePeerWorkerName();\n return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);\n }\n\n /**\n * Parse peer worker.\n *

\n * peer-worker := worker-name | function\n *\n * @return peer worker name node\n */\n private STNode parsePeerWorkerName() {\n STToken token = peek();\n switch (token.kind) {\n case IDENTIFIER_TOKEN:\n case FUNCTION_KEYWORD:\n return STNodeFactory.createSimpleNameReferenceNode(consume());\n default:\n recover(token, ParserRuleContext.PEER_WORKER_NAME);\n return parsePeerWorkerName();\n }\n }\n\n /**\n * Parse sync send token.\n *

\n * sync-send-token := ->> \n *\n * @return sync send token\n */\n private STNode parseSyncSendToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.SYNC_SEND_TOKEN);\n return parseSyncSendToken();\n }\n }\n\n /**\n * Parse receive action.\n *

\n * receive-action := single-receive-action | multiple-receive-action\n *\n * @return Receive action\n */\n private STNode parseReceiveAction() {\n STNode leftArrow = parseLeftArrowToken();\n STNode receiveWorkers = parseReceiveWorkers();\n return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);\n }\n\n private STNode parseReceiveWorkers() {\n switch (peek().kind) {\n case FUNCTION_KEYWORD:\n case IDENTIFIER_TOKEN:\n return parsePeerWorkerName();\n case OPEN_BRACE_TOKEN:\n return parseMultipleReceiveWorkers();\n default:\n recover(peek(), ParserRuleContext.RECEIVE_WORKERS);\n return parseReceiveWorkers();\n }\n }\n\n /**\n * Parse multiple worker receivers.\n *

\n * { receive-field (, receive-field)* }\n *\n * @return Multiple worker receiver node\n */\n private STNode parseMultipleReceiveWorkers() {\n startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);\n STNode openBrace = parseOpenBrace();\n STNode receiveFields = parseReceiveFields();\n STNode closeBrace = parseCloseBrace();\n endContext();\n\n openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,\n DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);\n return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);\n }\n\n private STNode parseReceiveFields() {\n List receiveFields = new ArrayList<>();\n STToken nextToken = peek();\n\n \n if (isEndOfReceiveFields(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode receiveField = parseReceiveField();\n receiveFields.add(receiveField);\n\n \n nextToken = peek();\n STNode recieveFieldEnd;\n while (!isEndOfReceiveFields(nextToken.kind)) {\n recieveFieldEnd = parseReceiveFieldEnd();\n if (recieveFieldEnd == null) {\n break;\n }\n\n receiveFields.add(recieveFieldEnd);\n receiveField = parseReceiveField();\n receiveFields.add(receiveField);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(receiveFields);\n }\n\n private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseReceiveFieldEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);\n return parseReceiveFieldEnd();\n }\n }\n\n /**\n * Parse receive field.\n *

\n * receive-field := peer-worker | field-name : peer-worker\n *\n * @return Receiver field node\n */\n private STNode parseReceiveField() {\n switch (peek().kind) {\n case FUNCTION_KEYWORD:\n STNode functionKeyword = consume();\n return STNodeFactory.createSimpleNameReferenceNode(functionKeyword);\n case IDENTIFIER_TOKEN:\n STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);\n return createQualifiedReceiveField(identifier);\n default:\n recover(peek(), ParserRuleContext.RECEIVE_FIELD);\n return parseReceiveField();\n }\n }\n\n private STNode createQualifiedReceiveField(STNode identifier) {\n if (peek().kind != SyntaxKind.COLON_TOKEN) {\n return identifier;\n }\n\n STNode colon = parseColon();\n STNode peerWorker = parsePeerWorkerName();\n return createQualifiedNameReferenceNode(identifier, colon, peerWorker);\n }\n\n /**\n * Parse left arrow (<-) token.\n *\n * @return left arrow token\n */\n private STNode parseLeftArrowToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);\n return parseLeftArrowToken();\n }\n }\n\n /**\n * Parse signed right shift token (>>).\n * This method should only be called by seeing a `DOUBLE_GT_TOKEN` or\n * by seeing a `GT_TOKEN` followed by a `GT_TOKEN`\n *\n * @return Parsed node\n */\n private STNode parseSignedRightShiftToken() {\n STNode firstToken = consume();\n if (firstToken.kind == SyntaxKind.DOUBLE_GT_TOKEN) {\n return firstToken;\n }\n\n STToken endLGToken = consume();\n STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, firstToken.leadingMinutiae(),\n endLGToken.trailingMinutiae());\n\n if (hasTrailingMinutiae(firstToken)) {\n doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,\n DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);\n }\n return doubleGTToken;\n }\n\n /**\n * Parse unsigned right shift token (>>>).\n * This method should only be called by seeing a `TRIPPLE_GT_TOKEN` or\n * by seeing a `GT_TOKEN` followed by two `GT_TOKEN`s\n *\n * @return Parsed node\n */\n private STNode parseUnsignedRightShiftToken() {\n STNode firstToken = consume();\n if (firstToken.kind == SyntaxKind.TRIPPLE_GT_TOKEN) {\n return firstToken;\n }\n\n STNode middleGTToken = consume();\n STNode endLGToken = consume();\n STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,\n firstToken.leadingMinutiae(), endLGToken.trailingMinutiae());\n\n boolean validOpenGTToken = !hasTrailingMinutiae(firstToken);\n boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);\n if (validOpenGTToken && validMiddleGTToken) {\n return unsignedRightShiftToken;\n }\n\n unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,\n DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);\n return unsignedRightShiftToken;\n }\n\n /**\n * Parse wait action.\n *

\n * wait-action := single-wait-action | multiple-wait-action | alternate-wait-action \n *\n * @return Wait action node\n */\n private STNode parseWaitAction() {\n STNode waitKeyword = parseWaitKeyword();\n if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {\n return parseMultiWaitAction(waitKeyword);\n }\n\n return parseSingleOrAlternateWaitAction(waitKeyword);\n }\n\n /**\n * Parse wait keyword.\n *\n * @return wait keyword\n */\n private STNode parseWaitKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.WAIT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.WAIT_KEYWORD);\n return parseWaitKeyword();\n }\n }\n\n /**\n * Parse single or alternate wait actions.\n *

\n * \n * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+\n *
\n * wait-future-expr := expression but not mapping-constructor-expr\n *
\n *\n * @param waitKeyword wait keyword\n * @return Single or alternate wait action node\n */\n private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {\n startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);\n STToken nextToken = peek();\n\n \n if (isEndOfWaitFutureExprList(nextToken.kind)) {\n endContext();\n STNode waitFutureExprs = STNodeFactory\n .createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));\n waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,\n DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);\n return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);\n }\n\n \n List waitFutureExprList = new ArrayList<>();\n STNode waitField = parseWaitFutureExpr();\n waitFutureExprList.add(waitField);\n\n \n nextToken = peek();\n STNode waitFutureExprEnd;\n while (!isEndOfWaitFutureExprList(nextToken.kind)) {\n waitFutureExprEnd = parseWaitFutureExprEnd();\n if (waitFutureExprEnd == null) {\n break;\n }\n\n waitFutureExprList.add(waitFutureExprEnd);\n waitField = parseWaitFutureExpr();\n waitFutureExprList.add(waitField);\n nextToken = peek();\n }\n\n \n \n endContext();\n return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));\n }\n\n private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case SEMICOLON_TOKEN:\n case OPEN_BRACE_TOKEN:\n return true;\n case PIPE_TOKEN:\n default:\n return false;\n }\n }\n\n private STNode parseWaitFutureExpr() {\n STNode waitFutureExpr = parseActionOrExpression();\n if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {\n waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,\n DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);\n } else if (isAction(waitFutureExpr)) {\n waitFutureExpr =\n SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);\n }\n return waitFutureExpr;\n }\n\n private STNode parseWaitFutureExprEnd() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case PIPE_TOKEN:\n return parsePipeToken();\n default:\n if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {\n return null;\n }\n\n recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);\n return parseWaitFutureExprEnd();\n }\n }\n\n /**\n * Parse multiple wait action.\n *

\n * multiple-wait-action := wait { wait-field (, wait-field)* }\n *\n * @param waitKeyword Wait keyword\n * @return Multiple wait action node\n */\n private STNode parseMultiWaitAction(STNode waitKeyword) {\n startContext(ParserRuleContext.MULTI_WAIT_FIELDS);\n STNode openBrace = parseOpenBrace();\n STNode waitFields = parseWaitFields();\n STNode closeBrace = parseCloseBrace();\n endContext();\n\n openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,\n DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);\n STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);\n return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);\n }\n\n private STNode parseWaitFields() {\n List waitFields = new ArrayList<>();\n STToken nextToken = peek();\n\n \n if (isEndOfWaitFields(nextToken.kind)) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n STNode waitField = parseWaitField();\n waitFields.add(waitField);\n\n \n nextToken = peek();\n STNode waitFieldEnd;\n while (!isEndOfWaitFields(nextToken.kind)) {\n waitFieldEnd = parseWaitFieldEnd();\n if (waitFieldEnd == null) {\n break;\n }\n\n waitFields.add(waitFieldEnd);\n waitField = parseWaitField();\n waitFields.add(waitField);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(waitFields);\n }\n\n private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseWaitFieldEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.WAIT_FIELD_END);\n return parseWaitFieldEnd();\n }\n }\n\n /**\n * Parse wait field.\n *

\n * wait-field := variable-name | field-name : wait-future-expr\n *\n * @return Receiver field node\n */\n private STNode parseWaitField() {\n switch (peek().kind) {\n case IDENTIFIER_TOKEN:\n STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);\n identifier = STNodeFactory.createSimpleNameReferenceNode(identifier);\n return createQualifiedWaitField(identifier);\n default:\n recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);\n return parseWaitField();\n }\n }\n\n private STNode createQualifiedWaitField(STNode identifier) {\n if (peek().kind != SyntaxKind.COLON_TOKEN) {\n return identifier;\n }\n\n STNode colon = parseColon();\n STNode waitFutureExpr = parseWaitFutureExpr();\n return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);\n }\n\n /**\n * Parse annot access expression.\n *

\n * \n * annot-access-expr := expression .@ annot-tag-reference\n *
\n * annot-tag-reference := qualified-identifier | identifier\n *
\n *\n * @param lhsExpr Preceding expression of the annot access access\n * @return Parsed node\n */\n private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\n STNode annotAccessToken = parseAnnotChainingToken();\n STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);\n return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);\n }\n\n /**\n * Parse annot-chaining-token.\n *\n * @return Parsed node\n */\n private STNode parseAnnotChainingToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);\n return parseAnnotChainingToken();\n }\n }\n\n /**\n * Parse field access identifier.\n *

\n * field-access-identifier := qualified-identifier | identifier\n *\n * @return Parsed node\n */\n private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {\n STToken nextToken = peek();\n if (!isPredeclaredIdentifier(nextToken.kind)) {\n \n STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);\n return parseQualifiedIdentifier(identifier, isInConditionalExpr);\n }\n\n return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);\n }\n\n /**\n * Parse query action.\n *

\n * query-action := query-pipeline do-clause\n *
\n * do-clause := do block-stmt\n *
\n *\n * @param queryConstructType Query construct type. This is only for validation\n * @param queryPipeline Query pipeline\n * @param selectClause Select clause if any This is only for validation.\n * @return Query action node\n */\n private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause) {\n if (queryConstructType != null) {\n queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,\n DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);\n }\n if (selectClause != null) {\n queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,\n DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);\n }\n\n startContext(ParserRuleContext.DO_CLAUSE);\n STNode doKeyword = parseDoKeyword();\n STNode blockStmt = parseBlockNode();\n endContext();\n\n return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);\n }\n\n /**\n * Parse 'do' keyword.\n *\n * @return do keyword node\n */\n private STNode parseDoKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.DO_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.DO_KEYWORD);\n return parseDoKeyword();\n }\n }\n\n /**\n * Parse optional field access or xml optional attribute access expression.\n *

\n * \n * optional-field-access-expr := expression ?. field-name\n *
\n * xml-optional-attribute-access-expr := expression ?. xml-attribute-name\n *
\n * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier\n *
\n * xml-qualified-name := xml-namespace-prefix : identifier\n *
\n * xml-namespace-prefix := identifier\n *
\n *\n * @param lhsExpr Preceding expression of the optional access\n * @return Parsed node\n */\n private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {\n STNode optionalFieldAccessToken = parseOptionalChainingToken();\n STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);\n return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);\n }\n\n /**\n * Parse optional chaining token.\n *\n * @return parsed node\n */\n private STNode parseOptionalChainingToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);\n return parseOptionalChainingToken();\n }\n }\n\n /**\n * Parse conditional expression.\n *

\n * conditional-expr := expression ? expression : expression\n *\n * @param lhsExpr Preceding expression of the question mark\n * @param isInConditionalExpr whether calling from a conditional-expr\n * @return Parsed node\n */\n private STNode parseConditionalExpression(STNode lhsExpr, boolean isInConditionalExpr) {\n startContext(ParserRuleContext.CONDITIONAL_EXPRESSION);\n STNode questionMark = parseQuestionMark();\n \n \n STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);\n\n \n \n if (peek().kind != SyntaxKind.COLON_TOKEN) {\n if (middleExpr.kind == SyntaxKind.CONDITIONAL_EXPRESSION) {\n STConditionalExpressionNode innerConditionalExpr = (STConditionalExpressionNode) middleExpr;\n STNode innerMiddleExpr = innerConditionalExpr.middleExpression;\n\n \n \n \n STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, false);\n if (rightMostQNameRef != null) {\n middleExpr = generateConditionalExprForRightMost(innerConditionalExpr.lhsExpression,\n innerConditionalExpr.questionMarkToken, innerMiddleExpr, rightMostQNameRef);\n endContext();\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,\n innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);\n }\n\n STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, true);\n if (leftMostQNameRef != null) {\n middleExpr = generateConditionalExprForLeftMost(innerConditionalExpr.lhsExpression,\n innerConditionalExpr.questionMarkToken, innerMiddleExpr, leftMostQNameRef);\n endContext();\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,\n innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);\n }\n }\n\n \n \n \n STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, false);\n if (rightMostQNameRef != null) {\n endContext();\n return generateConditionalExprForRightMost(lhsExpr, questionMark, middleExpr, rightMostQNameRef);\n }\n\n STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, true);\n if (leftMostQNameRef != null) {\n endContext();\n return generateConditionalExprForLeftMost(lhsExpr, questionMark, middleExpr, leftMostQNameRef);\n }\n }\n\n return parseConditionalExprRhs(lhsExpr, questionMark, middleExpr, isInConditionalExpr);\n }\n\n private STNode generateConditionalExprForRightMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,\n STNode rightMostQualifiedNameRef) {\n STQualifiedNameReferenceNode qualifiedNameRef =\n (STQualifiedNameReferenceNode) rightMostQualifiedNameRef;\n STNode endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n\n STNode simpleNameRef =\n ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);\n middleExpr = middleExpr.replace(rightMostQualifiedNameRef, simpleNameRef);\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,\n endExpr);\n }\n\n private STNode generateConditionalExprForLeftMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,\n STNode leftMostQualifiedNameRef) {\n STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) leftMostQualifiedNameRef;\n STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);\n STNode endExpr = middleExpr.replace(leftMostQualifiedNameRef, simpleNameRef);\n middleExpr = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,\n endExpr);\n }\n\n private STNode parseConditionalExprRhs(STNode lhsExpr, STNode questionMark, STNode middleExpr,\n boolean isInConditionalExpr) {\n STNode colon = parseColon();\n endContext();\n \n \n STNode endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false,\n isInConditionalExpr);\n return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);\n }\n\n /**\n * Parse enum declaration.\n *

\n * module-enum-decl :=\n * metadata\n * [public] enum identifier { enum-member (, enum-member)* }\n * enum-member := metadata identifier [= const-expr]\n *

\n *\n * @param metadata\n * @param qualifier\n * @return Parsed enum node.\n */\n private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {\n startContext(ParserRuleContext.MODULE_ENUM_DECLARATION);\n STNode enumKeywordToken = parseEnumKeyword();\n STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);\n STNode openBraceToken = parseOpenBrace();\n STNode enumMemberList = parseEnumMemberList();\n STNode closeBraceToken = parseCloseBrace();\n\n endContext();\n openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,\n DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);\n return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,\n openBraceToken, enumMemberList, closeBraceToken);\n }\n\n /**\n * Parse 'enum' keyword.\n *\n * @return enum keyword node\n */\n private STNode parseEnumKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ENUM_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ENUM_KEYWORD);\n return parseEnumKeyword();\n }\n }\n\n /**\n * Parse enum member list.\n *

\n * enum-member := metadata identifier [= const-expr]\n *

\n *\n * @return enum member list node.\n */\n private STNode parseEnumMemberList() {\n startContext(ParserRuleContext.ENUM_MEMBER_LIST);\n\n \n if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n \n List enumMemberList = new ArrayList<>();\n STNode enumMember = parseEnumMember();\n\n \n STNode enumMemberRhs;\n while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {\n enumMemberRhs = parseEnumMemberEnd();\n if (enumMemberRhs == null) {\n break;\n }\n enumMemberList.add(enumMember);\n enumMemberList.add(enumMemberRhs);\n enumMember = parseEnumMember();\n }\n\n enumMemberList.add(enumMember);\n\n endContext();\n return STNodeFactory.createNodeList(enumMemberList);\n }\n\n /**\n * Parse enum member.\n *

\n * enum-member := metadata identifier [= const-expr]\n *

\n *\n * @return Parsed enum member node.\n */\n private STNode parseEnumMember() {\n STNode metadata;\n switch (peek().kind) {\n case DOCUMENTATION_STRING:\n case AT_TOKEN:\n metadata = parseMetaData();\n break;\n default:\n metadata = STNodeFactory.createEmptyNode();\n }\n\n STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);\n return parseEnumMemberRhs(metadata, identifierNode);\n }\n\n private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {\n STNode equalToken, constExprNode;\n switch (peek().kind) {\n case EQUAL_TOKEN:\n equalToken = parseAssignOp();\n constExprNode = parseExpression();\n break;\n case COMMA_TOKEN:\n case CLOSE_BRACE_TOKEN:\n equalToken = STNodeFactory.createEmptyNode();\n constExprNode = STNodeFactory.createEmptyNode();\n break;\n default:\n recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS);\n return parseEnumMemberRhs(metadata, identifierNode);\n }\n\n return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);\n }\n\n private STNode parseEnumMemberEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.ENUM_MEMBER_END);\n return parseEnumMemberEnd();\n }\n }\n\n private STNode parseTransactionStmtOrVarDecl(STNode annots, List qualifiers, STToken transactionKeyword) {\n switch (peek().kind) {\n case OPEN_BRACE_TOKEN:\n reportInvalidStatementAnnots(annots, qualifiers);\n reportInvalidQualifierList(qualifiers);\n return parseTransactionStatement(transactionKeyword);\n case COLON_TOKEN:\n if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) {\n STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);\n return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);\n }\n \n default:\n Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF);\n\n if (solution.action == Action.KEEP ||\n (solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) {\n \n STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);\n return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);\n }\n\n return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword);\n }\n }\n\n /**\n * Parse transaction statement.\n *

\n * transaction-stmt := `transaction` block-stmt [on-fail-clause]\n *\n * @return Transaction statement node\n */\n private STNode parseTransactionStatement(STNode transactionKeyword) {\n startContext(ParserRuleContext.TRANSACTION_STMT);\n STNode blockStmt = parseBlockNode();\n endContext();\n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);\n }\n\n /**\n * Parse commit action.\n *

\n * commit-action := \"commit\"\n *\n * @return Commit action node\n */\n private STNode parseCommitAction() {\n STNode commitKeyword = parseCommitKeyword();\n return STNodeFactory.createCommitActionNode(commitKeyword);\n }\n\n /**\n * Parse commit keyword.\n *\n * @return parsed node\n */\n private STNode parseCommitKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.COMMIT_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.COMMIT_KEYWORD);\n return parseCommitKeyword();\n }\n }\n\n /**\n * Parse retry statement.\n *

\n * \n * retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]\n *
\n * retry-spec := [type-parameter] [ `(` arg-list `)` ]\n *
\n *\n * @return Retry statement node\n */\n private STNode parseRetryStatement() {\n startContext(ParserRuleContext.RETRY_STMT);\n STNode retryKeyword = parseRetryKeyword();\n \n STNode retryStmt = parseRetryKeywordRhs(retryKeyword);\n return retryStmt;\n }\n\n private STNode parseRetryKeywordRhs(STNode retryKeyword) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case LT_TOKEN:\n STNode typeParam = parseTypeParameter();\n return parseRetryTypeParamRhs(retryKeyword, typeParam);\n case OPEN_PAREN_TOKEN:\n case OPEN_BRACE_TOKEN:\n case TRANSACTION_KEYWORD:\n typeParam = STNodeFactory.createEmptyNode();\n return parseRetryTypeParamRhs(retryKeyword, typeParam);\n default:\n recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS);\n return parseRetryKeywordRhs(retryKeyword);\n }\n }\n\n private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {\n STNode args;\n switch (peek().kind) {\n case OPEN_PAREN_TOKEN:\n args = parseParenthesizedArgList();\n break;\n case OPEN_BRACE_TOKEN:\n case TRANSACTION_KEYWORD:\n args = STNodeFactory.createEmptyNode();\n break;\n default:\n recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS);\n return parseRetryTypeParamRhs(retryKeyword, typeParam);\n }\n\n STNode blockStmt = parseRetryBody();\n endContext(); \n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);\n }\n\n private STNode parseRetryBody() {\n switch (peek().kind) {\n case OPEN_BRACE_TOKEN:\n return parseBlockNode();\n case TRANSACTION_KEYWORD:\n return parseTransactionStatement(consume());\n default:\n recover(peek(), ParserRuleContext.RETRY_BODY);\n return parseRetryBody();\n }\n }\n\n /**\n * Parse optional on fail clause.\n *\n * @return Parsed node\n */\n private STNode parseOptionalOnFailClause() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.ON_KEYWORD) {\n return parseOnFailClause();\n }\n\n if (isEndOfRegularCompoundStmt(nextToken.kind)) {\n return STNodeFactory.createEmptyNode();\n }\n\n recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);\n return parseOptionalOnFailClause();\n }\n\n private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case CLOSE_BRACE_TOKEN:\n case SEMICOLON_TOKEN:\n case AT_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return isStatementStartingToken(nodeKind);\n }\n }\n\n private boolean isStatementStartingToken(SyntaxKind nodeKind) {\n switch (nodeKind) {\n case FINAL_KEYWORD:\n\n \n case IF_KEYWORD:\n case WHILE_KEYWORD:\n case DO_KEYWORD:\n case PANIC_KEYWORD:\n case CONTINUE_KEYWORD:\n case BREAK_KEYWORD:\n case RETURN_KEYWORD:\n case LOCK_KEYWORD:\n case OPEN_BRACE_TOKEN:\n case FORK_KEYWORD:\n case FOREACH_KEYWORD:\n case XMLNS_KEYWORD:\n case TRANSACTION_KEYWORD:\n case RETRY_KEYWORD:\n case ROLLBACK_KEYWORD:\n case MATCH_KEYWORD:\n case FAIL_KEYWORD:\n\n \n case CHECK_KEYWORD:\n case CHECKPANIC_KEYWORD:\n case TRAP_KEYWORD:\n case START_KEYWORD:\n case FLUSH_KEYWORD:\n case LEFT_ARROW_TOKEN:\n case WAIT_KEYWORD:\n case COMMIT_KEYWORD:\n\n \n \n \n case WORKER_KEYWORD:\n case TYPE_KEYWORD: \n case CONST_KEYWORD: \n return true;\n default:\n \n if (isTypeStartingToken(nodeKind)) {\n return true;\n }\n\n \n if (isValidExpressionStart(nodeKind, 1)) {\n return true;\n }\n\n return false;\n }\n }\n\n /**\n * Parse on fail clause.\n *

\n * \n * on-fail-clause := on fail typed-binding-pattern statement-block\n * \n *\n * @return On fail clause node\n */\n private STNode parseOnFailClause() {\n startContext(ParserRuleContext.ON_FAIL_CLAUSE);\n STNode onKeyword = parseOnKeyword();\n STNode failKeyword = parseFailKeyword();\n STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false,\n TypePrecedence.DEFAULT);\n STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_NAME);\n STNode blockStatement = parseBlockNode();\n endContext();\n return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,\n blockStatement);\n }\n\n /**\n * Parse retry keyword.\n *\n * @return parsed node\n */\n private STNode parseRetryKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.RETRY_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.RETRY_KEYWORD);\n return parseRetryKeyword();\n }\n }\n\n /**\n * Parse transaction statement.\n *

\n * rollback-stmt := \"rollback\" [expression] \";\"\n *\n * @return Rollback statement node\n */\n private STNode parseRollbackStatement() {\n startContext(ParserRuleContext.ROLLBACK_STMT);\n STNode rollbackKeyword = parseRollbackKeyword();\n STNode expression;\n if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {\n expression = STNodeFactory.createEmptyNode();\n } else {\n expression = parseExpression();\n }\n\n STNode semicolon = parseSemicolon();\n endContext();\n return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);\n }\n\n /**\n * Parse rollback keyword.\n *\n * @return Rollback keyword node\n */\n private STNode parseRollbackKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.ROLLBACK_KEYWORD);\n return parseRollbackKeyword();\n }\n }\n\n /**\n * Parse transactional expression.\n *

\n * transactional-expr := \"transactional\"\n *\n * @return Transactional expression node\n */\n private STNode parseTransactionalExpression() {\n STNode transactionalKeyword = parseTransactionalKeyword();\n return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);\n }\n\n /**\n * Parse transactional keyword.\n *\n * @return Transactional keyword node\n */\n private STNode parseTransactionalKeyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);\n return parseTransactionalKeyword();\n }\n }\n\n /**\n * Parse base16 literal.\n *

\n * \n * byte-array-literal := Base16Literal | Base64Literal\n *
\n * Base16Literal := base16 WS ` HexGroup* WS `\n *
\n * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n *
\n *\n * @return parsed node\n */\n private STNode parseByteArrayLiteral() {\n STNode type;\n if (peek().kind == SyntaxKind.BASE16_KEYWORD) {\n type = parseBase16Keyword();\n } else {\n type = parseBase64Keyword();\n }\n\n STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);\n if (startingBackTick.isMissing()) {\n \n startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\n STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);\n STNode content = STNodeFactory.createEmptyNode();\n STNode byteArrayLiteral =\n STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);\n byteArrayLiteral =\n SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);\n return byteArrayLiteral;\n }\n\n STNode content = parseByteArrayContent();\n return parseByteArrayLiteral(type, startingBackTick, content);\n }\n\n /**\n * Parse byte array literal.\n *\n * @param typeKeyword keyword token, possible values are `base16` and `base64`\n * @param startingBackTick starting backtick token\n * @param byteArrayContent byte array literal content to be validated\n * @return parsed byte array literal node\n */\n private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {\n STNode content = STNodeFactory.createEmptyNode();\n STNode newStartingBackTick = startingBackTick;\n STNodeList items = (STNodeList) byteArrayContent;\n if (items.size() == 1) {\n STNode item = items.get(0);\n if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {\n newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\n DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);\n } else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {\n newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\n DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);\n } else if (item.kind != SyntaxKind.TEMPLATE_STRING) {\n newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,\n DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n } else {\n content = item;\n }\n } else if (items.size() > 1) {\n \n STNode clonedStartingBackTick = startingBackTick;\n for (int index = 0; index < items.size(); index++) {\n STNode item = items.get(index);\n clonedStartingBackTick =\n SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);\n }\n newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,\n DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);\n }\n\n STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);\n return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);\n }\n\n /**\n * Parse base16 keyword.\n *\n * @return base16 keyword node\n */\n private STNode parseBase16Keyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.BASE16_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.BASE16_KEYWORD);\n return parseBase16Keyword();\n }\n }\n\n /**\n * Parse base64 keyword.\n *\n * @return base64 keyword node\n */\n private STNode parseBase64Keyword() {\n STToken token = peek();\n if (token.kind == SyntaxKind.BASE64_KEYWORD) {\n return consume();\n } else {\n recover(token, ParserRuleContext.BASE64_KEYWORD);\n return parseBase64Keyword();\n }\n }\n\n /**\n * Validate and parse byte array literal content.\n * An error is reported, if the content is invalid.\n *\n * @return parsed node\n */\n private STNode parseByteArrayContent() {\n STToken nextToken = peek();\n\n List items = new ArrayList<>();\n while (!isEndOfBacktickContent(nextToken.kind)) {\n STNode content = parseTemplateItem();\n items.add(content);\n nextToken = peek();\n }\n\n return STNodeFactory.createNodeList(items);\n }\n\n /**\n * Validate base16 literal content.\n *

\n * \n * Base16Literal := base16 WS ` HexGroup* WS `\n *
\n * HexGroup := WS HexDigit WS HexDigit\n *
\n * WS := WhiteSpaceChar*\n *
\n * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n *
\n *\n * @param content the string surrounded by the backticks\n * @return true, if the string content is valid. false otherwise.\n */\n static boolean isValidBase16LiteralContent(String content) {\n char[] charArray = content.toCharArray();\n int hexDigitCount = 0;\n\n for (char c : charArray) {\n switch (c) {\n case LexerTerminals.TAB:\n case LexerTerminals.NEWLINE:\n case LexerTerminals.CARRIAGE_RETURN:\n case LexerTerminals.SPACE:\n break;\n default:\n if (isHexDigit(c)) {\n hexDigitCount++;\n } else {\n return false;\n }\n break;\n }\n }\n\n return hexDigitCount % 2 == 0;\n }\n\n /**\n * Validate base64 literal content.\n *

\n * \n * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `\n *
\n * Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char\n *
\n * PaddedBase64Group :=\n * WS Base64Char WS Base64Char WS Base64Char WS PaddingChar\n * | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar\n *
\n * Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n *
\n * PaddingChar := =\n *
\n * WS := WhiteSpaceChar*\n *
\n * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20\n *
\n *\n * @param content the string surrounded by the backticks\n * @return true, if the string content is valid. false otherwise.\n */\n static boolean isValidBase64LiteralContent(String content) {\n char[] charArray = content.toCharArray();\n int base64CharCount = 0;\n int paddingCharCount = 0;\n\n for (char c : charArray) {\n switch (c) {\n case LexerTerminals.TAB:\n case LexerTerminals.NEWLINE:\n case LexerTerminals.CARRIAGE_RETURN:\n case LexerTerminals.SPACE:\n break;\n case LexerTerminals.EQUAL:\n paddingCharCount++;\n break;\n default:\n if (isBase64Char(c)) {\n if (paddingCharCount == 0) {\n base64CharCount++;\n } else {\n return false;\n }\n } else {\n return false;\n }\n break;\n }\n }\n\n if (paddingCharCount > 2) {\n return false;\n } else if (paddingCharCount == 0) {\n return base64CharCount % 4 == 0;\n } else {\n return base64CharCount % 4 == 4 - paddingCharCount;\n }\n }\n\n /**\n *

\n * Check whether a given char is a base64 char.\n *

\n * Base64Char := A .. Z | a .. z | 0 .. 9 | + | /\n *\n * @param c character to check\n * @return true, if the character represents a base64 char. false otherwise.\n */\n static boolean isBase64Char(int c) {\n if ('a' <= c && c <= 'z') {\n return true;\n }\n if ('A' <= c && c <= 'Z') {\n return true;\n }\n if (c == '+' || c == '/') {\n return true;\n }\n return isDigit(c);\n }\n\n static boolean isHexDigit(int c) {\n if ('a' <= c && c <= 'f') {\n return true;\n }\n if ('A' <= c && c <= 'F') {\n return true;\n }\n return isDigit(c);\n }\n\n static boolean isDigit(int c) {\n return ('0' <= c && c <= '9');\n }\n\n /**\n * Parse xml filter expression.\n *

\n * xml-filter-expr := expression .< xml-name-pattern >\n *\n * @param lhsExpr Preceding expression of .< token\n * @return Parsed node\n */\n private STNode parseXMLFilterExpression(STNode lhsExpr) {\n STNode xmlNamePatternChain = parseXMLFilterExpressionRhs();\n return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);\n }\n\n /**\n * Parse xml filter expression rhs.\n *

\n * filer-expression-rhs := .< xml-name-pattern >\n *\n * @return Parsed node\n */\n private STNode parseXMLFilterExpressionRhs() {\n STNode dotLTToken = parseDotLTToken();\n return parseXMLNamePatternChain(dotLTToken);\n }\n\n /**\n * Parse xml name pattern chain.\n *

\n * \n * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step\n *
\n * filer-expression-rhs := .< xml-name-pattern >\n *
\n * xml-element-children-step := /< xml-name-pattern >\n *
\n * xml-element-descendants-step := /**\\/\n *
\n *\n * @param startToken Preceding token of xml name pattern\n * @return Parsed node\n */\n private STNode parseXMLNamePatternChain(STNode startToken) {\n startContext(ParserRuleContext.XML_NAME_PATTERN);\n STNode xmlNamePattern = parseXMLNamePattern();\n STNode gtToken = parseGTToken();\n endContext();\n\n startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,\n DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);\n return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);\n }\n\n /**\n * Parse .< token.\n *\n * @return Parsed node\n */\n private STNode parseDotLTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);\n return parseDotLTToken();\n }\n }\n\n /**\n * Parse xml name pattern.\n *

\n * xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*\n *\n * @return Parsed node\n */\n private STNode parseXMLNamePattern() {\n List xmlAtomicNamePatternList = new ArrayList<>();\n STToken nextToken = peek();\n\n \n if (isEndOfXMLNamePattern(nextToken.kind)) {\n return STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n }\n\n \n STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();\n xmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n\n \n STNode separator;\n while (!isEndOfXMLNamePattern(peek().kind)) {\n separator = parseXMLNamePatternSeparator();\n if (separator == null) {\n break;\n }\n xmlAtomicNamePatternList.add(separator);\n\n xmlAtomicNamePattern = parseXMLAtomicNamePattern();\n xmlAtomicNamePatternList.add(xmlAtomicNamePattern);\n }\n\n return STNodeFactory.createNodeList(xmlAtomicNamePatternList);\n }\n\n private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case GT_TOKEN:\n case EOF_TOKEN:\n return true;\n case IDENTIFIER_TOKEN:\n case ASTERISK_TOKEN:\n case COLON_TOKEN:\n default:\n return false;\n }\n }\n\n private STNode parseXMLNamePatternSeparator() {\n STToken token = peek();\n switch (token.kind) {\n case PIPE_TOKEN:\n return consume();\n case GT_TOKEN:\n case EOF_TOKEN:\n return null;\n default:\n recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);\n return parseXMLNamePatternSeparator();\n }\n }\n\n /**\n * Parse xml atomic name pattern.\n *

\n * \n * xml-atomic-name-pattern :=\n * *\n * | identifier\n * | xml-namespace-prefix : identifier\n * | xml-namespace-prefix : *\n * \n *\n * @return Parsed node\n */\n private STNode parseXMLAtomicNamePattern() {\n startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);\n STNode atomicNamePattern = parseXMLAtomicNamePatternBody();\n endContext();\n return atomicNamePattern;\n }\n\n private STNode parseXMLAtomicNamePatternBody() {\n STToken token = peek();\n STNode identifier;\n switch (token.kind) {\n case ASTERISK_TOKEN:\n return consume();\n case IDENTIFIER_TOKEN:\n identifier = consume();\n break;\n default:\n recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);\n return parseXMLAtomicNamePatternBody();\n }\n\n return parseXMLAtomicNameIdentifier(identifier);\n }\n\n private STNode parseXMLAtomicNameIdentifier(STNode identifier) {\n STToken token = peek();\n if (token.kind == SyntaxKind.COLON_TOKEN) {\n STNode colon = consume();\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {\n STToken endToken = consume();\n return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);\n }\n }\n return STNodeFactory.createSimpleNameReferenceNode(identifier);\n }\n\n /**\n * Parse xml step expression.\n *

\n * xml-step-expr := expression xml-step-start\n *\n * @param lhsExpr Preceding expression of /*, /<, or /**\\/< token\n * @return Parsed node\n */\n private STNode parseXMLStepExpression(STNode lhsExpr) {\n STNode xmlStepStart = parseXMLStepStart();\n return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);\n }\n\n /**\n * Parse xml filter expression rhs.\n *

\n * \n * xml-step-start :=\n * xml-all-children-step\n * | xml-element-children-step\n * | xml-element-descendants-step\n *
\n * xml-all-children-step := /*\n *
\n *\n * @return Parsed node\n */\n private STNode parseXMLStepStart() {\n STToken token = peek();\n STNode startToken;\n\n switch (token.kind) {\n case SLASH_ASTERISK_TOKEN:\n return consume();\n case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:\n startToken = parseDoubleSlashDoubleAsteriskLTToken();\n break;\n case SLASH_LT_TOKEN:\n default:\n startToken = parseSlashLTToken();\n break;\n }\n return parseXMLNamePatternChain(startToken);\n }\n\n /**\n * Parse /< token.\n *\n * @return Parsed node\n */\n private STNode parseSlashLTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);\n return parseSlashLTToken();\n }\n }\n\n /**\n * Parse /< token.\n *\n * @return Parsed node\n */\n private STNode parseDoubleSlashDoubleAsteriskLTToken() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);\n return parseDoubleSlashDoubleAsteriskLTToken();\n }\n }\n\n /**\n * Parse match statement.\n *

\n * match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]\n *\n * @return Match statement\n */\n private STNode parseMatchStatement() {\n startContext(ParserRuleContext.MATCH_STMT);\n STNode matchKeyword = parseMatchKeyword();\n STNode actionOrExpr = parseActionOrExpression();\n startContext(ParserRuleContext.MATCH_BODY);\n STNode openBrace = parseOpenBrace();\n \n List matchClausesList = new ArrayList<>();\n while (!isEndOfMatchClauses(peek().kind)) {\n STNode clause = parseMatchClause();\n matchClausesList.add(clause);\n }\n STNode matchClauses = STNodeFactory.createNodeList(matchClausesList);\n \n if (isNodeListEmpty(matchClauses)) {\n openBrace = SyntaxErrors.addDiagnostic(openBrace,\n DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);\n }\n\n STNode closeBrace = parseCloseBrace();\n endContext();\n endContext();\n STNode onFailClause = parseOptionalOnFailClause();\n return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,\n onFailClause);\n }\n\n /**\n * Parse match keyword.\n *\n * @return Match keyword node\n */\n private STNode parseMatchKeyword() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.MATCH_KEYWORD);\n return parseMatchKeyword();\n }\n }\n\n private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case TYPE_KEYWORD:\n return true;\n default:\n return isEndOfStatements();\n }\n }\n\n /**\n * Parse a single match match clause.\n *

\n * \n * match-clause := match-pattern-list [match-guard] => block-stmt\n *
\n * match-guard := if expression\n *
\n *\n * @return A match clause\n */\n private STNode parseMatchClause() {\n STNode matchPatterns = parseMatchPatternList();\n STNode matchGuard = parseMatchGuard();\n STNode rightDoubleArrow = parseDoubleRightArrow();\n STNode blockStmt = parseBlockNode();\n \n if (isNodeListEmpty(matchPatterns)) {\n STToken identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n STNode constantPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);\n matchPatterns = STNodeFactory.createNodeList(constantPattern);\n \n DiagnosticErrorCode errorCode = DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN;\n if (matchGuard != null) {\n matchGuard = SyntaxErrors.addDiagnostic(matchGuard, errorCode);\n } else {\n rightDoubleArrow = SyntaxErrors.addDiagnostic(rightDoubleArrow, errorCode);\n }\n }\n \n return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);\n }\n\n /**\n * Parse match guard.\n *

\n * match-guard := if expression\n *\n * @return Match guard\n */\n private STNode parseMatchGuard() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IF_KEYWORD:\n STNode ifKeyword = parseIfKeyword();\n STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);\n return STNodeFactory.createMatchGuardNode(ifKeyword, expr);\n case RIGHT_DOUBLE_ARROW_TOKEN:\n return STNodeFactory.createEmptyNode();\n default:\n recover(nextToken, ParserRuleContext.OPTIONAL_MATCH_GUARD);\n return parseMatchGuard();\n }\n }\n\n /**\n * Parse match patterns list.\n *

\n * match-pattern-list := match-pattern (| match-pattern)*\n *\n * @return Match patterns list\n */\n private STNode parseMatchPatternList() {\n startContext(ParserRuleContext.MATCH_PATTERN);\n List matchClauses = new ArrayList<>();\n while (!isEndOfMatchPattern(peek().kind)) {\n STNode clause = parseMatchPattern();\n if (clause == null) {\n break;\n }\n matchClauses.add(clause);\n\n STNode seperator = parseMatchPatternListMemberRhs();\n if (seperator == null) {\n break;\n }\n matchClauses.add(seperator);\n }\n\n endContext();\n return STNodeFactory.createNodeList(matchClauses);\n }\n\n private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case PIPE_TOKEN:\n case IF_KEYWORD:\n case RIGHT_DOUBLE_ARROW_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse match pattern.\n *

\n * \n * match-pattern := var binding-pattern\n * | wildcard-match-pattern\n * | const-pattern\n * | list-match-pattern\n * | mapping-match-pattern\n * | error-match-pattern\n * \n *\n * @return Match pattern\n */\n private STNode parseMatchPattern() {\n STToken nextToken = peek();\n if (isPredeclaredIdentifier(nextToken.kind)) {\n \n STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\n return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);\n }\n\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n return parseSimpleConstExpr();\n case VAR_KEYWORD:\n return parseVarTypedBindingPattern();\n case OPEN_BRACKET_TOKEN:\n return parseListMatchPattern();\n case OPEN_BRACE_TOKEN:\n return parseMappingMatchPattern();\n case ERROR_KEYWORD:\n return parseErrorMatchPattern();\n default:\n recover(nextToken, ParserRuleContext.MATCH_PATTERN_START);\n return parseMatchPattern();\n }\n }\n\n private STNode parseMatchPatternListMemberRhs() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case PIPE_TOKEN:\n return parsePipeToken();\n case IF_KEYWORD:\n case RIGHT_DOUBLE_ARROW_TOKEN:\n \n return null;\n default:\n recover(nextToken, ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);\n return parseMatchPatternListMemberRhs();\n }\n }\n\n /**\n * Parse var typed binding pattern.\n *

\n * var binding-pattern\n *

\n *\n * @return Parsed typed binding pattern node\n */\n private STNode parseVarTypedBindingPattern() {\n STNode varKeyword = parseVarKeyword();\n STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);\n STNode bindingPattern = parseBindingPattern();\n return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);\n }\n\n /**\n * Parse var keyword.\n *\n * @return Var keyword node\n */\n private STNode parseVarKeyword() {\n STToken nextToken = peek();\n if (nextToken.kind == SyntaxKind.VAR_KEYWORD) {\n return consume();\n } else {\n recover(nextToken, ParserRuleContext.VAR_KEYWORD);\n return parseVarKeyword();\n }\n }\n\n /**\n * Parse list match pattern.\n *

\n * \n * list-match-pattern := [ list-member-match-patterns ]\n * list-member-match-patterns :=\n * match-pattern (, match-pattern)* [, rest-match-pattern]\n * | [ rest-match-pattern ]\n * \n *

\n *\n * @return Parsed list match pattern node\n */\n private STNode parseListMatchPattern() {\n startContext(ParserRuleContext.LIST_MATCH_PATTERN);\n STNode openBracketToken = parseOpenBracket();\n List matchPatternList = new ArrayList<>();\n STNode listMatchPatternMemberRhs = null;\n boolean isEndOfFields = false;\n\n while (!isEndOfListMatchPattern()) {\n STNode listMatchPatternMember = parseListMatchPatternMember();\n matchPatternList.add(listMatchPatternMember);\n listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();\n\n if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\n isEndOfFields = true;\n break;\n }\n\n if (listMatchPatternMemberRhs != null) {\n matchPatternList.add(listMatchPatternMemberRhs);\n } else {\n break;\n }\n }\n\n \n \n while (isEndOfFields && listMatchPatternMemberRhs != null) {\n updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);\n\n if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {\n break;\n }\n\n STNode invalidField = parseListMatchPatternMember();\n updateLastNodeInListWithInvalidNode(matchPatternList, invalidField,\n DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);\n listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();\n }\n\n STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);\n STNode closeBracketToken = parseCloseBracket();\n endContext();\n\n return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);\n }\n\n public boolean isEndOfListMatchPattern() {\n switch (peek().kind) {\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseListMatchPatternMember() {\n STNode nextToken = peek();\n switch (nextToken.kind) {\n case ELLIPSIS_TOKEN:\n return parseRestMatchPattern();\n default:\n \n return parseMatchPattern();\n }\n }\n\n /**\n * Parse rest match pattern.\n *

\n * \n * rest-match-pattern := ... var variable-name\n * \n *

\n *\n * @return Parsed rest match pattern node\n */\n private STNode parseRestMatchPattern() {\n startContext(ParserRuleContext.REST_MATCH_PATTERN);\n STNode ellipsisToken = parseEllipsis();\n STNode varKeywordToken = parseVarKeyword();\n STNode variableName = parseVariableName();\n endContext();\n\n STSimpleNameReferenceNode simpleNameReferenceNode =\n (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);\n return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);\n }\n\n private STNode parseListMatchPatternMemberRhs() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);\n return parseListMatchPatternMemberRhs();\n }\n }\n\n /**\n * Parse mapping match pattern.\n *

\n * mapping-match-pattern := { field-match-patterns }\n *
\n * field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]\n * | [ rest-match-pattern ]\n *
\n * field-match-pattern := field-name : match-pattern\n *
\n * rest-match-pattern := ... var variable-name\n *

\n *\n * @return Parsed Node.\n */\n private STNode parseMappingMatchPattern() {\n startContext(ParserRuleContext.MAPPING_MATCH_PATTERN);\n STNode openBraceToken = parseOpenBrace();\n STNode fieldMatchPatterns = parseFieldMatchPatternList();\n STNode closeBraceToken = parseCloseBrace();\n endContext();\n return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);\n }\n\n private STNode parseFieldMatchPatternList() {\n List fieldMatchPatterns = new ArrayList<>();\n\n STNode fieldMatchPatternMember = parseFieldMatchPatternMember();\n if (fieldMatchPatternMember == null) {\n return STNodeFactory.createEmptyNodeList();\n }\n\n fieldMatchPatterns.add(fieldMatchPatternMember);\n if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\n invalidateExtraFieldMatchPatterns(fieldMatchPatterns);\n return STNodeFactory.createNodeList(fieldMatchPatterns);\n }\n\n return parseFieldMatchPatternList(fieldMatchPatterns);\n }\n\n private STNode parseFieldMatchPatternList(List fieldMatchPatterns) {\n while (!isEndOfMappingMatchPattern()) {\n STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\n if (fieldMatchPatternRhs == null) {\n break;\n }\n\n fieldMatchPatterns.add(fieldMatchPatternRhs);\n STNode fieldMatchPatternMember = parseFieldMatchPatternMember();\n if (fieldMatchPatternMember == null) {\n fieldMatchPatternMember = createMissingFieldMatchPattern();\n }\n\n fieldMatchPatterns.add(fieldMatchPatternMember);\n if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {\n invalidateExtraFieldMatchPatterns(fieldMatchPatterns);\n break;\n }\n }\n\n return STNodeFactory.createNodeList(fieldMatchPatterns);\n }\n\n private STNode createMissingFieldMatchPattern() {\n STNode fieldName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n STNode colon = SyntaxErrors.createMissingToken(SyntaxKind.COLON_TOKEN);\n STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n STNode matchPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);\n STNode fieldMatchPatternMember = STNodeFactory.createFieldMatchPatternNode(fieldName, colon, matchPattern);\n fieldMatchPatternMember = SyntaxErrors.addDiagnostic(fieldMatchPatternMember,\n DiagnosticErrorCode.ERROR_MISSING_FIELD_MATCH_PATTERN_MEMBER);\n return fieldMatchPatternMember;\n }\n\n /**\n * Parse and invalidate all field match pattern members after a rest-match-pattern.\n *\n * @param fieldMatchPatterns field-match-patterns list\n */\n private void invalidateExtraFieldMatchPatterns(List fieldMatchPatterns) {\n while (!isEndOfMappingMatchPattern()) {\n STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();\n if (fieldMatchPatternRhs == null) {\n break;\n }\n\n STNode fieldMatchPatternMember = parseFieldMatchPatternMember();\n if (fieldMatchPatternMember == null) {\n updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs,\n DiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) fieldMatchPatternRhs).text());\n } else {\n updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, null);\n updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternMember,\n DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);\n }\n }\n }\n\n private STNode parseFieldMatchPatternMember() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN:\n return parseFieldMatchPattern();\n case ELLIPSIS_TOKEN:\n return parseRestMatchPattern();\n case CLOSE_BRACE_TOKEN:\n case EOF_TOKEN:\n \n return null;\n default:\n recover(nextToken, ParserRuleContext.FIELD_MATCH_PATTERNS_START);\n return parseFieldMatchPatternMember();\n }\n }\n\n /**\n * Parse filed match pattern.\n *

\n * field-match-pattern := field-name : match-pattern\n *

\n *\n * @return Parsed field match pattern node\n */\n public STNode parseFieldMatchPattern() {\n STNode fieldNameNode = parseVariableName();\n STNode colonToken = parseColon();\n STNode matchPattern = parseMatchPattern();\n return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);\n }\n\n public boolean isEndOfMappingMatchPattern() {\n switch (peek().kind) {\n case CLOSE_BRACE_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseFieldMatchPatternRhs() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n case EOF_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);\n return parseFieldMatchPatternRhs();\n }\n }\n\n private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,\n ParserRuleContext.ERROR_KEYWORD);\n startContext(ParserRuleContext.ERROR_MATCH_PATTERN); \n return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);\n default:\n if (isMatchPatternEnd(peek().kind)) {\n return typeRefOrConstExpr;\n }\n recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN);\n return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);\n }\n }\n\n private boolean isMatchPatternEnd(SyntaxKind tokenKind) {\n switch (tokenKind) {\n case RIGHT_DOUBLE_ARROW_TOKEN:\n case COMMA_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case PIPE_TOKEN:\n case IF_KEYWORD:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse functional match pattern.\n *

\n * error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )\n * error-arg-list-match-pattern :=\n * error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]\n * | [error-field-match-patterns]\n * error-message-match-pattern := simple-match-pattern\n * error-cause-match-pattern := simple-match-pattern | error-match-pattern\n * simple-match-pattern :=\n * wildcard-match-pattern\n * | const-pattern\n * | var variable-name\n * error-field-match-patterns :=\n * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]\n * | rest-match-pattern\n * named-arg-match-pattern := arg-name = match-pattern\n *

\n *\n * @return Parsed functional match pattern node.\n */\n private STNode parseErrorMatchPattern() {\n startContext(ParserRuleContext.ERROR_MATCH_PATTERN);\n STNode errorKeyword = consume();\n return parseErrorMatchPattern(errorKeyword);\n }\n\n private STNode parseErrorMatchPattern(STNode errorKeyword) {\n STToken nextToken = peek();\n STNode typeRef;\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n typeRef = STNodeFactory.createEmptyNode();\n break;\n default:\n if (isPredeclaredIdentifier(nextToken.kind)) {\n typeRef = parseTypeReference();\n break;\n }\n recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);\n return parseErrorMatchPattern(errorKeyword);\n }\n return parseErrorMatchPattern(errorKeyword, typeRef);\n }\n\n private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {\n STNode openParenthesisToken = parseOpenParenthesis();\n STNode argListMatchPatternNode = parseErrorArgListMatchPatterns();\n STNode closeParenthesisToken = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,\n argListMatchPatternNode, closeParenthesisToken);\n }\n\n private STNode parseErrorArgListMatchPatterns() {\n List argListMatchPatterns = new ArrayList<>();\n\n if (isEndOfErrorFieldMatchPatterns()) {\n return STNodeFactory.createNodeList(argListMatchPatterns);\n }\n startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);\n STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);\n endContext();\n\n if (isSimpleMatchPattern(firstArg.kind)) {\n\n argListMatchPatterns.add(firstArg);\n STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);\n if (argEnd != null) {\n \n STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);\n if (isValidSecondArgMatchPattern(secondArg.kind)) {\n argListMatchPatterns.add(argEnd);\n argListMatchPatterns.add(secondArg);\n } else {\n updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);\n updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,\n DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);\n }\n }\n } else {\n if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&\n firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {\n addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);\n } else {\n argListMatchPatterns.add(firstArg);\n }\n }\n\n parseErrorFieldMatchPatterns(argListMatchPatterns);\n return STNodeFactory.createNodeList(argListMatchPatterns);\n }\n\n private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {\n switch (matchPatternKind) {\n case IDENTIFIER_TOKEN:\n case SIMPLE_NAME_REFERENCE:\n case QUALIFIED_NAME_REFERENCE:\n case NUMERIC_LITERAL:\n case STRING_LITERAL:\n case NULL_LITERAL:\n case NIL_LITERAL:\n case BOOLEAN_LITERAL:\n case TYPED_BINDING_PATTERN:\n case UNARY_EXPRESSION:\n return true;\n default:\n return false;\n }\n }\n\n private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {\n switch (syntaxKind) {\n case ERROR_MATCH_PATTERN:\n case NAMED_ARG_MATCH_PATTERN:\n case REST_MATCH_PATTERN:\n return true;\n default:\n if (isSimpleMatchPattern(syntaxKind)) {\n return true;\n }\n return false;\n }\n }\n\n /**\n * Parse error field match patterns.\n * error-field-match-patterns :=\n * named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]\n * | rest-match-pattern\n * named-arg-match-pattern := arg-name = match-pattern\n * @param argListMatchPatterns\n */\n private void parseErrorFieldMatchPatterns(List argListMatchPatterns) {\n SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;\n while (!isEndOfErrorFieldMatchPatterns()) {\n STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);\n if (argEnd == null) {\n \n break;\n }\n STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);\n DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);\n if (errorCode == null) {\n argListMatchPatterns.add(argEnd);\n argListMatchPatterns.add(currentArg);\n lastValidArgKind = currentArg.kind;\n } else if (argListMatchPatterns.size() == 0) {\n addInvalidNodeToNextToken(argEnd, null);\n addInvalidNodeToNextToken(currentArg, errorCode);\n } else {\n updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);\n updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);\n }\n }\n }\n\n private boolean isEndOfErrorFieldMatchPatterns() {\n \n return isEndOfErrorFieldBindingPatterns();\n }\n\n private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return consume();\n case CLOSE_PAREN_TOKEN:\n return null;\n default:\n recover(peek(), currentCtx);\n return parseErrorArgListMatchPatternEnd(currentCtx);\n }\n }\n\n private STNode parseErrorArgListMatchPattern(ParserRuleContext context) {\n STToken nextToken = peek();\n if (isPredeclaredIdentifier(nextToken.kind)) {\n return parseNamedArgOrSimpleMatchPattern();\n }\n\n switch (nextToken.kind) {\n case ELLIPSIS_TOKEN:\n return parseRestMatchPattern();\n case OPEN_PAREN_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case PLUS_TOKEN:\n case MINUS_TOKEN:\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n case OPEN_BRACKET_TOKEN:\n case OPEN_BRACE_TOKEN:\n case ERROR_KEYWORD:\n return parseMatchPattern();\n case VAR_KEYWORD:\n STNode varType = createBuiltinSimpleNameReference(consume());\n STNode variableName = createCaptureOrWildcardBP(parseVariableName());\n return STNodeFactory.createTypedBindingPatternNode(varType, variableName);\n case CLOSE_PAREN_TOKEN:\n return SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN);\n default:\n recover(nextToken, context);\n return parseErrorArgListMatchPattern(context);\n }\n }\n\n private STNode parseNamedArgOrSimpleMatchPattern() {\n STNode constRefExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);\n if (constRefExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || peek().kind != SyntaxKind.EQUAL_TOKEN) {\n return constRefExpr;\n }\n\n \n return parseNamedArgMatchPattern(((STSimpleNameReferenceNode) constRefExpr).name);\n }\n\n /**\n * Parses the next named arg match pattern.\n *
\n * named-arg-match-pattern := arg-name = match-pattern\n *
\n *
\n *\n * @return arg match pattern list node added the new arg match pattern\n */\n private STNode parseNamedArgMatchPattern(STNode identifier) {\n startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);\n STNode equalToken = parseAssignOp();\n STNode matchPattern = parseMatchPattern();\n endContext();\n return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);\n }\n\n private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {\n switch (currentArgKind) {\n case NAMED_ARG_MATCH_PATTERN:\n case REST_MATCH_PATTERN:\n \n if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {\n return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;\n }\n return null;\n default:\n return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;\n }\n }\n\n /**\n * Parse markdown documentation.\n *\n * @return markdown documentation node\n */\n private STNode parseMarkdownDocumentation() {\n List markdownDocLineList = new ArrayList<>();\n\n \n \n \n \n \n STToken nextToken = peek();\n while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {\n STToken documentationString = consume();\n STNode parsedDocLines = parseDocumentationString(documentationString);\n appendParsedDocumentationLines(markdownDocLineList, parsedDocLines);\n nextToken = peek();\n }\n\n STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);\n return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);\n }\n\n /**\n * Parse documentation string.\n *\n * @return markdown documentation line list node\n */\n private STNode parseDocumentationString(STToken documentationStringToken) {\n List leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());\n Collection diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));\n\n CharReader charReader = CharReader.from(documentationStringToken.text());\n DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);\n AbstractTokenReader tokenReader = new TokenReader(documentationLexer);\n DocumentationParser documentationParser = new DocumentationParser(tokenReader);\n return documentationParser.parse();\n }\n\n private List getLeadingTriviaList(STNode leadingMinutiaeNode) {\n List leadingTriviaList = new ArrayList<>();\n int bucketCount = leadingMinutiaeNode.bucketCount();\n for (int i = 0; i < bucketCount; i++) {\n leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));\n }\n\n return leadingTriviaList;\n }\n\n private void appendParsedDocumentationLines(List markdownDocLineList, STNode parsedDocLines) {\n int bucketCount = parsedDocLines.bucketCount();\n for (int i = 0; i < bucketCount; i++) {\n STNode markdownDocLine = parsedDocLines.childInBucket(i);\n markdownDocLineList.add(markdownDocLine);\n }\n }\n\n \n\n /**\n * Parse any statement that starts with a token that has ambiguity between being\n * a type-desc or an expression.\n *\n * @param annots Annotations\n * @param qualifiers\n * @return Statement node\n */\n private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List qualifiers) {\n startContext(ParserRuleContext.AMBIGUOUS_STMT);\n STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);\n return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);\n }\n\n private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {\n if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\n List varDeclQualifiers = new ArrayList<>();\n switchContext(ParserRuleContext.VAR_DECL_STMT);\n return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);\n }\n\n STNode expr = getExpression(typedBindingPatternOrExpr);\n expr = getExpression(parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true));\n return parseStatementStartWithExprRhs(expr);\n }\n\n private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {\n List typeDescQualifiers = new ArrayList<>();\n return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);\n }\n\n private STNode parseTypedBindingPatternOrExpr(List qualifiers, boolean allowAssignment) {\n parseTypeDescQualifiers(qualifiers);\n STToken nextToken = peek();\n STNode typeOrExpr;\n if (isPredeclaredIdentifier(nextToken.kind)) {\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\n return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\n }\n \n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n reportInvalidQualifierList(qualifiers);\n return parseTypedBPOrExprStartsWithOpenParenthesis();\n case FUNCTION_KEYWORD:\n return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);\n case OPEN_BRACKET_TOKEN:\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());\n return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\n \n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n reportInvalidQualifierList(qualifiers);\n STNode basicLiteral = parseBasicLiteral();\n return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);\n default:\n if (isValidExpressionStart(nextToken.kind, 1)) {\n reportInvalidQualifierList(qualifiers);\n return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());\n }\n\n return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);\n }\n }\n\n /**\n * Parse the component after the ambiguous starting node. Ambiguous node could be either an expr\n * or a type-desc. The component followed by this ambiguous node could be the binding-pattern or\n * the expression-rhs.\n *\n * @param typeOrExpr Type desc or the expression\n * @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a\n * valid lvalue expression\n * @return Typed-binding-pattern node or an expression node\n */\n private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case PIPE_TOKEN:\n case BITWISE_AND_TOKEN:\n STToken nextNextToken = peek(2);\n if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\n return typeOrExpr;\n }\n\n STNode pipeOrAndToken = parseBinaryOperator();\n STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);\n if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\n STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;\n typeOrExpr = getTypeDescFromExpr(typeOrExpr);\n\n STNode newTypeDesc = mergeTypes(typeOrExpr, pipeOrAndToken, typedBP.typeDescriptor);\n return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);\n }\n\n \n if (peek().kind == SyntaxKind.EQUAL_TOKEN) {\n return createCaptureBPWithMissingVarName(typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr);\n }\n\n return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,\n pipeOrAndToken, rhsTypedBPOrExpr);\n case SEMICOLON_TOKEN:\n if (isDefiniteExpr(typeOrExpr.kind)) {\n return typeOrExpr;\n }\n\n if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {\n \n STNode typeDesc = getTypeDescFromExpr(typeOrExpr);\n return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n }\n\n return typeOrExpr;\n case IDENTIFIER_TOKEN:\n case QUESTION_MARK_TOKEN:\n if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {\n \n STNode typeDesc = getTypeDescFromExpr(typeOrExpr);\n return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n }\n\n return typeOrExpr;\n case EQUAL_TOKEN:\n return typeOrExpr;\n case OPEN_BRACKET_TOKEN:\n return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,\n ParserRuleContext.AMBIGUOUS_STMT);\n case OPEN_BRACE_TOKEN: \n case ERROR_KEYWORD: \n STNode typeDesc = getTypeDescFromExpr(typeOrExpr);\n return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);\n default:\n \n if (isCompoundAssignment(nextToken.kind)) {\n return typeOrExpr;\n }\n\n \n \n if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {\n return typeOrExpr;\n }\n\n STToken token = peek();\n SyntaxKind typeOrExprKind = typeOrExpr.kind;\n if (typeOrExprKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||\n typeOrExprKind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n recover(token, ParserRuleContext.BINDING_PATTERN_OR_VAR_REF_RHS);\n } else {\n recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS);\n }\n\n return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);\n }\n }\n\n private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) {\n lhsType = getTypeDescFromExpr(lhsType);\n rhsType = getTypeDescFromExpr(rhsType);\n\n STNode newTypeDesc = mergeTypes(lhsType, separatorToken, rhsType);\n\n STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n ParserRuleContext.VARIABLE_NAME);\n STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier);\n\n return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP);\n }\n\n private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {\n \n\n \n \n typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);\n }\n\n private STNode parseTypedBPOrExprStartsWithOpenParenthesis() {\n STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();\n if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {\n return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);\n }\n\n return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);\n }\n\n private boolean isDefiniteTypeDesc(SyntaxKind kind) {\n return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.FUTURE_TYPE_DESC) <= 0;\n }\n\n private boolean isDefiniteExpr(SyntaxKind kind) {\n if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n return false;\n }\n\n return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0;\n }\n\n private boolean isDefiniteAction(SyntaxKind kind) {\n return kind.compareTo(SyntaxKind.REMOTE_METHOD_CALL_ACTION) >= 0 && \n kind.compareTo(SyntaxKind.COMMIT_ACTION) <= 0;\n }\n\n /**\n * Parse type or expression that starts with open parenthesis. Possible options are:\n * 1) () - nil type-desc or nil-literal\n * 2) (T) - Parenthesized type-desc\n * 3) (expr) - Parenthesized expression\n * 4) (param, param, ..) - Anon function params\n *\n * @return Type-desc or expression node\n */\n private STNode parseTypedDescOrExprStartsWithOpenParenthesis() {\n STNode openParen = parseOpenParenthesis();\n STToken nextToken = peek();\n\n if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {\n STNode closeParen = parseCloseParenthesis();\n return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);\n }\n\n STNode typeOrExpr = parseTypeDescOrExpr();\n if (isAction(typeOrExpr)) {\n STNode closeParen = parseCloseParenthesis();\n return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,\n closeParen);\n }\n\n if (isExpression(typeOrExpr.kind)) {\n startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);\n return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);\n }\n\n STNode typeDescNode = getTypeDescFromExpr(typeOrExpr);\n typeDescNode = parseComplexTypeDescriptor(typeDescNode, ParserRuleContext.TYPE_DESC_IN_PARENTHESIS, false);\n\n STNode closeParen = parseCloseParenthesis();\n return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen);\n }\n\n /**\n * Parse type-desc or expression. This method does not handle binding patterns.\n *\n * @return Type-desc node or expression node\n */\n private STNode parseTypeDescOrExpr() {\n List typeDescQualifiers = new ArrayList<>();\n return parseTypeDescOrExpr(typeDescQualifiers);\n }\n\n private STNode parseTypeDescOrExpr(List qualifiers) {\n parseTypeDescQualifiers(qualifiers);\n STToken nextToken = peek();\n STNode typeOrExpr;\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();\n break;\n case FUNCTION_KEYWORD:\n typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);\n break;\n case IDENTIFIER_TOKEN:\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);\n return parseTypeDescOrExprRhs(typeOrExpr);\n case OPEN_BRACKET_TOKEN:\n reportInvalidQualifierList(qualifiers);\n typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());\n break;\n \n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case STRING_LITERAL_TOKEN:\n case NULL_KEYWORD:\n case TRUE_KEYWORD:\n case FALSE_KEYWORD:\n case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:\n case HEX_FLOATING_POINT_LITERAL_TOKEN:\n reportInvalidQualifierList(qualifiers);\n STNode basicLiteral = parseBasicLiteral();\n return parseTypeDescOrExprRhs(basicLiteral);\n default:\n if (isValidExpressionStart(nextToken.kind, 1)) {\n reportInvalidQualifierList(qualifiers);\n return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());\n }\n return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n }\n\n if (isDefiniteTypeDesc(typeOrExpr.kind)) {\n return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n }\n\n return parseTypeDescOrExprRhs(typeOrExpr);\n }\n\n private boolean isExpression(SyntaxKind kind) {\n switch (kind) {\n case NUMERIC_LITERAL:\n case STRING_LITERAL_TOKEN:\n case NIL_LITERAL:\n case NULL_LITERAL:\n case BOOLEAN_LITERAL:\n return true;\n default:\n return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&\n kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;\n }\n }\n\n /**\n * Parse statement that starts with an empty parenthesis. Empty parenthesis can be\n * 1) Nil literal\n * 2) Nil type-desc\n * 3) Anon-function params\n *\n * @param openParen Open parenthesis\n * @param closeParen Close parenthesis\n * @return Parsed node\n */\n private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case RIGHT_DOUBLE_ARROW_TOKEN:\n STNode params = STNodeFactory.createEmptyNodeList();\n STNode anonFuncParam =\n STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);\n return parseImplicitAnonFunc(anonFuncParam, false);\n default:\n return STNodeFactory.createNilLiteralNode(openParen, closeParen);\n }\n }\n\n private STNode parseAnonFuncExprOrTypedBPWithFuncType(List qualifiers) {\n STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);\n if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {\n return exprOrTypeDesc;\n }\n\n return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);\n }\n\n /**\n * Parse anon-func-expr or function-type-desc, by resolving the ambiguity.\n *\n * @param qualifiers Preceding qualifiers\n * @return Anon-func-expr or function-type-desc\n */\n private STNode parseAnonFuncExprOrFuncTypeDesc(List qualifiers) {\n startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);\n STNode qualifierList;\n STNode functionKeyword = parseFunctionKeyword();\n STNode funcSignature;\n\n if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {\n funcSignature = parseFuncSignature(true);\n\n \n STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, true);\n qualifierList = nodes[0];\n functionKeyword = nodes[1];\n\n endContext();\n return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);\n }\n\n funcSignature = STNodeFactory.createEmptyNode();\n\n \n STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, false);\n qualifierList = nodes[0];\n functionKeyword = nodes[1];\n\n STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,\n funcSignature);\n if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {\n switchContext(ParserRuleContext.VAR_DECL_STMT);\n return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n }\n return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n }\n\n private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {\n ParserRuleContext currentCtx = getCurrentContext();\n switch (peek().kind) {\n case OPEN_BRACE_TOKEN:\n case RIGHT_DOUBLE_ARROW_TOKEN:\n if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {\n switchContext(ParserRuleContext.EXPRESSION_STATEMENT);\n }\n startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);\n \n funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);\n\n STNode funcBody = parseAnonFuncBody(false);\n STNode annots = STNodeFactory.createEmptyNodeList();\n STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,\n functionKeyword, funcSignature, funcBody);\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);\n case IDENTIFIER_TOKEN:\n default:\n STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,\n funcSignature);\n if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {\n switchContext(ParserRuleContext.VAR_DECL_STMT);\n return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,\n true);\n }\n return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n }\n }\n\n private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {\n STToken nextToken = peek();\n STNode typeDesc;\n switch (nextToken.kind) {\n case PIPE_TOKEN:\n case BITWISE_AND_TOKEN:\n STToken nextNextToken = peek(2);\n if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {\n return typeOrExpr;\n }\n\n STNode pipeOrAndToken = parseBinaryOperator();\n STNode rhsTypeDescOrExpr = parseTypeDescOrExpr();\n if (isExpression(rhsTypeDescOrExpr.kind)) {\n return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,\n pipeOrAndToken, rhsTypeDescOrExpr);\n }\n\n typeDesc = getTypeDescFromExpr(typeOrExpr);\n rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);\n return mergeTypes(typeDesc, pipeOrAndToken, rhsTypeDescOrExpr);\n case IDENTIFIER_TOKEN:\n case QUESTION_MARK_TOKEN:\n \n \n \n typeDesc = parseComplexTypeDescriptor(getTypeDescFromExpr(typeOrExpr), \n ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);\n return typeDesc;\n case SEMICOLON_TOKEN:\n return getTypeDescFromExpr(typeOrExpr);\n case EQUAL_TOKEN:\n case CLOSE_PAREN_TOKEN:\n case CLOSE_BRACE_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n case COMMA_TOKEN:\n return typeOrExpr;\n case OPEN_BRACKET_TOKEN:\n return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,\n ParserRuleContext.AMBIGUOUS_STMT);\n case ELLIPSIS_TOKEN:\n STNode ellipsis = parseEllipsis();\n typeOrExpr = getTypeDescFromExpr(typeOrExpr);\n return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);\n default:\n \n if (isCompoundAssignment(nextToken.kind)) {\n return typeOrExpr;\n }\n\n \n \n if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);\n }\n\n recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS);\n return parseTypeDescOrExprRhs(typeOrExpr);\n }\n }\n\n private boolean isAmbiguous(STNode node) {\n switch (node.kind) {\n case SIMPLE_NAME_REFERENCE:\n case QUALIFIED_NAME_REFERENCE:\n case NIL_LITERAL:\n case NULL_LITERAL:\n case NUMERIC_LITERAL:\n case STRING_LITERAL:\n case BOOLEAN_LITERAL:\n case BRACKETED_LIST:\n return true;\n case BINARY_EXPRESSION:\n STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\n if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\n binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\n return false;\n }\n return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\n case BRACED_EXPRESSION:\n return isAmbiguous(((STBracedExpressionNode) node).expression);\n case INDEXED_EXPRESSION:\n STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;\n if (!isAmbiguous(indexExpr.containerExpression)) {\n return false;\n }\n\n STNode keys = indexExpr.keyExpression;\n for (int i = 0; i < keys.bucketCount(); i++) {\n STNode item = keys.childInBucket(i);\n if (item.kind == SyntaxKind.COMMA_TOKEN) {\n continue;\n }\n\n if (!isAmbiguous(item)) {\n return false;\n }\n }\n return true;\n default:\n return false;\n }\n }\n\n private boolean isAllBasicLiterals(STNode node) {\n switch (node.kind) {\n case NIL_LITERAL:\n case NULL_LITERAL:\n case NUMERIC_LITERAL:\n case STRING_LITERAL:\n case BOOLEAN_LITERAL:\n return true;\n case BINARY_EXPRESSION:\n STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;\n if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||\n binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {\n return false;\n }\n return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);\n case BRACED_EXPRESSION:\n return isAmbiguous(((STBracedExpressionNode) node).expression);\n case BRACKETED_LIST:\n STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;\n for (STNode member : list.members) {\n if (member.kind == SyntaxKind.COMMA_TOKEN) {\n continue;\n }\n\n if (!isAllBasicLiterals(member)) {\n return false;\n }\n }\n\n return true;\n case UNARY_EXPRESSION:\n STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;\n if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&\n unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {\n return false;\n }\n\n return isNumericLiteral(unaryExpr.expression);\n default:\n return false;\n }\n }\n\n private boolean isNumericLiteral(STNode node) {\n switch (node.kind) {\n case NUMERIC_LITERAL:\n return true;\n default:\n return false;\n }\n }\n\n \n\n /**\n * Parse binding-patterns.\n *

\n * \n * binding-pattern := capture-binding-pattern\n * | wildcard-binding-pattern\n * | list-binding-pattern\n * | mapping-binding-pattern\n * | functional-binding-pattern\n *

\n *

\n * capture-binding-pattern := variable-name\n * variable-name := identifier\n *

\n *

\n * wildcard-binding-pattern := _\n * list-binding-pattern := [ list-member-binding-patterns ]\n *
\n * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *

\n *

\n * mapping-binding-pattern := { field-binding-patterns }\n * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *
\n * field-binding-pattern := field-name : binding-pattern | variable-name\n *
\n * rest-binding-pattern := ... variable-name\n *

\n *

\n * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )\n *
\n * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]\n * | other-arg-binding-patterns\n *
\n * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*\n *
\n * positional-arg-binding-pattern := binding-pattern\n *
\n * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]\n * | [rest-binding-pattern]\n *
\n * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*\n *
\n * named-arg-binding-pattern := arg-name = binding-pattern\n *
\n *\n * @return binding-pattern node\n */\n private STNode parseBindingPattern() {\n switch (peek().kind) {\n case OPEN_BRACKET_TOKEN:\n return parseListBindingPattern();\n case IDENTIFIER_TOKEN:\n return parseBindingPatternStartsWithIdentifier();\n case OPEN_BRACE_TOKEN:\n return parseMappingBindingPattern();\n case ERROR_KEYWORD:\n return parseErrorBindingPattern();\n default:\n recover(peek(), ParserRuleContext.BINDING_PATTERN);\n return parseBindingPattern();\n }\n }\n\n private STNode parseBindingPatternStartsWithIdentifier() {\n STNode argNameOrBindingPattern =\n parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\n STToken secondToken = peek();\n if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {\n startContext(ParserRuleContext.ERROR_BINDING_PATTERN);\n STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,\n ParserRuleContext.ERROR_KEYWORD);\n return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);\n }\n\n if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {\n STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);\n identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);\n return createCaptureOrWildcardBP(identifier);\n }\n\n return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);\n }\n\n private STNode createCaptureOrWildcardBP(STNode varName) {\n STNode bindingPattern;\n if (isWildcardBP(varName)) {\n bindingPattern = getWildcardBindingPattern(varName);\n } else {\n bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);\n }\n return bindingPattern;\n }\n\n /**\n * Parse list-binding-patterns.\n *

\n * \n * list-binding-pattern := [ list-member-binding-patterns ]\n *
\n * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *
\n *\n * @return list-binding-pattern node\n */\n private STNode parseListBindingPattern() {\n startContext(ParserRuleContext.LIST_BINDING_PATTERN);\n STNode openBracket = parseOpenBracket();\n List bindingPatternsList = new ArrayList<>();\n STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);\n endContext();\n return listBindingPattern;\n }\n\n private STNode parseListBindingPattern(STNode openBracket, List bindingPatternsList) {\n if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {\n \n STNode closeBracket = parseCloseBracket();\n STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);\n return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);\n }\n STNode listBindingPatternMember = parseListBindingPatternMember();\n bindingPatternsList.add(listBindingPatternMember);\n STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);\n return listBindingPattern;\n }\n\n private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List bindingPatterns) {\n STNode member = firstMember;\n \n STToken token = peek(); \n STNode listBindingPatternRhs = null;\n while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {\n listBindingPatternRhs = parseListBindingPatternMemberRhs();\n if (listBindingPatternRhs == null) {\n break;\n }\n\n bindingPatterns.add(listBindingPatternRhs);\n member = parseListBindingPatternMember();\n bindingPatterns.add(member);\n token = peek();\n }\n\n STNode closeBracket = parseCloseBracket();\n STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\n return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);\n }\n\n private STNode parseListBindingPatternMemberRhs() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);\n return parseListBindingPatternMemberRhs();\n }\n }\n\n private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case CLOSE_BRACKET_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse list-binding-pattern member.\n *

\n * \n * list-binding-pattern := [ list-member-binding-patterns ]\n *
\n * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *
\n *\n * @return List binding pattern member\n */\n private STNode parseListBindingPatternMember() {\n switch (peek().kind) {\n case ELLIPSIS_TOKEN:\n return parseRestBindingPattern();\n case OPEN_BRACKET_TOKEN:\n case IDENTIFIER_TOKEN:\n case OPEN_BRACE_TOKEN:\n case ERROR_KEYWORD:\n return parseBindingPattern();\n default:\n recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);\n return parseListBindingPatternMember();\n }\n }\n\n /**\n * Parse rest binding pattern.\n *

\n * \n * rest-binding-pattern := ... variable-name\n * \n *\n * @return Rest binding pattern node\n */\n private STNode parseRestBindingPattern() {\n startContext(ParserRuleContext.REST_BINDING_PATTERN);\n STNode ellipsis = parseEllipsis();\n STNode varName = parseVariableName();\n endContext();\n\n STSimpleNameReferenceNode simpleNameReferenceNode =\n (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);\n return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);\n }\n\n /**\n * Parse Typed-binding-pattern.\n *

\n * \n * typed-binding-pattern := inferable-type-descriptor binding-pattern\n *

\n * inferable-type-descriptor := type-descriptor | var\n *
\n *\n * @return Typed binding pattern node\n */\n private STNode parseTypedBindingPattern(ParserRuleContext context) {\n List typeDescQualifiers = new ArrayList<>();\n return parseTypedBindingPattern(typeDescQualifiers, context);\n }\n\n private STNode parseTypedBindingPattern(List qualifiers, ParserRuleContext context) {\n STNode typeDesc = parseTypeDescriptor(qualifiers,\n ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT);\n STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);\n return typeBindingPattern;\n }\n\n /**\n * Parse mapping-binding-patterns.\n *

\n * \n * mapping-binding-pattern := { field-binding-patterns }\n *

\n * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *

\n * field-binding-pattern := field-name : binding-pattern | variable-name\n *
\n *\n * @return mapping-binding-pattern node\n */\n private STNode parseMappingBindingPattern() {\n startContext(ParserRuleContext.MAPPING_BINDING_PATTERN);\n STNode openBrace = parseOpenBrace();\n\n STToken token = peek();\n if (isEndOfMappingBindingPattern(token.kind)) {\n STNode closeBrace = parseCloseBrace();\n STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();\n endContext();\n return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);\n }\n\n List bindingPatterns = new ArrayList<>();\n STNode prevMember = parseMappingBindingPatternMember();\n if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\n bindingPatterns.add(prevMember);\n }\n return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);\n }\n\n private STNode parseMappingBindingPattern(STNode openBrace, List bindingPatterns, STNode prevMember) {\n STToken token = peek(); \n STNode mappingBindingPatternRhs = null;\n while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {\n mappingBindingPatternRhs = parseMappingBindingPatternEnd();\n if (mappingBindingPatternRhs == null) {\n break;\n }\n\n bindingPatterns.add(mappingBindingPatternRhs);\n prevMember = parseMappingBindingPatternMember();\n if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\n break;\n }\n bindingPatterns.add(prevMember);\n token = peek();\n }\n\n if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {\n bindingPatterns.add(prevMember);\n }\n\n STNode closeBrace = parseCloseBrace();\n STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);\n endContext();\n return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);\n }\n\n /**\n * Parse mapping-binding-pattern entry.\n *

\n * \n * mapping-binding-pattern := { field-binding-patterns }\n *

\n * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]\n * | [ rest-binding-pattern ]\n *

\n * field-binding-pattern := field-name : binding-pattern\n * | variable-name\n *
\n *\n * @return mapping-binding-pattern node\n */\n private STNode parseMappingBindingPatternMember() {\n STToken token = peek();\n switch (token.kind) {\n case ELLIPSIS_TOKEN:\n return parseRestBindingPattern();\n default:\n return parseFieldBindingPattern();\n }\n }\n\n private STNode parseMappingBindingPatternEnd() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACE_TOKEN:\n return null;\n default:\n recover(nextToken, ParserRuleContext.MAPPING_BINDING_PATTERN_END);\n return parseMappingBindingPatternEnd();\n }\n }\n\n /**\n * Parse field-binding-pattern.\n * field-binding-pattern := field-name : binding-pattern | varname\n *\n * @return field-binding-pattern node\n */\n private STNode parseFieldBindingPattern() {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN:\n STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\n STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);\n return parseFieldBindingPattern(simpleNameReference);\n default:\n recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_NAME);\n return parseFieldBindingPattern();\n }\n }\n\n private STNode parseFieldBindingPattern(STNode simpleNameReference) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case COMMA_TOKEN:\n case CLOSE_BRACE_TOKEN:\n return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);\n case COLON_TOKEN:\n STNode colon = parseColon();\n STNode bindingPattern = parseBindingPattern();\n return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);\n default:\n recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_END);\n return parseFieldBindingPattern(simpleNameReference);\n }\n }\n\n private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {\n return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || isEndOfModuleLevelNode(1);\n }\n\n private STNode parseErrorTypeDescOrErrorBP(STNode annots) {\n STToken nextNextToken = peek(2);\n switch (nextNextToken.kind) {\n case OPEN_PAREN_TOKEN:\n return parseAsErrorBindingPattern();\n case LT_TOKEN:\n return parseAsErrorTypeDesc(annots);\n case IDENTIFIER_TOKEN:\n \n SyntaxKind nextNextNextTokenKind = peek(3).kind;\n if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||\n nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {\n return parseAsErrorBindingPattern();\n }\n \n default:\n return parseAsErrorTypeDesc(annots);\n }\n }\n\n private STNode parseAsErrorBindingPattern() {\n startContext(ParserRuleContext.ASSIGNMENT_STMT);\n return parseAssignmentStmtRhs(parseErrorBindingPattern());\n }\n\n private STNode parseAsErrorTypeDesc(STNode annots) {\n STNode finalKeyword = STNodeFactory.createEmptyNode();\n return parseVariableDecl(getAnnotations(annots), finalKeyword);\n }\n\n /**\n * Parse error binding pattern node.\n *

\n * error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )\n *

\n * error-arg-list-binding-pattern :=\n * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]\n * | [error-field-binding-patterns]\n *

\n * error-message-binding-pattern := simple-binding-pattern\n *

\n * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern\n *

\n * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern\n *

\n * error-field-binding-patterns :=\n * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]\n * | rest-binding-pattern\n *

\n * named-arg-binding-pattern := arg-name = binding-pattern\n *\n * @return Error binding pattern node.\n */\n private STNode parseErrorBindingPattern() {\n startContext(ParserRuleContext.ERROR_BINDING_PATTERN);\n STNode errorKeyword = parseErrorKeyword();\n return parseErrorBindingPattern(errorKeyword);\n }\n\n private STNode parseErrorBindingPattern(STNode errorKeyword) {\n STToken nextToken = peek();\n STNode typeRef;\n switch (nextToken.kind) {\n case OPEN_PAREN_TOKEN:\n typeRef = STNodeFactory.createEmptyNode();\n break;\n default:\n if (isPredeclaredIdentifier(nextToken.kind)) {\n typeRef = parseTypeReference();\n break;\n }\n recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);\n return parseErrorBindingPattern(errorKeyword);\n }\n return parseErrorBindingPattern(errorKeyword, typeRef);\n }\n\n private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {\n STNode openParenthesis = parseOpenParenthesis();\n STNode argListBindingPatterns = parseErrorArgListBindingPatterns();\n STNode closeParenthesis = parseCloseParenthesis();\n endContext();\n return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,\n argListBindingPatterns, closeParenthesis);\n }\n\n /**\n * Parse error arg list binding pattern.\n *

\n * \n * error-arg-list-binding-pattern :=\n * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]\n * | [error-field-binding-patterns]\n *

\n *

\n * error-message-binding-pattern := simple-binding-pattern\n *

\n *

\n * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern\n *

\n *

\n * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern\n *

\n *

\n * error-field-binding-patterns :=\n * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]\n * | rest-binding-pattern\n *

\n *

\n * named-arg-binding-pattern := arg-name = binding-pattern\n * \n *\n * @return Error arg list binding patterns.\n */\n private STNode parseErrorArgListBindingPatterns() {\n List argListBindingPatterns = new ArrayList<>();\n if (isEndOfErrorFieldBindingPatterns()) {\n return STNodeFactory.createNodeList(argListBindingPatterns);\n }\n return parseErrorArgListBindingPatterns(argListBindingPatterns);\n }\n\n private STNode parseErrorArgListBindingPatterns(List argListBindingPatterns) {\n STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);\n if (firstArg == null) {\n \n return STNodeFactory.createNodeList(argListBindingPatterns);\n }\n\n switch (firstArg.kind) {\n case CAPTURE_BINDING_PATTERN:\n case WILDCARD_BINDING_PATTERN:\n argListBindingPatterns.add(firstArg);\n return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);\n case ERROR_BINDING_PATTERN:\n STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);\n STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);\n missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,\n DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);\n STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);\n argListBindingPatterns.add(missingErrorMsgBP);\n argListBindingPatterns.add(missingComma);\n argListBindingPatterns.add(firstArg);\n return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);\n case REST_BINDING_PATTERN:\n case NAMED_ARG_BINDING_PATTERN:\n argListBindingPatterns.add(firstArg);\n return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);\n default:\n \n \n addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);\n return parseErrorArgListBindingPatterns(argListBindingPatterns);\n }\n }\n\n private STNode parseErrorArgListBPWithoutErrorMsg(List argListBindingPatterns) {\n STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);\n if (argEnd == null) {\n \n return STNodeFactory.createNodeList(argListBindingPatterns);\n }\n\n STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);\n assert secondArg != null; \n switch (secondArg.kind) {\n case CAPTURE_BINDING_PATTERN:\n case WILDCARD_BINDING_PATTERN:\n case ERROR_BINDING_PATTERN:\n case REST_BINDING_PATTERN:\n case NAMED_ARG_BINDING_PATTERN:\n argListBindingPatterns.add(argEnd);\n argListBindingPatterns.add(secondArg);\n return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);\n default:\n \n \n updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\n updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,\n DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);\n return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);\n }\n }\n\n private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List argListBindingPatterns,\n SyntaxKind lastValidArgKind) {\n while (!isEndOfErrorFieldBindingPatterns()) {\n STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);\n if (argEnd == null) {\n \n break;\n }\n STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);\n assert currentArg != null; \n DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);\n if (errorCode == null) {\n argListBindingPatterns.add(argEnd);\n argListBindingPatterns.add(currentArg);\n lastValidArgKind = currentArg.kind;\n } else if (argListBindingPatterns.size() == 0) {\n addInvalidNodeToNextToken(argEnd, null);\n addInvalidNodeToNextToken(currentArg, errorCode);\n } else {\n updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);\n updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);\n }\n }\n\n return STNodeFactory.createNodeList(argListBindingPatterns);\n }\n\n private boolean isEndOfErrorFieldBindingPatterns() {\n SyntaxKind nextTokenKind = peek().kind;\n switch (nextTokenKind) {\n case CLOSE_PAREN_TOKEN:\n case EOF_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return consume();\n case CLOSE_PAREN_TOKEN:\n return null;\n default:\n recover(peek(), currentCtx);\n return parseErrorArgsBindingPatternEnd(currentCtx);\n }\n }\n\n private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {\n switch (peek().kind) {\n case ELLIPSIS_TOKEN:\n return parseRestBindingPattern();\n case IDENTIFIER_TOKEN:\n \n STNode argNameOrSimpleBindingPattern = consume();\n return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);\n case OPEN_BRACKET_TOKEN:\n case OPEN_BRACE_TOKEN:\n case ERROR_KEYWORD:\n return parseBindingPattern();\n case CLOSE_PAREN_TOKEN:\n if (isFirstArg) {\n \n return null;\n }\n \n default:\n recover(peek(), context);\n return parseErrorArgListBindingPattern(context, isFirstArg);\n }\n }\n\n private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {\n STToken secondToken = peek();\n switch (secondToken.kind) {\n case EQUAL_TOKEN:\n STNode equal = consume();\n STNode bindingPattern = parseBindingPattern();\n return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,\n equal, bindingPattern);\n case COMMA_TOKEN:\n case CLOSE_PAREN_TOKEN:\n default:\n return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);\n }\n }\n\n private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,\n SyntaxKind currentArgKind) {\n switch (currentArgKind) {\n case NAMED_ARG_BINDING_PATTERN:\n case REST_BINDING_PATTERN:\n \n if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {\n return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;\n }\n return null;\n case CAPTURE_BINDING_PATTERN:\n case WILDCARD_BINDING_PATTERN:\n case ERROR_BINDING_PATTERN:\n case LIST_BINDING_PATTERN:\n case MAPPING_BINDING_PATTERN:\n default:\n return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;\n }\n }\n\n \n\n /*\n * This parses Typed binding patterns and deals with ambiguity between types,\n * and binding patterns. An example is 'T[a]'.\n * The ambiguity lies in between:\n * 1) Array Type\n * 2) List binding pattern\n * 3) Member access expression.\n */\n\n /**\n * Parse the component after the type-desc, of a typed-binding-pattern.\n *\n * @param typeDesc Starting type-desc of the typed-binding-pattern\n * @return Typed-binding pattern\n */\n private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {\n return parseTypedBindingPatternTypeRhs(typeDesc, context, true);\n }\n\n private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN: \n case OPEN_BRACE_TOKEN: \n case ERROR_KEYWORD: \n STNode bindingPattern = parseBindingPattern();\n return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n case OPEN_BRACKET_TOKEN:\n \n STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);\n assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;\n return typedBindingPattern;\n case CLOSE_PAREN_TOKEN:\n case COMMA_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n case CLOSE_BRACE_TOKEN:\n if (!isRoot) {\n return typeDesc;\n }\n \n default:\n recover(nextToken, ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS);\n return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);\n }\n }\n\n /**\n * Parse typed-binding pattern with list, array-type-desc, or member-access-expr.\n *\n * @param typeDescOrExpr Type desc or the expression at the start\n * @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous\n * @return Parsed node\n */\n private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,\n boolean allowAssignment, ParserRuleContext context) {\n startContext(ParserRuleContext.BRACKETED_LIST);\n STNode openBracket = parseOpenBracket();\n\n \n if (isBracketedListEnd(peek().kind)) {\n return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);\n }\n\n \n STNode member = parseBracketedListMember(isTypedBindingPattern);\n SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);\n switch (currentNodeType) {\n case ARRAY_TYPE_DESC:\n STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);\n return typedBindingPattern;\n case LIST_BINDING_PATTERN:\n \n \n STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);\n STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n case INDEXED_EXPRESSION:\n return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);\n case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS:\n break;\n case NONE:\n default:\n \n \n\n \n STNode memberEnd = parseBracketedListMemberEnd();\n if (memberEnd != null) {\n \n List memberList = new ArrayList<>();\n memberList.add(getBindingPattern(member));\n memberList.add(memberEnd);\n bindingPattern = parseAsListBindingPattern(openBracket, memberList);\n typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n }\n }\n\n \n \n \n \n \n STNode closeBracket = parseCloseBracket();\n endContext();\n return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\n isTypedBindingPattern, allowAssignment, context);\n }\n\n private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {\n member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);\n STNode closeBracket = parseCloseBracket();\n endContext();\n STNode keyExpr = STNodeFactory.createNodeList(member);\n STNode memberAccessExpr =\n STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);\n }\n\n private boolean isBracketedListEnd(SyntaxKind nextTokenKind) {\n switch (nextTokenKind) {\n case EOF_TOKEN:\n case CLOSE_BRACKET_TOKEN:\n return true;\n default:\n return false;\n }\n }\n\n /**\n * Parse a member of an ambiguous bracketed list. This member could be:\n * 1) Array length\n * 2) Key expression of a member-access-expr\n * 3) A member-binding pattern of a list-binding-pattern.\n *\n * @param isTypedBindingPattern Is this in a definite typed-binding pattern\n * @return Parsed member node\n */\n private STNode parseBracketedListMember(boolean isTypedBindingPattern) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case DECIMAL_INTEGER_LITERAL_TOKEN:\n case HEX_INTEGER_LITERAL_TOKEN:\n case ASTERISK_TOKEN:\n case STRING_LITERAL_TOKEN:\n return parseBasicLiteral();\n case CLOSE_BRACKET_TOKEN:\n return STNodeFactory.createEmptyNode();\n case OPEN_BRACE_TOKEN:\n case ERROR_KEYWORD: \n case ELLIPSIS_TOKEN: \n case OPEN_BRACKET_TOKEN: \n return parseStatementStartBracketedListMember();\n case IDENTIFIER_TOKEN:\n if (isTypedBindingPattern) {\n return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n }\n break;\n default:\n if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) ||\n isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) {\n break;\n }\n\n ParserRuleContext recoverContext =\n isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH\n : ParserRuleContext.BRACKETED_LIST_MEMBER;\n recover(peek(), recoverContext);\n return parseBracketedListMember(isTypedBindingPattern);\n }\n\n STNode expr = parseExpression();\n if (isWildcardBP(expr)) {\n return getWildcardBindingPattern(expr);\n }\n\n \n return expr;\n }\n\n /**\n * Treat the current node as an array, and parse the remainder of the binding pattern.\n *\n * @param typeDesc Type-desc\n * @param openBracket Open bracket\n * @param member Member\n * @return Parsed node\n */\n private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {\n \n \n typeDesc = getTypeDescFromExpr(typeDesc);\n switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);\n startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);\n STNode closeBracket = parseCloseBracket();\n endContext();\n endContext();\n return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,\n context);\n }\n\n private STNode parseBracketedListMemberEnd() {\n switch (peek().kind) {\n case COMMA_TOKEN:\n return parseComma();\n case CLOSE_BRACKET_TOKEN:\n return null;\n default:\n recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);\n return parseBracketedListMemberEnd();\n }\n }\n\n /**\n * We reach here to break ambiguity of T[a]. This could be:\n * 1) Array Type Desc\n * 2) Member access on LHS\n * 3) Typed-binding-pattern\n *\n * @param typeDescOrExpr Type name or the expr that precede the open-bracket.\n * @param openBracket Open bracket\n * @param member Member\n * @param closeBracket Open bracket\n * @param isTypedBindingPattern Is this is a typed-binding-pattern.\n * @return Specific node that matches to T[a], after solving ambiguity.\n */\n private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\n STNode closeBracket, boolean isTypedBindingPattern,\n boolean allowAssignment, ParserRuleContext context) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case IDENTIFIER_TOKEN: \n case OPEN_BRACE_TOKEN: \n case ERROR_KEYWORD: \n \n STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\n return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n case OPEN_BRACKET_TOKEN: \n if (isTypedBindingPattern) {\n typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\n return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);\n }\n\n \n STNode keyExpr = getKeyExpr(member);\n STNode expr =\n STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\n return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);\n case QUESTION_MARK_TOKEN:\n \n typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\n typeDesc = parseComplexTypeDescriptor(arrayTypeDesc,\n ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);\n return parseTypedBindingPatternTypeRhs(typeDesc, context);\n case PIPE_TOKEN:\n case BITWISE_AND_TOKEN:\n \n return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket,\n isTypedBindingPattern);\n case IN_KEYWORD:\n \n if (context != ParserRuleContext.FOREACH_STMT &&\n context != ParserRuleContext.FROM_CLAUSE &&\n context != ParserRuleContext.JOIN_CLAUSE) {\n break;\n }\n return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n case EQUAL_TOKEN: \n if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\n \n \n break;\n }\n\n \n if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {\n return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n }\n\n keyExpr = getKeyExpr(member);\n typeDescOrExpr = getExpression(typeDescOrExpr);\n return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);\n case SEMICOLON_TOKEN: \n if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {\n \n \n break;\n }\n\n return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);\n case CLOSE_BRACE_TOKEN: \n case COMMA_TOKEN:\n if (context == ParserRuleContext.AMBIGUOUS_STMT) {\n keyExpr = getKeyExpr(member);\n return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\n closeBracket);\n }\n \n default:\n if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {\n \n keyExpr = getKeyExpr(member);\n typeDescOrExpr = getExpression(typeDescOrExpr);\n return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,\n closeBracket);\n }\n\n break;\n }\n\n ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS;\n if (isTypedBindingPattern) {\n recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS;\n }\n\n recover(peek(), recoveryCtx);\n return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,\n isTypedBindingPattern, allowAssignment, context);\n }\n\n private STNode getKeyExpr(STNode member) {\n if (member == null) {\n STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);\n STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);\n\n return STNodeFactory.createNodeList(missingVarRef);\n }\n return STNodeFactory.createNodeList(member);\n }\n\n private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,\n STNode closeBracket) {\n STNode bindingPatterns = STNodeFactory.createEmptyNodeList();\n if (!isEmpty(member)) {\n SyntaxKind memberKind = member.kind;\n if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) {\n STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);\n STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,\n DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME);\n STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken);\n return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName);\n }\n \n if (member.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {\n \n openBracket = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracket, member,\n DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP);\n } else {\n STNode bindingPattern = getBindingPattern(member);\n bindingPatterns = STNodeFactory.createNodeList(bindingPattern);\n }\n }\n\n STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);\n STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);\n return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);\n }\n\n /**\n * Parse a union or intersection type-desc/binary-expression that involves ambiguous\n * bracketed list in lhs.\n *

\n * e.g: (T[a] & R..) or (T[a] | R.. )\n *

\n * Complexity occurs in scenarios such as T[a] |/& R[b]. If the token after this\n * is another binding-pattern, then (T[a] |/& R[b]) becomes the type-desc. However,\n * if the token follows this is an equal or semicolon, then (T[a] |/& R) becomes\n * the type-desc, and [b] becomes the binding pattern.\n *\n * @param typeDescOrExpr Type desc or the expression\n * @param openBracket Open bracket\n * @param member Member\n * @param closeBracket Close bracket\n * @return Parsed node\n */\n private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,\n STNode closeBracket, boolean isTypedBindingPattern) {\n STNode pipeOrAndToken = parseUnionOrIntersectionToken();\n STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);\n\n if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {\n \n \n STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\n lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\n\n STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;\n STNode rhsTypeDesc = rhsTypedBindingPattern.typeDescriptor;\n\n STNode newTypeDesc = mergeTypes(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);\n return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);\n }\n\n if (isTypedBindingPattern) {\n \n STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);\n lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);\n return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr);\n }\n\n STNode keyExpr = getExpression(member);\n STNode containerExpr = getExpression(typeDescOrExpr);\n STNode lhsExpr =\n STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);\n return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,\n typedBindingPatternOrExpr);\n\n }\n\n /**\n * Merges two types separated by | or & into one type, while taking precedence\n * and associativity into account.\n *\n * @param lhsTypeDesc lhs type\n * @param pipeOrAndToken pipe or bitwise-and token\n * @param rhsTypeDesc rhs type\n * @return a TypeDescriptorNode\n */\n private STNode mergeTypes(STNode lhsTypeDesc, STNode pipeOrAndToken, STNode rhsTypeDesc) {\n if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {\n return mergeTypesWithUnion(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);\n } else {\n return mergeTypesWithIntersection(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);\n }\n }\n\n /**\n * Merges two types separated by | into one type, while taking precedence\n * and associativity into account.\n *\n * @param lhsTypeDesc lhs type\n * @param pipeToken pipe token\n * @param rhsTypeDesc rhs type\n * @return a TypeDescriptorNode\n */\n private STNode mergeTypesWithUnion(STNode lhsTypeDesc, STNode pipeToken, STNode rhsTypeDesc) {\n if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n \n \n \n\n STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;\n return replaceLeftMostUnionWithAUnion(lhsTypeDesc, pipeToken, rhsUnionTypeDesc);\n } else {\n return createUnionTypeDesc(lhsTypeDesc, pipeToken, rhsTypeDesc);\n }\n }\n\n /**\n * Merges two types separated by & into one type, while taking precedence\n * and associativity into account.\n *\n * @param lhsTypeDesc lhs type\n * @param bitwiseAndToken bitwise-and token\n * @param rhsTypeDesc rhs type\n * @return a TypeDescriptorNode\n */\n private STNode mergeTypesWithIntersection(STNode lhsTypeDesc, STNode bitwiseAndToken, STNode rhsTypeDesc) {\n if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n \n STUnionTypeDescriptorNode lhsUnionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\n if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n rhsTypeDesc = replaceLeftMostIntersectionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,\n bitwiseAndToken, (STIntersectionTypeDescriptorNode) rhsTypeDesc);\n return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);\n } else if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n rhsTypeDesc = replaceLeftMostUnionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,\n bitwiseAndToken, (STUnionTypeDescriptorNode) rhsTypeDesc);\n return replaceLeftMostUnionWithAUnion(lhsUnionTypeDesc.leftTypeDesc,\n lhsUnionTypeDesc.pipeToken, (STUnionTypeDescriptorNode) rhsTypeDesc);\n } else {\n rhsTypeDesc = createIntersectionTypeDesc(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, rhsTypeDesc);\n return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);\n }\n }\n\n if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n \n \n \n\n STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;\n return replaceLeftMostUnionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsUnionTypeDesc);\n } else if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n \n \n \n\n STIntersectionTypeDescriptorNode rhsIntSecTypeDesc = (STIntersectionTypeDescriptorNode) rhsTypeDesc;\n return replaceLeftMostIntersectionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsIntSecTypeDesc);\n } else {\n return createIntersectionTypeDesc(lhsTypeDesc, bitwiseAndToken, rhsTypeDesc);\n }\n }\n\n private STNode replaceLeftMostUnionWithAUnion(STNode typeDesc, STNode pipeToken,\n STUnionTypeDescriptorNode unionTypeDesc) {\n STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;\n\n \n \n if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,\n replaceLeftMostUnionWithAUnion(typeDesc, pipeToken, (STUnionTypeDescriptorNode) leftTypeDesc));\n }\n\n \n \n leftTypeDesc = createUnionTypeDesc(typeDesc, pipeToken, leftTypeDesc);\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);\n }\n \n private STNode replaceLeftMostUnionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken,\n STUnionTypeDescriptorNode unionTypeDesc) {\n STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;\n \n \n \n if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,\n replaceLeftMostUnionWithAIntersection(typeDesc, bitwiseAndToken,\n (STUnionTypeDescriptorNode) leftTypeDesc));\n }\n\n \n \n if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, \n replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,\n (STIntersectionTypeDescriptorNode) leftTypeDesc));\n }\n\n \n \n leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);\n return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);\n }\n\n private STNode replaceLeftMostIntersectionWithAIntersection(STNode typeDesc,\n STNode bitwiseAndToken,\n STIntersectionTypeDescriptorNode intersectionTypeDesc) {\n STNode leftTypeDesc = intersectionTypeDesc.leftTypeDesc;\n\n \n \n if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc,\n replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,\n (STIntersectionTypeDescriptorNode) leftTypeDesc));\n }\n\n \n \n leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);\n return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, leftTypeDesc);\n }\n \n private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {\n if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {\n STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;\n STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);\n lhsTypeDesc = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);\n } else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {\n STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;\n STNode middleTypeDesc =\n getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);\n lhsTypeDesc = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,\n intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);\n } else {\n lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);\n }\n\n return lhsTypeDesc;\n }\n\n /**\n * Parse union (|) or intersection (&) type operator.\n *\n * @return pipe or bitwise and token\n */\n private STNode parseUnionOrIntersectionToken() {\n STToken token = peek();\n if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {\n return consume();\n } else {\n recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);\n return parseUnionOrIntersectionToken();\n }\n }\n\n /**\n * Infer the type of the ambiguous bracketed list, based on the type of the member.\n *\n * @param memberNode Member node\n * @return Inferred type of the bracketed list\n */\n private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {\n if (isEmpty(memberNode)) {\n \n return SyntaxKind.NONE;\n }\n\n if (isDefiniteTypeDesc(memberNode.kind)) {\n return SyntaxKind.TUPLE_TYPE_DESC;\n }\n\n switch (memberNode.kind) {\n case ASTERISK_LITERAL:\n return SyntaxKind.ARRAY_TYPE_DESC;\n case CAPTURE_BINDING_PATTERN:\n case LIST_BINDING_PATTERN:\n case REST_BINDING_PATTERN:\n case MAPPING_BINDING_PATTERN:\n case WILDCARD_BINDING_PATTERN:\n return SyntaxKind.LIST_BINDING_PATTERN;\n case QUALIFIED_NAME_REFERENCE: \n case REST_TYPE:\n return SyntaxKind.TUPLE_TYPE_DESC;\n case NUMERIC_LITERAL: \n if (isTypedBindingPattern) {\n return SyntaxKind.ARRAY_TYPE_DESC;\n }\n return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS;\n case SIMPLE_NAME_REFERENCE: \n case BRACKETED_LIST: \n case MAPPING_BP_OR_MAPPING_CONSTRUCTOR:\n return SyntaxKind.NONE;\n case ERROR_CONSTRUCTOR:\n if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {\n return SyntaxKind.NONE;\n }\n return SyntaxKind.INDEXED_EXPRESSION;\n default:\n if (isTypedBindingPattern) {\n return SyntaxKind.NONE;\n }\n return SyntaxKind.INDEXED_EXPRESSION;\n }\n }\n\n \n\n /*\n * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.\n * The ambiguity lies in between:\n * 1) Assignment that starts with list binding pattern\n * 2) Var-decl statement that starts with tuple type\n * 3) Statement that starts with list constructor, such as sync-send, etc.\n */\n\n /**\n * Parse any statement that starts with an open-bracket.\n *\n * @param annots Annotations attached to the statement.\n * @return Parsed node\n */\n private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {\n startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);\n return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);\n }\n\n private STNode parseMemberBracketedList() {\n STNode annots = STNodeFactory.createEmptyNodeList();\n return parseStatementStartsWithOpenBracket(annots, false, false);\n }\n\n /**\n * The bracketed list at the start of a statement can be one of the following.\n * 1) List binding pattern\n * 2) Tuple type\n * 3) List constructor\n *\n * @param isRoot Is this the root of the list\n * @return Parsed node\n */\n private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {\n startContext(ParserRuleContext.STMT_START_BRACKETED_LIST);\n STNode openBracket = parseOpenBracket();\n List memberList = new ArrayList<>();\n while (!isBracketedListEnd(peek().kind)) {\n \n STNode member = parseStatementStartBracketedListMember();\n SyntaxKind currentNodeType = getStmtStartBracketedListType(member);\n\n switch (currentNodeType) {\n case TUPLE_TYPE_DESC:\n member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);\n \n \n return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\n case MEMBER_TYPE_DESC:\n case REST_TYPE:\n return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\n case LIST_BINDING_PATTERN:\n \n \n return parseAsListBindingPattern(openBracket, memberList, member, isRoot);\n case LIST_CONSTRUCTOR:\n \n \n return parseAsListConstructor(openBracket, memberList, member, isRoot);\n case LIST_BP_OR_LIST_CONSTRUCTOR:\n return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);\n case TUPLE_TYPE_DESC_OR_LIST_CONST:\n return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);\n case NONE:\n default:\n memberList.add(member);\n break;\n }\n\n \n STNode memberEnd = parseBracketedListMemberEnd();\n if (memberEnd == null) {\n break;\n }\n memberList.add(memberEnd);\n }\n\n \n STNode closeBracket = parseCloseBracket();\n STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket,\n isRoot, possibleMappingField);\n return bracketedList;\n }\n\n /**\n * Parse a member of a list-binding-pattern, tuple-type-desc, or\n * list-constructor-expr, when the parent is ambiguous.\n *\n * @return Parsed node\n */\n private STNode parseStatementStartBracketedListMember() {\n List typeDescQualifiers = new ArrayList<>();\n return parseStatementStartBracketedListMember(typeDescQualifiers);\n }\n\n private STNode parseStatementStartBracketedListMember(List qualifiers) {\n parseTypeDescQualifiers(qualifiers);\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_BRACKET_TOKEN:\n reportInvalidQualifierList(qualifiers);\n return parseMemberBracketedList();\n case IDENTIFIER_TOKEN:\n reportInvalidQualifierList(qualifiers);\n STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n if (isWildcardBP(identifier)) {\n STNode varName = ((STSimpleNameReferenceNode) identifier).name;\n return getWildcardBindingPattern(varName);\n }\n\n nextToken = peek();\n if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {\n STNode ellipsis = parseEllipsis();\n return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n }\n\n if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) {\n \n return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n }\n\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);\n case OPEN_BRACE_TOKEN:\n \n reportInvalidQualifierList(qualifiers);\n return parseMappingBindingPatterOrMappingConstructor();\n case ERROR_KEYWORD:\n reportInvalidQualifierList(qualifiers);\n STToken nextNextToken = getNextNextToken();\n if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||\n nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return parseErrorBindingPatternOrErrorConstructor();\n }\n \n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n case ELLIPSIS_TOKEN:\n \n reportInvalidQualifierList(qualifiers);\n return parseRestBindingOrSpreadMember();\n case XML_KEYWORD:\n case STRING_KEYWORD:\n reportInvalidQualifierList(qualifiers);\n if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {\n return parseExpression(false);\n }\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n case TABLE_KEYWORD:\n case STREAM_KEYWORD:\n reportInvalidQualifierList(qualifiers);\n if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n }\n return parseExpression(false);\n case OPEN_PAREN_TOKEN:\n return parseTypeDescOrExpr(qualifiers);\n case FUNCTION_KEYWORD:\n return parseAnonFuncExprOrFuncTypeDesc(qualifiers);\n case AT_TOKEN:\n return parseTupleMember();\n default:\n if (isValidExpressionStart(nextToken.kind, 1)) {\n reportInvalidQualifierList(qualifiers);\n return parseExpression(false);\n }\n\n if (isTypeStartingToken(nextToken.kind)) {\n return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);\n }\n\n recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);\n return parseStatementStartBracketedListMember(qualifiers);\n }\n }\n\n private STNode parseRestBindingOrSpreadMember() {\n STNode ellipsis = parseEllipsis();\n STNode expr = parseExpression();\n if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {\n return STNodeFactory.createRestBindingPatternNode(ellipsis, expr);\n } else {\n return STNodeFactory.createSpreadMemberNode(ellipsis, expr);\n }\n }\n\n private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\n STNode member, boolean isRoot) {\n memberList.add(member);\n STNode memberEnd = parseBracketedListMemberEnd();\n\n STNode tupleTypeDescOrListCons;\n if (memberEnd == null) {\n \n STNode closeBracket = parseCloseBracket();\n tupleTypeDescOrListCons =\n parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n } else {\n memberList.add(memberEnd);\n tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);\n }\n\n return tupleTypeDescOrListCons;\n }\n\n /**\n * Parse tuple type desc or list constructor.\n *\n * @return Parsed node\n */\n private STNode parseTupleTypeDescOrListConstructor(STNode annots) {\n startContext(ParserRuleContext.BRACKETED_LIST);\n STNode openBracket = parseOpenBracket();\n List memberList = new ArrayList<>();\n return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);\n }\n\n private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List memberList,\n boolean isRoot) {\n \n STToken nextToken = peek();\n while (!isBracketedListEnd(nextToken.kind)) {\n \n STNode member = parseTupleTypeDescOrListConstructorMember(annots);\n SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);\n\n switch (currentNodeType) {\n case LIST_CONSTRUCTOR:\n \n \n return parseAsListConstructor(openBracket, memberList, member, isRoot);\n case REST_TYPE:\n case MEMBER_TYPE_DESC:\n return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\n case TUPLE_TYPE_DESC:\n member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);\n \n \n return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);\n case TUPLE_TYPE_DESC_OR_LIST_CONST:\n default:\n memberList.add(member);\n break;\n }\n\n \n STNode memberEnd = parseBracketedListMemberEnd();\n if (memberEnd == null) {\n break;\n }\n memberList.add(memberEnd);\n nextToken = peek();\n }\n\n \n STNode closeBracket = parseCloseBracket();\n return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);\n }\n\n private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case OPEN_BRACKET_TOKEN:\n \n return parseTupleTypeDescOrListConstructor(annots);\n case IDENTIFIER_TOKEN:\n STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);\n \n if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {\n STNode ellipsis = parseEllipsis();\n return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);\n }\n return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);\n case OPEN_BRACE_TOKEN:\n \n return parseMappingConstructorExpr();\n case ERROR_KEYWORD:\n STToken nextNextToken = getNextNextToken();\n if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||\n nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {\n return parseErrorConstructorExpr(false);\n }\n \n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n case XML_KEYWORD:\n case STRING_KEYWORD:\n if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {\n return parseExpression(false);\n }\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n case TABLE_KEYWORD:\n case STREAM_KEYWORD:\n if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n }\n return parseExpression(false);\n case OPEN_PAREN_TOKEN:\n return parseTypeDescOrExpr();\n case AT_TOKEN:\n return parseTupleMember();\n default:\n if (isValidExpressionStart(nextToken.kind, 1)) {\n return parseExpression(false);\n }\n\n if (isTypeStartingToken(nextToken.kind)) {\n return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);\n }\n\n recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER);\n return parseTupleTypeDescOrListConstructorMember(annots);\n }\n }\n\n private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {\n \n return getStmtStartBracketedListType(memberNode);\n }\n\n private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List members, STNode closeBracket,\n boolean isRoot) {\n STNode tupleTypeOrListConst;\n switch (peek().kind) {\n case COMMA_TOKEN: \n case CLOSE_BRACE_TOKEN: \n case CLOSE_BRACKET_TOKEN: \n case PIPE_TOKEN: \n case BITWISE_AND_TOKEN: \n if (!isRoot) {\n endContext();\n return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,\n closeBracket);\n }\n \n default:\n if (isValidExprRhsStart(peek().kind, closeBracket.kind) ||\n (isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {\n members = getExpressionList(members, false);\n STNode memberExpressions = STNodeFactory.createNodeList(members);\n tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,\n memberExpressions, closeBracket);\n break;\n }\n\n \n STNode memberTypeDescs = STNodeFactory.createNodeList(getTupleMemberList(members));\n STNode tupleTypeDesc =\n STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);\n tupleTypeOrListConst =\n parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);\n }\n\n endContext();\n\n if (!isRoot) {\n return tupleTypeOrListConst;\n }\n\n STNode annots = STNodeFactory.createEmptyNodeList();\n return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);\n\n }", "context_before": "class member, object member or object member descriptor.\n *

\n * \n * class-member := object-field | method-defn | object-type-inclusion\n *
\n * object-member := object-field | method-defn\n *
\n * object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion\n *
\n *\n * @param context Parsing context of the object member\n * @return Parsed node\n */\n private STNode parseObjectMember(ParserRuleContext context) {\n STNode metadata;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n \n return null;\n case ASTERISK_TOKEN:\n case PUBLIC_KEYWORD:\n case PRIVATE_KEYWORD:\n case FINAL_KEYWORD:\n case REMOTE_KEYWORD:\n case FUNCTION_KEYWORD:\n case TRANSACTIONAL_KEYWORD:\n case ISOLATED_KEYWORD:\n case RESOURCE_KEYWORD:\n metadata = STNodeFactory.createEmptyNode();\n break;\n case DOCUMENTATION_STRING:\n case AT_TOKEN:\n metadata = parseMetaData();\n break;\n default:\n if (isTypeStartingToken(nextToken.kind)) {\n metadata = STNodeFactory.createEmptyNode();\n break;\n }\n\n ParserRuleContext recoveryCtx;\n if (context == ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER) {\n recoveryCtx = ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER_START;\n } else {\n recoveryCtx = ParserRuleContext.CLASS_MEMBER_OR_OBJECT_MEMBER_START;\n }\n\n Solution solution = recover(peek(), recoveryCtx);\n\n if (solution.action == Action.KEEP) {\n metadata = STNodeFactory.createEmptyNode();\n break;\n }\n \n return parseObjectMember(context);\n }\n\n return parseObjectMemberWithoutMeta(metadata, context);\n }", "context_after": "class member, object member or object member descriptor.\n *

\n * \n * class-member := object-field | method-defn | object-type-inclusion\n *
\n * object-member := object-field | method-defn\n *
\n * object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion\n *
\n *\n * @param context Parsing context of the object member\n * @return Parsed node\n */\n private STNode parseObjectMember(ParserRuleContext context) {\n STNode metadata;\n STToken nextToken = peek();\n switch (nextToken.kind) {\n case EOF_TOKEN:\n case CLOSE_BRACE_TOKEN:\n \n return null;\n case ASTERISK_TOKEN:\n case PUBLIC_KEYWORD:\n case PRIVATE_KEYWORD:\n case FINAL_KEYWORD:\n case REMOTE_KEYWORD:\n case FUNCTION_KEYWORD:\n case TRANSACTIONAL_KEYWORD:\n case ISOLATED_KEYWORD:\n case RESOURCE_KEYWORD:\n metadata = STNodeFactory.createEmptyNode();\n break;\n case DOCUMENTATION_STRING:\n case AT_TOKEN:\n metadata = parseMetaData();\n break;\n default:\n if (isTypeStartingToken(nextToken.kind)) {\n metadata = STNodeFactory.createEmptyNode();\n break;\n }\n\n ParserRuleContext recoveryCtx;\n if (context == ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER) {\n recoveryCtx = ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER_START;\n } else {\n recoveryCtx = ParserRuleContext.CLASS_MEMBER_OR_OBJECT_MEMBER_START;\n }\n\n Solution solution = recover(peek(), recoveryCtx);\n\n if (solution.action == Action.KEEP) {\n metadata = STNodeFactory.createEmptyNode();\n break;\n }\n \n return parseObjectMember(context);\n }\n\n return parseObjectMemberWithoutMeta(metadata, context);\n }" }, { "comment": "Shall we add throwable as a second parameter along with the message?", "method_body": "public void onError(Throwable throwable) {\n BMap httpConnectorError = HttpUtil.getError(dataContext.context, throwable);\n if (outboundMsgDataStreamer != null) {\n if (throwable instanceof IOException) {\n this.dataContext.getOutboundRequest().setIoException((IOException) throwable);\n } else {\n this.dataContext.getOutboundRequest().setIoException(new IOException(throwable.getMessage()));\n }\n }\n this.dataContext.notifyOutboundResponseStatus(httpConnectorError);\n }", "target_code": "this.dataContext.getOutboundRequest().setIoException(new IOException(throwable.getMessage()));", "method_body_after": "public void onError(Throwable throwable) {\n BMap httpConnectorError = HttpUtil.getError(dataContext.context, throwable);\n if (outboundMsgDataStreamer != null) {\n if (throwable instanceof IOException) {\n this.dataContext.getOutboundRequest().setIoException((IOException) throwable);\n } else {\n this.dataContext.getOutboundRequest()\n .setIoException(new IOException(throwable.getMessage(), throwable));\n }\n }\n this.dataContext.notifyOutboundResponseStatus(httpConnectorError);\n }", "context_before": "class HttpResponseConnectorListener implements HttpConnectorListener {\n\n private final DataContext dataContext;\n private HttpMessageDataStreamer outboundMsgDataStreamer;\n\n HttpResponseConnectorListener(DataContext dataContext) {\n this.dataContext = dataContext;\n }\n\n HttpResponseConnectorListener(DataContext dataContext, HttpMessageDataStreamer outboundMsgDataStreamer) {\n this.dataContext = dataContext;\n this.outboundMsgDataStreamer = outboundMsgDataStreamer;\n }\n\n @Override\n public void onMessage(HttpCarbonMessage httpCarbonMessage) {\n this.dataContext.notifyOutboundResponseStatus(null);\n }\n\n @Override\n \n }", "context_after": "class HttpResponseConnectorListener implements HttpConnectorListener {\n\n private final DataContext dataContext;\n private HttpMessageDataStreamer outboundMsgDataStreamer;\n\n HttpResponseConnectorListener(DataContext dataContext) {\n this.dataContext = dataContext;\n }\n\n HttpResponseConnectorListener(DataContext dataContext, HttpMessageDataStreamer outboundMsgDataStreamer) {\n this.dataContext = dataContext;\n this.outboundMsgDataStreamer = outboundMsgDataStreamer;\n }\n\n @Override\n public void onMessage(HttpCarbonMessage httpCarbonMessage) {\n this.dataContext.notifyOutboundResponseStatus(null);\n }\n\n @Override\n \n }" }, { "comment": "assuming the discriminator is the first, can we avoid buffering? It can be done later, for now it would be great to understand how the current API would evolve to allow it.", "method_body": "public static AnimalWithTypeIdContainingDot fromJson(JsonReader jsonReader) {\n return JsonUtils.readObject(jsonReader, reader -> {\n \n String json = JsonUtils.bufferedJsonObject(jsonReader);\n\n \n JsonReader replayReader = DefaultJsonReader.fromString(json);\n\n String discriminatorValue = null;\n\n \n while (replayReader.nextToken() != JsonToken.END_OBJECT) {\n if (\"@odata.type\".equals(replayReader.getFieldName())) {\n replayReader.nextToken();\n discriminatorValue = replayReader.getStringValue();\n break;\n }\n }\n\n \n if (\"\n return DogWithTypeIdContainingDot.fromJson(DefaultJsonReader.fromString(json));\n } else if (\"\n return CatWithTypeIdContainingDot.fromJson(DefaultJsonReader.fromString(json));\n } else if (\"\n return RabbitWithTypeIdContainingDot.fromJson(DefaultJsonReader.fromString(json));\n } else {\n throw new IllegalStateException(\"Discriminator field '@odata.type' was either missing or didn't match \"\n + \"one of the expected values '\n + \"'\n + \"'\n }\n });\n }", "target_code": "String json = JsonUtils.bufferedJsonObject(jsonReader);", "method_body_after": "public static AnimalWithTypeIdContainingDot fromJson(JsonReader jsonReader) {\n return JsonUtils.readObject(jsonReader, reader -> {\n String discriminatorValue = null;\n JsonReader readerToUse = null;\n\n \n jsonReader.nextToken();\n if (\"@odata.type\".equals(jsonReader.getFieldName())) {\n jsonReader.nextToken();\n discriminatorValue = jsonReader.getStringValue();\n readerToUse = jsonReader;\n } else {\n \n \n String json = JsonUtils.bufferJsonObject(jsonReader);\n JsonReader replayReader = DefaultJsonReader.fromString(json);\n while (replayReader.nextToken() != JsonToken.END_OBJECT) {\n String fieldName = replayReader.getFieldName();\n replayReader.nextToken();\n\n if (\"@odata.type\".equals(fieldName)) {\n discriminatorValue = replayReader.getStringValue();\n break;\n } else {\n replayReader.skipChildren();\n }\n }\n\n if (discriminatorValue != null) {\n readerToUse = DefaultJsonReader.fromString(json);\n }\n }\n\n \n if (\"\n return DogWithTypeIdContainingDot.fromJson(readerToUse);\n } else if (\"\n return CatWithTypeIdContainingDot.fromJson(readerToUse);\n } else if (\"\n return RabbitWithTypeIdContainingDot.fromJson(readerToUse);\n } else {\n throw new IllegalStateException(\"Discriminator field '@odata.type' was either missing or didn't match \"\n + \"one of the expected values '\n + \"'\n + \"'\n }\n });\n }", "context_before": "class AnimalWithTypeIdContainingDot implements JsonSerializable {\n /**\n * Creates an instance of {@link AnimalWithTypeIdContainingDot} by reading the {@link JsonReader}.\n *\n * @param jsonReader The {@link JsonReader} that will be read.\n * @return An instance of {@link AnimalWithTypeIdContainingDot} if the {@link JsonReader} is pointing to\n * {@link AnimalWithTypeIdContainingDot} JSON content, or null if it is pointing to {@link JsonToken\n * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to the correct {@link JsonToken} when\n * passed.\n */\n \n}", "context_after": "class AnimalWithTypeIdContainingDot implements JsonSerializable {\n /**\n * Creates an instance of {@link AnimalWithTypeIdContainingDot} by reading the {@link JsonReader}.\n *\n * @param jsonReader The {@link JsonReader} that will be read.\n * @return An instance of {@link AnimalWithTypeIdContainingDot} if the {@link JsonReader} is pointing to\n * {@link AnimalWithTypeIdContainingDot} JSON content, or null if it is pointing to {@link JsonToken\n * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to the correct {@link JsonToken} when\n * passed.\n */\n \n}" }, { "comment": "this sleep should be moved into else block", "method_body": "public void modifyTblReplicaCount(Database database, String tblName) {\n if (!(Config.min_replication_num_per_tablet < StatisticConstants.STATISTIC_INTERNAL_TABLE_REPLICA_NUM\n && Config.max_replication_num_per_tablet >= StatisticConstants.STATISTIC_INTERNAL_TABLE_REPLICA_NUM)) {\n return;\n }\n while (true) {\n if (Env.getCurrentSystemInfo().aliveBECount() >= StatisticConstants.STATISTIC_INTERNAL_TABLE_REPLICA_NUM) {\n try {\n Map props = new HashMap<>();\n props.put(PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION, \"tag.location.default: 3\");\n TableIf colStatsTbl = StatisticsUtil.findTable(InternalCatalog.INTERNAL_CATALOG_NAME,\n StatisticConstants.DB_NAME, tblName);\n OlapTable olapTable = (OlapTable) colStatsTbl;\n Partition partition = olapTable.getPartition(olapTable.getName());\n if (partition.getReplicaCount() >= StatisticConstants.STATISTIC_INTERNAL_TABLE_REPLICA_NUM) {\n return;\n }\n try {\n colStatsTbl.writeLock();\n Env.getCurrentEnv().modifyTableReplicaAllocation(database, (OlapTable) colStatsTbl, props);\n } finally {\n colStatsTbl.writeUnlock();\n }\n break;\n } catch (Throwable t) {\n LOG.warn(\"Failed to scale replica of stats tbl:{} to 3\", tblName, t);\n }\n try {\n Thread.sleep(5000);\n } catch (InterruptedException t) {\n \n }\n }\n }\n }", "target_code": "Thread.sleep(5000);", "method_body_after": "public void modifyTblReplicaCount(Database database, String tblName) {\n if (!(Config.min_replication_num_per_tablet < StatisticConstants.STATISTIC_INTERNAL_TABLE_REPLICA_NUM\n && Config.max_replication_num_per_tablet >= StatisticConstants.STATISTIC_INTERNAL_TABLE_REPLICA_NUM)) {\n return;\n }\n while (true) {\n if (Env.getCurrentSystemInfo().aliveBECount() >= StatisticConstants.STATISTIC_INTERNAL_TABLE_REPLICA_NUM) {\n try {\n Map props = new HashMap<>();\n props.put(PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION, \"tag.location.default: \"\n + StatisticConstants.STATISTIC_INTERNAL_TABLE_REPLICA_NUM);\n TableIf colStatsTbl = StatisticsUtil.findTable(InternalCatalog.INTERNAL_CATALOG_NAME,\n StatisticConstants.DB_NAME, tblName);\n OlapTable olapTable = (OlapTable) colStatsTbl;\n Partition partition = olapTable.getPartition(olapTable.getName());\n if (partition.getReplicaCount() >= StatisticConstants.STATISTIC_INTERNAL_TABLE_REPLICA_NUM) {\n return;\n }\n try {\n colStatsTbl.writeLock();\n Env.getCurrentEnv().modifyTableReplicaAllocation(database, (OlapTable) colStatsTbl, props);\n } finally {\n colStatsTbl.writeUnlock();\n }\n break;\n } catch (Throwable t) {\n LOG.warn(\"Failed to scale replica of stats tbl:{} to 3\", tblName, t);\n }\n }\n try {\n Thread.sleep(5000);\n } catch (InterruptedException t) {\n \n }\n }\n }", "context_before": "class InternalSchemaInitializer extends Thread {\n\n public static final int TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS = 5;\n\n private static final Logger LOG = LogManager.getLogger(InternalSchemaInitializer.class);\n\n public void run() {\n if (!FeConstants.enableInternalSchemaDb) {\n return;\n }\n while (!created()) {\n try {\n FrontendNodeType feType = Env.getCurrentEnv().getFeType();\n if (feType.equals(FrontendNodeType.INIT) || feType.equals(FrontendNodeType.UNKNOWN)) {\n LOG.warn(\"FE is not ready\");\n Thread.sleep(5000);\n continue;\n }\n Thread.currentThread()\n .join(TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS * 1000L);\n createDB();\n createTbl();\n } catch (Throwable e) {\n LOG.warn(\"Statistics storage initiated failed, will try again later\", e);\n }\n }\n LOG.info(\"Internal schema is initialized\");\n Optional op\n = Env.getCurrentEnv().getInternalCatalog().getDb(StatisticConstants.DB_NAME);\n if (!op.isPresent()) {\n LOG.warn(\"Internal DB got deleted!\");\n return;\n }\n Database database = op.get();\n modifyTblReplicaCount(database, StatisticConstants.ANALYSIS_TBL_NAME);\n modifyTblReplicaCount(database, StatisticConstants.STATISTIC_TBL_NAME);\n modifyTblReplicaCount(database, StatisticConstants.HISTOGRAM_TBL_NAME);\n }\n\n \n\n private void createTbl() throws UserException {\n Env.getCurrentEnv().getInternalCatalog().createTable(buildAnalysisTblStmt());\n Env.getCurrentEnv().getInternalCatalog().createTable(buildStatisticsTblStmt());\n Env.getCurrentEnv().getInternalCatalog().createTable(buildHistogramTblStmt());\n }\n\n @VisibleForTesting\n public static void createDB() {\n CreateDbStmt createDbStmt = new CreateDbStmt(true,\n ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, FeConstants.INTERNAL_DB_NAME),\n null);\n createDbStmt.setClusterName(SystemInfoService.DEFAULT_CLUSTER);\n try {\n Env.getCurrentEnv().createDb(createDbStmt);\n } catch (DdlException e) {\n LOG.warn(\"Failed to create database: {}, will try again later\",\n FeConstants.INTERNAL_DB_NAME, e);\n }\n }\n\n @VisibleForTesting\n public CreateTableStmt buildAnalysisTblStmt() throws UserException {\n TableName tableName = new TableName(\"\",\n FeConstants.INTERNAL_DB_NAME, StatisticConstants.ANALYSIS_TBL_NAME);\n List columnDefs = new ArrayList<>();\n columnDefs.add(new ColumnDef(\"id\", TypeDef.createVarchar(StatisticConstants.ID_LEN)));\n columnDefs.add(new ColumnDef(\"catalog_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"db_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"tbl_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"idx_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n ColumnDef partId = new ColumnDef(\"part_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN));\n partId.setAllowNull(true);\n columnDefs.add(partId);\n columnDefs.add(new ColumnDef(\"count\", TypeDef.create(PrimitiveType.BIGINT)));\n columnDefs.add(new ColumnDef(\"last_analyze_time_in_ms\", TypeDef.create(PrimitiveType.BIGINT)));\n columnDefs.add(new ColumnDef(\"update_time\", TypeDef.create(PrimitiveType.DATETIME)));\n String engineName = \"olap\";\n ArrayList uniqueKeys = Lists.newArrayList(\"id\", \"catalog_id\",\n \"db_id\", \"tbl_id\", \"idx_id\", \"part_id\");\n KeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS, uniqueKeys);\n DistributionDesc distributionDesc = new HashDistributionDesc(\n StatisticConstants.STATISTIC_TABLE_BUCKET_COUNT, uniqueKeys);\n Map properties = new HashMap() {\n {\n put(\"replication_num\", String.valueOf(\n Math.max(1, Config.min_replication_num_per_tablet)));\n }\n };\n CreateTableStmt createTableStmt = new CreateTableStmt(true, false,\n tableName, columnDefs, engineName, keysDesc, null, distributionDesc,\n properties, null, \"Doris internal statistics table, DO NOT MODIFY IT\", null);\n StatisticsUtil.analyze(createTableStmt);\n return createTableStmt;\n }\n\n @VisibleForTesting\n public CreateTableStmt buildStatisticsTblStmt() throws UserException {\n TableName tableName = new TableName(\"\",\n FeConstants.INTERNAL_DB_NAME, StatisticConstants.STATISTIC_TBL_NAME);\n List columnDefs = new ArrayList<>();\n columnDefs.add(new ColumnDef(\"id\", TypeDef.createVarchar(StatisticConstants.ID_LEN)));\n columnDefs.add(new ColumnDef(\"catalog_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"db_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"tbl_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"idx_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"col_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n ColumnDef partId = new ColumnDef(\"part_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN));\n partId.setAllowNull(true);\n columnDefs.add(partId);\n columnDefs.add(new ColumnDef(\"count\", TypeDef.create(PrimitiveType.BIGINT), true));\n columnDefs.add(new ColumnDef(\"ndv\", TypeDef.create(PrimitiveType.BIGINT), true));\n columnDefs.add(new ColumnDef(\"null_count\", TypeDef.create(PrimitiveType.BIGINT), true));\n columnDefs.add(new ColumnDef(\"min\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH), true));\n columnDefs.add(new ColumnDef(\"max\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH), true));\n columnDefs.add(new ColumnDef(\"data_size_in_bytes\", TypeDef.create(PrimitiveType.BIGINT), true));\n columnDefs.add(new ColumnDef(\"update_time\", TypeDef.create(PrimitiveType.DATETIME)));\n String engineName = \"olap\";\n ArrayList uniqueKeys = Lists.newArrayList(\"id\", \"catalog_id\",\n \"db_id\", \"tbl_id\", \"idx_id\", \"col_id\", \"part_id\");\n KeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS, uniqueKeys);\n DistributionDesc distributionDesc = new HashDistributionDesc(\n StatisticConstants.STATISTIC_TABLE_BUCKET_COUNT, uniqueKeys);\n Map properties = new HashMap() {\n {\n put(\"replication_num\", String.valueOf(\n Math.max(1, Config.min_replication_num_per_tablet)));\n }\n };\n CreateTableStmt createTableStmt = new CreateTableStmt(true, false,\n tableName, columnDefs, engineName, keysDesc, null, distributionDesc,\n properties, null, \"Doris internal statistics table, DO NOT MODIFY IT\", null);\n \n StatisticsUtil.analyze(createTableStmt);\n return createTableStmt;\n }\n\n @VisibleForTesting\n public CreateTableStmt buildHistogramTblStmt() throws UserException {\n TableName tableName = new TableName(\"\",\n FeConstants.INTERNAL_DB_NAME, StatisticConstants.HISTOGRAM_TBL_NAME);\n List columnDefs = new ArrayList<>();\n columnDefs.add(new ColumnDef(\"id\", TypeDef.createVarchar(StatisticConstants.ID_LEN)));\n columnDefs.add(new ColumnDef(\"catalog_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"db_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"tbl_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"idx_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"col_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"sample_rate\", TypeDef.create(PrimitiveType.DOUBLE)));\n columnDefs.add(new ColumnDef(\"buckets\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH)));\n columnDefs.add(new ColumnDef(\"update_time\", TypeDef.create(PrimitiveType.DATETIME)));\n String engineName = \"olap\";\n ArrayList uniqueKeys = Lists.newArrayList(\"id\", \"catalog_id\",\n \"db_id\", \"tbl_id\", \"idx_id\", \"col_id\");\n KeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS, uniqueKeys);\n DistributionDesc distributionDesc = new HashDistributionDesc(\n StatisticConstants.STATISTIC_TABLE_BUCKET_COUNT, uniqueKeys);\n Map properties = new HashMap() {\n {\n put(\"replication_num\", String.valueOf(Math.max(1,\n Config.min_replication_num_per_tablet)));\n }\n };\n CreateTableStmt createTableStmt = new CreateTableStmt(true, false,\n tableName, columnDefs, engineName, keysDesc, null, distributionDesc,\n properties, null, \"Doris internal statistics table, DO NOT MODIFY IT\", null);\n StatisticsUtil.analyze(createTableStmt);\n \n return createTableStmt;\n }\n\n private boolean created() {\n Optional optionalDatabase =\n Env.getCurrentEnv().getInternalCatalog()\n .getDb(SystemInfoService.DEFAULT_CLUSTER + \":\" + FeConstants.INTERNAL_DB_NAME);\n if (!optionalDatabase.isPresent()) {\n return false;\n }\n Database db = optionalDatabase.get();\n return db.getTable(StatisticConstants.ANALYSIS_TBL_NAME).isPresent()\n && db.getTable(StatisticConstants.STATISTIC_TBL_NAME).isPresent()\n && db.getTable(StatisticConstants.HISTOGRAM_TBL_NAME).isPresent();\n }\n\n /**\n * Compare whether the current internal table schema meets expectations,\n * delete and rebuild if it does not meet the table schema.\n * TODO remove this code after the table structure is stable\n */\n private boolean isTableChanged(TableName tableName, List columnDefs) {\n try {\n String catalogName = Env.getCurrentEnv().getInternalCatalog().getName();\n String dbName = SystemInfoService.DEFAULT_CLUSTER + \":\" + tableName.getDb();\n TableIf table = StatisticsUtil.findTable(catalogName, dbName, tableName.getTbl());\n List existColumns = table.getBaseSchema(false);\n existColumns.sort(Comparator.comparing(Column::getName));\n List columns = columnDefs.stream()\n .map(ColumnDef::toColumn)\n .sorted(Comparator.comparing(Column::getName))\n .collect(Collectors.toList());\n if (columns.size() != existColumns.size()) {\n return true;\n }\n for (int i = 0; i < columns.size(); i++) {\n Column c1 = columns.get(i);\n Column c2 = existColumns.get(i);\n if (!c1.getName().equals(c2.getName())\n || c1.getDataType() != c2.getDataType()) {\n return true;\n }\n }\n return false;\n } catch (Throwable t) {\n LOG.warn(\"Failed to check table schema\", t);\n return false;\n }\n }\n\n}", "context_after": "class InternalSchemaInitializer extends Thread {\n\n public static final int TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS = 5;\n\n private static final Logger LOG = LogManager.getLogger(InternalSchemaInitializer.class);\n\n public void run() {\n if (!FeConstants.enableInternalSchemaDb) {\n return;\n }\n while (!created()) {\n try {\n FrontendNodeType feType = Env.getCurrentEnv().getFeType();\n if (feType.equals(FrontendNodeType.INIT) || feType.equals(FrontendNodeType.UNKNOWN)) {\n LOG.warn(\"FE is not ready\");\n Thread.sleep(5000);\n continue;\n }\n Thread.currentThread()\n .join(TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS * 1000L);\n createDB();\n createTbl();\n } catch (Throwable e) {\n LOG.warn(\"Statistics storage initiated failed, will try again later\", e);\n }\n }\n LOG.info(\"Internal schema is initialized\");\n Optional op\n = Env.getCurrentEnv().getInternalCatalog().getDb(StatisticConstants.DB_NAME);\n if (!op.isPresent()) {\n LOG.warn(\"Internal DB got deleted!\");\n return;\n }\n Database database = op.get();\n modifyTblReplicaCount(database, StatisticConstants.ANALYSIS_TBL_NAME);\n modifyTblReplicaCount(database, StatisticConstants.STATISTIC_TBL_NAME);\n modifyTblReplicaCount(database, StatisticConstants.HISTOGRAM_TBL_NAME);\n }\n\n \n\n private void createTbl() throws UserException {\n Env.getCurrentEnv().getInternalCatalog().createTable(buildAnalysisTblStmt());\n Env.getCurrentEnv().getInternalCatalog().createTable(buildStatisticsTblStmt());\n Env.getCurrentEnv().getInternalCatalog().createTable(buildHistogramTblStmt());\n }\n\n @VisibleForTesting\n public static void createDB() {\n CreateDbStmt createDbStmt = new CreateDbStmt(true,\n ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, FeConstants.INTERNAL_DB_NAME),\n null);\n createDbStmt.setClusterName(SystemInfoService.DEFAULT_CLUSTER);\n try {\n Env.getCurrentEnv().createDb(createDbStmt);\n } catch (DdlException e) {\n LOG.warn(\"Failed to create database: {}, will try again later\",\n FeConstants.INTERNAL_DB_NAME, e);\n }\n }\n\n @VisibleForTesting\n public CreateTableStmt buildAnalysisTblStmt() throws UserException {\n TableName tableName = new TableName(\"\",\n FeConstants.INTERNAL_DB_NAME, StatisticConstants.ANALYSIS_TBL_NAME);\n List columnDefs = new ArrayList<>();\n columnDefs.add(new ColumnDef(\"id\", TypeDef.createVarchar(StatisticConstants.ID_LEN)));\n columnDefs.add(new ColumnDef(\"catalog_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"db_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"tbl_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"idx_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n ColumnDef partId = new ColumnDef(\"part_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN));\n partId.setAllowNull(true);\n columnDefs.add(partId);\n columnDefs.add(new ColumnDef(\"count\", TypeDef.create(PrimitiveType.BIGINT)));\n columnDefs.add(new ColumnDef(\"last_analyze_time_in_ms\", TypeDef.create(PrimitiveType.BIGINT)));\n columnDefs.add(new ColumnDef(\"update_time\", TypeDef.create(PrimitiveType.DATETIME)));\n String engineName = \"olap\";\n ArrayList uniqueKeys = Lists.newArrayList(\"id\", \"catalog_id\",\n \"db_id\", \"tbl_id\", \"idx_id\", \"part_id\");\n KeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS, uniqueKeys);\n DistributionDesc distributionDesc = new HashDistributionDesc(\n StatisticConstants.STATISTIC_TABLE_BUCKET_COUNT, uniqueKeys);\n Map properties = new HashMap() {\n {\n put(\"replication_num\", String.valueOf(\n Math.max(1, Config.min_replication_num_per_tablet)));\n }\n };\n CreateTableStmt createTableStmt = new CreateTableStmt(true, false,\n tableName, columnDefs, engineName, keysDesc, null, distributionDesc,\n properties, null, \"Doris internal statistics table, DO NOT MODIFY IT\", null);\n StatisticsUtil.analyze(createTableStmt);\n return createTableStmt;\n }\n\n @VisibleForTesting\n public CreateTableStmt buildStatisticsTblStmt() throws UserException {\n TableName tableName = new TableName(\"\",\n FeConstants.INTERNAL_DB_NAME, StatisticConstants.STATISTIC_TBL_NAME);\n List columnDefs = new ArrayList<>();\n columnDefs.add(new ColumnDef(\"id\", TypeDef.createVarchar(StatisticConstants.ID_LEN)));\n columnDefs.add(new ColumnDef(\"catalog_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"db_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"tbl_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"idx_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"col_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n ColumnDef partId = new ColumnDef(\"part_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN));\n partId.setAllowNull(true);\n columnDefs.add(partId);\n columnDefs.add(new ColumnDef(\"count\", TypeDef.create(PrimitiveType.BIGINT), true));\n columnDefs.add(new ColumnDef(\"ndv\", TypeDef.create(PrimitiveType.BIGINT), true));\n columnDefs.add(new ColumnDef(\"null_count\", TypeDef.create(PrimitiveType.BIGINT), true));\n columnDefs.add(new ColumnDef(\"min\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH), true));\n columnDefs.add(new ColumnDef(\"max\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH), true));\n columnDefs.add(new ColumnDef(\"data_size_in_bytes\", TypeDef.create(PrimitiveType.BIGINT), true));\n columnDefs.add(new ColumnDef(\"update_time\", TypeDef.create(PrimitiveType.DATETIME)));\n String engineName = \"olap\";\n ArrayList uniqueKeys = Lists.newArrayList(\"id\", \"catalog_id\",\n \"db_id\", \"tbl_id\", \"idx_id\", \"col_id\", \"part_id\");\n KeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS, uniqueKeys);\n DistributionDesc distributionDesc = new HashDistributionDesc(\n StatisticConstants.STATISTIC_TABLE_BUCKET_COUNT, uniqueKeys);\n Map properties = new HashMap() {\n {\n put(\"replication_num\", String.valueOf(\n Math.max(1, Config.min_replication_num_per_tablet)));\n }\n };\n CreateTableStmt createTableStmt = new CreateTableStmt(true, false,\n tableName, columnDefs, engineName, keysDesc, null, distributionDesc,\n properties, null, \"Doris internal statistics table, DO NOT MODIFY IT\", null);\n \n StatisticsUtil.analyze(createTableStmt);\n return createTableStmt;\n }\n\n @VisibleForTesting\n public CreateTableStmt buildHistogramTblStmt() throws UserException {\n TableName tableName = new TableName(\"\",\n FeConstants.INTERNAL_DB_NAME, StatisticConstants.HISTOGRAM_TBL_NAME);\n List columnDefs = new ArrayList<>();\n columnDefs.add(new ColumnDef(\"id\", TypeDef.createVarchar(StatisticConstants.ID_LEN)));\n columnDefs.add(new ColumnDef(\"catalog_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"db_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"tbl_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"idx_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"col_id\", TypeDef.createVarchar(StatisticConstants.MAX_NAME_LEN)));\n columnDefs.add(new ColumnDef(\"sample_rate\", TypeDef.create(PrimitiveType.DOUBLE)));\n columnDefs.add(new ColumnDef(\"buckets\", TypeDef.createVarchar(ScalarType.MAX_VARCHAR_LENGTH)));\n columnDefs.add(new ColumnDef(\"update_time\", TypeDef.create(PrimitiveType.DATETIME)));\n String engineName = \"olap\";\n ArrayList uniqueKeys = Lists.newArrayList(\"id\", \"catalog_id\",\n \"db_id\", \"tbl_id\", \"idx_id\", \"col_id\");\n KeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS, uniqueKeys);\n DistributionDesc distributionDesc = new HashDistributionDesc(\n StatisticConstants.STATISTIC_TABLE_BUCKET_COUNT, uniqueKeys);\n Map properties = new HashMap() {\n {\n put(\"replication_num\", String.valueOf(Math.max(1,\n Config.min_replication_num_per_tablet)));\n }\n };\n CreateTableStmt createTableStmt = new CreateTableStmt(true, false,\n tableName, columnDefs, engineName, keysDesc, null, distributionDesc,\n properties, null, \"Doris internal statistics table, DO NOT MODIFY IT\", null);\n StatisticsUtil.analyze(createTableStmt);\n \n return createTableStmt;\n }\n\n private boolean created() {\n Optional optionalDatabase =\n Env.getCurrentEnv().getInternalCatalog()\n .getDb(SystemInfoService.DEFAULT_CLUSTER + \":\" + FeConstants.INTERNAL_DB_NAME);\n if (!optionalDatabase.isPresent()) {\n return false;\n }\n Database db = optionalDatabase.get();\n return db.getTable(StatisticConstants.ANALYSIS_TBL_NAME).isPresent()\n && db.getTable(StatisticConstants.STATISTIC_TBL_NAME).isPresent()\n && db.getTable(StatisticConstants.HISTOGRAM_TBL_NAME).isPresent();\n }\n\n /**\n * Compare whether the current internal table schema meets expectations,\n * delete and rebuild if it does not meet the table schema.\n * TODO remove this code after the table structure is stable\n */\n private boolean isTableChanged(TableName tableName, List columnDefs) {\n try {\n String catalogName = Env.getCurrentEnv().getInternalCatalog().getName();\n String dbName = SystemInfoService.DEFAULT_CLUSTER + \":\" + tableName.getDb();\n TableIf table = StatisticsUtil.findTable(catalogName, dbName, tableName.getTbl());\n List existColumns = table.getBaseSchema(false);\n existColumns.sort(Comparator.comparing(Column::getName));\n List columns = columnDefs.stream()\n .map(ColumnDef::toColumn)\n .sorted(Comparator.comparing(Column::getName))\n .collect(Collectors.toList());\n if (columns.size() != existColumns.size()) {\n return true;\n }\n for (int i = 0; i < columns.size(); i++) {\n Column c1 = columns.get(i);\n Column c2 = existColumns.get(i);\n if (!c1.getName().equals(c2.getName())\n || c1.getDataType() != c2.getDataType()) {\n return true;\n }\n }\n return false;\n } catch (Throwable t) {\n LOG.warn(\"Failed to check table schema\", t);\n return false;\n }\n }\n\n}" }, { "comment": "Should we make matching case-insensitive? ```java Pattern pattern = Pattern.compile(\"/subscriptions/([\\\\w-]+)/\", Pattern.CASE_INSENSITIVE); ```", "method_body": "public Response intercept(Chain chain) throws IOException {\n Response response = chain.proceed(chain.request());\n if (!response.isSuccessful()) {\n String content = errorBody(response.body());\n RestClient restClient = new RestClient.Builder()\n .withBaseUrl(\"https:\n .withCredentials(credentials)\n .withSerializerAdapter(new AzureJacksonAdapter())\n .withResponseBuilderFactory(new AzureResponseBuilder.Factory())\n .build();\n CloudError cloudError = restClient.serializerAdapter().deserialize(content, CloudError.class);\n if (\"MissingSubscriptionRegistration\".equals(cloudError.code())) {\n Pattern pattern = Pattern.compile(\"/subscriptions/([\\\\w-]+)/\");\n Matcher matcher = pattern.matcher(chain.request().url().toString());\n matcher.find();\n ResourceManager resourceManager = ResourceManager.authenticate(restClient)\n .withSubscription(matcher.group(1));\n pattern = Pattern.compile(\".*'(.*)'\");\n matcher = pattern.matcher(cloudError.message());\n matcher.find();\n Provider provider = registerProvider(matcher.group(1), resourceManager);\n while (provider.registrationState().equals(\"Unregistered\")\n || provider.registrationState().equalsIgnoreCase(\"Registering\")) {\n SdkContext.sleep(5 * 1000);\n provider = resourceManager.providers().getByName(provider.namespace());\n }\n \n response = chain.proceed(chain.request());\n }\n }\n return response;\n }", "target_code": "Pattern pattern = Pattern.compile(\"/subscriptions/([\\\\w-]+)/\");", "method_body_after": "public Response intercept(Chain chain) throws IOException {\n Response response = chain.proceed(chain.request());\n if (!response.isSuccessful()) {\n String content = errorBody(response.body());\n AzureJacksonAdapter jacksonAdapter = new AzureJacksonAdapter();\n CloudError cloudError = jacksonAdapter.deserialize(content, CloudError.class);\n if (\"MissingSubscriptionRegistration\".equals(cloudError.code())) {\n Pattern pattern = Pattern.compile(\"/subscriptions/([\\\\w-]+)/\", Pattern.CASE_INSENSITIVE);\n Matcher matcher = pattern.matcher(chain.request().url().toString());\n matcher.find();\n RestClient restClient = new RestClient.Builder()\n .withBaseUrl(\"https:\n .withCredentials(credentials)\n .withSerializerAdapter(jacksonAdapter)\n .withResponseBuilderFactory(new AzureResponseBuilder.Factory())\n .build();\n ResourceManager resourceManager = ResourceManager.authenticate(restClient)\n .withSubscription(matcher.group(1));\n pattern = Pattern.compile(\".*'(.*)'\");\n matcher = pattern.matcher(cloudError.message());\n matcher.find();\n Provider provider = registerProvider(matcher.group(1), resourceManager);\n while (provider.registrationState().equalsIgnoreCase(\"Unregistered\")\n || provider.registrationState().equalsIgnoreCase(\"Registering\")) {\n SdkContext.sleep(5 * 1000);\n provider = resourceManager.providers().getByName(provider.namespace());\n }\n \n response = chain.proceed(chain.request());\n }\n }\n return response;\n }", "context_before": "class ProviderRegistrationInterceptor implements Interceptor {\n private AzureTokenCredentials credentials;\n\n /**\n * Initialize a provider registration interceptor with a credential that's authorized\n * to register the provider.\n * @param credentials the credential for provider registration\n */\n public ProviderRegistrationInterceptor(AzureTokenCredentials credentials) {\n this.credentials = credentials;\n }\n\n @Override\n \n\n private String errorBody(ResponseBody responseBody) throws IOException {\n if (responseBody == null) {\n return null;\n }\n BufferedSource source = responseBody.source();\n source.request(Long.MAX_VALUE); \n Buffer buffer = source.buffer();\n\n return buffer.readUtf8();\n }\n\n private Provider registerProvider(String namespace, ResourceManager resourceManager) {\n return resourceManager.providers().register(namespace);\n }\n}", "context_after": "class ProviderRegistrationInterceptor implements Interceptor {\n private final AzureTokenCredentials credentials;\n\n /**\n * Initialize a provider registration interceptor with a credential that's authorized\n * to register the provider.\n * @param credentials the credential for provider registration\n */\n public ProviderRegistrationInterceptor(AzureTokenCredentials credentials) {\n this.credentials = credentials;\n }\n\n @Override\n \n\n private String errorBody(ResponseBody responseBody) throws IOException {\n if (responseBody == null) {\n return null;\n }\n BufferedSource source = responseBody.source();\n source.request(Long.MAX_VALUE); \n Buffer buffer = source.buffer();\n return buffer.readUtf8();\n }\n\n private Provider registerProvider(String namespace, ResourceManager resourceManager) {\n return resourceManager.providers().register(namespace);\n }\n}" }, { "comment": "Shouldn't it be completely removed?", "method_body": "protected List getDependencies() {\n if (dependencies == null) {\n dependencies = projectDepsSupplier.get();\n projectDepsSupplier = null;\n }\n return dependencies;\n }", "target_code": "if (dependencies == null) {", "method_body_after": "protected List getDependencies() {\n if (dependencies == null) {\n dependencies = projectDepsSupplier.get();\n projectDepsSupplier = null;\n }\n return dependencies;\n }", "context_before": "class MavenProjectBuildFile extends BuildFile {\n\n private static final Pattern PROPERTY_PATTERN = Pattern.compile(\"\\\\$\\\\{(.+)}\");\n\n public static QuarkusProject getProject(Path projectDir, MessageWriter log, Supplier defaultQuarkusVersion) {\n final MavenArtifactResolver mvnResolver = getMavenResolver(projectDir);\n final LocalProject currentProject = mvnResolver.getMavenContext().getCurrentProject();\n final Model projectModel;\n final Artifact projectPom;\n if (currentProject != null && isSameFile(projectDir, currentProject.getDir())) {\n projectPom = new DefaultArtifact(currentProject.getGroupId(), currentProject.getArtifactId(), null, \"pom\",\n currentProject.getVersion());\n projectModel = currentProject.getRawModel();\n } else {\n projectPom = null;\n projectModel = null;\n }\n return getProject(projectPom, projectModel, projectDir,\n projectModel == null ? new Properties() : projectModel.getProperties(), mvnResolver, log,\n defaultQuarkusVersion);\n }\n\n public static QuarkusProject getProject(Artifact projectPom, Model projectModel, Path projectDir,\n Properties projectProps, MavenArtifactResolver mvnResolver, MessageWriter log,\n Supplier defaultQuarkusVersion) {\n final List managedDeps;\n final Supplier> deps;\n final String quarkusVersion;\n if (projectPom == null) {\n managedDeps = Collections.emptyList();\n deps = () -> Collections.emptyList();\n quarkusVersion = defaultQuarkusVersion.get();\n } else {\n final ArtifactDescriptorResult descriptor = describe(mvnResolver, projectPom);\n managedDeps = toArtifactCoords(descriptor.getManagedDependencies());\n deps = () -> toArtifactCoords(descriptor.getDependencies());\n quarkusVersion = getQuarkusVersion(managedDeps);\n }\n\n final ExtensionCatalog extensionCatalog;\n final ExtensionCatalogResolver catalogResolver = QuarkusProjectHelper.getCatalogResolver(mvnResolver, log);\n if (catalogResolver.hasRegistries()) {\n try {\n extensionCatalog = catalogResolver.resolveExtensionCatalog(quarkusVersion);\n } catch (RegistryResolutionException e) {\n throw new RuntimeException(\"Failed to resolve extension catalog\", e);\n }\n } else {\n final List importedPlatforms = collectPlatformDescriptors(managedDeps, log);\n if (importedPlatforms.isEmpty()) {\n extensionCatalog = ToolsUtils.resolvePlatformDescriptorDirectly(null, null, quarkusVersion, mvnResolver, log);\n } else {\n extensionCatalog = ToolsUtils.mergePlatforms(importedPlatforms, mvnResolver);\n }\n }\n final MavenProjectBuildFile extensionManager = new MavenProjectBuildFile(projectDir, extensionCatalog,\n projectModel, deps, managedDeps, projectProps);\n final List codestartResourceLoaders = codestartLoadersBuilder().catalog(extensionCatalog)\n .artifactResolver(mvnResolver).build();\n return QuarkusProject.of(projectDir, extensionCatalog,\n codestartResourceLoaders, log, extensionManager);\n }\n\n private static MavenArtifactResolver getMavenResolver(Path projectDir) {\n final RegistriesConfig toolsConfig = QuarkusProjectHelper.toolsConfig();\n try {\n return MavenArtifactResolver.builder()\n .setArtifactTransferLogging(toolsConfig.isDebug())\n .setCurrentProject(projectDir.toString())\n .setPreferPomsFromWorkspace(true)\n .build();\n } catch (BootstrapMavenException e) {\n throw new RuntimeException(\"Failed to initialize Maven artifact resolver\", e);\n }\n }\n\n private static String getQuarkusVersion(List managedDeps) {\n for (ArtifactCoords a : managedDeps) {\n if (a.getArtifactId().endsWith(\"quarkus-core\") && a.getGroupId().equals(\"io.quarkus\")) {\n return a.getVersion();\n }\n }\n return null;\n }\n\n private static List toArtifactCoords(List deps) {\n final List result = new ArrayList<>(deps.size());\n for (org.eclipse.aether.graph.Dependency dep : deps) {\n org.eclipse.aether.artifact.Artifact a = dep.getArtifact();\n result.add(new ArtifactCoords(a.getGroupId(), a.getArtifactId(), a.getClassifier(),\n a.getExtension(), a.getVersion()));\n }\n return result;\n }\n\n private static ArtifactDescriptorResult describe(MavenArtifactResolver resolver, Artifact projectArtifact) {\n try {\n return resolver.resolveDescriptor(projectArtifact);\n } catch (BootstrapMavenException e) {\n throw new RuntimeException(\"Failed to resolve descriptor for \" + projectArtifact, e);\n }\n }\n\n private static List collectPlatformDescriptors(List managedDeps, MessageWriter log) {\n if (managedDeps.isEmpty()) {\n return Collections.emptyList();\n }\n final List result = new ArrayList<>(4);\n for (ArtifactCoords c : managedDeps) {\n if (PlatformArtifacts.isCatalogArtifact(c)) {\n result.add(c);\n }\n }\n return result;\n }\n\n private static boolean isSameFile(Path p1, Path p2) {\n try {\n return Files.isSameFile(p1, p2);\n } catch (IOException e) {\n throw new RuntimeException(\"Failed to compare \" + p1 + \" to \" + p2, e);\n }\n }\n\n private final Model model;\n private final List managedDependencies;\n private final Properties projectProps;\n private Supplier> projectDepsSupplier;\n private List dependencies;\n private List importedPlatforms;\n\n private MavenProjectBuildFile(Path projectDirPath, ExtensionCatalog extensionsCatalog, Model model,\n Supplier> projectDeps,\n List projectManagedDeps,\n Properties projectProps) {\n super(projectDirPath, extensionsCatalog);\n this.model = model;\n this.projectDepsSupplier = projectDeps;\n this.managedDependencies = projectManagedDeps;\n this.projectProps = projectProps;\n }\n\n @Override\n public BuildTool getBuildTool() {\n return BuildTool.MAVEN;\n }\n\n @Override\n protected boolean addDependency(ArtifactCoords coords, boolean managed) {\n final Dependency d = new Dependency();\n d.setGroupId(coords.getGroupId());\n d.setArtifactId(coords.getArtifactId());\n if (!managed) {\n d.setVersion(coords.getVersion());\n }\n \n if (coords.getClassifier() != null && !coords.getClassifier().isEmpty()) {\n d.setClassifier(coords.getClassifier());\n }\n d.setType(coords.getType());\n if (\"pom\".equalsIgnoreCase(coords.getType())) {\n d.setScope(\"import\");\n DependencyManagement dependencyManagement = model().getDependencyManagement();\n if (dependencyManagement == null) {\n dependencyManagement = new DependencyManagement();\n model().setDependencyManagement(dependencyManagement);\n }\n if (dependencyManagement.getDependencies()\n .stream()\n .noneMatch(thisDep -> d.getManagementKey().equals(resolveKey(thisDep)))) {\n dependencyManagement.addDependency(d);\n \n if (!getManagedDependencies().contains(coords)) {\n getManagedDependencies().add(coords);\n }\n return true;\n }\n } else if (model().getDependencies()\n .stream()\n .noneMatch(thisDep -> d.getManagementKey().equals(thisDep.getManagementKey()))) {\n model().getDependencies().add(0, d);\n \n if (!getDependencies().contains(coords)) {\n getDependencies().add(0, coords);\n }\n return true;\n }\n return false;\n }\n\n @Override\n protected void removeDependency(ArtifactKey key) throws IOException {\n if (model() != null) {\n final Iterator i = getDependencies().iterator();\n while (i.hasNext()) {\n final ArtifactCoords a = i.next();\n if (a.getKey().equals(key)) {\n i.remove();\n break;\n }\n }\n model().getDependencies().removeIf(d -> Objects.equals(toKey(d), key));\n }\n }\n\n @Override\n \n\n @Override\n public final Collection getInstalledPlatforms() throws IOException {\n if (importedPlatforms == null) {\n final List tmp = new ArrayList<>(4);\n for (ArtifactCoords c : getManagedDependencies()) {\n if (PlatformArtifacts.isCatalogArtifact(c)) {\n tmp.add(PlatformArtifacts.getBomArtifactForCatalog(c));\n }\n }\n importedPlatforms = tmp;\n }\n return importedPlatforms;\n }\n\n protected List getManagedDependencies() {\n return managedDependencies;\n }\n\n @Override\n protected void writeToDisk() throws IOException {\n if (model == null) {\n return;\n }\n try (ByteArrayOutputStream pomOutputStream = new ByteArrayOutputStream()) {\n MojoUtils.write(model(), pomOutputStream);\n writeToProjectFile(BuildTool.MAVEN.getDependenciesFile(), pomOutputStream.toByteArray());\n }\n }\n\n @Override\n protected String getProperty(String propertyName) {\n return projectProps.getProperty(propertyName);\n }\n\n @Override\n protected void refreshData() {\n }\n\n private Model model() {\n return model;\n }\n\n /**\n * Resolves dependencies containing property references in the GAV\n */\n private String resolveKey(Dependency dependency) {\n String resolvedGroupId = toResolvedProperty(dependency.getGroupId());\n String resolvedArtifactId = toResolvedProperty(dependency.getArtifactId());\n String resolvedVersion = toResolvedProperty(dependency.getVersion());\n if (!resolvedGroupId.equals(dependency.getGroupId())\n || !resolvedArtifactId.equals(dependency.getArtifactId())\n || !resolvedVersion.equals(dependency.getVersion())) {\n return resolvedGroupId + \":\" + resolvedArtifactId + \":\" + dependency.getType()\n + (dependency.getClassifier() != null ? \":\" + dependency.getClassifier() : \"\");\n }\n return dependency.getManagementKey();\n }\n\n /**\n * Resolves properties as ${quarkus.platform.version}\n */\n private String toResolvedProperty(String value) {\n Matcher matcher = PROPERTY_PATTERN.matcher(value);\n if (matcher.matches()) {\n String property = getProperty(matcher.group(1));\n return property == null ? value : property;\n }\n return value;\n }\n}", "context_after": "class MavenProjectBuildFile extends BuildFile {\n\n private static final Pattern PROPERTY_PATTERN = Pattern.compile(\"\\\\$\\\\{(.+)}\");\n\n public static QuarkusProject getProject(Path projectDir, MessageWriter log, Supplier defaultQuarkusVersion) {\n final MavenArtifactResolver mvnResolver = getMavenResolver(projectDir);\n final LocalProject currentProject = mvnResolver.getMavenContext().getCurrentProject();\n final Model projectModel;\n final Artifact projectPom;\n if (currentProject != null && isSameFile(projectDir, currentProject.getDir())) {\n projectPom = new DefaultArtifact(currentProject.getGroupId(), currentProject.getArtifactId(), null, \"pom\",\n currentProject.getVersion());\n projectModel = currentProject.getRawModel();\n } else {\n projectPom = null;\n projectModel = null;\n }\n return getProject(projectPom, projectModel, projectDir,\n projectModel == null ? new Properties() : projectModel.getProperties(), mvnResolver, log,\n defaultQuarkusVersion);\n }\n\n public static QuarkusProject getProject(Artifact projectPom, Model projectModel, Path projectDir,\n Properties projectProps, MavenArtifactResolver mvnResolver, MessageWriter log,\n Supplier defaultQuarkusVersion) {\n final List managedDeps;\n final Supplier> deps;\n final String quarkusVersion;\n if (projectPom == null) {\n managedDeps = Collections.emptyList();\n deps = () -> Collections.emptyList();\n quarkusVersion = defaultQuarkusVersion.get();\n } else {\n final ArtifactDescriptorResult descriptor = describe(mvnResolver, projectPom);\n managedDeps = toArtifactCoords(descriptor.getManagedDependencies());\n deps = () -> toArtifactCoords(descriptor.getDependencies());\n quarkusVersion = getQuarkusVersion(managedDeps);\n }\n\n final ExtensionCatalog extensionCatalog;\n final ExtensionCatalogResolver catalogResolver = QuarkusProjectHelper.getCatalogResolver(mvnResolver, log);\n if (catalogResolver.hasRegistries()) {\n try {\n extensionCatalog = catalogResolver.resolveExtensionCatalog(quarkusVersion);\n } catch (RegistryResolutionException e) {\n throw new RuntimeException(\"Failed to resolve extension catalog\", e);\n }\n } else {\n final List importedPlatforms = collectPlatformDescriptors(managedDeps, log);\n if (importedPlatforms.isEmpty()) {\n extensionCatalog = ToolsUtils.resolvePlatformDescriptorDirectly(null, null, quarkusVersion, mvnResolver, log);\n } else {\n extensionCatalog = ToolsUtils.mergePlatforms(importedPlatforms, mvnResolver);\n }\n }\n final MavenProjectBuildFile extensionManager = new MavenProjectBuildFile(projectDir, extensionCatalog,\n projectModel, deps, managedDeps, projectProps);\n final List codestartResourceLoaders = codestartLoadersBuilder().catalog(extensionCatalog)\n .artifactResolver(mvnResolver).build();\n return QuarkusProject.of(projectDir, extensionCatalog,\n codestartResourceLoaders, log, extensionManager);\n }\n\n private static MavenArtifactResolver getMavenResolver(Path projectDir) {\n final RegistriesConfig toolsConfig = QuarkusProjectHelper.toolsConfig();\n try {\n return MavenArtifactResolver.builder()\n .setArtifactTransferLogging(toolsConfig.isDebug())\n .setCurrentProject(projectDir.toString())\n .setPreferPomsFromWorkspace(true)\n .build();\n } catch (BootstrapMavenException e) {\n throw new RuntimeException(\"Failed to initialize Maven artifact resolver\", e);\n }\n }\n\n private static String getQuarkusVersion(List managedDeps) {\n for (ArtifactCoords a : managedDeps) {\n if (a.getArtifactId().endsWith(\"quarkus-core\") && a.getGroupId().equals(\"io.quarkus\")) {\n return a.getVersion();\n }\n }\n return null;\n }\n\n private static List toArtifactCoords(List deps) {\n final List result = new ArrayList<>(deps.size());\n for (org.eclipse.aether.graph.Dependency dep : deps) {\n org.eclipse.aether.artifact.Artifact a = dep.getArtifact();\n result.add(new ArtifactCoords(a.getGroupId(), a.getArtifactId(), a.getClassifier(),\n a.getExtension(), a.getVersion()));\n }\n return result;\n }\n\n private static ArtifactDescriptorResult describe(MavenArtifactResolver resolver, Artifact projectArtifact) {\n try {\n return resolver.resolveDescriptor(projectArtifact);\n } catch (BootstrapMavenException e) {\n throw new RuntimeException(\"Failed to resolve descriptor for \" + projectArtifact, e);\n }\n }\n\n private static List collectPlatformDescriptors(List managedDeps, MessageWriter log) {\n if (managedDeps.isEmpty()) {\n return Collections.emptyList();\n }\n final List result = new ArrayList<>(4);\n for (ArtifactCoords c : managedDeps) {\n if (PlatformArtifacts.isCatalogArtifact(c)) {\n result.add(c);\n }\n }\n return result;\n }\n\n private static boolean isSameFile(Path p1, Path p2) {\n try {\n return Files.isSameFile(p1, p2);\n } catch (IOException e) {\n throw new RuntimeException(\"Failed to compare \" + p1 + \" to \" + p2, e);\n }\n }\n\n private final Model model;\n private final List managedDependencies;\n private final Properties projectProps;\n private Supplier> projectDepsSupplier;\n private List dependencies;\n private List importedPlatforms;\n\n private MavenProjectBuildFile(Path projectDirPath, ExtensionCatalog extensionsCatalog, Model model,\n Supplier> projectDeps,\n List projectManagedDeps,\n Properties projectProps) {\n super(projectDirPath, extensionsCatalog);\n this.model = model;\n this.projectDepsSupplier = projectDeps;\n this.managedDependencies = projectManagedDeps;\n this.projectProps = projectProps;\n }\n\n @Override\n public BuildTool getBuildTool() {\n return BuildTool.MAVEN;\n }\n\n @Override\n protected boolean addDependency(ArtifactCoords coords, boolean managed) {\n final Dependency d = new Dependency();\n d.setGroupId(coords.getGroupId());\n d.setArtifactId(coords.getArtifactId());\n if (!managed) {\n d.setVersion(coords.getVersion());\n }\n \n if (coords.getClassifier() != null && !coords.getClassifier().isEmpty()) {\n d.setClassifier(coords.getClassifier());\n }\n d.setType(coords.getType());\n if (\"pom\".equalsIgnoreCase(coords.getType())) {\n d.setScope(\"import\");\n DependencyManagement dependencyManagement = model().getDependencyManagement();\n if (dependencyManagement == null) {\n dependencyManagement = new DependencyManagement();\n model().setDependencyManagement(dependencyManagement);\n }\n if (dependencyManagement.getDependencies()\n .stream()\n .noneMatch(thisDep -> d.getManagementKey().equals(resolveKey(thisDep)))) {\n dependencyManagement.addDependency(d);\n \n if (!getManagedDependencies().contains(coords)) {\n getManagedDependencies().add(coords);\n }\n return true;\n }\n } else if (model().getDependencies()\n .stream()\n .noneMatch(thisDep -> d.getManagementKey().equals(thisDep.getManagementKey()))) {\n final int index = getIndexToAddExtension();\n if (index >= 0) {\n model().getDependencies().add(index, d);\n } else {\n model().getDependencies().add(d);\n }\n\n \n if (!getDependencies().contains(coords)) {\n getDependencies().add(coords);\n }\n return true;\n }\n return false;\n }\n\n @Override\n protected void removeDependency(ArtifactKey key) throws IOException {\n if (model() != null) {\n final Iterator i = getDependencies().iterator();\n while (i.hasNext()) {\n final ArtifactCoords a = i.next();\n if (a.getKey().equals(key)) {\n i.remove();\n break;\n }\n }\n model().getDependencies().removeIf(d -> Objects.equals(toKey(d), key));\n }\n }\n\n @Override\n \n\n @Override\n public final Collection getInstalledPlatforms() throws IOException {\n if (importedPlatforms == null) {\n final List tmp = new ArrayList<>(4);\n for (ArtifactCoords c : getManagedDependencies()) {\n if (PlatformArtifacts.isCatalogArtifact(c)) {\n tmp.add(PlatformArtifacts.getBomArtifactForCatalog(c));\n }\n }\n importedPlatforms = tmp;\n }\n return importedPlatforms;\n }\n\n protected List getManagedDependencies() {\n return managedDependencies;\n }\n\n @Override\n protected void writeToDisk() throws IOException {\n if (model == null) {\n return;\n }\n try (ByteArrayOutputStream pomOutputStream = new ByteArrayOutputStream()) {\n MojoUtils.write(model(), pomOutputStream);\n writeToProjectFile(BuildTool.MAVEN.getDependenciesFile(), pomOutputStream.toByteArray());\n }\n }\n\n @Override\n protected String getProperty(String propertyName) {\n return projectProps.getProperty(propertyName);\n }\n\n @Override\n protected void refreshData() {\n }\n\n private int getIndexToAddExtension() {\n final List dependencies = model().getDependencies();\n for (int i = 0; i < dependencies.size(); i++) {\n if (\"test\".equals(dependencies.get(i).getScope())) {\n return i;\n }\n }\n return -1;\n }\n\n private Model model() {\n return model;\n }\n\n /**\n * Resolves dependencies containing property references in the GAV\n */\n private String resolveKey(Dependency dependency) {\n String resolvedGroupId = toResolvedProperty(dependency.getGroupId());\n String resolvedArtifactId = toResolvedProperty(dependency.getArtifactId());\n String resolvedVersion = toResolvedProperty(dependency.getVersion());\n if (!resolvedGroupId.equals(dependency.getGroupId())\n || !resolvedArtifactId.equals(dependency.getArtifactId())\n || !resolvedVersion.equals(dependency.getVersion())) {\n return resolvedGroupId + \":\" + resolvedArtifactId + \":\" + dependency.getType()\n + (dependency.getClassifier() != null ? \":\" + dependency.getClassifier() : \"\");\n }\n return dependency.getManagementKey();\n }\n\n /**\n * Resolves properties as ${quarkus.platform.version}\n */\n private String toResolvedProperty(String value) {\n Matcher matcher = PROPERTY_PATTERN.matcher(value);\n if (matcher.matches()) {\n String property = getProperty(matcher.group(1));\n return property == null ? value : property;\n }\n return value;\n }\n}" }, { "comment": "nit: the tabbing needs to be fixed here", "method_body": "public void createRelationship() {\n \n BasicRelationship buildingToFloorBasicRelationship = new BasicRelationship(\n \"myRelationshipId\",\n \"mySourceDigitalTwinId\",\n \"myTargetDigitalTwinId\",\n \"contains\")\n .addCustomProperty(\"Prop1\", \"Prop1 value\")\n .addCustomProperty(\"Prop2\", 6);\n\n digitalTwinsAsyncClient.createRelationship(\n \"mySourceDigitalTwinId\",\n \"myRelationshipId\",\n buildingToFloorBasicRelationship,\n BasicRelationship.class)\n .subscribe(createdRelationship -> System.out.println(\n \"Created relationship with Id: \" +\n createdRelationship.getId() +\n \" from: \" + createdRelationship.getSourceId() +\n \" to: \" + createdRelationship.getTargetId()));\n \n\n \n String relationshipPayload = getRelationshipPayload();\n\n digitalTwinsAsyncClient.createRelationship(\n \"mySourceDigitalTwinId\",\n \"myRelationshipId\",\n relationshipPayload,\n String.class)\n .subscribe(createRelationshipString ->\n System.out.println(\"Created relationship: \" + createRelationshipString));\n \n }", "target_code": "\"myRelationshipId\",", "method_body_after": "public void createRelationship() {\n \n BasicRelationship buildingToFloorBasicRelationship = new BasicRelationship(\n \"myRelationshipId\",\n \"mySourceDigitalTwinId\",\n \"myTargetDigitalTwinId\",\n \"contains\")\n .addCustomProperty(\"Prop1\", \"Prop1 value\")\n .addCustomProperty(\"Prop2\", 6);\n\n digitalTwinsAsyncClient.createRelationship(\n \"mySourceDigitalTwinId\",\n \"myRelationshipId\",\n buildingToFloorBasicRelationship,\n BasicRelationship.class)\n .subscribe(createdRelationship -> System.out.println(\n \"Created relationship with Id: \" +\n createdRelationship.getId() +\n \" from: \" + createdRelationship.getSourceId() +\n \" to: \" + createdRelationship.getTargetId()));\n \n\n \n String relationshipPayload = getRelationshipPayload();\n\n digitalTwinsAsyncClient.createRelationship(\n \"mySourceDigitalTwinId\",\n \"myRelationshipId\",\n relationshipPayload,\n String.class)\n .subscribe(createRelationshipString ->\n System.out.println(\"Created relationship: \" + createRelationshipString));\n \n }", "context_before": "class DigitalTwinsAsyncClientJavaDocCodeSnippets extends CodeSnippetBase {\n\n private final DigitalTwinsAsyncClient digitalTwinsAsyncClient;\n\n DigitalTwinsAsyncClientJavaDocCodeSnippets(){\n digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n }\n\n public DigitalTwinsAsyncClient createDigitalTwinsAsyncClient() {\n\n String tenantId = getTenenatId();\n String clientId = getClientId();\n String clientSecret = getClientSecret();\n String digitalTwinsEndpointUrl = getEndpointUrl();\n\n \n DigitalTwinsAsyncClient digitalTwinsAsyncClient = new DigitalTwinsClientBuilder()\n .credential(\n new ClientSecretCredentialBuilder()\n .tenantId(tenantId)\n .clientId(clientId)\n .clientSecret(clientSecret)\n .build())\n .endpoint(digitalTwinsEndpointUrl)\n .buildAsyncClient();\n \n\n return digitalTwinsAsyncClient;\n }\n\n \n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createDigitalTwin() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n \n String modelId = \"dtmi:samples:Building;1\";\n\n BasicDigitalTwin basicTwin = new BasicDigitalTwin(\"myDigitalTwinId\")\n .setMetadata(\n new DigitalTwinMetadata()\n .setModelId(modelId)\n );\n\n digitalTwinsAsyncClient.createDigitalTwin(basicTwin.getId(), basicTwin, BasicDigitalTwin.class)\n .subscribe(response -> System.out.println(\"Created digital twin Id: \" + response.getId()));\n \n\n String digitalTwinStringPayload = getDigitalTwinPayload();\n\n \n digitalTwinsAsyncClient.createDigitalTwin(\"myDigitalTwinId\", digitalTwinStringPayload, String.class)\n .subscribe(stringResponse -> System.out.println(\"Created digital twin: \" + stringResponse));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createDigitalTwinWithResponse(){\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n String modelId = \"dtmi:samples:Building;1\";\n\n BasicDigitalTwin basicDigitalTwin = new BasicDigitalTwin(\"myDigitalTwinId\")\n .setMetadata(\n new DigitalTwinMetadata()\n .setModelId(modelId)\n );\n\n digitalTwinsAsyncClient.createDigitalTwinWithResponse(\n basicDigitalTwin.getId(),\n basicDigitalTwin,\n BasicDigitalTwin.class,\n new CreateDigitalTwinOptions())\n .subscribe(resultWithResponse ->\n System.out.println(\n \"Response http status: \" +\n resultWithResponse.getStatusCode() +\n \" created digital twin Id: \" +\n resultWithResponse.getValue().getId()));\n \n\n String stringPayload = getDigitalTwinPayload();\n\n \n digitalTwinsAsyncClient.createDigitalTwinWithResponse(\n basicDigitalTwin.getId(),\n stringPayload,\n String.class,\n new CreateDigitalTwinOptions())\n .subscribe(stringWithResponse ->\n System.out.println(\n \"Response http status: \" +\n stringWithResponse.getStatusCode() +\n \" created digital twin: \" +\n stringWithResponse.getValue()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n public void getDigitalTwin() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n digitalTwinsAsyncClient.getDigitalTwin(\"myDigitalTwinId\", BasicDigitalTwin.class)\n .subscribe(\n basicDigitalTwin -> System.out.println(\"Retrieved digital twin with Id: \" + basicDigitalTwin.getId()));\n \n\n \n digitalTwinsAsyncClient.getDigitalTwin(\"myDigitalTwinId\", String.class)\n .subscribe(stringResult -> System.out.println(\"Retrieved digital twin: \" + stringResult));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getDigitalTwinWithResponse() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n digitalTwinsAsyncClient.getDigitalTwinWithResponse(\n \"myDigitalTwinId\",\n BasicDigitalTwin.class,\n new GetDigitalTwinOptions())\n .subscribe(\n basicDigitalTwinWithResponse -> System.out.println(\n \"Retrieved digital twin with Id: \" + basicDigitalTwinWithResponse.getValue().getId() +\n \" Http Status Code: \" + basicDigitalTwinWithResponse.getStatusCode()));\n \n\n \n digitalTwinsAsyncClient.getDigitalTwinWithResponse(\n \"myDigitalTwinId\",\n String.class,\n new GetDigitalTwinOptions())\n .subscribe(\n basicDigitalTwinWithResponse -> System.out.println(\n \"Retrieved digital twin: \" + basicDigitalTwinWithResponse.getValue() +\n \" Http Status Code: \" + basicDigitalTwinWithResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void updateDigitalTwin() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n UpdateOperationUtility updateOperationUtility = new UpdateOperationUtility();\n updateOperationUtility.appendReplaceOperation(\"Prop1\", \"newValue\");\n\n digitalTwinsAsyncClient.updateDigitalTwin(\n \"myDigitalTwinId\",\n updateOperationUtility.getUpdateOperations())\n .subscribe();\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void updateDigitalTwinWithResponse() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n UpdateOperationUtility updateOperationUtility = new UpdateOperationUtility();\n updateOperationUtility.appendReplaceOperation(\"Prop1\", \"newValue\");\n\n digitalTwinsAsyncClient.updateDigitalTwinWithResponse(\n \"myDigitalTwinId\",\n updateOperationUtility.getUpdateOperations(),\n new UpdateDigitalTwinOptions())\n .subscribe(updateResponse ->\n System.out.println(\"Update completed with HTTP status code: \" + updateResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void deleteDigitalTwin() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n digitalTwinsAsyncClient.deleteDigitalTwin(\"myDigitalTwinId\")\n .subscribe();\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void deleteDigitalTwinWithResponse() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n digitalTwinsAsyncClient.deleteDigitalTwinWithResponse(\n \"myDigitalTwinId\",\n new DeleteDigitalTwinOptions())\n .subscribe(deleteResponse ->\n System.out.println(\"Deleted digital twin. HTTP response status code: \" + deleteResponse.getStatusCode()));\n \n }\n\n \n\n \n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n \n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createRelationshipWithResponse() {\n \n BasicRelationship buildingToFloorBasicRelationship = new BasicRelationship(\n \"myRelationshipId\",\n \"mySourceDigitalTwinId\",\n \"myTargetDigitalTwinId\",\n \"contains\")\n .addCustomProperty(\"Prop1\", \"Prop1 value\")\n .addCustomProperty(\"Prop2\", 6);\n\n digitalTwinsAsyncClient.createRelationshipWithResponse(\n \"mySourceDigitalTwinId\",\n \"myRelationshipId\",\n buildingToFloorBasicRelationship,\n BasicRelationship.class,\n new CreateRelationshipOptions())\n .subscribe(createdRelationshipWithResponse -> System.out.println(\n \"Created relationship with Id: \" +\n createdRelationshipWithResponse.getValue().getId() +\n \" from: \" + createdRelationshipWithResponse.getValue().getSourceId() +\n \" to: \" + createdRelationshipWithResponse.getValue().getTargetId() +\n \" Http status code: \" +\n createdRelationshipWithResponse.getStatusCode()));\n \n\n \n String relationshipPayload = getRelationshipPayload();\n\n digitalTwinsAsyncClient.createRelationshipWithResponse(\n \"mySourceDigitalTwinId\",\n \"myRelationshipId\",\n relationshipPayload,\n String.class,\n new CreateRelationshipOptions())\n .subscribe(createdRelationshipStringWithResponse -> System.out.println(\n \"Created relationship: \" +\n createdRelationshipStringWithResponse +\n \" With HTTP status code: \" +\n createdRelationshipStringWithResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getRelationship() {\n \n digitalTwinsAsyncClient.getRelationship(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n BasicRelationship.class)\n .subscribe(retrievedRelationship -> System.out.println(\n \"Retrieved relationship with Id: \"\n + retrievedRelationship.getId() +\n \" from: \" +\n retrievedRelationship.getSourceId() +\n \" to: \" + retrievedRelationship.getTargetId()));\n \n\n \n digitalTwinsAsyncClient.getRelationship(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n String.class)\n .subscribe(retrievedRelationshipString ->\n System.out.println(\"Retrieved relationship: \" + retrievedRelationshipString));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getRelationshipWithResponse() {\n \n digitalTwinsAsyncClient.getRelationshipWithResponse(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n BasicRelationship.class,\n new GetRelationshipOptions())\n .subscribe(retrievedRelationshipWithResponse -> System.out.println(\n \"Retrieved relationship with Id: \"\n + retrievedRelationshipWithResponse.getValue().getId() +\n \" from: \" +\n retrievedRelationshipWithResponse.getValue().getSourceId() +\n \" to: \" + retrievedRelationshipWithResponse.getValue().getTargetId() +\n \"HTTP status code: \" + retrievedRelationshipWithResponse.getStatusCode()));\n \n\n \n digitalTwinsAsyncClient.getRelationshipWithResponse(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n String.class,\n new GetRelationshipOptions())\n .subscribe(retrievedRelationshipStringWithResponse -> System.out.println(\n \"Retrieved relationship: \" +\n retrievedRelationshipStringWithResponse +\n \" HTTP status code: \" +\n retrievedRelationshipStringWithResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void updateRelationship() {\n \n UpdateOperationUtility updateOperationUtility = new UpdateOperationUtility();\n updateOperationUtility.appendReplaceOperation(\"/relationshipProperty1\", \"new property value\");\n\n digitalTwinsAsyncClient.updateRelationship(\n \"myDigitalTwinId\",\n \"myRelationshipId\",\n updateOperationUtility.getUpdateOperations())\n .subscribe();\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void updateRelationshipWithResponse() {\n \n UpdateOperationUtility updateOperationUtility = new UpdateOperationUtility();\n updateOperationUtility.appendReplaceOperation(\"/relationshipProperty1\", \"new property value\");\n\n digitalTwinsAsyncClient.updateRelationshipWithResponse(\n \"myDigitalTwinId\",\n \"myRelationshipId\",\n updateOperationUtility.getUpdateOperations(),\n new UpdateRelationshipOptions())\n .subscribe(updateResponse ->\n System.out.println(\n \"Relationship updated with status code: \" +\n updateResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void deleteRelationship() {\n \n digitalTwinsAsyncClient.deleteRelationship(\"myDigitalTwinId\", \"myRelationshipId\")\n .subscribe();\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void deleteRelationshipWithResponse() {\n \n digitalTwinsAsyncClient.deleteRelationshipWithResponse(\n \"myDigitalTwinId\",\n \"myRelationshipId\",\n new DeleteRelationshipOptions())\n .subscribe(deleteResponse ->\n System.out.println(\n \"Deleted relationship with HTTP status code: \" +\n deleteResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n * and {@link DigitalTwinsAsyncClient\n */\n @Override\n public void listRelationships() {\n \n digitalTwinsAsyncClient.listRelationships(\"myDigitalTwinId\", BasicRelationship.class)\n .doOnNext(basicRel -> System.out.println(\"Retrieved relationship with Id: \" + basicRel.getId()));\n \n\n \n digitalTwinsAsyncClient.listRelationships(\"myDigitalTwinId\", String.class)\n .doOnNext(rel -> System.out.println(\"Retrieved relationship: \" + rel));\n \n\n \n digitalTwinsAsyncClient.listRelationships(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n BasicRelationship.class,\n new ListRelationshipsOptions())\n .doOnNext(rel -> System.out.println(\"Retrieved relationship with Id: \" + rel.getId()));\n \n\n \n digitalTwinsAsyncClient.listRelationships(\n \"myDigitalTwinId\",\n \"myRelationshipId\",\n String.class,\n new ListRelationshipsOptions())\n .doOnNext(rel -> System.out.println(\"Retrieved relationship: \" + rel));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void listIncomingRelationships() {\n \n digitalTwinsAsyncClient.listIncomingRelationships(\"myDigitalTwinId\")\n .doOnNext(incomingRel -> System.out.println(\n \"Retrieved relationship with Id: \" +\n incomingRel.getRelationshipId() +\n \" from: \" + incomingRel.getSourceId() +\n \" to: myDigitalTwinId\"))\n .subscribe();\n \n\n \n digitalTwinsAsyncClient.listIncomingRelationships(\n \"myDigitalTwinId\",\n new ListIncomingRelationshipsOptions())\n .doOnNext(incomingRel -> System.out.println(\n \"Retrieved relationship with Id: \" +\n incomingRel.getRelationshipId() +\n \" from: \" + incomingRel.getSourceId() +\n \" to: myDigitalTwinId\"))\n .subscribe();\n \n }\n\n \n\n \n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createModels() {\n String model1 = loadModelFromFile(\"model1\");\n String model2 = loadModelFromFile(\"model2\");\n String model3 = loadModelFromFile(\"model3\");\n\n \n digitalTwinsAsyncClient.createModels(Arrays.asList(model1, model2, model3))\n .subscribe(createdModels -> createdModels.forEach(model ->\n System.out.println(\"Retrieved model with Id: \" + model.getId())));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createModelsWithResponse() {\n String model1 = loadModelFromFile(\"model1\");\n String model2 = loadModelFromFile(\"model2\");\n String model3 = loadModelFromFile(\"model3\");\n\n \n digitalTwinsAsyncClient.createModelsWithResponse(\n Arrays.asList(model1, model2, model3),\n new CreateModelsOptions())\n .subscribe(createdModels -> {\n System.out.println(\"Received a response with HTTP status code: \" + createdModels.getStatusCode());\n createdModels.getValue().forEach(\n model -> System.out.println(\"Retrieved model with Id: \" + model.getId()));\n });\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getModel() {\n \n digitalTwinsAsyncClient.getModel(\"dtmi:samples:Building;1\")\n .subscribe(model -> System.out.println(\"Retrieved model with Id: \" + model.getId()));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getModelWithResponse() {\n \n digitalTwinsAsyncClient.getModelWithResponse(\n \"dtmi:samples:Building;1\",\n new GetModelOptions())\n .subscribe(modelWithResponse -> {\n System.out.println(\"Received HTTP response with status code: \" + modelWithResponse.getStatusCode());\n System.out.println(\"Retrieved model with Id: \" + modelWithResponse.getValue().getId());\n });\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n * {@link DigitalTwinsAsyncClient", "context_after": "class DigitalTwinsAsyncClientJavaDocCodeSnippets extends CodeSnippetBase {\n\n private final DigitalTwinsAsyncClient digitalTwinsAsyncClient;\n\n DigitalTwinsAsyncClientJavaDocCodeSnippets(){\n digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n }\n\n public DigitalTwinsAsyncClient createDigitalTwinsAsyncClient() {\n\n String tenantId = getTenenatId();\n String clientId = getClientId();\n String clientSecret = getClientSecret();\n String digitalTwinsEndpointUrl = getEndpointUrl();\n\n \n DigitalTwinsAsyncClient digitalTwinsAsyncClient = new DigitalTwinsClientBuilder()\n .credential(\n new ClientSecretCredentialBuilder()\n .tenantId(tenantId)\n .clientId(clientId)\n .clientSecret(clientSecret)\n .build())\n .endpoint(digitalTwinsEndpointUrl)\n .buildAsyncClient();\n \n\n return digitalTwinsAsyncClient;\n }\n\n \n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createDigitalTwin() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n \n String modelId = \"dtmi:samples:Building;1\";\n\n BasicDigitalTwin basicTwin = new BasicDigitalTwin(\"myDigitalTwinId\")\n .setMetadata(\n new DigitalTwinMetadata()\n .setModelId(modelId)\n );\n\n digitalTwinsAsyncClient.createDigitalTwin(basicTwin.getId(), basicTwin, BasicDigitalTwin.class)\n .subscribe(response -> System.out.println(\"Created digital twin Id: \" + response.getId()));\n \n\n String digitalTwinStringPayload = getDigitalTwinPayload();\n\n \n digitalTwinsAsyncClient.createDigitalTwin(\"myDigitalTwinId\", digitalTwinStringPayload, String.class)\n .subscribe(stringResponse -> System.out.println(\"Created digital twin: \" + stringResponse));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createDigitalTwinWithResponse(){\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n String modelId = \"dtmi:samples:Building;1\";\n\n BasicDigitalTwin basicDigitalTwin = new BasicDigitalTwin(\"myDigitalTwinId\")\n .setMetadata(\n new DigitalTwinMetadata()\n .setModelId(modelId)\n );\n\n digitalTwinsAsyncClient.createDigitalTwinWithResponse(\n basicDigitalTwin.getId(),\n basicDigitalTwin,\n BasicDigitalTwin.class,\n new CreateDigitalTwinOptions())\n .subscribe(resultWithResponse ->\n System.out.println(\n \"Response http status: \" +\n resultWithResponse.getStatusCode() +\n \" created digital twin Id: \" +\n resultWithResponse.getValue().getId()));\n \n\n String stringPayload = getDigitalTwinPayload();\n\n \n digitalTwinsAsyncClient.createDigitalTwinWithResponse(\n basicDigitalTwin.getId(),\n stringPayload,\n String.class,\n new CreateDigitalTwinOptions())\n .subscribe(stringWithResponse ->\n System.out.println(\n \"Response http status: \" +\n stringWithResponse.getStatusCode() +\n \" created digital twin: \" +\n stringWithResponse.getValue()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n public void getDigitalTwin() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n digitalTwinsAsyncClient.getDigitalTwin(\"myDigitalTwinId\", BasicDigitalTwin.class)\n .subscribe(\n basicDigitalTwin -> System.out.println(\"Retrieved digital twin with Id: \" + basicDigitalTwin.getId()));\n \n\n \n digitalTwinsAsyncClient.getDigitalTwin(\"myDigitalTwinId\", String.class)\n .subscribe(stringResult -> System.out.println(\"Retrieved digital twin: \" + stringResult));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getDigitalTwinWithResponse() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n digitalTwinsAsyncClient.getDigitalTwinWithResponse(\n \"myDigitalTwinId\",\n BasicDigitalTwin.class,\n new GetDigitalTwinOptions())\n .subscribe(\n basicDigitalTwinWithResponse -> System.out.println(\n \"Retrieved digital twin with Id: \" + basicDigitalTwinWithResponse.getValue().getId() +\n \" Http Status Code: \" + basicDigitalTwinWithResponse.getStatusCode()));\n \n\n \n digitalTwinsAsyncClient.getDigitalTwinWithResponse(\n \"myDigitalTwinId\",\n String.class,\n new GetDigitalTwinOptions())\n .subscribe(\n basicDigitalTwinWithResponse -> System.out.println(\n \"Retrieved digital twin: \" + basicDigitalTwinWithResponse.getValue() +\n \" Http Status Code: \" + basicDigitalTwinWithResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void updateDigitalTwin() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n UpdateOperationUtility updateOperationUtility = new UpdateOperationUtility();\n updateOperationUtility.appendReplaceOperation(\"Prop1\", \"newValue\");\n\n digitalTwinsAsyncClient.updateDigitalTwin(\n \"myDigitalTwinId\",\n updateOperationUtility.getUpdateOperations())\n .subscribe();\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void updateDigitalTwinWithResponse() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n UpdateOperationUtility updateOperationUtility = new UpdateOperationUtility();\n updateOperationUtility.appendReplaceOperation(\"Prop1\", \"newValue\");\n\n digitalTwinsAsyncClient.updateDigitalTwinWithResponse(\n \"myDigitalTwinId\",\n updateOperationUtility.getUpdateOperations(),\n new UpdateDigitalTwinOptions())\n .subscribe(updateResponse ->\n System.out.println(\"Update completed with HTTP status code: \" + updateResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void deleteDigitalTwin() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n digitalTwinsAsyncClient.deleteDigitalTwin(\"myDigitalTwinId\")\n .subscribe();\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void deleteDigitalTwinWithResponse() {\n DigitalTwinsAsyncClient digitalTwinsAsyncClient = createDigitalTwinsAsyncClient();\n\n \n digitalTwinsAsyncClient.deleteDigitalTwinWithResponse(\n \"myDigitalTwinId\",\n new DeleteDigitalTwinOptions())\n .subscribe(deleteResponse ->\n System.out.println(\"Deleted digital twin. HTTP response status code: \" + deleteResponse.getStatusCode()));\n \n }\n\n \n\n \n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n \n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createRelationshipWithResponse() {\n \n BasicRelationship buildingToFloorBasicRelationship = new BasicRelationship(\n \"myRelationshipId\",\n \"mySourceDigitalTwinId\",\n \"myTargetDigitalTwinId\",\n \"contains\")\n .addCustomProperty(\"Prop1\", \"Prop1 value\")\n .addCustomProperty(\"Prop2\", 6);\n\n digitalTwinsAsyncClient.createRelationshipWithResponse(\n \"mySourceDigitalTwinId\",\n \"myRelationshipId\",\n buildingToFloorBasicRelationship,\n BasicRelationship.class,\n new CreateRelationshipOptions())\n .subscribe(createdRelationshipWithResponse -> System.out.println(\n \"Created relationship with Id: \" +\n createdRelationshipWithResponse.getValue().getId() +\n \" from: \" + createdRelationshipWithResponse.getValue().getSourceId() +\n \" to: \" + createdRelationshipWithResponse.getValue().getTargetId() +\n \" Http status code: \" +\n createdRelationshipWithResponse.getStatusCode()));\n \n\n \n String relationshipPayload = getRelationshipPayload();\n\n digitalTwinsAsyncClient.createRelationshipWithResponse(\n \"mySourceDigitalTwinId\",\n \"myRelationshipId\",\n relationshipPayload,\n String.class,\n new CreateRelationshipOptions())\n .subscribe(createdRelationshipStringWithResponse -> System.out.println(\n \"Created relationship: \" +\n createdRelationshipStringWithResponse +\n \" With HTTP status code: \" +\n createdRelationshipStringWithResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getRelationship() {\n \n digitalTwinsAsyncClient.getRelationship(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n BasicRelationship.class)\n .subscribe(retrievedRelationship -> System.out.println(\n \"Retrieved relationship with Id: \"\n + retrievedRelationship.getId() +\n \" from: \" +\n retrievedRelationship.getSourceId() +\n \" to: \" + retrievedRelationship.getTargetId()));\n \n\n \n digitalTwinsAsyncClient.getRelationship(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n String.class)\n .subscribe(retrievedRelationshipString ->\n System.out.println(\"Retrieved relationship: \" + retrievedRelationshipString));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getRelationshipWithResponse() {\n \n digitalTwinsAsyncClient.getRelationshipWithResponse(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n BasicRelationship.class,\n new GetRelationshipOptions())\n .subscribe(retrievedRelationshipWithResponse -> System.out.println(\n \"Retrieved relationship with Id: \"\n + retrievedRelationshipWithResponse.getValue().getId() +\n \" from: \" +\n retrievedRelationshipWithResponse.getValue().getSourceId() +\n \" to: \" + retrievedRelationshipWithResponse.getValue().getTargetId() +\n \"HTTP status code: \" + retrievedRelationshipWithResponse.getStatusCode()));\n \n\n \n digitalTwinsAsyncClient.getRelationshipWithResponse(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n String.class,\n new GetRelationshipOptions())\n .subscribe(retrievedRelationshipStringWithResponse -> System.out.println(\n \"Retrieved relationship: \" +\n retrievedRelationshipStringWithResponse +\n \" HTTP status code: \" +\n retrievedRelationshipStringWithResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void updateRelationship() {\n \n UpdateOperationUtility updateOperationUtility = new UpdateOperationUtility();\n updateOperationUtility.appendReplaceOperation(\"/relationshipProperty1\", \"new property value\");\n\n digitalTwinsAsyncClient.updateRelationship(\n \"myDigitalTwinId\",\n \"myRelationshipId\",\n updateOperationUtility.getUpdateOperations())\n .subscribe();\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void updateRelationshipWithResponse() {\n \n UpdateOperationUtility updateOperationUtility = new UpdateOperationUtility();\n updateOperationUtility.appendReplaceOperation(\"/relationshipProperty1\", \"new property value\");\n\n digitalTwinsAsyncClient.updateRelationshipWithResponse(\n \"myDigitalTwinId\",\n \"myRelationshipId\",\n updateOperationUtility.getUpdateOperations(),\n new UpdateRelationshipOptions())\n .subscribe(updateResponse ->\n System.out.println(\n \"Relationship updated with status code: \" +\n updateResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void deleteRelationship() {\n \n digitalTwinsAsyncClient.deleteRelationship(\"myDigitalTwinId\", \"myRelationshipId\")\n .subscribe();\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void deleteRelationshipWithResponse() {\n \n digitalTwinsAsyncClient.deleteRelationshipWithResponse(\n \"myDigitalTwinId\",\n \"myRelationshipId\",\n new DeleteRelationshipOptions())\n .subscribe(deleteResponse ->\n System.out.println(\n \"Deleted relationship with HTTP status code: \" +\n deleteResponse.getStatusCode()));\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n * and {@link DigitalTwinsAsyncClient\n */\n @Override\n public void listRelationships() {\n \n digitalTwinsAsyncClient.listRelationships(\"myDigitalTwinId\", BasicRelationship.class)\n .doOnNext(basicRel -> System.out.println(\"Retrieved relationship with Id: \" + basicRel.getId()));\n \n\n \n digitalTwinsAsyncClient.listRelationships(\"myDigitalTwinId\", String.class)\n .doOnNext(rel -> System.out.println(\"Retrieved relationship: \" + rel));\n \n\n \n digitalTwinsAsyncClient.listRelationships(\n \"myDigitalTwinId\",\n \"myRelationshipName\",\n BasicRelationship.class,\n new ListRelationshipsOptions())\n .doOnNext(rel -> System.out.println(\"Retrieved relationship with Id: \" + rel.getId()));\n \n\n \n digitalTwinsAsyncClient.listRelationships(\n \"myDigitalTwinId\",\n \"myRelationshipId\",\n String.class,\n new ListRelationshipsOptions())\n .doOnNext(rel -> System.out.println(\"Retrieved relationship: \" + rel));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void listIncomingRelationships() {\n \n digitalTwinsAsyncClient.listIncomingRelationships(\"myDigitalTwinId\")\n .doOnNext(incomingRel -> System.out.println(\n \"Retrieved relationship with Id: \" +\n incomingRel.getRelationshipId() +\n \" from: \" + incomingRel.getSourceId() +\n \" to: myDigitalTwinId\"))\n .subscribe();\n \n\n \n digitalTwinsAsyncClient.listIncomingRelationships(\n \"myDigitalTwinId\",\n new ListIncomingRelationshipsOptions())\n .doOnNext(incomingRel -> System.out.println(\n \"Retrieved relationship with Id: \" +\n incomingRel.getRelationshipId() +\n \" from: \" + incomingRel.getSourceId() +\n \" to: myDigitalTwinId\"))\n .subscribe();\n \n }\n\n \n\n \n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createModels() {\n String model1 = loadModelFromFile(\"model1\");\n String model2 = loadModelFromFile(\"model2\");\n String model3 = loadModelFromFile(\"model3\");\n\n \n digitalTwinsAsyncClient.createModels(Arrays.asList(model1, model2, model3))\n .subscribe(createdModels -> createdModels.forEach(model ->\n System.out.println(\"Retrieved model with Id: \" + model.getId())));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void createModelsWithResponse() {\n String model1 = loadModelFromFile(\"model1\");\n String model2 = loadModelFromFile(\"model2\");\n String model3 = loadModelFromFile(\"model3\");\n\n \n digitalTwinsAsyncClient.createModelsWithResponse(\n Arrays.asList(model1, model2, model3),\n new CreateModelsOptions())\n .subscribe(createdModels -> {\n System.out.println(\"Received a response with HTTP status code: \" + createdModels.getStatusCode());\n createdModels.getValue().forEach(\n model -> System.out.println(\"Retrieved model with Id: \" + model.getId()));\n });\n \n }\n\n /**\n * Generates code samples for using {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getModel() {\n \n digitalTwinsAsyncClient.getModel(\"dtmi:samples:Building;1\")\n .subscribe(model -> System.out.println(\"Retrieved model with Id: \" + model.getId()));\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n */\n @Override\n public void getModelWithResponse() {\n \n digitalTwinsAsyncClient.getModelWithResponse(\n \"dtmi:samples:Building;1\",\n new GetModelOptions())\n .subscribe(modelWithResponse -> {\n System.out.println(\"Received HTTP response with status code: \" + modelWithResponse.getStatusCode());\n System.out.println(\"Retrieved model with Id: \" + modelWithResponse.getValue().getId());\n });\n \n }\n\n /**\n * Generates code samples for using\n * {@link DigitalTwinsAsyncClient\n * {@link DigitalTwinsAsyncClient" }, { "comment": "```suggestion throw Utils.createNatsError(\"Timeout error occurred on graceful stop\"); ```", "method_body": "public static void gracefulStop(Strand strand, ObjectValue listenerObject) {\n ObjectValue connectionObject = (ObjectValue) listenerObject.get(Constants.CONNECTION_OBJ);\n if (connectionObject == null) {\n LOG.debug(\"Connection object reference does not exist. Possibly the connection is already closed.\");\n return;\n }\n Connection natsConnection =\n (Connection) connectionObject.getNativeData(Constants.NATS_CONNECTION);\n if (natsConnection == null) {\n LOG.debug(\"NATS connection does not exist. Possibly the connection is already closed.\");\n listenerObject.set(Constants.CONNECTION_OBJ, null);\n return;\n }\n @SuppressWarnings(\"unchecked\")\n List dispatcherList = (List) listenerObject.getNativeData(DISPATCHER_LIST);\n dispatcherList.forEach(natsConnection::closeDispatcher);\n\n int clientsCount =\n ((AtomicInteger) connectionObject.getNativeData(Constants.CONNECTED_CLIENTS)).decrementAndGet();\n\n if (clientsCount == 0) {\n try {\n \n natsConnection.drain(Duration.ZERO);\n } catch (InterruptedException e) {\n Thread.currentThread().interrupt();\n throw Utils.createNatsError(\"Listener interrupted while closing NATS connection\");\n } catch (TimeoutException e) {\n throw Utils.createNatsError(\"Timeout error occurred, initial flush timed out\");\n }\n }\n }", "target_code": "throw Utils.createNatsError(\"Timeout error occurred, initial flush timed out\");", "method_body_after": "public static void gracefulStop(Strand strand, ObjectValue listenerObject) {\n ObjectValue connectionObject = (ObjectValue) listenerObject.get(Constants.CONNECTION_OBJ);\n if (connectionObject == null) {\n LOG.debug(\"Connection object reference does not exist. Possibly the connection is already closed.\");\n return;\n }\n Connection natsConnection =\n (Connection) connectionObject.getNativeData(Constants.NATS_CONNECTION);\n if (natsConnection == null) {\n LOG.debug(\"NATS connection does not exist. Possibly the connection is already closed.\");\n return;\n }\n @SuppressWarnings(\"unchecked\")\n List dispatcherList = (List) listenerObject.getNativeData(DISPATCHER_LIST);\n dispatcherList.forEach(natsConnection::closeDispatcher);\n\n int clientsCount =\n ((AtomicInteger) connectionObject.getNativeData(Constants.CONNECTED_CLIENTS)).decrementAndGet();\n\n if (clientsCount == 0) {\n try {\n \n natsConnection.drain(Duration.ZERO);\n } catch (InterruptedException e) {\n Thread.currentThread().interrupt();\n throw Utils.createNatsError(\"Listener interrupted on graceful stop.\");\n } catch (TimeoutException e) {\n throw Utils.createNatsError(\"Timeout error occurred, on graceful stop.\");\n } catch (IllegalStateException e) {\n throw Utils.createNatsError(\"Connection is already closed.\");\n }\n }\n }", "context_before": "class GracefulStop {\n\n private static final Logger LOG = LoggerFactory.getLogger(GracefulStop.class);\n\n \n}", "context_after": "class GracefulStop {\n\n private static final Logger LOG = LoggerFactory.getLogger(GracefulStop.class);\n\n \n}" }, { "comment": "If these variables aren't defined by user, we would use default config values in of FE", "method_body": "public void initFuzzyModeVariables() {\n Random random = new Random(System.currentTimeMillis());\n this.parallelExecInstanceNum = random.nextInt(8) + 1;\n this.parallelPipelineTaskNum = random.nextInt(8);\n this.enableCommonExprPushdown = random.nextBoolean();\n this.enableLocalExchange = random.nextBoolean();\n \n \n this.disableStreamPreaggregations = random.nextBoolean();\n this.partitionedHashJoinRowsThreshold = random.nextBoolean() ? 8 : 1048576;\n this.partitionedHashAggRowsThreshold = random.nextBoolean() ? 8 : 1048576;\n this.enableShareHashTableForBroadcastJoin = random.nextBoolean();\n \n int randomInt = random.nextInt(4);\n if (randomInt % 2 == 0) {\n this.rewriteOrToInPredicateThreshold = 100000;\n this.enableFunctionPushdown = false;\n this.enableDeleteSubPredicateV2 = false;\n } else {\n this.rewriteOrToInPredicateThreshold = 2;\n this.enableFunctionPushdown = true;\n this.enableDeleteSubPredicateV2 = true;\n }\n this.runtimeFilterType = 1 << randomInt;\n /*\n switch (randomInt) {\n case 0:\n this.externalSortBytesThreshold = 0;\n this.externalAggBytesThreshold = 0;\n break;\n case 1:\n this.externalSortBytesThreshold = 1;\n this.externalAggBytesThreshold = 1;\n this.externalAggPartitionBits = 6;\n break;\n case 2:\n this.externalSortBytesThreshold = 1024 * 1024;\n this.externalAggBytesThreshold = 1024 * 1024;\n this.externalAggPartitionBits = 8;\n break;\n default:\n this.externalSortBytesThreshold = 100 * 1024 * 1024 * 1024;\n this.externalAggBytesThreshold = 100 * 1024 * 1024 * 1024;\n this.externalAggPartitionBits = 4;\n break;\n }\n */\n \n if (Config.pull_request_id > 0) {\n this.enablePipelineEngine = true;\n this.enableNereidsPlanner = true;\n\n switch (Config.pull_request_id % 4) {\n case 0:\n this.runtimeFilterType |= TRuntimeFilterType.BITMAP.getValue();\n break;\n case 1:\n this.runtimeFilterType |= TRuntimeFilterType.BITMAP.getValue();\n break;\n case 2:\n this.runtimeFilterType &= ~TRuntimeFilterType.BITMAP.getValue();\n break;\n case 3:\n this.runtimeFilterType &= ~TRuntimeFilterType.BITMAP.getValue();\n break;\n default:\n break;\n }\n }\n\n if (Config.fuzzy_test_type.equals(\"p0\")) {\n if (Config.pull_request_id > 0) {\n if (Config.pull_request_id % 2 == 1) {\n this.batchSize = 4064;\n } else {\n this.batchSize = 50;\n }\n }\n }\n\n \n \n \n \n this.topnOptLimitThreshold = 0;\n }", "target_code": "this.rewriteOrToInPredicateThreshold = 100000;", "method_body_after": "public void initFuzzyModeVariables() {\n Random random = new Random(System.currentTimeMillis());\n this.parallelExecInstanceNum = random.nextInt(8) + 1;\n this.parallelPipelineTaskNum = random.nextInt(8);\n this.enableCommonExprPushdown = random.nextBoolean();\n this.enableLocalExchange = random.nextBoolean();\n \n \n this.disableStreamPreaggregations = random.nextBoolean();\n this.partitionedHashJoinRowsThreshold = random.nextBoolean() ? 8 : 1048576;\n this.partitionedHashAggRowsThreshold = random.nextBoolean() ? 8 : 1048576;\n this.enableShareHashTableForBroadcastJoin = random.nextBoolean();\n \n int randomInt = random.nextInt(4);\n if (randomInt % 2 == 0) {\n this.rewriteOrToInPredicateThreshold = 100000;\n this.enableFunctionPushdown = false;\n this.enableDeleteSubPredicateV2 = false;\n } else {\n this.rewriteOrToInPredicateThreshold = 2;\n this.enableFunctionPushdown = true;\n this.enableDeleteSubPredicateV2 = true;\n }\n this.runtimeFilterType = 1 << randomInt;\n /*\n switch (randomInt) {\n case 0:\n this.externalSortBytesThreshold = 0;\n this.externalAggBytesThreshold = 0;\n break;\n case 1:\n this.externalSortBytesThreshold = 1;\n this.externalAggBytesThreshold = 1;\n this.externalAggPartitionBits = 6;\n break;\n case 2:\n this.externalSortBytesThreshold = 1024 * 1024;\n this.externalAggBytesThreshold = 1024 * 1024;\n this.externalAggPartitionBits = 8;\n break;\n default:\n this.externalSortBytesThreshold = 100 * 1024 * 1024 * 1024;\n this.externalAggBytesThreshold = 100 * 1024 * 1024 * 1024;\n this.externalAggPartitionBits = 4;\n break;\n }\n */\n \n if (Config.pull_request_id > 0) {\n this.enablePipelineEngine = true;\n this.enableNereidsPlanner = true;\n\n switch (Config.pull_request_id % 4) {\n case 0:\n this.runtimeFilterType |= TRuntimeFilterType.BITMAP.getValue();\n break;\n case 1:\n this.runtimeFilterType |= TRuntimeFilterType.BITMAP.getValue();\n break;\n case 2:\n this.runtimeFilterType &= ~TRuntimeFilterType.BITMAP.getValue();\n break;\n case 3:\n this.runtimeFilterType &= ~TRuntimeFilterType.BITMAP.getValue();\n break;\n default:\n break;\n }\n }\n\n if (Config.fuzzy_test_type.equals(\"p0\")) {\n if (Config.pull_request_id > 0) {\n if (Config.pull_request_id % 2 == 1) {\n this.batchSize = 4064;\n } else {\n this.batchSize = 50;\n }\n }\n }\n\n \n \n \n \n this.topnOptLimitThreshold = 0;\n }", "context_before": "class SessionVariable implements Serializable, Writable {\n public static final Logger LOG = LogManager.getLogger(SessionVariable.class);\n\n public static final String EXEC_MEM_LIMIT = \"exec_mem_limit\";\n public static final String SCAN_QUEUE_MEM_LIMIT = \"scan_queue_mem_limit\";\n public static final String QUERY_TIMEOUT = \"query_timeout\";\n\n public static final String MAX_EXECUTION_TIME = \"max_execution_time\";\n public static final String INSERT_TIMEOUT = \"insert_timeout\";\n public static final String ENABLE_PROFILE = \"enable_profile\";\n public static final String SQL_MODE = \"sql_mode\";\n public static final String WORKLOAD_VARIABLE = \"workload_group\";\n public static final String RESOURCE_VARIABLE = \"resource_group\";\n public static final String AUTO_COMMIT = \"autocommit\";\n public static final String TX_ISOLATION = \"tx_isolation\";\n public static final String TX_READ_ONLY = \"tx_read_only\";\n public static final String TRANSACTION_READ_ONLY = \"transaction_read_only\";\n public static final String TRANSACTION_ISOLATION = \"transaction_isolation\";\n public static final String CHARACTER_SET_CLIENT = \"character_set_client\";\n public static final String CHARACTER_SET_CONNNECTION = \"character_set_connection\";\n public static final String CHARACTER_SET_RESULTS = \"character_set_results\";\n public static final String CHARACTER_SET_SERVER = \"character_set_server\";\n public static final String COLLATION_CONNECTION = \"collation_connection\";\n public static final String COLLATION_DATABASE = \"collation_database\";\n public static final String COLLATION_SERVER = \"collation_server\";\n public static final String SQL_AUTO_IS_NULL = \"SQL_AUTO_IS_NULL\";\n public static final String SQL_SELECT_LIMIT = \"sql_select_limit\";\n public static final String MAX_ALLOWED_PACKET = \"max_allowed_packet\";\n public static final String AUTO_INCREMENT_INCREMENT = \"auto_increment_increment\";\n public static final String QUERY_CACHE_TYPE = \"query_cache_type\";\n public static final String INTERACTIVE_TIMTOUT = \"interactive_timeout\";\n public static final String WAIT_TIMEOUT = \"wait_timeout\";\n public static final String NET_WRITE_TIMEOUT = \"net_write_timeout\";\n public static final String NET_READ_TIMEOUT = \"net_read_timeout\";\n public static final String TIME_ZONE = \"time_zone\";\n public static final String SQL_SAFE_UPDATES = \"sql_safe_updates\";\n public static final String NET_BUFFER_LENGTH = \"net_buffer_length\";\n public static final String CODEGEN_LEVEL = \"codegen_level\";\n public static final String HAVE_QUERY_CACHE = \"have_query_cache\";\n \n public static final int MIN_EXEC_MEM_LIMIT = 2097152;\n public static final String BATCH_SIZE = \"batch_size\";\n public static final String DISABLE_STREAMING_PREAGGREGATIONS = \"disable_streaming_preaggregations\";\n public static final String DISABLE_COLOCATE_PLAN = \"disable_colocate_plan\";\n public static final String ENABLE_COLOCATE_SCAN = \"enable_colocate_scan\";\n public static final String ENABLE_BUCKET_SHUFFLE_JOIN = \"enable_bucket_shuffle_join\";\n public static final String PARALLEL_FRAGMENT_EXEC_INSTANCE_NUM = \"parallel_fragment_exec_instance_num\";\n public static final String PARALLEL_PIPELINE_TASK_NUM = \"parallel_pipeline_task_num\";\n public static final String MAX_INSTANCE_NUM = \"max_instance_num\";\n public static final String ENABLE_INSERT_STRICT = \"enable_insert_strict\";\n public static final String ENABLE_SPILLING = \"enable_spilling\";\n public static final String ENABLE_EXCHANGE_NODE_PARALLEL_MERGE = \"enable_exchange_node_parallel_merge\";\n public static final String PREFER_JOIN_METHOD = \"prefer_join_method\";\n\n public static final String ENABLE_FOLD_CONSTANT_BY_BE = \"enable_fold_constant_by_be\";\n public static final String ENABLE_ODBC_TRANSCATION = \"enable_odbc_transcation\";\n public static final String ENABLE_SQL_CACHE = \"enable_sql_cache\";\n public static final String ENABLE_PARTITION_CACHE = \"enable_partition_cache\";\n\n public static final String ENABLE_COST_BASED_JOIN_REORDER = \"enable_cost_based_join_reorder\";\n\n \n public static final String FORWARD_TO_MASTER = \"forward_to_master\";\n \n public static final String PARALLEL_EXCHANGE_INSTANCE_NUM = \"parallel_exchange_instance_num\";\n public static final String SHOW_HIDDEN_COLUMNS = \"show_hidden_columns\";\n public static final String USE_V2_ROLLUP = \"use_v2_rollup\";\n public static final String REWRITE_COUNT_DISTINCT_TO_BITMAP_HLL = \"rewrite_count_distinct_to_bitmap_hll\";\n public static final String EVENT_SCHEDULER = \"event_scheduler\";\n public static final String STORAGE_ENGINE = \"storage_engine\";\n \n public static final String DEFAULT_STORAGE_ENGINE = \"default_storage_engine\";\n public static final String DEFAULT_TMP_STORAGE_ENGINE = \"default_tmp_storage_engine\";\n\n \n public static final String PROFILLING = \"profiling\";\n\n public static final String DIV_PRECISION_INCREMENT = \"div_precision_increment\";\n\n \n public static final String MAX_SCAN_KEY_NUM = \"max_scan_key_num\";\n public static final String MAX_PUSHDOWN_CONDITIONS_PER_COLUMN = \"max_pushdown_conditions_per_column\";\n\n \n public static final String ALLOW_PARTITION_COLUMN_NULLABLE = \"allow_partition_column_nullable\";\n\n \n public static final String RUNTIME_FILTER_MODE = \"runtime_filter_mode\";\n \n \n public static final String RUNTIME_BLOOM_FILTER_SIZE = \"runtime_bloom_filter_size\";\n \n public static final String RUNTIME_BLOOM_FILTER_MIN_SIZE = \"runtime_bloom_filter_min_size\";\n \n public static final String RUNTIME_BLOOM_FILTER_MAX_SIZE = \"runtime_bloom_filter_max_size\";\n public static final String USE_RF_DEFAULT = \"use_rf_default\";\n \n public static final String RUNTIME_FILTER_WAIT_TIME_MS = \"runtime_filter_wait_time_ms\";\n \n public static final String RUNTIME_FILTERS_MAX_NUM = \"runtime_filters_max_num\";\n \n public static final String RUNTIME_FILTER_TYPE = \"runtime_filter_type\";\n \n public static final String RUNTIME_FILTER_MAX_IN_NUM = \"runtime_filter_max_in_num\";\n\n public static final String BE_NUMBER_FOR_TEST = \"be_number_for_test\";\n\n \n public static final String INSERT_VISIBLE_TIMEOUT_MS = \"insert_visible_timeout_ms\";\n\n public static final String DELETE_WITHOUT_PARTITION = \"delete_without_partition\";\n\n \n \n \n public static final String SEND_BATCH_PARALLELISM = \"send_batch_parallelism\";\n\n \n public static final String DISABLE_JOIN_REORDER = \"disable_join_reorder\";\n\n public static final String MAX_JOIN_NUMBER_OF_REORDER = \"max_join_number_of_reorder\";\n\n public static final String ENABLE_NEREIDS_DML = \"enable_nereids_dml\";\n public static final String ENABLE_STRICT_CONSISTENCY_DML = \"enable_strict_consistency_dml\";\n\n public static final String ENABLE_BUSHY_TREE = \"enable_bushy_tree\";\n\n public static final String MAX_JOIN_NUMBER_BUSHY_TREE = \"max_join_number_bushy_tree\";\n public static final String ENABLE_PARTITION_TOPN = \"enable_partition_topn\";\n\n public static final String ENABLE_INFER_PREDICATE = \"enable_infer_predicate\";\n\n public static final long DEFAULT_INSERT_VISIBLE_TIMEOUT_MS = 10_000;\n\n public static final String ENABLE_VECTORIZED_ENGINE = \"enable_vectorized_engine\";\n\n public static final String EXTRACT_WIDE_RANGE_EXPR = \"extract_wide_range_expr\";\n\n \n public static final long MIN_INSERT_VISIBLE_TIMEOUT_MS = 1000;\n\n public static final String ENABLE_PIPELINE_ENGINE = \"enable_pipeline_engine\";\n\n public static final String ENABLE_PIPELINE_X_ENGINE = \"enable_pipeline_x_engine\";\n\n public static final String ENABLE_AGG_STATE = \"enable_agg_state\";\n\n public static final String ENABLE_RPC_OPT_FOR_PIPELINE = \"enable_rpc_opt_for_pipeline\";\n\n public static final String ENABLE_SINGLE_DISTINCT_COLUMN_OPT = \"enable_single_distinct_column_opt\";\n\n public static final String CPU_RESOURCE_LIMIT = \"cpu_resource_limit\";\n\n public static final String ENABLE_PARALLEL_OUTFILE = \"enable_parallel_outfile\";\n\n public static final String SQL_QUOTE_SHOW_CREATE = \"sql_quote_show_create\";\n\n public static final String RETURN_OBJECT_DATA_AS_BINARY = \"return_object_data_as_binary\";\n\n public static final String BLOCK_ENCRYPTION_MODE = \"block_encryption_mode\";\n\n public static final String AUTO_BROADCAST_JOIN_THRESHOLD = \"auto_broadcast_join_threshold\";\n\n public static final String ENABLE_PROJECTION = \"enable_projection\";\n\n public static final String CHECK_OVERFLOW_FOR_DECIMAL = \"check_overflow_for_decimal\";\n\n public static final String TRIM_TAILING_SPACES_FOR_EXTERNAL_TABLE_QUERY\n = \"trim_tailing_spaces_for_external_table_query\";\n\n public static final String ENABLE_DPHYP_OPTIMIZER = \"enable_dphyp_optimizer\";\n\n public static final String NTH_OPTIMIZED_PLAN = \"nth_optimized_plan\";\n\n public static final String ENABLE_NEREIDS_PLANNER = \"enable_nereids_planner\";\n public static final String DISABLE_NEREIDS_RULES = \"disable_nereids_rules\";\n public static final String ENABLE_NEW_COST_MODEL = \"enable_new_cost_model\";\n public static final String ENABLE_FALLBACK_TO_ORIGINAL_PLANNER = \"enable_fallback_to_original_planner\";\n public static final String ENABLE_NEREIDS_TIMEOUT = \"enable_nereids_timeout\";\n\n public static final String FORBID_UNKNOWN_COLUMN_STATS = \"forbid_unknown_col_stats\";\n public static final String BROADCAST_RIGHT_TABLE_SCALE_FACTOR = \"broadcast_right_table_scale_factor\";\n public static final String BROADCAST_ROW_COUNT_LIMIT = \"broadcast_row_count_limit\";\n\n \n public static final String BROADCAST_HASHTABLE_MEM_LIMIT_PERCENTAGE = \"broadcast_hashtable_mem_limit_percentage\";\n\n public static final String REWRITE_OR_TO_IN_PREDICATE_THRESHOLD = \"rewrite_or_to_in_predicate_threshold\";\n\n public static final String NEREIDS_STAR_SCHEMA_SUPPORT = \"nereids_star_schema_support\";\n\n public static final String NEREIDS_CBO_PENALTY_FACTOR = \"nereids_cbo_penalty_factor\";\n public static final String ENABLE_NEREIDS_TRACE = \"enable_nereids_trace\";\n\n public static final String ENABLE_DPHYP_TRACE = \"enable_dphyp_trace\";\n\n public static final String ENABLE_FOLD_NONDETERMINISTIC_FN = \"enable_fold_nondeterministic_fn\";\n\n public static final String ENABLE_RUNTIME_FILTER_PRUNE =\n \"enable_runtime_filter_prune\";\n\n static final String SESSION_CONTEXT = \"session_context\";\n\n public static final String DEFAULT_ORDER_BY_LIMIT = \"default_order_by_limit\";\n\n public static final String ENABLE_SINGLE_REPLICA_INSERT = \"enable_single_replica_insert\";\n\n public static final String ENABLE_FUNCTION_PUSHDOWN = \"enable_function_pushdown\";\n\n public static final String ENABLE_COMMON_EXPR_PUSHDOWN = \"enable_common_expr_pushdown\";\n\n public static final String FRAGMENT_TRANSMISSION_COMPRESSION_CODEC = \"fragment_transmission_compression_codec\";\n\n public static final String ENABLE_LOCAL_EXCHANGE = \"enable_local_exchange\";\n\n public static final String SKIP_STORAGE_ENGINE_MERGE = \"skip_storage_engine_merge\";\n\n public static final String SKIP_DELETE_PREDICATE = \"skip_delete_predicate\";\n\n public static final String SKIP_DELETE_SIGN = \"skip_delete_sign\";\n\n public static final String SKIP_DELETE_BITMAP = \"skip_delete_bitmap\";\n\n public static final String ENABLE_PUSH_DOWN_NO_GROUP_AGG = \"enable_push_down_no_group_agg\";\n\n public static final String ENABLE_CBO_STATISTICS = \"enable_cbo_statistics\";\n\n public static final String ENABLE_SAVE_STATISTICS_SYNC_JOB = \"enable_save_statistics_sync_job\";\n\n public static final String ENABLE_ELIMINATE_SORT_NODE = \"enable_eliminate_sort_node\";\n\n public static final String NEREIDS_TRACE_EVENT_MODE = \"nereids_trace_event_mode\";\n\n public static final String INTERNAL_SESSION = \"internal_session\";\n\n public static final String PARTITIONED_HASH_JOIN_ROWS_THRESHOLD = \"partitioned_hash_join_rows_threshold\";\n public static final String PARTITIONED_HASH_AGG_ROWS_THRESHOLD = \"partitioned_hash_agg_rows_threshold\";\n\n public static final String PARTITION_PRUNING_EXPAND_THRESHOLD = \"partition_pruning_expand_threshold\";\n\n public static final String ENABLE_SHARE_HASH_TABLE_FOR_BROADCAST_JOIN\n = \"enable_share_hash_table_for_broadcast_join\";\n\n \n public static final String ENABLE_HASH_JOIN_EARLY_START_PROBE = \"enable_hash_join_early_start_probe\";\n\n \n public static final String ENABLE_UNICODE_NAME_SUPPORT = \"enable_unicode_name_support\";\n\n public static final String REPEAT_MAX_NUM = \"repeat_max_num\";\n\n public static final String GROUP_CONCAT_MAX_LEN = \"group_concat_max_len\";\n\n public static final String EXTERNAL_SORT_BYTES_THRESHOLD = \"external_sort_bytes_threshold\";\n public static final String EXTERNAL_AGG_BYTES_THRESHOLD = \"external_agg_bytes_threshold\";\n public static final String EXTERNAL_AGG_PARTITION_BITS = \"external_agg_partition_bits\";\n\n public static final String ENABLE_TWO_PHASE_READ_OPT = \"enable_two_phase_read_opt\";\n public static final String TOPN_OPT_LIMIT_THRESHOLD = \"topn_opt_limit_threshold\";\n\n public static final String ENABLE_FILE_CACHE = \"enable_file_cache\";\n\n public static final String FILE_CACHE_BASE_PATH = \"file_cache_base_path\";\n\n public static final String ENABLE_INVERTED_INDEX_QUERY = \"enable_inverted_index_query\";\n\n public static final String ENABLE_PUSHDOWN_COUNT_ON_INDEX = \"enable_count_on_index_pushdown\";\n\n public static final String GROUP_BY_AND_HAVING_USE_ALIAS_FIRST = \"group_by_and_having_use_alias_first\";\n public static final String DROP_TABLE_IF_CTAS_FAILED = \"drop_table_if_ctas_failed\";\n\n public static final String MAX_TABLE_COUNT_USE_CASCADES_JOIN_REORDER = \"max_table_count_use_cascades_join_reorder\";\n public static final int MIN_JOIN_REORDER_TABLE_COUNT = 2;\n\n public static final String SHOW_USER_DEFAULT_ROLE = \"show_user_default_role\";\n\n public static final String ENABLE_MINIDUMP = \"enable_minidump\";\n\n public static final String MINIDUMP_PATH = \"minidump_path\";\n\n public static final String TRACE_NEREIDS = \"trace_nereids\";\n\n public static final String PLAN_NEREIDS_DUMP = \"plan_nereids_dump\";\n\n public static final String DUMP_NEREIDS_MEMO = \"dump_nereids_memo\";\n\n \n public static final String USE_FIX_REPLICA = \"use_fix_replica\";\n\n public static final String DRY_RUN_QUERY = \"dry_run_query\";\n\n \n public static final String FILE_SPLIT_SIZE = \"file_split_size\";\n\n /**\n * use insert stmt as the unified backend for all loads\n */\n public static final String ENABLE_UNIFIED_LOAD = \"enable_unified_load\";\n\n public static final String ENABLE_PARQUET_LAZY_MAT = \"enable_parquet_lazy_materialization\";\n\n public static final String ENABLE_ORC_LAZY_MAT = \"enable_orc_lazy_materialization\";\n\n public static final String INLINE_CTE_REFERENCED_THRESHOLD = \"inline_cte_referenced_threshold\";\n\n public static final String ENABLE_CTE_MATERIALIZE = \"enable_cte_materialize\";\n\n public static final String ENABLE_SCAN_RUN_SERIAL = \"enable_scan_node_run_serial\";\n\n public static final String ENABLE_ANALYZE_COMPLEX_TYPE_COLUMN = \"enable_analyze_complex_type_column\";\n\n public static final String EXTERNAL_TABLE_ANALYZE_PART_NUM = \"external_table_analyze_part_num\";\n\n public static final String ENABLE_STRONG_CONSISTENCY = \"enable_strong_consistency_read\";\n\n public static final String PARALLEL_SYNC_ANALYZE_TASK_NUM = \"parallel_sync_analyze_task_num\";\n\n public static final String TRUNCATE_CHAR_OR_VARCHAR_COLUMNS = \"truncate_char_or_varchar_columns\";\n\n public static final String CBO_CPU_WEIGHT = \"cbo_cpu_weight\";\n\n public static final String CBO_MEM_WEIGHT = \"cbo_mem_weight\";\n\n public static final String CBO_NET_WEIGHT = \"cbo_net_weight\";\n\n public static final String ROUND_PRECISE_DECIMALV2_VALUE = \"round_precise_decimalv2_value\";\n\n public static final String ENABLE_DELETE_SUB_PREDICATE_V2 = \"enable_delete_sub_predicate_v2\";\n\n public static final String JDBC_CLICKHOUSE_QUERY_FINAL = \"jdbc_clickhouse_query_final\";\n\n public static final String ENABLE_MEMTABLE_ON_SINK_NODE =\n \"enable_memtable_on_sink_node\";\n\n \n public static final String FULL_AUTO_ANALYZE_START_TIME = \"full_auto_analyze_start_time\";\n\n \n public static final String FULL_AUTO_ANALYZE_END_TIME = \"full_auto_analyze_end_time\";\n\n public static final List DEBUG_VARIABLES = ImmutableList.of(\n SKIP_DELETE_PREDICATE,\n SKIP_DELETE_BITMAP,\n SKIP_DELETE_SIGN,\n SKIP_STORAGE_ENGINE_MERGE,\n SHOW_HIDDEN_COLUMNS\n );\n\n \n public Map sessionOriginValue = new HashMap();\n \n \n public boolean isSingleSetVar = false;\n\n @VariableMgr.VarAttr(name = JDBC_CLICKHOUSE_QUERY_FINAL)\n public boolean jdbcClickhouseQueryFinal = false;\n\n @VariableMgr.VarAttr(name = ROUND_PRECISE_DECIMALV2_VALUE)\n public boolean roundPreciseDecimalV2Value = false;\n\n @VariableMgr.VarAttr(name = INSERT_VISIBLE_TIMEOUT_MS, needForward = true)\n public long insertVisibleTimeoutMs = DEFAULT_INSERT_VISIBLE_TIMEOUT_MS;\n\n \n @VariableMgr.VarAttr(name = EXEC_MEM_LIMIT)\n public long maxExecMemByte = 2147483648L;\n\n @VariableMgr.VarAttr(name = SCAN_QUEUE_MEM_LIMIT)\n public long maxScanQueueMemByte = 2147483648L / 20;\n\n @VariableMgr.VarAttr(name = ENABLE_SPILLING)\n public boolean enableSpilling = false;\n\n @VariableMgr.VarAttr(name = ENABLE_EXCHANGE_NODE_PARALLEL_MERGE)\n public boolean enableExchangeNodeParallelMerge = false;\n\n \n \n @VariableMgr.VarAttr(name = DEFAULT_ORDER_BY_LIMIT)\n private long defaultOrderByLimit = -1;\n\n \n @VariableMgr.VarAttr(name = QUERY_TIMEOUT)\n public int queryTimeoutS = 300;\n\n \n \n \n \n \n @VariableMgr.VarAttr(name = MAX_EXECUTION_TIME, fuzzy = true, setter = \"setMaxExecutionTimeMS\")\n public int maxExecutionTimeMS = -1;\n\n @VariableMgr.VarAttr(name = INSERT_TIMEOUT)\n public int insertTimeoutS = 14400;\n\n \n @VariableMgr.VarAttr(name = ENABLE_PROFILE, needForward = true)\n public boolean enableProfile = false;\n\n \n \n \n @VariableMgr.VarAttr(name = ENABLE_SINGLE_DISTINCT_COLUMN_OPT)\n public boolean enableSingleDistinctColumnOpt = false;\n\n \n @VariableMgr.VarAttr(name = SQL_MODE, needForward = true)\n public long sqlMode = SqlModeHelper.MODE_DEFAULT;\n\n @VariableMgr.VarAttr(name = WORKLOAD_VARIABLE)\n public String workloadGroup = \"\";\n\n @VariableMgr.VarAttr(name = RESOURCE_VARIABLE)\n public String resourceGroup = \"\";\n\n \n @VariableMgr.VarAttr(name = AUTO_COMMIT)\n public boolean autoCommit = true;\n\n \n @VariableMgr.VarAttr(name = TX_ISOLATION)\n public String txIsolation = \"REPEATABLE-READ\";\n\n \n @VariableMgr.VarAttr(name = TX_READ_ONLY)\n public boolean txReadonly = false;\n\n \n @VariableMgr.VarAttr(name = TRANSACTION_READ_ONLY)\n public boolean transactionReadonly = false;\n\n \n @VariableMgr.VarAttr(name = TRANSACTION_ISOLATION)\n public String transactionIsolation = \"REPEATABLE-READ\";\n\n \n @VariableMgr.VarAttr(name = CHARACTER_SET_CLIENT)\n public String charsetClient = \"utf8\";\n @VariableMgr.VarAttr(name = CHARACTER_SET_CONNNECTION)\n public String charsetConnection = \"utf8\";\n @VariableMgr.VarAttr(name = CHARACTER_SET_RESULTS)\n public String charsetResults = \"utf8\";\n @VariableMgr.VarAttr(name = CHARACTER_SET_SERVER)\n public String charsetServer = \"utf8\";\n @VariableMgr.VarAttr(name = COLLATION_CONNECTION)\n public String collationConnection = \"utf8_general_ci\";\n @VariableMgr.VarAttr(name = COLLATION_DATABASE)\n public String collationDatabase = \"utf8_general_ci\";\n\n @VariableMgr.VarAttr(name = COLLATION_SERVER)\n public String collationServer = \"utf8_general_ci\";\n\n \n @VariableMgr.VarAttr(name = SQL_AUTO_IS_NULL)\n public boolean sqlAutoIsNull = false;\n\n @VariableMgr.VarAttr(name = SQL_SELECT_LIMIT)\n private long sqlSelectLimit = Long.MAX_VALUE;\n\n \n @VariableMgr.VarAttr(name = MAX_ALLOWED_PACKET)\n public int maxAllowedPacket = 1048576;\n\n @VariableMgr.VarAttr(name = AUTO_INCREMENT_INCREMENT)\n public int autoIncrementIncrement = 1;\n\n \n @VariableMgr.VarAttr(name = QUERY_CACHE_TYPE)\n public int queryCacheType = 0;\n\n \n @VariableMgr.VarAttr(name = INTERACTIVE_TIMTOUT)\n public int interactiveTimeout = 3600;\n\n \n @VariableMgr.VarAttr(name = WAIT_TIMEOUT)\n public int waitTimeoutS = 28800;\n\n \n @VariableMgr.VarAttr(name = NET_WRITE_TIMEOUT)\n public int netWriteTimeout = 60;\n\n \n @VariableMgr.VarAttr(name = NET_READ_TIMEOUT)\n public int netReadTimeout = 60;\n\n \n @VariableMgr.VarAttr(name = TIME_ZONE, needForward = true)\n public String timeZone = TimeUtils.getSystemTimeZone().getID();\n\n @VariableMgr.VarAttr(name = PARALLEL_EXCHANGE_INSTANCE_NUM)\n public int exchangeInstanceParallel = -1;\n\n @VariableMgr.VarAttr(name = SQL_SAFE_UPDATES)\n public int sqlSafeUpdates = 0;\n\n \n @VariableMgr.VarAttr(name = NET_BUFFER_LENGTH, flag = VariableMgr.READ_ONLY)\n public int netBufferLength = 16384;\n\n \n @VariableMgr.VarAttr(name = CODEGEN_LEVEL)\n public int codegenLevel = 0;\n\n @VariableMgr.VarAttr(name = HAVE_QUERY_CACHE, flag = VariableMgr.READ_ONLY)\n public boolean haveQueryCache = false;\n\n \n @VariableMgr.VarAttr(name = BATCH_SIZE, fuzzy = true)\n public int batchSize = 4064;\n\n @VariableMgr.VarAttr(name = DISABLE_STREAMING_PREAGGREGATIONS, fuzzy = true)\n public boolean disableStreamPreaggregations = false;\n\n @VariableMgr.VarAttr(name = DISABLE_COLOCATE_PLAN)\n public boolean disableColocatePlan = false;\n\n @VariableMgr.VarAttr(name = ENABLE_COLOCATE_SCAN)\n public boolean enableColocateScan = false;\n\n @VariableMgr.VarAttr(name = ENABLE_BUCKET_SHUFFLE_JOIN, varType = VariableAnnotation.EXPERIMENTAL_ONLINE)\n public boolean enableBucketShuffleJoin = true;\n\n @VariableMgr.VarAttr(name = PREFER_JOIN_METHOD)\n public String preferJoinMethod = \"broadcast\";\n\n @VariableMgr.VarAttr(name = FRAGMENT_TRANSMISSION_COMPRESSION_CODEC)\n public String fragmentTransmissionCompressionCodec = \"lz4\";\n\n /*\n * the parallel exec instance num for one Fragment in one BE\n * 1 means disable this feature\n */\n @VariableMgr.VarAttr(name = PARALLEL_FRAGMENT_EXEC_INSTANCE_NUM, needForward = true, fuzzy = true)\n public int parallelExecInstanceNum = 1;\n\n @VariableMgr.VarAttr(name = PARALLEL_PIPELINE_TASK_NUM, fuzzy = true, needForward = true)\n public int parallelPipelineTaskNum = 0;\n\n @VariableMgr.VarAttr(name = MAX_INSTANCE_NUM)\n public int maxInstanceNum = 64;\n\n @VariableMgr.VarAttr(name = ENABLE_INSERT_STRICT, needForward = true)\n public boolean enableInsertStrict = true;\n\n @VariableMgr.VarAttr(name = ENABLE_ODBC_TRANSCATION)\n public boolean enableOdbcTransaction = false;\n\n @VariableMgr.VarAttr(name = ENABLE_SCAN_RUN_SERIAL, description = {\n \"\u662f\u5426\u5f00\u542fScanNode\u4e32\u884c\u8bfb\uff0c\u4ee5\u907f\u514dlimit\u8f83\u5c0f\u7684\u60c5\u51b5\u4e0b\u7684\u8bfb\u653e\u5927\uff0c\u53ef\u4ee5\u63d0\u9ad8\u67e5\u8be2\u7684\u5e76\u53d1\u80fd\u529b\",\n \"Whether to enable ScanNode serial reading to avoid read amplification in cases of small limits\"\n + \"which can improve query concurrency. default is false.\"})\n public boolean enableScanRunSerial = false;\n\n @VariableMgr.VarAttr(name = ENABLE_SQL_CACHE)\n public boolean enableSqlCache = false;\n\n @VariableMgr.VarAttr(name = ENABLE_PARTITION_CACHE)\n public boolean enablePartitionCache = false;\n\n @VariableMgr.VarAttr(name = FORWARD_TO_MASTER)\n public boolean forwardToMaster = true;\n\n @VariableMgr.VarAttr(name = USE_V2_ROLLUP)\n public boolean useV2Rollup = false;\n\n @VariableMgr.VarAttr(name = REWRITE_COUNT_DISTINCT_TO_BITMAP_HLL)\n public boolean rewriteCountDistinct = true;\n\n \n @VariableMgr.VarAttr(name = EVENT_SCHEDULER)\n public String eventScheduler = \"OFF\";\n @VariableMgr.VarAttr(name = STORAGE_ENGINE)\n public String storageEngine = \"olap\";\n @VariableMgr.VarAttr(name = DEFAULT_STORAGE_ENGINE)\n public String defaultStorageEngine = \"olap\";\n @VariableMgr.VarAttr(name = DEFAULT_TMP_STORAGE_ENGINE)\n public String defaultTmpStorageEngine = \"olap\";\n @VariableMgr.VarAttr(name = DIV_PRECISION_INCREMENT)\n public int divPrecisionIncrement = 4;\n\n \n @VariableMgr.VarAttr(name = MAX_SCAN_KEY_NUM)\n public int maxScanKeyNum = -1;\n @VariableMgr.VarAttr(name = MAX_PUSHDOWN_CONDITIONS_PER_COLUMN)\n public int maxPushdownConditionsPerColumn = -1;\n @VariableMgr.VarAttr(name = SHOW_HIDDEN_COLUMNS, flag = VariableMgr.SESSION_ONLY)\n public boolean showHiddenColumns = false;\n\n @VariableMgr.VarAttr(name = ALLOW_PARTITION_COLUMN_NULLABLE)\n public boolean allowPartitionColumnNullable = true;\n\n @VariableMgr.VarAttr(name = DELETE_WITHOUT_PARTITION, needForward = true)\n public boolean deleteWithoutPartition = false;\n\n @VariableMgr.VarAttr(name = SEND_BATCH_PARALLELISM, needForward = true)\n public int sendBatchParallelism = 1;\n\n @VariableMgr.VarAttr(name = EXTRACT_WIDE_RANGE_EXPR, needForward = true)\n public boolean extractWideRangeExpr = true;\n\n @VariableMgr.VarAttr(name = ENABLE_NEREIDS_DML, needForward = true)\n public boolean enableNereidsDML = false;\n\n @VariableMgr.VarAttr(name = ENABLE_STRICT_CONSISTENCY_DML, needForward = true)\n public boolean enableStrictConsistencyDml = false;\n\n @VariableMgr.VarAttr(name = ENABLE_VECTORIZED_ENGINE, varType = VariableAnnotation.EXPERIMENTAL_ONLINE)\n public boolean enableVectorizedEngine = true;\n\n @VariableMgr.VarAttr(name = ENABLE_PIPELINE_ENGINE, fuzzy = true, needForward = true,\n varType = VariableAnnotation.EXPERIMENTAL)\n private boolean enablePipelineEngine = true;\n\n @VariableMgr.VarAttr(name = ENABLE_PIPELINE_X_ENGINE, fuzzy = false, varType = VariableAnnotation.EXPERIMENTAL)\n private boolean enablePipelineXEngine = false;\n\n @VariableMgr.VarAttr(name = ENABLE_AGG_STATE, fuzzy = false, varType = VariableAnnotation.EXPERIMENTAL)\n public boolean enableAggState = false;\n\n @VariableMgr.VarAttr(name = ENABLE_PARALLEL_OUTFILE)\n public boolean enableParallelOutfile = false;\n\n @VariableMgr.VarAttr(name = CPU_RESOURCE_LIMIT)\n public int cpuResourceLimit = -1;\n\n @VariableMgr.VarAttr(name = SQL_QUOTE_SHOW_CREATE)\n public boolean sqlQuoteShowCreate = true;\n\n @VariableMgr.VarAttr(name = TRIM_TAILING_SPACES_FOR_EXTERNAL_TABLE_QUERY, needForward = true)\n public boolean trimTailingSpacesForExternalTableQuery = false;\n\n\n \n \n \n @VariableMgr.VarAttr(name = AUTO_BROADCAST_JOIN_THRESHOLD)\n public double autoBroadcastJoinThreshold = 0.8;\n\n @VariableMgr.VarAttr(name = ENABLE_COST_BASED_JOIN_REORDER)\n private boolean enableJoinReorderBasedCost = false;\n\n @VariableMgr.VarAttr(name = ENABLE_FOLD_CONSTANT_BY_BE, fuzzy = true)\n private boolean enableFoldConstantByBe = false;\n\n @VariableMgr.VarAttr(name = RUNTIME_FILTER_MODE, needForward = true)\n private String runtimeFilterMode = \"GLOBAL\";\n\n @VariableMgr.VarAttr(name = RUNTIME_BLOOM_FILTER_SIZE, needForward = true)\n private int runtimeBloomFilterSize = 2097152;\n\n @VariableMgr.VarAttr(name = RUNTIME_BLOOM_FILTER_MIN_SIZE, needForward = true)\n private int runtimeBloomFilterMinSize = 1048576;\n\n @VariableMgr.VarAttr(name = RUNTIME_BLOOM_FILTER_MAX_SIZE, needForward = true)\n private int runtimeBloomFilterMaxSize = 16777216;\n\n @VariableMgr.VarAttr(name = RUNTIME_FILTER_WAIT_TIME_MS, needForward = true)\n private int runtimeFilterWaitTimeMs = 1000;\n\n @VariableMgr.VarAttr(name = RUNTIME_FILTERS_MAX_NUM, needForward = true)\n private int runtimeFiltersMaxNum = 10;\n\n \n @VariableMgr.VarAttr(name = RUNTIME_FILTER_TYPE, fuzzy = true, needForward = true)\n private int runtimeFilterType = 8;\n\n @VariableMgr.VarAttr(name = RUNTIME_FILTER_MAX_IN_NUM, needForward = true)\n private int runtimeFilterMaxInNum = 1024;\n\n @VariableMgr.VarAttr(name = USE_RF_DEFAULT)\n public boolean useRuntimeFilterDefaultSize = false;\n\n public int getBeNumberForTest() {\n return beNumberForTest;\n }\n\n @VariableMgr.VarAttr(name = PROFILLING)\n public boolean profiling = false;\n\n public void setBeNumberForTest(int beNumberForTest) {\n this.beNumberForTest = beNumberForTest;\n }\n\n @VariableMgr.VarAttr(name = BE_NUMBER_FOR_TEST)\n private int beNumberForTest = -1;\n\n public double getCboCpuWeight() {\n return cboCpuWeight;\n }\n\n public void setCboCpuWeight(double cboCpuWeight) {\n this.cboCpuWeight = cboCpuWeight;\n }\n\n public double getCboMemWeight() {\n return cboMemWeight;\n }\n\n public void setCboMemWeight(double cboMemWeight) {\n this.cboMemWeight = cboMemWeight;\n }\n\n public double getCboNetWeight() {\n return cboNetWeight;\n }\n\n public void setCboNetWeight(double cboNetWeight) {\n this.cboNetWeight = cboNetWeight;\n }\n\n @VariableMgr.VarAttr(name = CBO_CPU_WEIGHT)\n private double cboCpuWeight = 1.0;\n\n @VariableMgr.VarAttr(name = CBO_MEM_WEIGHT)\n private double cboMemWeight = 1.0;\n\n @VariableMgr.VarAttr(name = CBO_NET_WEIGHT)\n private double cboNetWeight = 1.5;\n\n @VariableMgr.VarAttr(name = DISABLE_JOIN_REORDER)\n private boolean disableJoinReorder = false;\n\n @VariableMgr.VarAttr(name = MAX_JOIN_NUMBER_OF_REORDER)\n private int maxJoinNumberOfReorder = 63;\n\n @VariableMgr.VarAttr(name = ENABLE_BUSHY_TREE, needForward = true)\n private boolean enableBushyTree = false;\n\n public int getMaxJoinNumBushyTree() {\n return maxJoinNumBushyTree;\n }\n\n public void setMaxJoinNumBushyTree(int maxJoinNumBushyTree) {\n this.maxJoinNumBushyTree = maxJoinNumBushyTree;\n }\n\n public int getMaxJoinNumberOfReorder() {\n return maxJoinNumberOfReorder;\n }\n\n public void setMaxJoinNumberOfReorder(int maxJoinNumberOfReorder) {\n this.maxJoinNumberOfReorder = maxJoinNumberOfReorder;\n }\n\n\n @VariableMgr.VarAttr(name = MAX_JOIN_NUMBER_BUSHY_TREE)\n private int maxJoinNumBushyTree = 5;\n\n @VariableMgr.VarAttr(name = ENABLE_PARTITION_TOPN)\n private boolean enablePartitionTopN = true;\n\n @VariableMgr.VarAttr(name = ENABLE_INFER_PREDICATE)\n private boolean enableInferPredicate = true;\n\n @VariableMgr.VarAttr(name = RETURN_OBJECT_DATA_AS_BINARY)\n private boolean returnObjectDataAsBinary = false;\n\n @VariableMgr.VarAttr(name = BLOCK_ENCRYPTION_MODE)\n private String blockEncryptionMode = \"\";\n\n @VariableMgr.VarAttr(name = ENABLE_PROJECTION)\n private boolean enableProjection = true;\n\n @VariableMgr.VarAttr(name = CHECK_OVERFLOW_FOR_DECIMAL)\n private boolean checkOverflowForDecimal = false;\n\n @VariableMgr.VarAttr(name = ENABLE_DPHYP_OPTIMIZER)\n public boolean enableDPHypOptimizer = false;\n\n /**\n * This variable is used to select n-th optimized plan in memo.\n * It can allow us select different plans for the same SQL statement\n * and these plans can be used to evaluate the cost model.\n */\n @VariableMgr.VarAttr(name = NTH_OPTIMIZED_PLAN)\n private int nthOptimizedPlan = 1;\n\n /**\n * as the new optimizer is not mature yet, use this var\n * to control whether to use new optimizer, remove it when\n * the new optimizer is fully developed. I hope that day\n * would be coming soon.\n */\n @VariableMgr.VarAttr(name = ENABLE_NEREIDS_PLANNER, needForward = true,\n fuzzy = true, varType = VariableAnnotation.EXPERIMENTAL)\n private boolean enableNereidsPlanner = true;\n\n @VariableMgr.VarAttr(name = DISABLE_NEREIDS_RULES, needForward = true)\n private String disableNereidsRules = \"\";\n\n @VariableMgr.VarAttr(name = ENABLE_NEW_COST_MODEL, needForward = true)\n private boolean enableNewCostModel = false;\n\n @VariableMgr.VarAttr(name = NEREIDS_STAR_SCHEMA_SUPPORT)\n private boolean nereidsStarSchemaSupport = true;\n\n @VariableMgr.VarAttr(name = REWRITE_OR_TO_IN_PREDICATE_THRESHOLD, fuzzy = true)\n private int rewriteOrToInPredicateThreshold = 2;\n\n @VariableMgr.VarAttr(name = NEREIDS_CBO_PENALTY_FACTOR, needForward = true)\n private double nereidsCboPenaltyFactor = 0.7;\n\n @VariableMgr.VarAttr(name = ENABLE_NEREIDS_TRACE)\n private boolean enableNereidsTrace = false;\n\n @VariableMgr.VarAttr(name = ENABLE_DPHYP_TRACE, needForward = true)\n public boolean enableDpHypTrace = false;\n\n @VariableMgr.VarAttr(name = BROADCAST_RIGHT_TABLE_SCALE_FACTOR)\n private double broadcastRightTableScaleFactor = 0.0;\n\n @VariableMgr.VarAttr(name = BROADCAST_ROW_COUNT_LIMIT, needForward = true)\n private double broadcastRowCountLimit = 30000000;\n\n @VariableMgr.VarAttr(name = BROADCAST_HASHTABLE_MEM_LIMIT_PERCENTAGE, needForward = true)\n private double broadcastHashtableMemLimitPercentage = 0.2;\n\n @VariableMgr.VarAttr(name = ENABLE_RUNTIME_FILTER_PRUNE, needForward = true)\n public boolean enableRuntimeFilterPrune = false;\n\n /**\n * The client can pass some special information by setting this session variable in the format: \"k1:v1;k2:v2\".\n * For example, trace_id can be passed to trace the query request sent by the user.\n * set session_context=\"trace_id:1234565678\";\n */\n @VariableMgr.VarAttr(name = SESSION_CONTEXT, needForward = true)\n public String sessionContext = \"\";\n\n @VariableMgr.VarAttr(name = ENABLE_SINGLE_REPLICA_INSERT,\n needForward = true, varType = VariableAnnotation.EXPERIMENTAL)\n public boolean enableSingleReplicaInsert = false;\n\n @VariableMgr.VarAttr(name = ENABLE_FUNCTION_PUSHDOWN, fuzzy = true)\n public boolean enableFunctionPushdown = false;\n\n @VariableMgr.VarAttr(name = FORBID_UNKNOWN_COLUMN_STATS)\n public boolean forbidUnknownColStats = false;\n\n @VariableMgr.VarAttr(name = ENABLE_COMMON_EXPR_PUSHDOWN, fuzzy = true)\n public boolean enableCommonExprPushdown = true;\n\n @VariableMgr.VarAttr(name = ENABLE_LOCAL_EXCHANGE, fuzzy = true, varType = VariableAnnotation.DEPRECATED)\n public boolean enableLocalExchange = true;\n\n /**\n * For debug purpose, don't merge unique key and agg key when reading data.\n */\n @VariableMgr.VarAttr(name = SKIP_STORAGE_ENGINE_MERGE)\n public boolean skipStorageEngineMerge = false;\n\n /**\n * For debug purpose, skip delete predicate when reading data.\n */\n @VariableMgr.VarAttr(name = SKIP_DELETE_PREDICATE)\n public boolean skipDeletePredicate = false;\n\n /**\n * For debug purpose, skip delete sign when reading data.\n */\n @VariableMgr.VarAttr(name = SKIP_DELETE_SIGN)\n public boolean skipDeleteSign = false;\n\n /**\n * For debug purpose, skip delete bitmap when reading data.\n */\n @VariableMgr.VarAttr(name = SKIP_DELETE_BITMAP)\n public boolean skipDeleteBitmap = false;\n\n \n \n \n @VariableMgr.VarAttr(name = ENABLE_FALLBACK_TO_ORIGINAL_PLANNER, needForward = true)\n public boolean enableFallbackToOriginalPlanner = true;\n\n @VariableMgr.VarAttr(name = ENABLE_NEREIDS_TIMEOUT, needForward = true)\n public boolean enableNereidsTimeout = true;\n\n @VariableMgr.VarAttr(name = ENABLE_PUSH_DOWN_NO_GROUP_AGG)\n public boolean enablePushDownNoGroupAgg = true;\n\n /**\n * The current statistics are only used for CBO test,\n * and are not available to users. (work in progress)\n */\n @VariableMgr.VarAttr(name = ENABLE_CBO_STATISTICS)\n public boolean enableCboStatistics = false;\n\n @VariableMgr.VarAttr(name = ENABLE_ELIMINATE_SORT_NODE)\n public boolean enableEliminateSortNode = true;\n\n @VariableMgr.VarAttr(name = INTERNAL_SESSION)\n public boolean internalSession = false;\n\n \n @VariableMgr.VarAttr(name = PARTITIONED_HASH_JOIN_ROWS_THRESHOLD, fuzzy = true)\n public int partitionedHashJoinRowsThreshold = 0;\n\n \n @VariableMgr.VarAttr(name = PARTITIONED_HASH_AGG_ROWS_THRESHOLD, fuzzy = true)\n public int partitionedHashAggRowsThreshold = 0;\n\n @VariableMgr.VarAttr(name = PARTITION_PRUNING_EXPAND_THRESHOLD, fuzzy = true)\n public int partitionPruningExpandThreshold = 10;\n\n @VariableMgr.VarAttr(name = ENABLE_SHARE_HASH_TABLE_FOR_BROADCAST_JOIN, fuzzy = true)\n public boolean enableShareHashTableForBroadcastJoin = true;\n\n @VariableMgr.VarAttr(name = ENABLE_HASH_JOIN_EARLY_START_PROBE, fuzzy = false)\n public boolean enableHashJoinEarlyStartProbe = false;\n\n @VariableMgr.VarAttr(name = ENABLE_UNICODE_NAME_SUPPORT)\n public boolean enableUnicodeNameSupport = false;\n\n @VariableMgr.VarAttr(name = REPEAT_MAX_NUM, needForward = true)\n public int repeatMaxNum = 10000;\n\n @VariableMgr.VarAttr(name = GROUP_CONCAT_MAX_LEN)\n public long groupConcatMaxLen = 2147483646;\n\n \n \n public static final long MIN_EXTERNAL_SORT_BYTES_THRESHOLD = 134217728;\n @VariableMgr.VarAttr(name = EXTERNAL_SORT_BYTES_THRESHOLD,\n checker = \"checkExternalSortBytesThreshold\", fuzzy = true)\n public long externalSortBytesThreshold = 0;\n\n \n public static final long MIN_EXTERNAL_AGG_BYTES_THRESHOLD = 134217728;\n @VariableMgr.VarAttr(name = EXTERNAL_AGG_BYTES_THRESHOLD,\n checker = \"checkExternalAggBytesThreshold\", fuzzy = true)\n public long externalAggBytesThreshold = 0;\n\n public static final int MIN_EXTERNAL_AGG_PARTITION_BITS = 4;\n public static final int MAX_EXTERNAL_AGG_PARTITION_BITS = 8;\n @VariableMgr.VarAttr(name = EXTERNAL_AGG_PARTITION_BITS,\n checker = \"checkExternalAggPartitionBits\", fuzzy = true)\n public int externalAggPartitionBits = 8; \n\n \n \n \n @VariableMgr.VarAttr(name = ENABLE_TWO_PHASE_READ_OPT, fuzzy = true)\n public boolean enableTwoPhaseReadOpt = true;\n @VariableMgr.VarAttr(name = TOPN_OPT_LIMIT_THRESHOLD)\n public long topnOptLimitThreshold = 1024;\n\n \n \n @VariableMgr.VarAttr(name = GROUP_BY_AND_HAVING_USE_ALIAS_FIRST)\n public boolean groupByAndHavingUseAliasFirst = false;\n\n \n @VariableMgr.VarAttr(name = ENABLE_FILE_CACHE, needForward = true, description = {\n \"\u662f\u5426\u542f\u7528file cache\u3002\u8be5\u53d8\u91cf\u53ea\u6709\u5728be.conf\u4e2denable_file_cache=true\u65f6\u624d\u6709\u6548\uff0c\"\n + \"\u5982\u679cbe.conf\u4e2denable_file_cache=false\uff0c\u8be5BE\u8282\u70b9\u7684file cache\u5904\u4e8e\u7981\u7528\u72b6\u6001\u3002\",\n \"Set wether to use file cache. This variable takes effect only if the BE config enable_file_cache=true. \"\n + \"The cache is not used when BE config enable_file_cache=false.\"})\n public boolean enableFileCache = false;\n\n \n @VariableMgr.VarAttr(name = FILE_CACHE_BASE_PATH, needForward = true, description = {\n \"\u6307\u5b9ablock file cache\u5728BE\u4e0a\u7684\u5b58\u50a8\u8def\u5f84\uff0c\u9ed8\u8ba4 'random'\uff0c\u968f\u673a\u9009\u62e9BE\u914d\u7f6e\u7684\u5b58\u50a8\u8def\u5f84\u3002\",\n \"Specify the storage path of the block file cache on BE, default 'random', \"\n + \"and randomly select the storage path configured by BE.\"})\n public String fileCacheBasePath = \"random\";\n\n \n @VariableMgr.VarAttr(name = ENABLE_INVERTED_INDEX_QUERY, needForward = true, description = {\n \"\u662f\u5426\u542f\u7528inverted index query\u3002\", \"Set whether to use inverted index query.\"})\n public boolean enableInvertedIndexQuery = true;\n\n \n @VariableMgr.VarAttr(name = ENABLE_PUSHDOWN_COUNT_ON_INDEX, needForward = true, description = {\n \"\u662f\u5426\u542f\u7528count_on_index pushdown\u3002\", \"Set whether to pushdown count_on_index.\"})\n public boolean enablePushDownCountOnIndex = true;\n\n \n @VariableMgr.VarAttr(name = DROP_TABLE_IF_CTAS_FAILED, needForward = true)\n public boolean dropTableIfCtasFailed = true;\n\n @VariableMgr.VarAttr(name = MAX_TABLE_COUNT_USE_CASCADES_JOIN_REORDER, needForward = true)\n public int maxTableCountUseCascadesJoinReorder = 10;\n\n \n @VariableMgr.VarAttr(name = SHOW_USER_DEFAULT_ROLE, needForward = true)\n public boolean showUserDefaultRole = false;\n\n \n @VariableMgr.VarAttr(name = USE_FIX_REPLICA)\n public int useFixReplica = -1;\n\n @VariableMgr.VarAttr(name = DUMP_NEREIDS_MEMO)\n public boolean dumpNereidsMemo = false;\n\n @VariableMgr.VarAttr(name = \"memo_max_group_expression_size\")\n public int memoMaxGroupExpressionSize = 10000;\n\n @VariableMgr.VarAttr(name = ENABLE_MINIDUMP)\n public boolean enableMinidump = false;\n\n @VariableMgr.VarAttr(name = ENABLE_FOLD_NONDETERMINISTIC_FN)\n public boolean enableFoldNondeterministicFn = false;\n\n @VariableMgr.VarAttr(name = MINIDUMP_PATH)\n public String minidumpPath = \"\";\n\n @VariableMgr.VarAttr(name = TRACE_NEREIDS)\n public boolean traceNereids = false;\n\n @VariableMgr.VarAttr(name = PLAN_NEREIDS_DUMP)\n public boolean planNereidsDump = false;\n\n \n @VariableMgr.VarAttr(name = DRY_RUN_QUERY, needForward = true)\n public boolean dryRunQuery = false;\n\n @VariableMgr.VarAttr(name = FILE_SPLIT_SIZE, needForward = true)\n public long fileSplitSize = 0;\n\n /**\n * determine should we enable unified load (use insert stmt as the backend for all load)\n */\n @VariableMgr.VarAttr(name = ENABLE_UNIFIED_LOAD, needForward = true)\n public boolean enableUnifiedLoad = false;\n\n @VariableMgr.VarAttr(\n name = ENABLE_PARQUET_LAZY_MAT,\n description = {\"\u63a7\u5236 parquet reader \u662f\u5426\u542f\u7528\u5ef6\u8fdf\u7269\u5316\u6280\u672f\u3002\u9ed8\u8ba4\u4e3a true\u3002\",\n \"Controls whether to use lazy materialization technology in parquet reader. \"\n + \"The default value is true.\"},\n needForward = true)\n public boolean enableParquetLazyMat = true;\n\n @VariableMgr.VarAttr(\n name = ENABLE_ORC_LAZY_MAT,\n description = {\"\u63a7\u5236 orc reader \u662f\u5426\u542f\u7528\u5ef6\u8fdf\u7269\u5316\u6280\u672f\u3002\u9ed8\u8ba4\u4e3a true\u3002\",\n \"Controls whether to use lazy materialization technology in orc reader. \"\n + \"The default value is true.\"},\n needForward = true)\n public boolean enableOrcLazyMat = true;\n\n @VariableMgr.VarAttr(\n name = EXTERNAL_TABLE_ANALYZE_PART_NUM,\n description = {\"\u6536\u96c6\u5916\u8868\u7edf\u8ba1\u4fe1\u606f\u884c\u6570\u65f6\u9009\u53d6\u7684\u91c7\u6837\u5206\u533a\u6570\uff0c\u9ed8\u8ba4-1\u8868\u793a\u5168\u90e8\u5206\u533a\",\n \"Number of sample partition for collecting external table line number, \"\n + \"default -1 means all partitions\"},\n needForward = false)\n public int externalTableAnalyzePartNum = -1;\n\n @VariableMgr.VarAttr(name = INLINE_CTE_REFERENCED_THRESHOLD)\n public int inlineCTEReferencedThreshold = 1;\n\n @VariableMgr.VarAttr(name = ENABLE_CTE_MATERIALIZE)\n public boolean enableCTEMaterialize = true;\n\n @VariableMgr.VarAttr(name = ENABLE_ANALYZE_COMPLEX_TYPE_COLUMN)\n public boolean enableAnalyzeComplexTypeColumn = false;\n\n @VariableMgr.VarAttr(name = ENABLE_STRONG_CONSISTENCY, description = {\"\u7528\u4ee5\u5f00\u542f\u5f3a\u4e00\u81f4\u8bfb\u3002Doris \u9ed8\u8ba4\u652f\u6301\u540c\u4e00\u4e2a\u4f1a\u8bdd\u5185\u7684\"\n + \"\u5f3a\u4e00\u81f4\u6027\uff0c\u5373\u540c\u4e00\u4e2a\u4f1a\u8bdd\u5185\u5bf9\u6570\u636e\u7684\u53d8\u66f4\u64cd\u4f5c\u662f\u5b9e\u65f6\u53ef\u89c1\u7684\u3002\u5982\u9700\u8981\u4f1a\u8bdd\u95f4\u7684\u5f3a\u4e00\u81f4\u8bfb\uff0c\u5219\u9700\u5c06\u6b64\u53d8\u91cf\u8bbe\u7f6e\u4e3atrue\u3002\",\n \"Used to enable strong consistent reading. By default, Doris supports strong consistency \"\n + \"within the same session, that is, changes to data within the same session are visible in \"\n + \"real time. If you want strong consistent reads between sessions, set this variable to true. \"\n })\n public boolean enableStrongConsistencyRead = false;\n\n @VariableMgr.VarAttr(name = PARALLEL_SYNC_ANALYZE_TASK_NUM)\n public int parallelSyncAnalyzeTaskNum = 2;\n\n @VariableMgr.VarAttr(name = ENABLE_DELETE_SUB_PREDICATE_V2, fuzzy = true, needForward = true)\n public boolean enableDeleteSubPredicateV2 = true;\n\n @VariableMgr.VarAttr(name = TRUNCATE_CHAR_OR_VARCHAR_COLUMNS,\n description = {\"\u662f\u5426\u6309\u7167\u8868\u7684 schema \u6765\u622a\u65ad char \u6216\u8005 varchar \u5217\u3002\u9ed8\u8ba4\u4e3a false\u3002\\n\"\n + \"\u56e0\u4e3a\u5916\u8868\u4f1a\u5b58\u5728\u8868\u7684 schema \u4e2d char \u6216\u8005 varchar \u5217\u7684\u6700\u5927\u957f\u5ea6\u548c\u5e95\u5c42 parquet \u6216\u8005 orc \u6587\u4ef6\u4e2d\u7684 schema \u4e0d\u4e00\u81f4\"\n + \"\u7684\u60c5\u51b5\u3002\u6b64\u65f6\u5f00\u542f\u6539\u9009\u9879\uff0c\u4f1a\u6309\u7167\u8868\u7684 schema \u4e2d\u7684\u6700\u5927\u957f\u5ea6\u8fdb\u884c\u622a\u65ad\u3002\",\n \"Whether to truncate char or varchar columns according to the table's schema. \"\n + \"The default is false.\\n\"\n + \"Because the maximum length of the char or varchar column in the schema of the table\"\n + \" is inconsistent with the schema in the underlying parquet or orc file.\"\n + \" At this time, if the option is turned on, it will be truncated according to the maximum length\"\n + \" in the schema of the table.\"},\n needForward = true)\n public boolean truncateCharOrVarcharColumns = false;\n\n @VariableMgr.VarAttr(name = ENABLE_MEMTABLE_ON_SINK_NODE, needForward = true)\n public boolean enableMemtableOnSinkNode = false;\n\n @VariableMgr.VarAttr(name = FULL_AUTO_ANALYZE_START_TIME, needForward = true)\n public String fullAutoAnalyzeStartTime = \"\";\n\n @VariableMgr.VarAttr(name = FULL_AUTO_ANALYZE_END_TIME, needForward = true)\n public String fullAutoAnalyzeEndTime = \"\";\n\n\n\n \n \n \n\n public String printFuzzyVariables() {\n if (!Config.use_fuzzy_session_variable) {\n return \"\";\n }\n List res = Lists.newArrayList();\n for (Field field : SessionVariable.class.getDeclaredFields()) {\n VarAttr attr = field.getAnnotation(VarAttr.class);\n if (attr == null || !attr.fuzzy()) {\n continue;\n }\n field.setAccessible(true);\n try {\n Object val = field.get(this);\n res.add(attr.name() + \"=\" + val.toString());\n } catch (IllegalAccessException e) {\n LOG.warn(\"failed to get fuzzy session variable {}\", attr.name(), e);\n }\n }\n return Joiner.on(\",\").join(res);\n }\n\n /**\n * syntax:\n * all -> use all event\n * all except event_1, event_2, ..., event_n -> use all events excluding the event_1~n\n * event_1, event_2, ..., event_n -> use event_1~n\n */\n @VariableMgr.VarAttr(name = NEREIDS_TRACE_EVENT_MODE, checker = \"checkNereidsTraceEventMode\")\n public String nereidsTraceEventMode = \"all\";\n\n private Set> parsedNereidsEventMode = EventSwitchParser.parse(Lists.newArrayList(\"all\"));\n\n public boolean isInDebugMode() {\n return showHiddenColumns || skipDeleteBitmap || skipDeletePredicate || skipDeleteSign || skipStorageEngineMerge;\n }\n\n public void setEnableNereidsTrace(boolean enableNereidsTrace) {\n this.enableNereidsTrace = enableNereidsTrace;\n }\n\n public void setNereidsTraceEventMode(String nereidsTraceEventMode) {\n checkNereidsTraceEventMode(nereidsTraceEventMode);\n this.nereidsTraceEventMode = nereidsTraceEventMode;\n }\n\n public void checkNereidsTraceEventMode(String nereidsTraceEventMode) {\n List strings = EventSwitchParser.checkEventModeStringAndSplit(nereidsTraceEventMode);\n if (strings != null) {\n parsedNereidsEventMode = EventSwitchParser.parse(strings);\n }\n if (parsedNereidsEventMode == null) {\n throw new UnsupportedOperationException(\"nereids_trace_event_mode syntax error, please check\");\n }\n }\n\n public Set> getParsedNereidsEventMode() {\n return parsedNereidsEventMode;\n }\n\n public String getBlockEncryptionMode() {\n return blockEncryptionMode;\n }\n\n public void setBlockEncryptionMode(String blockEncryptionMode) {\n this.blockEncryptionMode = blockEncryptionMode;\n }\n\n public void setRewriteOrToInPredicateThreshold(int threshold) {\n this.rewriteOrToInPredicateThreshold = threshold;\n }\n\n public int getRewriteOrToInPredicateThreshold() {\n return rewriteOrToInPredicateThreshold;\n }\n\n public long getMaxExecMemByte() {\n return maxExecMemByte;\n }\n\n public long getMaxScanQueueExecMemByte() {\n return maxScanQueueMemByte;\n }\n\n public int getQueryTimeoutS() {\n return queryTimeoutS;\n }\n\n public void setEnableTwoPhaseReadOpt(boolean enable) {\n enableTwoPhaseReadOpt = enable;\n }\n\n public int getMaxExecutionTimeMS() {\n return maxExecutionTimeMS;\n }\n\n public int getInsertTimeoutS() {\n return insertTimeoutS;\n }\n\n\n public void setInsertTimeoutS(int insertTimeoutS) {\n this.insertTimeoutS = insertTimeoutS;\n }\n\n public boolean enableProfile() {\n return enableProfile;\n }\n\n public boolean enableSingleDistinctColumnOpt() {\n return enableSingleDistinctColumnOpt;\n }\n\n public int getWaitTimeoutS() {\n return waitTimeoutS;\n }\n\n public long getSqlMode() {\n return sqlMode;\n }\n\n public void setSqlMode(long sqlMode) {\n this.sqlMode = sqlMode;\n }\n\n public boolean isEnableJoinReorderBasedCost() {\n return enableJoinReorderBasedCost;\n }\n\n public boolean isAutoCommit() {\n return autoCommit;\n }\n\n public boolean isTxReadonly() {\n return txReadonly;\n }\n\n public boolean isTransactionReadonly() {\n return transactionReadonly;\n }\n\n public String getTransactionIsolation() {\n return transactionIsolation;\n }\n\n public String getTxIsolation() {\n return txIsolation;\n }\n\n public String getCharsetClient() {\n return charsetClient;\n }\n\n public String getCharsetConnection() {\n return charsetConnection;\n }\n\n public String getCharsetResults() {\n return charsetResults;\n }\n\n public String getCharsetServer() {\n return charsetServer;\n }\n\n public String getCollationConnection() {\n return collationConnection;\n }\n\n public String getCollationDatabase() {\n return collationDatabase;\n }\n\n public String getCollationServer() {\n return collationServer;\n }\n\n public boolean isSqlAutoIsNull() {\n return sqlAutoIsNull;\n }\n\n public long getSqlSelectLimit() {\n if (sqlSelectLimit < 0 || sqlSelectLimit >= Long.MAX_VALUE) {\n return -1;\n }\n return sqlSelectLimit;\n }\n\n public long getDefaultOrderByLimit() {\n return defaultOrderByLimit;\n }\n\n public int getMaxAllowedPacket() {\n return maxAllowedPacket;\n }\n\n public int getAutoIncrementIncrement() {\n return autoIncrementIncrement;\n }\n\n public int getQueryCacheType() {\n return queryCacheType;\n }\n\n public int getInteractiveTimeout() {\n return interactiveTimeout;\n }\n\n public int getNetWriteTimeout() {\n return netWriteTimeout;\n }\n\n public int getNetReadTimeout() {\n return netReadTimeout;\n }\n\n public String getTimeZone() {\n return timeZone;\n }\n\n public void setTimeZone(String timeZone) {\n this.timeZone = timeZone;\n }\n\n public int getSqlSafeUpdates() {\n return sqlSafeUpdates;\n }\n\n public int getNetBufferLength() {\n return netBufferLength;\n }\n\n public int getCodegenLevel() {\n return codegenLevel;\n }\n\n public boolean getHaveQueryCache() {\n return haveQueryCache;\n }\n\n /**\n * setMaxExecMemByte.\n **/\n public void setMaxExecMemByte(long maxExecMemByte) {\n if (maxExecMemByte < MIN_EXEC_MEM_LIMIT) {\n this.maxExecMemByte = MIN_EXEC_MEM_LIMIT;\n } else {\n this.maxExecMemByte = maxExecMemByte;\n }\n }\n\n public void setMaxScanQueueMemByte(long scanQueueMemByte) {\n this.maxScanQueueMemByte = Math.min(scanQueueMemByte, maxExecMemByte / 20);\n }\n\n public boolean isSqlQuoteShowCreate() {\n return sqlQuoteShowCreate;\n }\n\n public void setSqlQuoteShowCreate(boolean sqlQuoteShowCreate) {\n this.sqlQuoteShowCreate = sqlQuoteShowCreate;\n }\n\n public void setQueryTimeoutS(int queryTimeoutS) {\n this.queryTimeoutS = queryTimeoutS;\n }\n\n public void setMaxExecutionTimeMS(int maxExecutionTimeMS) {\n this.maxExecutionTimeMS = maxExecutionTimeMS;\n this.queryTimeoutS = this.maxExecutionTimeMS / 1000;\n }\n\n public void setMaxExecutionTimeMS(String maxExecutionTimeMS) {\n this.maxExecutionTimeMS = Integer.valueOf(maxExecutionTimeMS);\n this.queryTimeoutS = this.maxExecutionTimeMS / 1000;\n }\n\n public String getWorkloadGroup() {\n return workloadGroup;\n }\n\n public void setWorkloadGroup(String workloadGroup) {\n this.workloadGroup = workloadGroup;\n }\n\n public String getResourceGroup() {\n return resourceGroup;\n }\n\n public void setResourceGroup(String resourceGroup) {\n this.resourceGroup = resourceGroup;\n }\n\n public boolean isDisableColocatePlan() {\n return disableColocatePlan;\n }\n\n public boolean enableColocateScan() {\n return enableColocateScan;\n }\n\n public boolean isEnableBucketShuffleJoin() {\n return enableBucketShuffleJoin;\n }\n\n public boolean isEnableOdbcTransaction() {\n return enableOdbcTransaction;\n }\n\n public String getPreferJoinMethod() {\n return preferJoinMethod;\n }\n\n public void setPreferJoinMethod(String preferJoinMethod) {\n this.preferJoinMethod = preferJoinMethod;\n }\n\n public boolean isEnableFoldConstantByBe() {\n return enableFoldConstantByBe;\n }\n\n public boolean isEnableNereidsDML() {\n return enableNereidsDML;\n }\n\n public void setEnableFoldConstantByBe(boolean foldConstantByBe) {\n this.enableFoldConstantByBe = foldConstantByBe;\n }\n\n public int getParallelExecInstanceNum() {\n if (getEnablePipelineEngine() && parallelPipelineTaskNum == 0) {\n int size = Env.getCurrentSystemInfo().getMinPipelineExecutorSize();\n int autoInstance = (size + 1) / 2;\n return Math.min(autoInstance, maxInstanceNum);\n } else if (enablePipelineEngine) {\n return parallelPipelineTaskNum;\n } else {\n return parallelExecInstanceNum;\n }\n }\n\n public int getExchangeInstanceParallel() {\n return exchangeInstanceParallel;\n }\n\n public boolean getEnableInsertStrict() {\n return enableInsertStrict;\n }\n\n public void setEnableInsertStrict(boolean enableInsertStrict) {\n this.enableInsertStrict = enableInsertStrict;\n }\n\n public boolean isEnableSqlCache() {\n return enableSqlCache;\n }\n\n public void setEnableSqlCache(boolean enableSqlCache) {\n this.enableSqlCache = enableSqlCache;\n }\n\n public boolean isEnablePartitionCache() {\n return enablePartitionCache;\n }\n\n public void setEnablePartitionCache(boolean enablePartitionCache) {\n this.enablePartitionCache = enablePartitionCache;\n }\n\n public int getPartitionedHashJoinRowsThreshold() {\n return partitionedHashJoinRowsThreshold;\n }\n\n public void setPartitionedHashJoinRowsThreshold(int threshold) {\n this.partitionedHashJoinRowsThreshold = threshold;\n }\n\n \n public boolean getForwardToMaster() {\n return forwardToMaster;\n }\n\n public boolean isUseV2Rollup() {\n return useV2Rollup;\n }\n\n \n public void setUseV2Rollup(boolean useV2Rollup) {\n this.useV2Rollup = useV2Rollup;\n }\n\n public boolean isRewriteCountDistinct() {\n return rewriteCountDistinct;\n }\n\n public void setRewriteCountDistinct(boolean rewriteCountDistinct) {\n this.rewriteCountDistinct = rewriteCountDistinct;\n }\n\n public String getEventScheduler() {\n return eventScheduler;\n }\n\n public void setEventScheduler(String eventScheduler) {\n this.eventScheduler = eventScheduler;\n }\n\n public String getStorageEngine() {\n return storageEngine;\n }\n\n public void setStorageEngine(String storageEngine) {\n this.storageEngine = storageEngine;\n }\n\n public int getDivPrecisionIncrement() {\n return divPrecisionIncrement;\n }\n\n public int getMaxScanKeyNum() {\n return maxScanKeyNum;\n }\n\n public void setMaxScanKeyNum(int maxScanKeyNum) {\n this.maxScanKeyNum = maxScanKeyNum;\n }\n\n public int getMaxPushdownConditionsPerColumn() {\n return maxPushdownConditionsPerColumn;\n }\n\n public void setMaxPushdownConditionsPerColumn(int maxPushdownConditionsPerColumn) {\n this.maxPushdownConditionsPerColumn = maxPushdownConditionsPerColumn;\n }\n\n public double getBroadcastRightTableScaleFactor() {\n return broadcastRightTableScaleFactor;\n }\n\n public void setBroadcastRightTableScaleFactor(double broadcastRightTableScaleFactor) {\n this.broadcastRightTableScaleFactor = broadcastRightTableScaleFactor;\n }\n\n public double getBroadcastRowCountLimit() {\n return broadcastRowCountLimit;\n }\n\n public void setBroadcastRowCountLimit(double broadcastRowCountLimit) {\n this.broadcastRowCountLimit = broadcastRowCountLimit;\n }\n\n public double getBroadcastHashtableMemLimitPercentage() {\n return broadcastHashtableMemLimitPercentage;\n }\n\n public void setBroadcastHashtableMemLimitPercentage(double broadcastHashtableMemLimitPercentage) {\n this.broadcastHashtableMemLimitPercentage = broadcastHashtableMemLimitPercentage;\n }\n\n public boolean showHiddenColumns() {\n return showHiddenColumns;\n }\n\n public void setShowHiddenColumns(boolean showHiddenColumns) {\n this.showHiddenColumns = showHiddenColumns;\n }\n\n public boolean isEnableScanRunSerial() {\n return enableScanRunSerial;\n }\n\n public boolean skipStorageEngineMerge() {\n return skipStorageEngineMerge;\n }\n\n public boolean skipDeleteSign() {\n return skipDeleteSign;\n }\n\n public boolean isAllowPartitionColumnNullable() {\n return allowPartitionColumnNullable;\n }\n\n public String getRuntimeFilterMode() {\n return runtimeFilterMode;\n }\n\n public void setRuntimeFilterMode(String runtimeFilterMode) {\n this.runtimeFilterMode = runtimeFilterMode;\n }\n\n public int getRuntimeBloomFilterSize() {\n return runtimeBloomFilterSize;\n }\n\n public void setRuntimeBloomFilterSize(int runtimeBloomFilterSize) {\n this.runtimeBloomFilterSize = runtimeBloomFilterSize;\n }\n\n public int getRuntimeBloomFilterMinSize() {\n return runtimeBloomFilterMinSize;\n }\n\n public void setRuntimeBloomFilterMinSize(int runtimeBloomFilterMinSize) {\n this.runtimeBloomFilterMinSize = runtimeBloomFilterMinSize;\n }\n\n public int getRuntimeBloomFilterMaxSize() {\n return runtimeBloomFilterMaxSize;\n }\n\n public void setRuntimeBloomFilterMaxSize(int runtimeBloomFilterMaxSize) {\n this.runtimeBloomFilterMaxSize = runtimeBloomFilterMaxSize;\n }\n\n public int getRuntimeFilterWaitTimeMs() {\n return runtimeFilterWaitTimeMs;\n }\n\n public void setRuntimeFilterWaitTimeMs(int runtimeFilterWaitTimeMs) {\n this.runtimeFilterWaitTimeMs = runtimeFilterWaitTimeMs;\n }\n\n public int getRuntimeFiltersMaxNum() {\n return runtimeFiltersMaxNum;\n }\n\n public void setRuntimeFiltersMaxNum(int runtimeFiltersMaxNum) {\n this.runtimeFiltersMaxNum = runtimeFiltersMaxNum;\n }\n\n public int getRuntimeFilterType() {\n return runtimeFilterType;\n }\n\n public boolean isRuntimeFilterTypeEnabled(TRuntimeFilterType type) {\n return (runtimeFilterType & type.getValue()) == type.getValue();\n }\n\n public void setRuntimeFilterType(int runtimeFilterType) {\n this.runtimeFilterType = runtimeFilterType;\n }\n\n public int getRuntimeFilterMaxInNum() {\n return runtimeFilterMaxInNum;\n }\n\n public void setRuntimeFilterMaxInNum(int runtimeFilterMaxInNum) {\n this.runtimeFilterMaxInNum = runtimeFilterMaxInNum;\n }\n\n public void setEnablePipelineEngine(boolean enablePipelineEngine) {\n this.enablePipelineEngine = enablePipelineEngine;\n }\n\n public void setEnablePipelineXEngine(boolean enablePipelineXEngine) {\n this.enablePipelineXEngine = enablePipelineXEngine;\n }\n\n public boolean enablePushDownNoGroupAgg() {\n return enablePushDownNoGroupAgg;\n }\n\n public boolean getEnableFunctionPushdown() {\n return this.enableFunctionPushdown;\n }\n\n public boolean getForbidUnknownColStats() {\n return forbidUnknownColStats;\n }\n\n public void setForbidUnownColStats(boolean forbid) {\n forbidUnknownColStats = forbid;\n }\n\n public boolean getEnableLocalExchange() {\n return enableLocalExchange;\n }\n\n public boolean getEnableCboStatistics() {\n return enableCboStatistics;\n }\n\n public long getFileSplitSize() {\n return fileSplitSize;\n }\n\n public void setFileSplitSize(long fileSplitSize) {\n this.fileSplitSize = fileSplitSize;\n }\n\n public boolean isEnableParquetLazyMat() {\n return enableParquetLazyMat;\n }\n\n public void setEnableParquetLazyMat(boolean enableParquetLazyMat) {\n this.enableParquetLazyMat = enableParquetLazyMat;\n }\n\n public boolean isEnableOrcLazyMat() {\n return enableOrcLazyMat;\n }\n\n public void setEnableOrcLazyMat(boolean enableOrcLazyMat) {\n this.enableOrcLazyMat = enableOrcLazyMat;\n }\n\n\n /**\n * getInsertVisibleTimeoutMs.\n **/\n public long getInsertVisibleTimeoutMs() {\n if (insertVisibleTimeoutMs < MIN_INSERT_VISIBLE_TIMEOUT_MS) {\n return MIN_INSERT_VISIBLE_TIMEOUT_MS;\n } else {\n return insertVisibleTimeoutMs;\n }\n }\n\n /**\n * setInsertVisibleTimeoutMs.\n **/\n public void setInsertVisibleTimeoutMs(long insertVisibleTimeoutMs) {\n if (insertVisibleTimeoutMs < MIN_INSERT_VISIBLE_TIMEOUT_MS) {\n this.insertVisibleTimeoutMs = MIN_INSERT_VISIBLE_TIMEOUT_MS;\n } else {\n this.insertVisibleTimeoutMs = insertVisibleTimeoutMs;\n }\n }\n\n public boolean getIsSingleSetVar() {\n return isSingleSetVar;\n }\n\n public void setIsSingleSetVar(boolean issinglesetvar) {\n this.isSingleSetVar = issinglesetvar;\n }\n\n public Map getSessionOriginValue() {\n return sessionOriginValue;\n }\n\n public void addSessionOriginValue(Field key, String value) {\n if (sessionOriginValue.containsKey(key)) {\n \n return;\n }\n sessionOriginValue.put(key, value);\n }\n\n public void clearSessionOriginValue() {\n sessionOriginValue.clear();\n }\n\n public boolean isDeleteWithoutPartition() {\n return deleteWithoutPartition;\n }\n\n public boolean isExtractWideRangeExpr() {\n return extractWideRangeExpr;\n }\n\n public boolean isGroupByAndHavingUseAliasFirst() {\n return groupByAndHavingUseAliasFirst;\n }\n\n public int getCpuResourceLimit() {\n return cpuResourceLimit;\n }\n\n public int getSendBatchParallelism() {\n return sendBatchParallelism;\n }\n\n public boolean isEnableParallelOutfile() {\n return enableParallelOutfile;\n }\n\n public boolean isDisableJoinReorder() {\n return disableJoinReorder;\n }\n\n public boolean isEnableBushyTree() {\n return enableBushyTree;\n }\n\n public void setEnableBushyTree(boolean enableBushyTree) {\n this.enableBushyTree = enableBushyTree;\n }\n\n public boolean isEnablePartitionTopN() {\n return enablePartitionTopN;\n }\n\n public void setEnablePartitionTopN(boolean enablePartitionTopN) {\n this.enablePartitionTopN = enablePartitionTopN;\n }\n\n public boolean isEnableFoldNondeterministicFn() {\n return enableFoldNondeterministicFn;\n }\n\n public void setEnableFoldNondeterministicFn(boolean enableFoldNondeterministicFn) {\n this.enableFoldNondeterministicFn = enableFoldNondeterministicFn;\n }\n\n public boolean isReturnObjectDataAsBinary() {\n return returnObjectDataAsBinary;\n }\n\n public void setReturnObjectDataAsBinary(boolean returnObjectDataAsBinary) {\n this.returnObjectDataAsBinary = returnObjectDataAsBinary;\n }\n\n public boolean isEnableInferPredicate() {\n return enableInferPredicate;\n }\n\n public void setEnableInferPredicate(boolean enableInferPredicate) {\n this.enableInferPredicate = enableInferPredicate;\n }\n\n public boolean isEnableProjection() {\n return enableProjection;\n }\n\n public boolean checkOverflowForDecimal() {\n return checkOverflowForDecimal;\n }\n\n public boolean isTrimTailingSpacesForExternalTableQuery() {\n return trimTailingSpacesForExternalTableQuery;\n }\n\n public void setTrimTailingSpacesForExternalTableQuery(boolean trimTailingSpacesForExternalTableQuery) {\n this.trimTailingSpacesForExternalTableQuery = trimTailingSpacesForExternalTableQuery;\n }\n\n public void setEnableJoinReorderBasedCost(boolean enableJoinReorderBasedCost) {\n this.enableJoinReorderBasedCost = enableJoinReorderBasedCost;\n }\n\n public void setDisableJoinReorder(boolean disableJoinReorder) {\n this.disableJoinReorder = disableJoinReorder;\n }\n\n /**\n * Nereids only support vectorized engine.\n *\n * @return true if both nereids and vectorized engine are enabled\n */\n public boolean isEnableNereidsPlanner() {\n return enableNereidsPlanner;\n }\n\n public void setEnableNereidsPlanner(boolean enableNereidsPlanner) {\n this.enableNereidsPlanner = enableNereidsPlanner;\n }\n\n public int getNthOptimizedPlan() {\n return nthOptimizedPlan;\n }\n\n public Set getDisableNereidsRuleNames() {\n return Arrays.stream(disableNereidsRules.split(\",[\\\\s]*\"))\n .map(rule -> rule.toUpperCase(Locale.ROOT))\n .collect(ImmutableSet.toImmutableSet());\n }\n\n public Set getDisableNereidsRules() {\n return Arrays.stream(disableNereidsRules.split(\",[\\\\s]*\"))\n .filter(rule -> !rule.isEmpty())\n .map(rule -> rule.toUpperCase(Locale.ROOT))\n .map(rule -> RuleType.valueOf(rule).type())\n .collect(ImmutableSet.toImmutableSet());\n }\n\n public void setEnableNewCostModel(boolean enable) {\n this.enableNewCostModel = enable;\n }\n\n public boolean getEnableNewCostModel() {\n return this.enableNewCostModel;\n }\n\n public void setDisableNereidsRules(String disableNereidsRules) {\n this.disableNereidsRules = disableNereidsRules;\n }\n\n public double getNereidsCboPenaltyFactor() {\n return nereidsCboPenaltyFactor;\n }\n\n public void setNereidsCboPenaltyFactor(double penaltyFactor) {\n this.nereidsCboPenaltyFactor = penaltyFactor;\n }\n\n public boolean isEnableNereidsTrace() {\n return isEnableNereidsPlanner() && enableNereidsTrace;\n }\n\n public boolean isEnableSingleReplicaInsert() {\n return enableSingleReplicaInsert;\n }\n\n public void setEnableSingleReplicaInsert(boolean enableSingleReplicaInsert) {\n this.enableSingleReplicaInsert = enableSingleReplicaInsert;\n }\n\n public boolean isEnableRuntimeFilterPrune() {\n return enableRuntimeFilterPrune;\n }\n\n public void setEnableRuntimeFilterPrune(boolean enableRuntimeFilterPrune) {\n this.enableRuntimeFilterPrune = enableRuntimeFilterPrune;\n }\n\n public void setFragmentTransmissionCompressionCodec(String codec) {\n this.fragmentTransmissionCompressionCodec = codec;\n }\n\n public boolean isEnableUnicodeNameSupport() {\n return enableUnicodeNameSupport;\n }\n\n public void setEnableUnicodeNameSupport(boolean enableUnicodeNameSupport) {\n this.enableUnicodeNameSupport = enableUnicodeNameSupport;\n }\n\n public boolean isDropTableIfCtasFailed() {\n return dropTableIfCtasFailed;\n }\n\n public void checkExternalSortBytesThreshold(String externalSortBytesThreshold) {\n long value = Long.valueOf(externalSortBytesThreshold);\n if (value > 0 && value < MIN_EXTERNAL_SORT_BYTES_THRESHOLD) {\n LOG.warn(\"external sort bytes threshold: {}, min: {}\", value, MIN_EXTERNAL_SORT_BYTES_THRESHOLD);\n throw new UnsupportedOperationException(\"minimum value is \" + MIN_EXTERNAL_SORT_BYTES_THRESHOLD);\n }\n }\n\n public void checkExternalAggBytesThreshold(String externalAggBytesThreshold) {\n long value = Long.valueOf(externalAggBytesThreshold);\n if (value > 0 && value < MIN_EXTERNAL_AGG_BYTES_THRESHOLD) {\n LOG.warn(\"external agg bytes threshold: {}, min: {}\", value, MIN_EXTERNAL_AGG_BYTES_THRESHOLD);\n throw new UnsupportedOperationException(\"minimum value is \" + MIN_EXTERNAL_AGG_BYTES_THRESHOLD);\n }\n }\n\n public void checkExternalAggPartitionBits(String externalAggPartitionBits) {\n int value = Integer.valueOf(externalAggPartitionBits);\n if (value < MIN_EXTERNAL_AGG_PARTITION_BITS || value > MAX_EXTERNAL_AGG_PARTITION_BITS) {\n LOG.warn(\"external agg bytes threshold: {}, min: {}, max: {}\",\n value, MIN_EXTERNAL_AGG_PARTITION_BITS, MAX_EXTERNAL_AGG_PARTITION_BITS);\n throw new UnsupportedOperationException(\"min value is \" + MIN_EXTERNAL_AGG_PARTITION_BITS + \" max value is \"\n + MAX_EXTERNAL_AGG_PARTITION_BITS);\n }\n }\n\n public boolean isEnableFileCache() {\n return enableFileCache;\n }\n\n public void setEnableFileCache(boolean enableFileCache) {\n this.enableFileCache = enableFileCache;\n }\n\n public String getFileCacheBasePath() {\n return fileCacheBasePath;\n }\n\n public void setFileCacheBasePath(String basePath) {\n this.fileCacheBasePath = basePath;\n }\n\n public boolean isEnableInvertedIndexQuery() {\n return enableInvertedIndexQuery;\n }\n\n public void setEnableInvertedIndexQuery(boolean enableInvertedIndexQuery) {\n this.enableInvertedIndexQuery = enableInvertedIndexQuery;\n }\n\n public boolean isEnablePushDownCountOnIndex() {\n return enablePushDownCountOnIndex;\n }\n\n public void setEnablePushDownCountOnIndex(boolean enablePushDownCountOnIndex) {\n this.enablePushDownCountOnIndex = enablePushDownCountOnIndex;\n }\n\n public int getMaxTableCountUseCascadesJoinReorder() {\n return this.maxTableCountUseCascadesJoinReorder;\n }\n\n public void setMaxTableCountUseCascadesJoinReorder(int maxTableCountUseCascadesJoinReorder) {\n this.maxTableCountUseCascadesJoinReorder =\n maxTableCountUseCascadesJoinReorder < MIN_JOIN_REORDER_TABLE_COUNT\n ? MIN_JOIN_REORDER_TABLE_COUNT\n : maxTableCountUseCascadesJoinReorder;\n }\n\n public boolean isShowUserDefaultRole() {\n return showUserDefaultRole;\n }\n\n public int getExternalTableAnalyzePartNum() {\n return externalTableAnalyzePartNum;\n }\n\n public boolean isTruncateCharOrVarcharColumns() {\n return truncateCharOrVarcharColumns;\n }\n\n public void setTruncateCharOrVarcharColumns(boolean truncateCharOrVarcharColumns) {\n this.truncateCharOrVarcharColumns = truncateCharOrVarcharColumns;\n }\n\n /**\n * Serialize to thrift object.\n * Used for rest api.\n */\n public TQueryOptions toThrift() {\n TQueryOptions tResult = new TQueryOptions();\n tResult.setMemLimit(maxExecMemByte);\n tResult.setScanQueueMemLimit(Math.min(maxScanQueueMemByte, maxExecMemByte / 20));\n\n \n tResult.setMinReservation(0);\n tResult.setMaxReservation(maxExecMemByte);\n tResult.setInitialReservationTotalClaims(maxExecMemByte);\n tResult.setBufferPoolLimit(maxExecMemByte);\n\n tResult.setQueryTimeout(queryTimeoutS);\n tResult.setIsReportSuccess(enableProfile);\n tResult.setCodegenLevel(codegenLevel);\n tResult.setBeExecVersion(Config.be_exec_version);\n tResult.setEnablePipelineEngine(enablePipelineEngine);\n tResult.setEnablePipelineXEngine(enablePipelineXEngine);\n tResult.setParallelInstance(getParallelExecInstanceNum());\n tResult.setReturnObjectDataAsBinary(returnObjectDataAsBinary);\n tResult.setTrimTailingSpacesForExternalTableQuery(trimTailingSpacesForExternalTableQuery);\n tResult.setEnableShareHashTableForBroadcastJoin(enableShareHashTableForBroadcastJoin);\n tResult.setEnableHashJoinEarlyStartProbe(enableHashJoinEarlyStartProbe);\n\n tResult.setBatchSize(batchSize);\n tResult.setDisableStreamPreaggregations(disableStreamPreaggregations);\n\n if (maxScanKeyNum > -1) {\n tResult.setMaxScanKeyNum(maxScanKeyNum);\n }\n if (maxPushdownConditionsPerColumn > -1) {\n tResult.setMaxPushdownConditionsPerColumn(maxPushdownConditionsPerColumn);\n }\n\n tResult.setEnableSpilling(enableSpilling);\n tResult.setEnableEnableExchangeNodeParallelMerge(enableExchangeNodeParallelMerge);\n\n tResult.setRuntimeFilterWaitTimeMs(runtimeFilterWaitTimeMs);\n tResult.setRuntimeFilterMaxInNum(runtimeFilterMaxInNum);\n\n if (cpuResourceLimit > 0) {\n TResourceLimit resourceLimit = new TResourceLimit();\n resourceLimit.setCpuLimit(cpuResourceLimit);\n tResult.setResourceLimit(resourceLimit);\n }\n\n tResult.setEnableFunctionPushdown(enableFunctionPushdown);\n tResult.setEnableCommonExprPushdown(enableCommonExprPushdown);\n tResult.setCheckOverflowForDecimal(checkOverflowForDecimal);\n tResult.setFragmentTransmissionCompressionCodec(fragmentTransmissionCompressionCodec);\n tResult.setEnableLocalExchange(enableLocalExchange);\n\n tResult.setSkipStorageEngineMerge(skipStorageEngineMerge);\n\n tResult.setSkipDeletePredicate(skipDeletePredicate);\n\n tResult.setSkipDeleteBitmap(skipDeleteBitmap);\n\n tResult.setPartitionedHashJoinRowsThreshold(partitionedHashJoinRowsThreshold);\n tResult.setPartitionedHashAggRowsThreshold(partitionedHashAggRowsThreshold);\n\n tResult.setRepeatMaxNum(repeatMaxNum);\n\n tResult.setExternalSortBytesThreshold(externalSortBytesThreshold);\n\n tResult.setExternalAggBytesThreshold(externalAggBytesThreshold);\n\n tResult.setExternalAggPartitionBits(externalAggPartitionBits);\n\n tResult.setEnableFileCache(enableFileCache);\n\n tResult.setFileCacheBasePath(fileCacheBasePath);\n\n tResult.setEnableInvertedIndexQuery(enableInvertedIndexQuery);\n\n if (dryRunQuery) {\n tResult.setDryRunQuery(true);\n }\n\n tResult.setEnableParquetLazyMat(enableParquetLazyMat);\n tResult.setEnableOrcLazyMat(enableOrcLazyMat);\n\n tResult.setEnableDeleteSubPredicateV2(enableDeleteSubPredicateV2);\n tResult.setTruncateCharOrVarcharColumns(truncateCharOrVarcharColumns);\n tResult.setEnableMemtableOnSinkNode(enableMemtableOnSinkNode);\n\n return tResult;\n }\n\n public JSONObject toJson() throws IOException {\n JSONObject root = new JSONObject();\n try {\n for (Field field : SessionVariable.class.getDeclaredFields()) {\n VarAttr attr = field.getAnnotation(VarAttr.class);\n if (attr == null) {\n continue;\n }\n switch (field.getType().getSimpleName()) {\n case \"boolean\":\n root.put(attr.name(), (Boolean) field.get(this));\n break;\n case \"int\":\n root.put(attr.name(), (Integer) field.get(this));\n break;\n case \"long\":\n root.put(attr.name(), (Long) field.get(this));\n break;\n case \"float\":\n root.put(attr.name(), (Float) field.get(this));\n break;\n case \"double\":\n root.put(attr.name(), (Double) field.get(this));\n break;\n case \"String\":\n root.put(attr.name(), (String) field.get(this));\n break;\n default:\n \n throw new IOException(\"invalid type: \" + field.getType().getSimpleName());\n }\n }\n } catch (Exception e) {\n throw new IOException(\"failed to write session variable: \" + e.getMessage());\n }\n return root;\n }\n\n @Override\n public void write(DataOutput out) throws IOException {\n JSONObject root = toJson();\n Text.writeString(out, root.toString());\n }\n\n\n public void readFields(DataInput in) throws IOException {\n String json = Text.readString(in);\n readFromJson(json);\n }\n\n public void readFromJson(String json) throws IOException {\n JSONObject root = (JSONObject) JSONValue.parse(json);\n try {\n for (Field field : SessionVariable.class.getDeclaredFields()) {\n VarAttr attr = field.getAnnotation(VarAttr.class);\n if (attr == null) {\n continue;\n }\n\n if (!root.containsKey(attr.name())) {\n continue;\n }\n\n switch (field.getType().getSimpleName()) {\n case \"boolean\":\n field.set(this, root.get(attr.name()));\n break;\n case \"int\":\n \n field.set(this, Integer.valueOf(root.get(attr.name()).toString()));\n break;\n case \"long\":\n field.set(this, (Long) root.get(attr.name()));\n break;\n case \"float\":\n field.set(this, root.get(attr.name()));\n break;\n case \"double\":\n field.set(this, root.get(attr.name()));\n break;\n case \"String\":\n field.set(this, root.get(attr.name()));\n break;\n default:\n \n throw new IOException(\"invalid type: \" + field.getType().getSimpleName());\n }\n }\n } catch (Exception e) {\n throw new IOException(\"failed to read session variable: \" + e.getMessage());\n }\n }\n\n /**\n * Get all variables which need to forward along with statement.\n **/\n public Map getForwardVariables() {\n HashMap map = new HashMap();\n try {\n Field[] fields = SessionVariable.class.getDeclaredFields();\n for (Field f : fields) {\n VarAttr varAttr = f.getAnnotation(VarAttr.class);\n if (varAttr == null || !varAttr.needForward()) {\n continue;\n }\n map.put(varAttr.name(), String.valueOf(f.get(this)));\n }\n } catch (IllegalAccessException e) {\n LOG.error(\"failed to get forward variables\", e);\n }\n return map;\n }\n\n /**\n * Set forwardedSessionVariables for variables.\n **/\n public void setForwardedSessionVariables(Map variables) {\n try {\n Field[] fields = SessionVariable.class.getFields();\n for (Field f : fields) {\n VarAttr varAttr = f.getAnnotation(VarAttr.class);\n if (varAttr == null || !varAttr.needForward()) {\n continue;\n }\n String val = variables.get(varAttr.name());\n if (val == null) {\n continue;\n }\n\n LOG.debug(\"set forward variable: {} = {}\", varAttr.name(), val);\n\n \n switch (f.getType().getSimpleName()) {\n case \"short\":\n f.setShort(this, Short.parseShort(val));\n break;\n case \"int\":\n f.setInt(this, Integer.parseInt(val));\n break;\n case \"long\":\n f.setLong(this, Long.parseLong(val));\n break;\n case \"double\":\n f.setDouble(this, Double.parseDouble(val));\n break;\n case \"boolean\":\n f.setBoolean(this, Boolean.parseBoolean(val));\n break;\n case \"String\":\n f.set(this, val);\n break;\n default:\n throw new IllegalArgumentException(\"Unknown field type: \" + f.getType().getSimpleName());\n }\n }\n } catch (IllegalAccessException e) {\n LOG.error(\"failed to set forward variables\", e);\n }\n }\n\n /**\n * Set forwardedSessionVariables for queryOptions.\n **/\n public void setForwardedSessionVariables(TQueryOptions queryOptions) {\n if (queryOptions.isSetMemLimit()) {\n setMaxExecMemByte(queryOptions.getMemLimit());\n }\n if (queryOptions.isSetQueryTimeout()) {\n setQueryTimeoutS(queryOptions.getQueryTimeout());\n }\n if (queryOptions.isSetInsertTimeout()) {\n setInsertTimeoutS(queryOptions.getInsertTimeout());\n }\n }\n\n /**\n * Get all variables which need to be set in TQueryOptions.\n **/\n public TQueryOptions getQueryOptionVariables() {\n TQueryOptions queryOptions = new TQueryOptions();\n queryOptions.setMemLimit(maxExecMemByte);\n queryOptions.setScanQueueMemLimit(Math.min(maxScanQueueMemByte, maxExecMemByte / 20));\n queryOptions.setQueryTimeout(queryTimeoutS);\n queryOptions.setInsertTimeout(insertTimeoutS);\n return queryOptions;\n }\n\n /**\n * The sessionContext is as follows:\n * \"k1:v1;k2:v2;...\"\n * Here we want to get value with key named \"trace_id\",\n * Return empty string is not found.\n *\n * @return\n */\n public String getTraceId() {\n if (Strings.isNullOrEmpty(sessionContext)) {\n return \"\";\n }\n String[] parts = sessionContext.split(\";\");\n for (String part : parts) {\n String[] innerParts = part.split(\":\");\n if (innerParts.length != 2) {\n continue;\n }\n if (innerParts[0].equals(\"trace_id\")) {\n return innerParts[1];\n }\n }\n return \"\";\n }\n\n public boolean isEnableMinidump() {\n return enableMinidump;\n }\n\n public void setEnableMinidump(boolean enableMinidump) {\n this.enableMinidump = enableMinidump;\n }\n\n public String getMinidumpPath() {\n return minidumpPath;\n }\n\n public void setMinidumpPath(String minidumpPath) {\n this.minidumpPath = minidumpPath;\n }\n\n public boolean isTraceNereids() {\n return traceNereids;\n }\n\n public void setTraceNereids(boolean traceNereids) {\n this.traceNereids = traceNereids;\n }\n\n public boolean isPlayNereidsDump() {\n return planNereidsDump;\n }\n\n public void setPlanNereidsDump(boolean planNereidsDump) {\n this.planNereidsDump = planNereidsDump;\n }\n\n public boolean isDumpNereidsMemo() {\n return dumpNereidsMemo;\n }\n\n public void setDumpNereidsMemo(boolean dumpNereidsMemo) {\n this.dumpNereidsMemo = dumpNereidsMemo;\n }\n\n public void enableFallbackToOriginalPlannerOnce() throws DdlException {\n if (enableFallbackToOriginalPlanner) {\n return;\n }\n setIsSingleSetVar(true);\n VariableMgr.setVar(this,\n new SetVar(SessionVariable.ENABLE_FALLBACK_TO_ORIGINAL_PLANNER, new StringLiteral(\"true\")));\n }\n\n public void disableNereidsPlannerOnce() throws DdlException {\n if (!enableNereidsPlanner) {\n return;\n }\n setIsSingleSetVar(true);\n VariableMgr.setVar(this, new SetVar(SessionVariable.ENABLE_NEREIDS_PLANNER, new StringLiteral(\"false\")));\n }\n\n \n public int getVariableNumByVariableAnnotation(VariableAnnotation type) {\n int num = 0;\n Field[] fields = SessionVariable.class.getDeclaredFields();\n for (Field f : fields) {\n VarAttr varAttr = f.getAnnotation(VarAttr.class);\n if (varAttr == null) {\n continue;\n }\n if (varAttr.varType() == type) {\n ++num;\n }\n }\n return num;\n }\n\n public boolean isEnableUnifiedLoad() {\n return enableUnifiedLoad;\n }\n\n public boolean getEnablePipelineEngine() {\n return enablePipelineEngine || enablePipelineXEngine;\n }\n\n public boolean getEnablePipelineXEngine() {\n return enablePipelineXEngine;\n }\n\n public static boolean enablePipelineEngine() {\n ConnectContext connectContext = ConnectContext.get();\n if (connectContext == null) {\n return false;\n }\n return connectContext.getSessionVariable().enablePipelineEngine\n || connectContext.getSessionVariable().enablePipelineXEngine;\n }\n\n public static boolean enableAggState() {\n ConnectContext connectContext = ConnectContext.get();\n if (connectContext == null) {\n return true;\n }\n return connectContext.getSessionVariable().enableAggState;\n }\n}", "context_after": "class SessionVariable implements Serializable, Writable {\n public static final Logger LOG = LogManager.getLogger(SessionVariable.class);\n\n public static final String EXEC_MEM_LIMIT = \"exec_mem_limit\";\n public static final String SCAN_QUEUE_MEM_LIMIT = \"scan_queue_mem_limit\";\n public static final String QUERY_TIMEOUT = \"query_timeout\";\n\n public static final String MAX_EXECUTION_TIME = \"max_execution_time\";\n public static final String INSERT_TIMEOUT = \"insert_timeout\";\n public static final String ENABLE_PROFILE = \"enable_profile\";\n public static final String SQL_MODE = \"sql_mode\";\n public static final String WORKLOAD_VARIABLE = \"workload_group\";\n public static final String RESOURCE_VARIABLE = \"resource_group\";\n public static final String AUTO_COMMIT = \"autocommit\";\n public static final String TX_ISOLATION = \"tx_isolation\";\n public static final String TX_READ_ONLY = \"tx_read_only\";\n public static final String TRANSACTION_READ_ONLY = \"transaction_read_only\";\n public static final String TRANSACTION_ISOLATION = \"transaction_isolation\";\n public static final String CHARACTER_SET_CLIENT = \"character_set_client\";\n public static final String CHARACTER_SET_CONNNECTION = \"character_set_connection\";\n public static final String CHARACTER_SET_RESULTS = \"character_set_results\";\n public static final String CHARACTER_SET_SERVER = \"character_set_server\";\n public static final String COLLATION_CONNECTION = \"collation_connection\";\n public static final String COLLATION_DATABASE = \"collation_database\";\n public static final String COLLATION_SERVER = \"collation_server\";\n public static final String SQL_AUTO_IS_NULL = \"SQL_AUTO_IS_NULL\";\n public static final String SQL_SELECT_LIMIT = \"sql_select_limit\";\n public static final String MAX_ALLOWED_PACKET = \"max_allowed_packet\";\n public static final String AUTO_INCREMENT_INCREMENT = \"auto_increment_increment\";\n public static final String QUERY_CACHE_TYPE = \"query_cache_type\";\n public static final String INTERACTIVE_TIMTOUT = \"interactive_timeout\";\n public static final String WAIT_TIMEOUT = \"wait_timeout\";\n public static final String NET_WRITE_TIMEOUT = \"net_write_timeout\";\n public static final String NET_READ_TIMEOUT = \"net_read_timeout\";\n public static final String TIME_ZONE = \"time_zone\";\n public static final String SQL_SAFE_UPDATES = \"sql_safe_updates\";\n public static final String NET_BUFFER_LENGTH = \"net_buffer_length\";\n public static final String CODEGEN_LEVEL = \"codegen_level\";\n public static final String HAVE_QUERY_CACHE = \"have_query_cache\";\n \n public static final int MIN_EXEC_MEM_LIMIT = 2097152;\n public static final String BATCH_SIZE = \"batch_size\";\n public static final String DISABLE_STREAMING_PREAGGREGATIONS = \"disable_streaming_preaggregations\";\n public static final String DISABLE_COLOCATE_PLAN = \"disable_colocate_plan\";\n public static final String ENABLE_COLOCATE_SCAN = \"enable_colocate_scan\";\n public static final String ENABLE_BUCKET_SHUFFLE_JOIN = \"enable_bucket_shuffle_join\";\n public static final String PARALLEL_FRAGMENT_EXEC_INSTANCE_NUM = \"parallel_fragment_exec_instance_num\";\n public static final String PARALLEL_PIPELINE_TASK_NUM = \"parallel_pipeline_task_num\";\n public static final String MAX_INSTANCE_NUM = \"max_instance_num\";\n public static final String ENABLE_INSERT_STRICT = \"enable_insert_strict\";\n public static final String ENABLE_SPILLING = \"enable_spilling\";\n public static final String ENABLE_EXCHANGE_NODE_PARALLEL_MERGE = \"enable_exchange_node_parallel_merge\";\n public static final String PREFER_JOIN_METHOD = \"prefer_join_method\";\n\n public static final String ENABLE_FOLD_CONSTANT_BY_BE = \"enable_fold_constant_by_be\";\n public static final String ENABLE_ODBC_TRANSCATION = \"enable_odbc_transcation\";\n public static final String ENABLE_SQL_CACHE = \"enable_sql_cache\";\n public static final String ENABLE_PARTITION_CACHE = \"enable_partition_cache\";\n\n public static final String ENABLE_COST_BASED_JOIN_REORDER = \"enable_cost_based_join_reorder\";\n\n \n public static final String FORWARD_TO_MASTER = \"forward_to_master\";\n \n public static final String PARALLEL_EXCHANGE_INSTANCE_NUM = \"parallel_exchange_instance_num\";\n public static final String SHOW_HIDDEN_COLUMNS = \"show_hidden_columns\";\n public static final String USE_V2_ROLLUP = \"use_v2_rollup\";\n public static final String REWRITE_COUNT_DISTINCT_TO_BITMAP_HLL = \"rewrite_count_distinct_to_bitmap_hll\";\n public static final String EVENT_SCHEDULER = \"event_scheduler\";\n public static final String STORAGE_ENGINE = \"storage_engine\";\n \n public static final String DEFAULT_STORAGE_ENGINE = \"default_storage_engine\";\n public static final String DEFAULT_TMP_STORAGE_ENGINE = \"default_tmp_storage_engine\";\n\n \n public static final String PROFILLING = \"profiling\";\n\n public static final String DIV_PRECISION_INCREMENT = \"div_precision_increment\";\n\n \n public static final String MAX_SCAN_KEY_NUM = \"max_scan_key_num\";\n public static final String MAX_PUSHDOWN_CONDITIONS_PER_COLUMN = \"max_pushdown_conditions_per_column\";\n\n \n public static final String ALLOW_PARTITION_COLUMN_NULLABLE = \"allow_partition_column_nullable\";\n\n \n public static final String RUNTIME_FILTER_MODE = \"runtime_filter_mode\";\n \n \n public static final String RUNTIME_BLOOM_FILTER_SIZE = \"runtime_bloom_filter_size\";\n \n public static final String RUNTIME_BLOOM_FILTER_MIN_SIZE = \"runtime_bloom_filter_min_size\";\n \n public static final String RUNTIME_BLOOM_FILTER_MAX_SIZE = \"runtime_bloom_filter_max_size\";\n public static final String USE_RF_DEFAULT = \"use_rf_default\";\n \n public static final String RUNTIME_FILTER_WAIT_TIME_MS = \"runtime_filter_wait_time_ms\";\n \n public static final String RUNTIME_FILTERS_MAX_NUM = \"runtime_filters_max_num\";\n \n public static final String RUNTIME_FILTER_TYPE = \"runtime_filter_type\";\n \n public static final String RUNTIME_FILTER_MAX_IN_NUM = \"runtime_filter_max_in_num\";\n\n public static final String BE_NUMBER_FOR_TEST = \"be_number_for_test\";\n\n \n public static final String INSERT_VISIBLE_TIMEOUT_MS = \"insert_visible_timeout_ms\";\n\n public static final String DELETE_WITHOUT_PARTITION = \"delete_without_partition\";\n\n \n \n \n public static final String SEND_BATCH_PARALLELISM = \"send_batch_parallelism\";\n\n \n public static final String DISABLE_JOIN_REORDER = \"disable_join_reorder\";\n\n public static final String MAX_JOIN_NUMBER_OF_REORDER = \"max_join_number_of_reorder\";\n\n public static final String ENABLE_NEREIDS_DML = \"enable_nereids_dml\";\n public static final String ENABLE_STRICT_CONSISTENCY_DML = \"enable_strict_consistency_dml\";\n\n public static final String ENABLE_BUSHY_TREE = \"enable_bushy_tree\";\n\n public static final String MAX_JOIN_NUMBER_BUSHY_TREE = \"max_join_number_bushy_tree\";\n public static final String ENABLE_PARTITION_TOPN = \"enable_partition_topn\";\n\n public static final String ENABLE_INFER_PREDICATE = \"enable_infer_predicate\";\n\n public static final long DEFAULT_INSERT_VISIBLE_TIMEOUT_MS = 10_000;\n\n public static final String ENABLE_VECTORIZED_ENGINE = \"enable_vectorized_engine\";\n\n public static final String EXTRACT_WIDE_RANGE_EXPR = \"extract_wide_range_expr\";\n\n \n public static final long MIN_INSERT_VISIBLE_TIMEOUT_MS = 1000;\n\n public static final String ENABLE_PIPELINE_ENGINE = \"enable_pipeline_engine\";\n\n public static final String ENABLE_PIPELINE_X_ENGINE = \"enable_pipeline_x_engine\";\n\n public static final String ENABLE_AGG_STATE = \"enable_agg_state\";\n\n public static final String ENABLE_RPC_OPT_FOR_PIPELINE = \"enable_rpc_opt_for_pipeline\";\n\n public static final String ENABLE_SINGLE_DISTINCT_COLUMN_OPT = \"enable_single_distinct_column_opt\";\n\n public static final String CPU_RESOURCE_LIMIT = \"cpu_resource_limit\";\n\n public static final String ENABLE_PARALLEL_OUTFILE = \"enable_parallel_outfile\";\n\n public static final String SQL_QUOTE_SHOW_CREATE = \"sql_quote_show_create\";\n\n public static final String RETURN_OBJECT_DATA_AS_BINARY = \"return_object_data_as_binary\";\n\n public static final String BLOCK_ENCRYPTION_MODE = \"block_encryption_mode\";\n\n public static final String AUTO_BROADCAST_JOIN_THRESHOLD = \"auto_broadcast_join_threshold\";\n\n public static final String ENABLE_PROJECTION = \"enable_projection\";\n\n public static final String CHECK_OVERFLOW_FOR_DECIMAL = \"check_overflow_for_decimal\";\n\n public static final String TRIM_TAILING_SPACES_FOR_EXTERNAL_TABLE_QUERY\n = \"trim_tailing_spaces_for_external_table_query\";\n\n public static final String ENABLE_DPHYP_OPTIMIZER = \"enable_dphyp_optimizer\";\n\n public static final String NTH_OPTIMIZED_PLAN = \"nth_optimized_plan\";\n\n public static final String ENABLE_NEREIDS_PLANNER = \"enable_nereids_planner\";\n public static final String DISABLE_NEREIDS_RULES = \"disable_nereids_rules\";\n public static final String ENABLE_NEW_COST_MODEL = \"enable_new_cost_model\";\n public static final String ENABLE_FALLBACK_TO_ORIGINAL_PLANNER = \"enable_fallback_to_original_planner\";\n public static final String ENABLE_NEREIDS_TIMEOUT = \"enable_nereids_timeout\";\n\n public static final String FORBID_UNKNOWN_COLUMN_STATS = \"forbid_unknown_col_stats\";\n public static final String BROADCAST_RIGHT_TABLE_SCALE_FACTOR = \"broadcast_right_table_scale_factor\";\n public static final String BROADCAST_ROW_COUNT_LIMIT = \"broadcast_row_count_limit\";\n\n \n public static final String BROADCAST_HASHTABLE_MEM_LIMIT_PERCENTAGE = \"broadcast_hashtable_mem_limit_percentage\";\n\n public static final String REWRITE_OR_TO_IN_PREDICATE_THRESHOLD = \"rewrite_or_to_in_predicate_threshold\";\n\n public static final String NEREIDS_STAR_SCHEMA_SUPPORT = \"nereids_star_schema_support\";\n\n public static final String NEREIDS_CBO_PENALTY_FACTOR = \"nereids_cbo_penalty_factor\";\n public static final String ENABLE_NEREIDS_TRACE = \"enable_nereids_trace\";\n\n public static final String ENABLE_DPHYP_TRACE = \"enable_dphyp_trace\";\n\n public static final String ENABLE_FOLD_NONDETERMINISTIC_FN = \"enable_fold_nondeterministic_fn\";\n\n public static final String ENABLE_RUNTIME_FILTER_PRUNE =\n \"enable_runtime_filter_prune\";\n\n static final String SESSION_CONTEXT = \"session_context\";\n\n public static final String DEFAULT_ORDER_BY_LIMIT = \"default_order_by_limit\";\n\n public static final String ENABLE_SINGLE_REPLICA_INSERT = \"enable_single_replica_insert\";\n\n public static final String ENABLE_FUNCTION_PUSHDOWN = \"enable_function_pushdown\";\n\n public static final String ENABLE_COMMON_EXPR_PUSHDOWN = \"enable_common_expr_pushdown\";\n\n public static final String FRAGMENT_TRANSMISSION_COMPRESSION_CODEC = \"fragment_transmission_compression_codec\";\n\n public static final String ENABLE_LOCAL_EXCHANGE = \"enable_local_exchange\";\n\n public static final String SKIP_STORAGE_ENGINE_MERGE = \"skip_storage_engine_merge\";\n\n public static final String SKIP_DELETE_PREDICATE = \"skip_delete_predicate\";\n\n public static final String SKIP_DELETE_SIGN = \"skip_delete_sign\";\n\n public static final String SKIP_DELETE_BITMAP = \"skip_delete_bitmap\";\n\n public static final String ENABLE_PUSH_DOWN_NO_GROUP_AGG = \"enable_push_down_no_group_agg\";\n\n public static final String ENABLE_CBO_STATISTICS = \"enable_cbo_statistics\";\n\n public static final String ENABLE_SAVE_STATISTICS_SYNC_JOB = \"enable_save_statistics_sync_job\";\n\n public static final String ENABLE_ELIMINATE_SORT_NODE = \"enable_eliminate_sort_node\";\n\n public static final String NEREIDS_TRACE_EVENT_MODE = \"nereids_trace_event_mode\";\n\n public static final String INTERNAL_SESSION = \"internal_session\";\n\n public static final String PARTITIONED_HASH_JOIN_ROWS_THRESHOLD = \"partitioned_hash_join_rows_threshold\";\n public static final String PARTITIONED_HASH_AGG_ROWS_THRESHOLD = \"partitioned_hash_agg_rows_threshold\";\n\n public static final String PARTITION_PRUNING_EXPAND_THRESHOLD = \"partition_pruning_expand_threshold\";\n\n public static final String ENABLE_SHARE_HASH_TABLE_FOR_BROADCAST_JOIN\n = \"enable_share_hash_table_for_broadcast_join\";\n\n \n public static final String ENABLE_HASH_JOIN_EARLY_START_PROBE = \"enable_hash_join_early_start_probe\";\n\n \n public static final String ENABLE_UNICODE_NAME_SUPPORT = \"enable_unicode_name_support\";\n\n public static final String REPEAT_MAX_NUM = \"repeat_max_num\";\n\n public static final String GROUP_CONCAT_MAX_LEN = \"group_concat_max_len\";\n\n public static final String EXTERNAL_SORT_BYTES_THRESHOLD = \"external_sort_bytes_threshold\";\n public static final String EXTERNAL_AGG_BYTES_THRESHOLD = \"external_agg_bytes_threshold\";\n public static final String EXTERNAL_AGG_PARTITION_BITS = \"external_agg_partition_bits\";\n\n public static final String ENABLE_TWO_PHASE_READ_OPT = \"enable_two_phase_read_opt\";\n public static final String TOPN_OPT_LIMIT_THRESHOLD = \"topn_opt_limit_threshold\";\n\n public static final String ENABLE_FILE_CACHE = \"enable_file_cache\";\n\n public static final String FILE_CACHE_BASE_PATH = \"file_cache_base_path\";\n\n public static final String ENABLE_INVERTED_INDEX_QUERY = \"enable_inverted_index_query\";\n\n public static final String ENABLE_PUSHDOWN_COUNT_ON_INDEX = \"enable_count_on_index_pushdown\";\n\n public static final String GROUP_BY_AND_HAVING_USE_ALIAS_FIRST = \"group_by_and_having_use_alias_first\";\n public static final String DROP_TABLE_IF_CTAS_FAILED = \"drop_table_if_ctas_failed\";\n\n public static final String MAX_TABLE_COUNT_USE_CASCADES_JOIN_REORDER = \"max_table_count_use_cascades_join_reorder\";\n public static final int MIN_JOIN_REORDER_TABLE_COUNT = 2;\n\n public static final String SHOW_USER_DEFAULT_ROLE = \"show_user_default_role\";\n\n public static final String ENABLE_MINIDUMP = \"enable_minidump\";\n\n public static final String MINIDUMP_PATH = \"minidump_path\";\n\n public static final String TRACE_NEREIDS = \"trace_nereids\";\n\n public static final String PLAN_NEREIDS_DUMP = \"plan_nereids_dump\";\n\n public static final String DUMP_NEREIDS_MEMO = \"dump_nereids_memo\";\n\n \n public static final String USE_FIX_REPLICA = \"use_fix_replica\";\n\n public static final String DRY_RUN_QUERY = \"dry_run_query\";\n\n \n public static final String FILE_SPLIT_SIZE = \"file_split_size\";\n\n /**\n * use insert stmt as the unified backend for all loads\n */\n public static final String ENABLE_UNIFIED_LOAD = \"enable_unified_load\";\n\n public static final String ENABLE_PARQUET_LAZY_MAT = \"enable_parquet_lazy_materialization\";\n\n public static final String ENABLE_ORC_LAZY_MAT = \"enable_orc_lazy_materialization\";\n\n public static final String INLINE_CTE_REFERENCED_THRESHOLD = \"inline_cte_referenced_threshold\";\n\n public static final String ENABLE_CTE_MATERIALIZE = \"enable_cte_materialize\";\n\n public static final String ENABLE_SCAN_RUN_SERIAL = \"enable_scan_node_run_serial\";\n\n public static final String ENABLE_ANALYZE_COMPLEX_TYPE_COLUMN = \"enable_analyze_complex_type_column\";\n\n public static final String EXTERNAL_TABLE_ANALYZE_PART_NUM = \"external_table_analyze_part_num\";\n\n public static final String ENABLE_STRONG_CONSISTENCY = \"enable_strong_consistency_read\";\n public static final String ENABLE_INSERT_GROUP_COMMIT = \"enable_insert_group_commit\";\n\n public static final String PARALLEL_SYNC_ANALYZE_TASK_NUM = \"parallel_sync_analyze_task_num\";\n\n public static final String TRUNCATE_CHAR_OR_VARCHAR_COLUMNS = \"truncate_char_or_varchar_columns\";\n\n public static final String CBO_CPU_WEIGHT = \"cbo_cpu_weight\";\n\n public static final String CBO_MEM_WEIGHT = \"cbo_mem_weight\";\n\n public static final String CBO_NET_WEIGHT = \"cbo_net_weight\";\n\n public static final String ROUND_PRECISE_DECIMALV2_VALUE = \"round_precise_decimalv2_value\";\n\n public static final String ENABLE_DELETE_SUB_PREDICATE_V2 = \"enable_delete_sub_predicate_v2\";\n\n public static final String JDBC_CLICKHOUSE_QUERY_FINAL = \"jdbc_clickhouse_query_final\";\n\n public static final String ENABLE_MEMTABLE_ON_SINK_NODE =\n \"enable_memtable_on_sink_node\";\n\n public static final String INVERTED_INDEX_CONJUNCTION_OPT_THRESHOLD = \"inverted_index_conjunction_opt_threshold\";\n\n public static final String FULL_AUTO_ANALYZE_START_TIME = \"full_auto_analyze_start_time\";\n\n public static final String FULL_AUTO_ANALYZE_END_TIME = \"full_auto_analyze_end_time\";\n\n public static final List DEBUG_VARIABLES = ImmutableList.of(\n SKIP_DELETE_PREDICATE,\n SKIP_DELETE_BITMAP,\n SKIP_DELETE_SIGN,\n SKIP_STORAGE_ENGINE_MERGE,\n SHOW_HIDDEN_COLUMNS\n );\n\n \n public Map sessionOriginValue = new HashMap();\n \n \n public boolean isSingleSetVar = false;\n\n @VariableMgr.VarAttr(name = JDBC_CLICKHOUSE_QUERY_FINAL)\n public boolean jdbcClickhouseQueryFinal = false;\n\n @VariableMgr.VarAttr(name = ROUND_PRECISE_DECIMALV2_VALUE)\n public boolean roundPreciseDecimalV2Value = false;\n\n @VariableMgr.VarAttr(name = INSERT_VISIBLE_TIMEOUT_MS, needForward = true)\n public long insertVisibleTimeoutMs = DEFAULT_INSERT_VISIBLE_TIMEOUT_MS;\n\n \n @VariableMgr.VarAttr(name = EXEC_MEM_LIMIT)\n public long maxExecMemByte = 2147483648L;\n\n @VariableMgr.VarAttr(name = SCAN_QUEUE_MEM_LIMIT)\n public long maxScanQueueMemByte = 2147483648L / 20;\n\n @VariableMgr.VarAttr(name = ENABLE_SPILLING)\n public boolean enableSpilling = false;\n\n @VariableMgr.VarAttr(name = ENABLE_EXCHANGE_NODE_PARALLEL_MERGE)\n public boolean enableExchangeNodeParallelMerge = false;\n\n \n \n @VariableMgr.VarAttr(name = DEFAULT_ORDER_BY_LIMIT)\n private long defaultOrderByLimit = -1;\n\n \n @VariableMgr.VarAttr(name = QUERY_TIMEOUT)\n public int queryTimeoutS = 300;\n\n \n \n \n \n \n @VariableMgr.VarAttr(name = MAX_EXECUTION_TIME, fuzzy = true, setter = \"setMaxExecutionTimeMS\")\n public int maxExecutionTimeMS = -1;\n\n @VariableMgr.VarAttr(name = INSERT_TIMEOUT)\n public int insertTimeoutS = 14400;\n\n \n @VariableMgr.VarAttr(name = ENABLE_PROFILE, needForward = true)\n public boolean enableProfile = false;\n\n \n \n \n @VariableMgr.VarAttr(name = ENABLE_SINGLE_DISTINCT_COLUMN_OPT)\n public boolean enableSingleDistinctColumnOpt = false;\n\n \n @VariableMgr.VarAttr(name = SQL_MODE, needForward = true)\n public long sqlMode = SqlModeHelper.MODE_DEFAULT;\n\n @VariableMgr.VarAttr(name = WORKLOAD_VARIABLE)\n public String workloadGroup = \"\";\n\n @VariableMgr.VarAttr(name = RESOURCE_VARIABLE)\n public String resourceGroup = \"\";\n\n \n @VariableMgr.VarAttr(name = AUTO_COMMIT)\n public boolean autoCommit = true;\n\n \n @VariableMgr.VarAttr(name = TX_ISOLATION)\n public String txIsolation = \"REPEATABLE-READ\";\n\n \n @VariableMgr.VarAttr(name = TX_READ_ONLY)\n public boolean txReadonly = false;\n\n \n @VariableMgr.VarAttr(name = TRANSACTION_READ_ONLY)\n public boolean transactionReadonly = false;\n\n \n @VariableMgr.VarAttr(name = TRANSACTION_ISOLATION)\n public String transactionIsolation = \"REPEATABLE-READ\";\n\n \n @VariableMgr.VarAttr(name = CHARACTER_SET_CLIENT)\n public String charsetClient = \"utf8\";\n @VariableMgr.VarAttr(name = CHARACTER_SET_CONNNECTION)\n public String charsetConnection = \"utf8\";\n @VariableMgr.VarAttr(name = CHARACTER_SET_RESULTS)\n public String charsetResults = \"utf8\";\n @VariableMgr.VarAttr(name = CHARACTER_SET_SERVER)\n public String charsetServer = \"utf8\";\n @VariableMgr.VarAttr(name = COLLATION_CONNECTION)\n public String collationConnection = \"utf8_general_ci\";\n @VariableMgr.VarAttr(name = COLLATION_DATABASE)\n public String collationDatabase = \"utf8_general_ci\";\n\n @VariableMgr.VarAttr(name = COLLATION_SERVER)\n public String collationServer = \"utf8_general_ci\";\n\n \n @VariableMgr.VarAttr(name = SQL_AUTO_IS_NULL)\n public boolean sqlAutoIsNull = false;\n\n @VariableMgr.VarAttr(name = SQL_SELECT_LIMIT)\n private long sqlSelectLimit = Long.MAX_VALUE;\n\n \n @VariableMgr.VarAttr(name = MAX_ALLOWED_PACKET)\n public int maxAllowedPacket = 1048576;\n\n @VariableMgr.VarAttr(name = AUTO_INCREMENT_INCREMENT)\n public int autoIncrementIncrement = 1;\n\n \n @VariableMgr.VarAttr(name = QUERY_CACHE_TYPE)\n public int queryCacheType = 0;\n\n \n @VariableMgr.VarAttr(name = INTERACTIVE_TIMTOUT)\n public int interactiveTimeout = 3600;\n\n \n @VariableMgr.VarAttr(name = WAIT_TIMEOUT)\n public int waitTimeoutS = 28800;\n\n \n @VariableMgr.VarAttr(name = NET_WRITE_TIMEOUT)\n public int netWriteTimeout = 60;\n\n \n @VariableMgr.VarAttr(name = NET_READ_TIMEOUT)\n public int netReadTimeout = 60;\n\n \n @VariableMgr.VarAttr(name = TIME_ZONE, needForward = true)\n public String timeZone = TimeUtils.getSystemTimeZone().getID();\n\n @VariableMgr.VarAttr(name = PARALLEL_EXCHANGE_INSTANCE_NUM)\n public int exchangeInstanceParallel = -1;\n\n @VariableMgr.VarAttr(name = SQL_SAFE_UPDATES)\n public int sqlSafeUpdates = 0;\n\n \n @VariableMgr.VarAttr(name = NET_BUFFER_LENGTH, flag = VariableMgr.READ_ONLY)\n public int netBufferLength = 16384;\n\n \n @VariableMgr.VarAttr(name = CODEGEN_LEVEL)\n public int codegenLevel = 0;\n\n @VariableMgr.VarAttr(name = HAVE_QUERY_CACHE, flag = VariableMgr.READ_ONLY)\n public boolean haveQueryCache = false;\n\n \n @VariableMgr.VarAttr(name = BATCH_SIZE, fuzzy = true)\n public int batchSize = 4064;\n\n @VariableMgr.VarAttr(name = DISABLE_STREAMING_PREAGGREGATIONS, fuzzy = true)\n public boolean disableStreamPreaggregations = false;\n\n @VariableMgr.VarAttr(name = DISABLE_COLOCATE_PLAN)\n public boolean disableColocatePlan = false;\n\n @VariableMgr.VarAttr(name = ENABLE_COLOCATE_SCAN)\n public boolean enableColocateScan = false;\n\n @VariableMgr.VarAttr(name = ENABLE_BUCKET_SHUFFLE_JOIN, varType = VariableAnnotation.EXPERIMENTAL_ONLINE)\n public boolean enableBucketShuffleJoin = true;\n\n @VariableMgr.VarAttr(name = PREFER_JOIN_METHOD)\n public String preferJoinMethod = \"broadcast\";\n\n @VariableMgr.VarAttr(name = FRAGMENT_TRANSMISSION_COMPRESSION_CODEC)\n public String fragmentTransmissionCompressionCodec = \"lz4\";\n\n /*\n * the parallel exec instance num for one Fragment in one BE\n * 1 means disable this feature\n */\n @VariableMgr.VarAttr(name = PARALLEL_FRAGMENT_EXEC_INSTANCE_NUM, needForward = true, fuzzy = true)\n public int parallelExecInstanceNum = 1;\n\n @VariableMgr.VarAttr(name = PARALLEL_PIPELINE_TASK_NUM, fuzzy = true, needForward = true)\n public int parallelPipelineTaskNum = 0;\n\n @VariableMgr.VarAttr(name = MAX_INSTANCE_NUM)\n public int maxInstanceNum = 64;\n\n @VariableMgr.VarAttr(name = ENABLE_INSERT_STRICT, needForward = true)\n public boolean enableInsertStrict = true;\n\n @VariableMgr.VarAttr(name = ENABLE_ODBC_TRANSCATION)\n public boolean enableOdbcTransaction = false;\n\n @VariableMgr.VarAttr(name = ENABLE_SCAN_RUN_SERIAL, description = {\n \"\u662f\u5426\u5f00\u542fScanNode\u4e32\u884c\u8bfb\uff0c\u4ee5\u907f\u514dlimit\u8f83\u5c0f\u7684\u60c5\u51b5\u4e0b\u7684\u8bfb\u653e\u5927\uff0c\u53ef\u4ee5\u63d0\u9ad8\u67e5\u8be2\u7684\u5e76\u53d1\u80fd\u529b\",\n \"Whether to enable ScanNode serial reading to avoid read amplification in cases of small limits\"\n + \"which can improve query concurrency. default is false.\"})\n public boolean enableScanRunSerial = false;\n\n @VariableMgr.VarAttr(name = ENABLE_SQL_CACHE)\n public boolean enableSqlCache = false;\n\n @VariableMgr.VarAttr(name = ENABLE_PARTITION_CACHE)\n public boolean enablePartitionCache = false;\n\n @VariableMgr.VarAttr(name = FORWARD_TO_MASTER)\n public boolean forwardToMaster = true;\n\n @VariableMgr.VarAttr(name = USE_V2_ROLLUP)\n public boolean useV2Rollup = false;\n\n @VariableMgr.VarAttr(name = REWRITE_COUNT_DISTINCT_TO_BITMAP_HLL)\n public boolean rewriteCountDistinct = true;\n\n \n @VariableMgr.VarAttr(name = EVENT_SCHEDULER)\n public String eventScheduler = \"OFF\";\n @VariableMgr.VarAttr(name = STORAGE_ENGINE)\n public String storageEngine = \"olap\";\n @VariableMgr.VarAttr(name = DEFAULT_STORAGE_ENGINE)\n public String defaultStorageEngine = \"olap\";\n @VariableMgr.VarAttr(name = DEFAULT_TMP_STORAGE_ENGINE)\n public String defaultTmpStorageEngine = \"olap\";\n @VariableMgr.VarAttr(name = DIV_PRECISION_INCREMENT)\n public int divPrecisionIncrement = 4;\n\n \n @VariableMgr.VarAttr(name = MAX_SCAN_KEY_NUM)\n public int maxScanKeyNum = -1;\n @VariableMgr.VarAttr(name = MAX_PUSHDOWN_CONDITIONS_PER_COLUMN)\n public int maxPushdownConditionsPerColumn = -1;\n @VariableMgr.VarAttr(name = SHOW_HIDDEN_COLUMNS, flag = VariableMgr.SESSION_ONLY)\n public boolean showHiddenColumns = false;\n\n @VariableMgr.VarAttr(name = ALLOW_PARTITION_COLUMN_NULLABLE)\n public boolean allowPartitionColumnNullable = true;\n\n @VariableMgr.VarAttr(name = DELETE_WITHOUT_PARTITION, needForward = true)\n public boolean deleteWithoutPartition = false;\n\n @VariableMgr.VarAttr(name = SEND_BATCH_PARALLELISM, needForward = true)\n public int sendBatchParallelism = 1;\n\n @VariableMgr.VarAttr(name = EXTRACT_WIDE_RANGE_EXPR, needForward = true)\n public boolean extractWideRangeExpr = true;\n\n @VariableMgr.VarAttr(name = ENABLE_NEREIDS_DML, needForward = true)\n public boolean enableNereidsDML = false;\n\n @VariableMgr.VarAttr(name = ENABLE_STRICT_CONSISTENCY_DML, needForward = true)\n public boolean enableStrictConsistencyDml = false;\n\n @VariableMgr.VarAttr(name = ENABLE_VECTORIZED_ENGINE, varType = VariableAnnotation.EXPERIMENTAL_ONLINE)\n public boolean enableVectorizedEngine = true;\n\n @VariableMgr.VarAttr(name = ENABLE_PIPELINE_ENGINE, fuzzy = true, needForward = true,\n varType = VariableAnnotation.EXPERIMENTAL)\n private boolean enablePipelineEngine = true;\n\n @VariableMgr.VarAttr(name = ENABLE_PIPELINE_X_ENGINE, fuzzy = false, varType = VariableAnnotation.EXPERIMENTAL)\n private boolean enablePipelineXEngine = false;\n\n @VariableMgr.VarAttr(name = ENABLE_AGG_STATE, fuzzy = false, varType = VariableAnnotation.EXPERIMENTAL)\n public boolean enableAggState = false;\n\n @VariableMgr.VarAttr(name = ENABLE_PARALLEL_OUTFILE)\n public boolean enableParallelOutfile = false;\n\n @VariableMgr.VarAttr(name = CPU_RESOURCE_LIMIT)\n public int cpuResourceLimit = -1;\n\n @VariableMgr.VarAttr(name = SQL_QUOTE_SHOW_CREATE)\n public boolean sqlQuoteShowCreate = true;\n\n @VariableMgr.VarAttr(name = TRIM_TAILING_SPACES_FOR_EXTERNAL_TABLE_QUERY, needForward = true)\n public boolean trimTailingSpacesForExternalTableQuery = false;\n\n\n \n \n \n @VariableMgr.VarAttr(name = AUTO_BROADCAST_JOIN_THRESHOLD)\n public double autoBroadcastJoinThreshold = 0.8;\n\n @VariableMgr.VarAttr(name = ENABLE_COST_BASED_JOIN_REORDER)\n private boolean enableJoinReorderBasedCost = false;\n\n @VariableMgr.VarAttr(name = ENABLE_FOLD_CONSTANT_BY_BE, fuzzy = true)\n private boolean enableFoldConstantByBe = false;\n\n @VariableMgr.VarAttr(name = RUNTIME_FILTER_MODE, needForward = true)\n private String runtimeFilterMode = \"GLOBAL\";\n\n @VariableMgr.VarAttr(name = RUNTIME_BLOOM_FILTER_SIZE, needForward = true)\n private int runtimeBloomFilterSize = 2097152;\n\n @VariableMgr.VarAttr(name = RUNTIME_BLOOM_FILTER_MIN_SIZE, needForward = true)\n private int runtimeBloomFilterMinSize = 1048576;\n\n @VariableMgr.VarAttr(name = RUNTIME_BLOOM_FILTER_MAX_SIZE, needForward = true)\n private int runtimeBloomFilterMaxSize = 16777216;\n\n @VariableMgr.VarAttr(name = RUNTIME_FILTER_WAIT_TIME_MS, needForward = true)\n private int runtimeFilterWaitTimeMs = 1000;\n\n @VariableMgr.VarAttr(name = RUNTIME_FILTERS_MAX_NUM, needForward = true)\n private int runtimeFiltersMaxNum = 10;\n\n \n @VariableMgr.VarAttr(name = RUNTIME_FILTER_TYPE, fuzzy = true, needForward = true)\n private int runtimeFilterType = 8;\n\n @VariableMgr.VarAttr(name = RUNTIME_FILTER_MAX_IN_NUM, needForward = true)\n private int runtimeFilterMaxInNum = 1024;\n\n @VariableMgr.VarAttr(name = USE_RF_DEFAULT)\n public boolean useRuntimeFilterDefaultSize = false;\n\n public int getBeNumberForTest() {\n return beNumberForTest;\n }\n\n @VariableMgr.VarAttr(name = PROFILLING)\n public boolean profiling = false;\n\n public void setBeNumberForTest(int beNumberForTest) {\n this.beNumberForTest = beNumberForTest;\n }\n\n @VariableMgr.VarAttr(name = BE_NUMBER_FOR_TEST)\n private int beNumberForTest = -1;\n\n public double getCboCpuWeight() {\n return cboCpuWeight;\n }\n\n public void setCboCpuWeight(double cboCpuWeight) {\n this.cboCpuWeight = cboCpuWeight;\n }\n\n public double getCboMemWeight() {\n return cboMemWeight;\n }\n\n public void setCboMemWeight(double cboMemWeight) {\n this.cboMemWeight = cboMemWeight;\n }\n\n public double getCboNetWeight() {\n return cboNetWeight;\n }\n\n public void setCboNetWeight(double cboNetWeight) {\n this.cboNetWeight = cboNetWeight;\n }\n\n @VariableMgr.VarAttr(name = CBO_CPU_WEIGHT)\n private double cboCpuWeight = 1.0;\n\n @VariableMgr.VarAttr(name = CBO_MEM_WEIGHT)\n private double cboMemWeight = 1.0;\n\n @VariableMgr.VarAttr(name = CBO_NET_WEIGHT)\n private double cboNetWeight = 1.5;\n\n @VariableMgr.VarAttr(name = DISABLE_JOIN_REORDER)\n private boolean disableJoinReorder = false;\n\n @VariableMgr.VarAttr(name = MAX_JOIN_NUMBER_OF_REORDER)\n private int maxJoinNumberOfReorder = 63;\n\n @VariableMgr.VarAttr(name = ENABLE_BUSHY_TREE, needForward = true)\n private boolean enableBushyTree = false;\n\n public int getMaxJoinNumBushyTree() {\n return maxJoinNumBushyTree;\n }\n\n public void setMaxJoinNumBushyTree(int maxJoinNumBushyTree) {\n this.maxJoinNumBushyTree = maxJoinNumBushyTree;\n }\n\n public int getMaxJoinNumberOfReorder() {\n return maxJoinNumberOfReorder;\n }\n\n public void setMaxJoinNumberOfReorder(int maxJoinNumberOfReorder) {\n this.maxJoinNumberOfReorder = maxJoinNumberOfReorder;\n }\n\n\n @VariableMgr.VarAttr(name = MAX_JOIN_NUMBER_BUSHY_TREE)\n private int maxJoinNumBushyTree = 5;\n\n @VariableMgr.VarAttr(name = ENABLE_PARTITION_TOPN)\n private boolean enablePartitionTopN = true;\n\n @VariableMgr.VarAttr(name = ENABLE_INFER_PREDICATE)\n private boolean enableInferPredicate = true;\n\n @VariableMgr.VarAttr(name = RETURN_OBJECT_DATA_AS_BINARY)\n private boolean returnObjectDataAsBinary = false;\n\n @VariableMgr.VarAttr(name = BLOCK_ENCRYPTION_MODE)\n private String blockEncryptionMode = \"\";\n\n @VariableMgr.VarAttr(name = ENABLE_PROJECTION)\n private boolean enableProjection = true;\n\n @VariableMgr.VarAttr(name = CHECK_OVERFLOW_FOR_DECIMAL)\n private boolean checkOverflowForDecimal = false;\n\n @VariableMgr.VarAttr(name = ENABLE_DPHYP_OPTIMIZER)\n public boolean enableDPHypOptimizer = false;\n\n /**\n * This variable is used to select n-th optimized plan in memo.\n * It can allow us select different plans for the same SQL statement\n * and these plans can be used to evaluate the cost model.\n */\n @VariableMgr.VarAttr(name = NTH_OPTIMIZED_PLAN)\n private int nthOptimizedPlan = 1;\n\n /**\n * as the new optimizer is not mature yet, use this var\n * to control whether to use new optimizer, remove it when\n * the new optimizer is fully developed. I hope that day\n * would be coming soon.\n */\n @VariableMgr.VarAttr(name = ENABLE_NEREIDS_PLANNER, needForward = true,\n fuzzy = true, varType = VariableAnnotation.EXPERIMENTAL)\n private boolean enableNereidsPlanner = true;\n\n @VariableMgr.VarAttr(name = DISABLE_NEREIDS_RULES, needForward = true)\n private String disableNereidsRules = \"\";\n\n @VariableMgr.VarAttr(name = ENABLE_NEW_COST_MODEL, needForward = true)\n private boolean enableNewCostModel = false;\n\n @VariableMgr.VarAttr(name = NEREIDS_STAR_SCHEMA_SUPPORT)\n private boolean nereidsStarSchemaSupport = true;\n\n @VariableMgr.VarAttr(name = REWRITE_OR_TO_IN_PREDICATE_THRESHOLD, fuzzy = true)\n private int rewriteOrToInPredicateThreshold = 2;\n\n @VariableMgr.VarAttr(name = NEREIDS_CBO_PENALTY_FACTOR, needForward = true)\n private double nereidsCboPenaltyFactor = 0.7;\n\n @VariableMgr.VarAttr(name = ENABLE_NEREIDS_TRACE)\n private boolean enableNereidsTrace = false;\n\n @VariableMgr.VarAttr(name = ENABLE_DPHYP_TRACE, needForward = true)\n public boolean enableDpHypTrace = false;\n\n @VariableMgr.VarAttr(name = BROADCAST_RIGHT_TABLE_SCALE_FACTOR)\n private double broadcastRightTableScaleFactor = 0.0;\n\n @VariableMgr.VarAttr(name = BROADCAST_ROW_COUNT_LIMIT, needForward = true)\n private double broadcastRowCountLimit = 30000000;\n\n @VariableMgr.VarAttr(name = BROADCAST_HASHTABLE_MEM_LIMIT_PERCENTAGE, needForward = true)\n private double broadcastHashtableMemLimitPercentage = 0.2;\n\n @VariableMgr.VarAttr(name = ENABLE_RUNTIME_FILTER_PRUNE, needForward = true)\n public boolean enableRuntimeFilterPrune = false;\n\n /**\n * The client can pass some special information by setting this session variable in the format: \"k1:v1;k2:v2\".\n * For example, trace_id can be passed to trace the query request sent by the user.\n * set session_context=\"trace_id:1234565678\";\n */\n @VariableMgr.VarAttr(name = SESSION_CONTEXT, needForward = true)\n public String sessionContext = \"\";\n\n @VariableMgr.VarAttr(name = ENABLE_SINGLE_REPLICA_INSERT,\n needForward = true, varType = VariableAnnotation.EXPERIMENTAL)\n public boolean enableSingleReplicaInsert = false;\n\n @VariableMgr.VarAttr(name = ENABLE_FUNCTION_PUSHDOWN, fuzzy = true)\n public boolean enableFunctionPushdown = false;\n\n @VariableMgr.VarAttr(name = FORBID_UNKNOWN_COLUMN_STATS)\n public boolean forbidUnknownColStats = false;\n\n @VariableMgr.VarAttr(name = ENABLE_COMMON_EXPR_PUSHDOWN, fuzzy = true)\n public boolean enableCommonExprPushdown = true;\n\n @VariableMgr.VarAttr(name = ENABLE_LOCAL_EXCHANGE, fuzzy = true, varType = VariableAnnotation.DEPRECATED)\n public boolean enableLocalExchange = true;\n\n /**\n * For debug purpose, don't merge unique key and agg key when reading data.\n */\n @VariableMgr.VarAttr(name = SKIP_STORAGE_ENGINE_MERGE)\n public boolean skipStorageEngineMerge = false;\n\n /**\n * For debug purpose, skip delete predicate when reading data.\n */\n @VariableMgr.VarAttr(name = SKIP_DELETE_PREDICATE)\n public boolean skipDeletePredicate = false;\n\n /**\n * For debug purpose, skip delete sign when reading data.\n */\n @VariableMgr.VarAttr(name = SKIP_DELETE_SIGN)\n public boolean skipDeleteSign = false;\n\n /**\n * For debug purpose, skip delete bitmap when reading data.\n */\n @VariableMgr.VarAttr(name = SKIP_DELETE_BITMAP)\n public boolean skipDeleteBitmap = false;\n\n \n \n \n @VariableMgr.VarAttr(name = ENABLE_FALLBACK_TO_ORIGINAL_PLANNER, needForward = true)\n public boolean enableFallbackToOriginalPlanner = true;\n\n @VariableMgr.VarAttr(name = ENABLE_NEREIDS_TIMEOUT, needForward = true)\n public boolean enableNereidsTimeout = true;\n\n @VariableMgr.VarAttr(name = ENABLE_PUSH_DOWN_NO_GROUP_AGG)\n public boolean enablePushDownNoGroupAgg = true;\n\n /**\n * The current statistics are only used for CBO test,\n * and are not available to users. (work in progress)\n */\n @VariableMgr.VarAttr(name = ENABLE_CBO_STATISTICS)\n public boolean enableCboStatistics = false;\n\n @VariableMgr.VarAttr(name = ENABLE_ELIMINATE_SORT_NODE)\n public boolean enableEliminateSortNode = true;\n\n @VariableMgr.VarAttr(name = INTERNAL_SESSION)\n public boolean internalSession = false;\n\n \n @VariableMgr.VarAttr(name = PARTITIONED_HASH_JOIN_ROWS_THRESHOLD, fuzzy = true)\n public int partitionedHashJoinRowsThreshold = 0;\n\n \n @VariableMgr.VarAttr(name = PARTITIONED_HASH_AGG_ROWS_THRESHOLD, fuzzy = true)\n public int partitionedHashAggRowsThreshold = 0;\n\n @VariableMgr.VarAttr(name = PARTITION_PRUNING_EXPAND_THRESHOLD, fuzzy = true)\n public int partitionPruningExpandThreshold = 10;\n\n @VariableMgr.VarAttr(name = ENABLE_SHARE_HASH_TABLE_FOR_BROADCAST_JOIN, fuzzy = true)\n public boolean enableShareHashTableForBroadcastJoin = true;\n\n @VariableMgr.VarAttr(name = ENABLE_HASH_JOIN_EARLY_START_PROBE, fuzzy = false)\n public boolean enableHashJoinEarlyStartProbe = false;\n\n @VariableMgr.VarAttr(name = ENABLE_UNICODE_NAME_SUPPORT)\n public boolean enableUnicodeNameSupport = false;\n\n @VariableMgr.VarAttr(name = REPEAT_MAX_NUM, needForward = true)\n public int repeatMaxNum = 10000;\n\n @VariableMgr.VarAttr(name = GROUP_CONCAT_MAX_LEN)\n public long groupConcatMaxLen = 2147483646;\n\n \n \n public static final long MIN_EXTERNAL_SORT_BYTES_THRESHOLD = 134217728;\n @VariableMgr.VarAttr(name = EXTERNAL_SORT_BYTES_THRESHOLD,\n checker = \"checkExternalSortBytesThreshold\", fuzzy = true)\n public long externalSortBytesThreshold = 0;\n\n \n public static final long MIN_EXTERNAL_AGG_BYTES_THRESHOLD = 134217728;\n @VariableMgr.VarAttr(name = EXTERNAL_AGG_BYTES_THRESHOLD,\n checker = \"checkExternalAggBytesThreshold\", fuzzy = true)\n public long externalAggBytesThreshold = 0;\n\n public static final int MIN_EXTERNAL_AGG_PARTITION_BITS = 4;\n public static final int MAX_EXTERNAL_AGG_PARTITION_BITS = 8;\n @VariableMgr.VarAttr(name = EXTERNAL_AGG_PARTITION_BITS,\n checker = \"checkExternalAggPartitionBits\", fuzzy = true)\n public int externalAggPartitionBits = 8; \n\n \n \n \n @VariableMgr.VarAttr(name = ENABLE_TWO_PHASE_READ_OPT, fuzzy = true)\n public boolean enableTwoPhaseReadOpt = true;\n @VariableMgr.VarAttr(name = TOPN_OPT_LIMIT_THRESHOLD)\n public long topnOptLimitThreshold = 1024;\n\n \n \n @VariableMgr.VarAttr(name = GROUP_BY_AND_HAVING_USE_ALIAS_FIRST)\n public boolean groupByAndHavingUseAliasFirst = false;\n\n \n @VariableMgr.VarAttr(name = ENABLE_FILE_CACHE, needForward = true, description = {\n \"\u662f\u5426\u542f\u7528file cache\u3002\u8be5\u53d8\u91cf\u53ea\u6709\u5728be.conf\u4e2denable_file_cache=true\u65f6\u624d\u6709\u6548\uff0c\"\n + \"\u5982\u679cbe.conf\u4e2denable_file_cache=false\uff0c\u8be5BE\u8282\u70b9\u7684file cache\u5904\u4e8e\u7981\u7528\u72b6\u6001\u3002\",\n \"Set wether to use file cache. This variable takes effect only if the BE config enable_file_cache=true. \"\n + \"The cache is not used when BE config enable_file_cache=false.\"})\n public boolean enableFileCache = false;\n\n \n @VariableMgr.VarAttr(name = FILE_CACHE_BASE_PATH, needForward = true, description = {\n \"\u6307\u5b9ablock file cache\u5728BE\u4e0a\u7684\u5b58\u50a8\u8def\u5f84\uff0c\u9ed8\u8ba4 'random'\uff0c\u968f\u673a\u9009\u62e9BE\u914d\u7f6e\u7684\u5b58\u50a8\u8def\u5f84\u3002\",\n \"Specify the storage path of the block file cache on BE, default 'random', \"\n + \"and randomly select the storage path configured by BE.\"})\n public String fileCacheBasePath = \"random\";\n\n \n @VariableMgr.VarAttr(name = ENABLE_INVERTED_INDEX_QUERY, needForward = true, description = {\n \"\u662f\u5426\u542f\u7528inverted index query\u3002\", \"Set whether to use inverted index query.\"})\n public boolean enableInvertedIndexQuery = true;\n\n \n @VariableMgr.VarAttr(name = ENABLE_PUSHDOWN_COUNT_ON_INDEX, needForward = true, description = {\n \"\u662f\u5426\u542f\u7528count_on_index pushdown\u3002\", \"Set whether to pushdown count_on_index.\"})\n public boolean enablePushDownCountOnIndex = true;\n\n \n @VariableMgr.VarAttr(name = DROP_TABLE_IF_CTAS_FAILED, needForward = true)\n public boolean dropTableIfCtasFailed = true;\n\n @VariableMgr.VarAttr(name = MAX_TABLE_COUNT_USE_CASCADES_JOIN_REORDER, needForward = true)\n public int maxTableCountUseCascadesJoinReorder = 10;\n\n \n @VariableMgr.VarAttr(name = SHOW_USER_DEFAULT_ROLE, needForward = true)\n public boolean showUserDefaultRole = false;\n\n \n @VariableMgr.VarAttr(name = USE_FIX_REPLICA)\n public int useFixReplica = -1;\n\n @VariableMgr.VarAttr(name = DUMP_NEREIDS_MEMO)\n public boolean dumpNereidsMemo = false;\n\n @VariableMgr.VarAttr(name = \"memo_max_group_expression_size\")\n public int memoMaxGroupExpressionSize = 10000;\n\n @VariableMgr.VarAttr(name = ENABLE_MINIDUMP)\n public boolean enableMinidump = false;\n\n @VariableMgr.VarAttr(name = ENABLE_FOLD_NONDETERMINISTIC_FN)\n public boolean enableFoldNondeterministicFn = false;\n\n @VariableMgr.VarAttr(name = MINIDUMP_PATH)\n public String minidumpPath = \"\";\n\n @VariableMgr.VarAttr(name = TRACE_NEREIDS)\n public boolean traceNereids = false;\n\n @VariableMgr.VarAttr(name = PLAN_NEREIDS_DUMP)\n public boolean planNereidsDump = false;\n\n \n @VariableMgr.VarAttr(name = DRY_RUN_QUERY, needForward = true)\n public boolean dryRunQuery = false;\n\n @VariableMgr.VarAttr(name = FILE_SPLIT_SIZE, needForward = true)\n public long fileSplitSize = 0;\n\n /**\n * determine should we enable unified load (use insert stmt as the backend for all load)\n */\n @VariableMgr.VarAttr(name = ENABLE_UNIFIED_LOAD, needForward = true)\n public boolean enableUnifiedLoad = false;\n\n @VariableMgr.VarAttr(\n name = ENABLE_PARQUET_LAZY_MAT,\n description = {\"\u63a7\u5236 parquet reader \u662f\u5426\u542f\u7528\u5ef6\u8fdf\u7269\u5316\u6280\u672f\u3002\u9ed8\u8ba4\u4e3a true\u3002\",\n \"Controls whether to use lazy materialization technology in parquet reader. \"\n + \"The default value is true.\"},\n needForward = true)\n public boolean enableParquetLazyMat = true;\n\n @VariableMgr.VarAttr(\n name = ENABLE_ORC_LAZY_MAT,\n description = {\"\u63a7\u5236 orc reader \u662f\u5426\u542f\u7528\u5ef6\u8fdf\u7269\u5316\u6280\u672f\u3002\u9ed8\u8ba4\u4e3a true\u3002\",\n \"Controls whether to use lazy materialization technology in orc reader. \"\n + \"The default value is true.\"},\n needForward = true)\n public boolean enableOrcLazyMat = true;\n\n @VariableMgr.VarAttr(\n name = EXTERNAL_TABLE_ANALYZE_PART_NUM,\n description = {\"\u6536\u96c6\u5916\u8868\u7edf\u8ba1\u4fe1\u606f\u884c\u6570\u65f6\u9009\u53d6\u7684\u91c7\u6837\u5206\u533a\u6570\uff0c\u9ed8\u8ba4-1\u8868\u793a\u5168\u90e8\u5206\u533a\",\n \"Number of sample partition for collecting external table line number, \"\n + \"default -1 means all partitions\"},\n needForward = false)\n public int externalTableAnalyzePartNum = -1;\n\n @VariableMgr.VarAttr(name = INLINE_CTE_REFERENCED_THRESHOLD)\n public int inlineCTEReferencedThreshold = 1;\n\n @VariableMgr.VarAttr(name = ENABLE_CTE_MATERIALIZE)\n public boolean enableCTEMaterialize = true;\n\n @VariableMgr.VarAttr(name = ENABLE_ANALYZE_COMPLEX_TYPE_COLUMN)\n public boolean enableAnalyzeComplexTypeColumn = false;\n\n @VariableMgr.VarAttr(name = ENABLE_STRONG_CONSISTENCY, description = {\"\u7528\u4ee5\u5f00\u542f\u5f3a\u4e00\u81f4\u8bfb\u3002Doris \u9ed8\u8ba4\u652f\u6301\u540c\u4e00\u4e2a\u4f1a\u8bdd\u5185\u7684\"\n + \"\u5f3a\u4e00\u81f4\u6027\uff0c\u5373\u540c\u4e00\u4e2a\u4f1a\u8bdd\u5185\u5bf9\u6570\u636e\u7684\u53d8\u66f4\u64cd\u4f5c\u662f\u5b9e\u65f6\u53ef\u89c1\u7684\u3002\u5982\u9700\u8981\u4f1a\u8bdd\u95f4\u7684\u5f3a\u4e00\u81f4\u8bfb\uff0c\u5219\u9700\u5c06\u6b64\u53d8\u91cf\u8bbe\u7f6e\u4e3atrue\u3002\",\n \"Used to enable strong consistent reading. By default, Doris supports strong consistency \"\n + \"within the same session, that is, changes to data within the same session are visible in \"\n + \"real time. If you want strong consistent reads between sessions, set this variable to true. \"\n })\n public boolean enableStrongConsistencyRead = false;\n\n @VariableMgr.VarAttr(name = PARALLEL_SYNC_ANALYZE_TASK_NUM)\n public int parallelSyncAnalyzeTaskNum = 2;\n\n @VariableMgr.VarAttr(name = ENABLE_DELETE_SUB_PREDICATE_V2, fuzzy = true, needForward = true)\n public boolean enableDeleteSubPredicateV2 = true;\n\n @VariableMgr.VarAttr(name = TRUNCATE_CHAR_OR_VARCHAR_COLUMNS,\n description = {\"\u662f\u5426\u6309\u7167\u8868\u7684 schema \u6765\u622a\u65ad char \u6216\u8005 varchar \u5217\u3002\u9ed8\u8ba4\u4e3a false\u3002\\n\"\n + \"\u56e0\u4e3a\u5916\u8868\u4f1a\u5b58\u5728\u8868\u7684 schema \u4e2d char \u6216\u8005 varchar \u5217\u7684\u6700\u5927\u957f\u5ea6\u548c\u5e95\u5c42 parquet \u6216\u8005 orc \u6587\u4ef6\u4e2d\u7684 schema \u4e0d\u4e00\u81f4\"\n + \"\u7684\u60c5\u51b5\u3002\u6b64\u65f6\u5f00\u542f\u6539\u9009\u9879\uff0c\u4f1a\u6309\u7167\u8868\u7684 schema \u4e2d\u7684\u6700\u5927\u957f\u5ea6\u8fdb\u884c\u622a\u65ad\u3002\",\n \"Whether to truncate char or varchar columns according to the table's schema. \"\n + \"The default is false.\\n\"\n + \"Because the maximum length of the char or varchar column in the schema of the table\"\n + \" is inconsistent with the schema in the underlying parquet or orc file.\"\n + \" At this time, if the option is turned on, it will be truncated according to the maximum length\"\n + \" in the schema of the table.\"},\n needForward = true)\n public boolean truncateCharOrVarcharColumns = false;\n\n @VariableMgr.VarAttr(name = ENABLE_MEMTABLE_ON_SINK_NODE, needForward = true)\n public boolean enableMemtableOnSinkNode = false;\n\n @VariableMgr.VarAttr(name = ENABLE_INSERT_GROUP_COMMIT)\n public boolean enableInsertGroupCommit = false;\n\n @VariableMgr.VarAttr(name = INVERTED_INDEX_CONJUNCTION_OPT_THRESHOLD,\n description = {\"\u5728match_all\u4e2d\u6c42\u53d6\u591a\u4e2a\u5012\u6392\u7d22\u5f15\u7684\u4ea4\u96c6\u65f6,\u5982\u679c\u6700\u5927\u7684\u5012\u6392\u7d22\u5f15\u4e2d\u7684\u603b\u6570\u662f\u6700\u5c0f\u5012\u6392\u7d22\u5f15\u4e2d\u7684\u603b\u6570\u7684\u6574\u6570\u500d,\"\n + \"\u5219\u4f7f\u7528\u8df3\u8868\u6765\u4f18\u5316\u4ea4\u96c6\u64cd\u4f5c\u3002\",\n \"When intersecting multiple inverted indexes in match_all,\"\n + \" if the maximum total count of the largest inverted index\"\n + \" is a multiple of the minimum total count of the smallest inverted index,\"\n + \" use a skiplist to optimize the intersection.\"})\n public int invertedIndexConjunctionOptThreshold = 1000;\n\n @VariableMgr.VarAttr(name = FULL_AUTO_ANALYZE_START_TIME, needForward = true, checker = \"checkAnalyzeTimeFormat\",\n description = {\"\u8be5\u53c2\u6570\u5b9a\u4e49\u81ea\u52a8ANALYZE\u4f8b\u7a0b\u7684\u5f00\u59cb\u65f6\u95f4\",\n \"This parameter defines the start time for the automatic ANALYZE routine.\"},\n flag = VariableMgr.GLOBAL)\n public String fullAutoAnalyzeStartTime = \"\";\n\n @VariableMgr.VarAttr(name = FULL_AUTO_ANALYZE_END_TIME, needForward = true, checker = \"checkAnalyzeTimeFormat\",\n description = {\"\u8be5\u53c2\u6570\u5b9a\u4e49\u81ea\u52a8ANALYZE\u4f8b\u7a0b\u7684\u7ed3\u675f\u65f6\u95f4\",\n \"This parameter defines the end time for the automatic ANALYZE routine.\"},\n flag = VariableMgr.GLOBAL)\n public String fullAutoAnalyzeEndTime = \"\";\n\n \n \n \n\n public String printFuzzyVariables() {\n if (!Config.use_fuzzy_session_variable) {\n return \"\";\n }\n List res = Lists.newArrayList();\n for (Field field : SessionVariable.class.getDeclaredFields()) {\n VarAttr attr = field.getAnnotation(VarAttr.class);\n if (attr == null || !attr.fuzzy()) {\n continue;\n }\n field.setAccessible(true);\n try {\n Object val = field.get(this);\n res.add(attr.name() + \"=\" + val.toString());\n } catch (IllegalAccessException e) {\n LOG.warn(\"failed to get fuzzy session variable {}\", attr.name(), e);\n }\n }\n return Joiner.on(\",\").join(res);\n }\n\n /**\n * syntax:\n * all -> use all event\n * all except event_1, event_2, ..., event_n -> use all events excluding the event_1~n\n * event_1, event_2, ..., event_n -> use event_1~n\n */\n @VariableMgr.VarAttr(name = NEREIDS_TRACE_EVENT_MODE, checker = \"checkNereidsTraceEventMode\")\n public String nereidsTraceEventMode = \"all\";\n\n private Set> parsedNereidsEventMode = EventSwitchParser.parse(Lists.newArrayList(\"all\"));\n\n public boolean isInDebugMode() {\n return showHiddenColumns || skipDeleteBitmap || skipDeletePredicate || skipDeleteSign || skipStorageEngineMerge;\n }\n\n public void setEnableNereidsTrace(boolean enableNereidsTrace) {\n this.enableNereidsTrace = enableNereidsTrace;\n }\n\n public void setNereidsTraceEventMode(String nereidsTraceEventMode) {\n checkNereidsTraceEventMode(nereidsTraceEventMode);\n this.nereidsTraceEventMode = nereidsTraceEventMode;\n }\n\n public void checkNereidsTraceEventMode(String nereidsTraceEventMode) {\n List strings = EventSwitchParser.checkEventModeStringAndSplit(nereidsTraceEventMode);\n if (strings != null) {\n parsedNereidsEventMode = EventSwitchParser.parse(strings);\n }\n if (parsedNereidsEventMode == null) {\n throw new UnsupportedOperationException(\"nereids_trace_event_mode syntax error, please check\");\n }\n }\n\n public Set> getParsedNereidsEventMode() {\n return parsedNereidsEventMode;\n }\n\n public String getBlockEncryptionMode() {\n return blockEncryptionMode;\n }\n\n public void setBlockEncryptionMode(String blockEncryptionMode) {\n this.blockEncryptionMode = blockEncryptionMode;\n }\n\n public void setRewriteOrToInPredicateThreshold(int threshold) {\n this.rewriteOrToInPredicateThreshold = threshold;\n }\n\n public int getRewriteOrToInPredicateThreshold() {\n return rewriteOrToInPredicateThreshold;\n }\n\n public long getMaxExecMemByte() {\n return maxExecMemByte;\n }\n\n public long getMaxScanQueueExecMemByte() {\n return maxScanQueueMemByte;\n }\n\n public int getQueryTimeoutS() {\n return queryTimeoutS;\n }\n\n public void setEnableTwoPhaseReadOpt(boolean enable) {\n enableTwoPhaseReadOpt = enable;\n }\n\n public int getMaxExecutionTimeMS() {\n return maxExecutionTimeMS;\n }\n\n public int getInsertTimeoutS() {\n return insertTimeoutS;\n }\n\n\n public void setInsertTimeoutS(int insertTimeoutS) {\n this.insertTimeoutS = insertTimeoutS;\n }\n\n public boolean enableProfile() {\n return enableProfile;\n }\n\n public boolean enableSingleDistinctColumnOpt() {\n return enableSingleDistinctColumnOpt;\n }\n\n public int getWaitTimeoutS() {\n return waitTimeoutS;\n }\n\n public long getSqlMode() {\n return sqlMode;\n }\n\n public void setSqlMode(long sqlMode) {\n this.sqlMode = sqlMode;\n }\n\n public boolean isEnableJoinReorderBasedCost() {\n return enableJoinReorderBasedCost;\n }\n\n public boolean isAutoCommit() {\n return autoCommit;\n }\n\n public boolean isTxReadonly() {\n return txReadonly;\n }\n\n public boolean isTransactionReadonly() {\n return transactionReadonly;\n }\n\n public String getTransactionIsolation() {\n return transactionIsolation;\n }\n\n public String getTxIsolation() {\n return txIsolation;\n }\n\n public String getCharsetClient() {\n return charsetClient;\n }\n\n public String getCharsetConnection() {\n return charsetConnection;\n }\n\n public String getCharsetResults() {\n return charsetResults;\n }\n\n public String getCharsetServer() {\n return charsetServer;\n }\n\n public String getCollationConnection() {\n return collationConnection;\n }\n\n public String getCollationDatabase() {\n return collationDatabase;\n }\n\n public String getCollationServer() {\n return collationServer;\n }\n\n public boolean isSqlAutoIsNull() {\n return sqlAutoIsNull;\n }\n\n public long getSqlSelectLimit() {\n if (sqlSelectLimit < 0 || sqlSelectLimit >= Long.MAX_VALUE) {\n return -1;\n }\n return sqlSelectLimit;\n }\n\n public long getDefaultOrderByLimit() {\n return defaultOrderByLimit;\n }\n\n public int getMaxAllowedPacket() {\n return maxAllowedPacket;\n }\n\n public int getAutoIncrementIncrement() {\n return autoIncrementIncrement;\n }\n\n public int getQueryCacheType() {\n return queryCacheType;\n }\n\n public int getInteractiveTimeout() {\n return interactiveTimeout;\n }\n\n public int getNetWriteTimeout() {\n return netWriteTimeout;\n }\n\n public int getNetReadTimeout() {\n return netReadTimeout;\n }\n\n public String getTimeZone() {\n return timeZone;\n }\n\n public void setTimeZone(String timeZone) {\n this.timeZone = timeZone;\n }\n\n public int getSqlSafeUpdates() {\n return sqlSafeUpdates;\n }\n\n public int getNetBufferLength() {\n return netBufferLength;\n }\n\n public int getCodegenLevel() {\n return codegenLevel;\n }\n\n public boolean getHaveQueryCache() {\n return haveQueryCache;\n }\n\n /**\n * setMaxExecMemByte.\n **/\n public void setMaxExecMemByte(long maxExecMemByte) {\n if (maxExecMemByte < MIN_EXEC_MEM_LIMIT) {\n this.maxExecMemByte = MIN_EXEC_MEM_LIMIT;\n } else {\n this.maxExecMemByte = maxExecMemByte;\n }\n }\n\n public void setMaxScanQueueMemByte(long scanQueueMemByte) {\n this.maxScanQueueMemByte = Math.min(scanQueueMemByte, maxExecMemByte / 20);\n }\n\n public boolean isSqlQuoteShowCreate() {\n return sqlQuoteShowCreate;\n }\n\n public void setSqlQuoteShowCreate(boolean sqlQuoteShowCreate) {\n this.sqlQuoteShowCreate = sqlQuoteShowCreate;\n }\n\n public void setQueryTimeoutS(int queryTimeoutS) {\n this.queryTimeoutS = queryTimeoutS;\n }\n\n public void setMaxExecutionTimeMS(int maxExecutionTimeMS) {\n this.maxExecutionTimeMS = maxExecutionTimeMS;\n this.queryTimeoutS = this.maxExecutionTimeMS / 1000;\n }\n\n public void setMaxExecutionTimeMS(String maxExecutionTimeMS) {\n this.maxExecutionTimeMS = Integer.valueOf(maxExecutionTimeMS);\n this.queryTimeoutS = this.maxExecutionTimeMS / 1000;\n }\n\n public String getWorkloadGroup() {\n return workloadGroup;\n }\n\n public void setWorkloadGroup(String workloadGroup) {\n this.workloadGroup = workloadGroup;\n }\n\n public String getResourceGroup() {\n return resourceGroup;\n }\n\n public void setResourceGroup(String resourceGroup) {\n this.resourceGroup = resourceGroup;\n }\n\n public boolean isDisableColocatePlan() {\n return disableColocatePlan;\n }\n\n public boolean enableColocateScan() {\n return enableColocateScan;\n }\n\n public boolean isEnableBucketShuffleJoin() {\n return enableBucketShuffleJoin;\n }\n\n public boolean isEnableOdbcTransaction() {\n return enableOdbcTransaction;\n }\n\n public String getPreferJoinMethod() {\n return preferJoinMethod;\n }\n\n public void setPreferJoinMethod(String preferJoinMethod) {\n this.preferJoinMethod = preferJoinMethod;\n }\n\n public boolean isEnableFoldConstantByBe() {\n return enableFoldConstantByBe;\n }\n\n public boolean isEnableNereidsDML() {\n return enableNereidsDML;\n }\n\n public void setEnableFoldConstantByBe(boolean foldConstantByBe) {\n this.enableFoldConstantByBe = foldConstantByBe;\n }\n\n public int getParallelExecInstanceNum() {\n if (getEnablePipelineEngine() && parallelPipelineTaskNum == 0) {\n int size = Env.getCurrentSystemInfo().getMinPipelineExecutorSize();\n int autoInstance = (size + 1) / 2;\n return Math.min(autoInstance, maxInstanceNum);\n } else if (enablePipelineEngine) {\n return parallelPipelineTaskNum;\n } else {\n return parallelExecInstanceNum;\n }\n }\n\n public int getExchangeInstanceParallel() {\n return exchangeInstanceParallel;\n }\n\n public boolean getEnableInsertStrict() {\n return enableInsertStrict;\n }\n\n public void setEnableInsertStrict(boolean enableInsertStrict) {\n this.enableInsertStrict = enableInsertStrict;\n }\n\n public boolean isEnableSqlCache() {\n return enableSqlCache;\n }\n\n public void setEnableSqlCache(boolean enableSqlCache) {\n this.enableSqlCache = enableSqlCache;\n }\n\n public boolean isEnablePartitionCache() {\n return enablePartitionCache;\n }\n\n public void setEnablePartitionCache(boolean enablePartitionCache) {\n this.enablePartitionCache = enablePartitionCache;\n }\n\n public int getPartitionedHashJoinRowsThreshold() {\n return partitionedHashJoinRowsThreshold;\n }\n\n public void setPartitionedHashJoinRowsThreshold(int threshold) {\n this.partitionedHashJoinRowsThreshold = threshold;\n }\n\n \n public boolean getForwardToMaster() {\n return forwardToMaster;\n }\n\n public boolean isUseV2Rollup() {\n return useV2Rollup;\n }\n\n \n public void setUseV2Rollup(boolean useV2Rollup) {\n this.useV2Rollup = useV2Rollup;\n }\n\n public boolean isRewriteCountDistinct() {\n return rewriteCountDistinct;\n }\n\n public void setRewriteCountDistinct(boolean rewriteCountDistinct) {\n this.rewriteCountDistinct = rewriteCountDistinct;\n }\n\n public String getEventScheduler() {\n return eventScheduler;\n }\n\n public void setEventScheduler(String eventScheduler) {\n this.eventScheduler = eventScheduler;\n }\n\n public String getStorageEngine() {\n return storageEngine;\n }\n\n public void setStorageEngine(String storageEngine) {\n this.storageEngine = storageEngine;\n }\n\n public int getDivPrecisionIncrement() {\n return divPrecisionIncrement;\n }\n\n public int getMaxScanKeyNum() {\n return maxScanKeyNum;\n }\n\n public void setMaxScanKeyNum(int maxScanKeyNum) {\n this.maxScanKeyNum = maxScanKeyNum;\n }\n\n public int getMaxPushdownConditionsPerColumn() {\n return maxPushdownConditionsPerColumn;\n }\n\n public void setMaxPushdownConditionsPerColumn(int maxPushdownConditionsPerColumn) {\n this.maxPushdownConditionsPerColumn = maxPushdownConditionsPerColumn;\n }\n\n public double getBroadcastRightTableScaleFactor() {\n return broadcastRightTableScaleFactor;\n }\n\n public void setBroadcastRightTableScaleFactor(double broadcastRightTableScaleFactor) {\n this.broadcastRightTableScaleFactor = broadcastRightTableScaleFactor;\n }\n\n public double getBroadcastRowCountLimit() {\n return broadcastRowCountLimit;\n }\n\n public void setBroadcastRowCountLimit(double broadcastRowCountLimit) {\n this.broadcastRowCountLimit = broadcastRowCountLimit;\n }\n\n public double getBroadcastHashtableMemLimitPercentage() {\n return broadcastHashtableMemLimitPercentage;\n }\n\n public void setBroadcastHashtableMemLimitPercentage(double broadcastHashtableMemLimitPercentage) {\n this.broadcastHashtableMemLimitPercentage = broadcastHashtableMemLimitPercentage;\n }\n\n public boolean showHiddenColumns() {\n return showHiddenColumns;\n }\n\n public void setShowHiddenColumns(boolean showHiddenColumns) {\n this.showHiddenColumns = showHiddenColumns;\n }\n\n public boolean isEnableScanRunSerial() {\n return enableScanRunSerial;\n }\n\n public boolean skipStorageEngineMerge() {\n return skipStorageEngineMerge;\n }\n\n public boolean skipDeleteSign() {\n return skipDeleteSign;\n }\n\n public boolean isAllowPartitionColumnNullable() {\n return allowPartitionColumnNullable;\n }\n\n public String getRuntimeFilterMode() {\n return runtimeFilterMode;\n }\n\n public void setRuntimeFilterMode(String runtimeFilterMode) {\n this.runtimeFilterMode = runtimeFilterMode;\n }\n\n public int getRuntimeBloomFilterSize() {\n return runtimeBloomFilterSize;\n }\n\n public void setRuntimeBloomFilterSize(int runtimeBloomFilterSize) {\n this.runtimeBloomFilterSize = runtimeBloomFilterSize;\n }\n\n public int getRuntimeBloomFilterMinSize() {\n return runtimeBloomFilterMinSize;\n }\n\n public void setRuntimeBloomFilterMinSize(int runtimeBloomFilterMinSize) {\n this.runtimeBloomFilterMinSize = runtimeBloomFilterMinSize;\n }\n\n public int getRuntimeBloomFilterMaxSize() {\n return runtimeBloomFilterMaxSize;\n }\n\n public void setRuntimeBloomFilterMaxSize(int runtimeBloomFilterMaxSize) {\n this.runtimeBloomFilterMaxSize = runtimeBloomFilterMaxSize;\n }\n\n public int getRuntimeFilterWaitTimeMs() {\n return runtimeFilterWaitTimeMs;\n }\n\n public void setRuntimeFilterWaitTimeMs(int runtimeFilterWaitTimeMs) {\n this.runtimeFilterWaitTimeMs = runtimeFilterWaitTimeMs;\n }\n\n public int getRuntimeFiltersMaxNum() {\n return runtimeFiltersMaxNum;\n }\n\n public void setRuntimeFiltersMaxNum(int runtimeFiltersMaxNum) {\n this.runtimeFiltersMaxNum = runtimeFiltersMaxNum;\n }\n\n public int getRuntimeFilterType() {\n return runtimeFilterType;\n }\n\n public boolean isRuntimeFilterTypeEnabled(TRuntimeFilterType type) {\n return (runtimeFilterType & type.getValue()) == type.getValue();\n }\n\n public void setRuntimeFilterType(int runtimeFilterType) {\n this.runtimeFilterType = runtimeFilterType;\n }\n\n public int getRuntimeFilterMaxInNum() {\n return runtimeFilterMaxInNum;\n }\n\n public void setRuntimeFilterMaxInNum(int runtimeFilterMaxInNum) {\n this.runtimeFilterMaxInNum = runtimeFilterMaxInNum;\n }\n\n public void setEnablePipelineEngine(boolean enablePipelineEngine) {\n this.enablePipelineEngine = enablePipelineEngine;\n }\n\n public void setEnablePipelineXEngine(boolean enablePipelineXEngine) {\n this.enablePipelineXEngine = enablePipelineXEngine;\n }\n\n public boolean enablePushDownNoGroupAgg() {\n return enablePushDownNoGroupAgg;\n }\n\n public boolean getEnableFunctionPushdown() {\n return this.enableFunctionPushdown;\n }\n\n public boolean getForbidUnknownColStats() {\n return forbidUnknownColStats;\n }\n\n public void setForbidUnownColStats(boolean forbid) {\n forbidUnknownColStats = forbid;\n }\n\n public boolean getEnableLocalExchange() {\n return enableLocalExchange;\n }\n\n public boolean getEnableCboStatistics() {\n return enableCboStatistics;\n }\n\n public long getFileSplitSize() {\n return fileSplitSize;\n }\n\n public void setFileSplitSize(long fileSplitSize) {\n this.fileSplitSize = fileSplitSize;\n }\n\n public boolean isEnableParquetLazyMat() {\n return enableParquetLazyMat;\n }\n\n public void setEnableParquetLazyMat(boolean enableParquetLazyMat) {\n this.enableParquetLazyMat = enableParquetLazyMat;\n }\n\n public boolean isEnableOrcLazyMat() {\n return enableOrcLazyMat;\n }\n\n public void setEnableOrcLazyMat(boolean enableOrcLazyMat) {\n this.enableOrcLazyMat = enableOrcLazyMat;\n }\n\n\n /**\n * getInsertVisibleTimeoutMs.\n **/\n public long getInsertVisibleTimeoutMs() {\n if (insertVisibleTimeoutMs < MIN_INSERT_VISIBLE_TIMEOUT_MS) {\n return MIN_INSERT_VISIBLE_TIMEOUT_MS;\n } else {\n return insertVisibleTimeoutMs;\n }\n }\n\n /**\n * setInsertVisibleTimeoutMs.\n **/\n public void setInsertVisibleTimeoutMs(long insertVisibleTimeoutMs) {\n if (insertVisibleTimeoutMs < MIN_INSERT_VISIBLE_TIMEOUT_MS) {\n this.insertVisibleTimeoutMs = MIN_INSERT_VISIBLE_TIMEOUT_MS;\n } else {\n this.insertVisibleTimeoutMs = insertVisibleTimeoutMs;\n }\n }\n\n public boolean getIsSingleSetVar() {\n return isSingleSetVar;\n }\n\n public void setIsSingleSetVar(boolean issinglesetvar) {\n this.isSingleSetVar = issinglesetvar;\n }\n\n public Map getSessionOriginValue() {\n return sessionOriginValue;\n }\n\n public void addSessionOriginValue(Field key, String value) {\n if (sessionOriginValue.containsKey(key)) {\n \n return;\n }\n sessionOriginValue.put(key, value);\n }\n\n public void clearSessionOriginValue() {\n sessionOriginValue.clear();\n }\n\n public boolean isDeleteWithoutPartition() {\n return deleteWithoutPartition;\n }\n\n public boolean isExtractWideRangeExpr() {\n return extractWideRangeExpr;\n }\n\n public boolean isGroupByAndHavingUseAliasFirst() {\n return groupByAndHavingUseAliasFirst;\n }\n\n public int getCpuResourceLimit() {\n return cpuResourceLimit;\n }\n\n public int getSendBatchParallelism() {\n return sendBatchParallelism;\n }\n\n public boolean isEnableParallelOutfile() {\n return enableParallelOutfile;\n }\n\n public boolean isDisableJoinReorder() {\n return disableJoinReorder;\n }\n\n public boolean isEnableBushyTree() {\n return enableBushyTree;\n }\n\n public void setEnableBushyTree(boolean enableBushyTree) {\n this.enableBushyTree = enableBushyTree;\n }\n\n public boolean isEnablePartitionTopN() {\n return enablePartitionTopN;\n }\n\n public void setEnablePartitionTopN(boolean enablePartitionTopN) {\n this.enablePartitionTopN = enablePartitionTopN;\n }\n\n public boolean isEnableFoldNondeterministicFn() {\n return enableFoldNondeterministicFn;\n }\n\n public void setEnableFoldNondeterministicFn(boolean enableFoldNondeterministicFn) {\n this.enableFoldNondeterministicFn = enableFoldNondeterministicFn;\n }\n\n public boolean isReturnObjectDataAsBinary() {\n return returnObjectDataAsBinary;\n }\n\n public void setReturnObjectDataAsBinary(boolean returnObjectDataAsBinary) {\n this.returnObjectDataAsBinary = returnObjectDataAsBinary;\n }\n\n public boolean isEnableInferPredicate() {\n return enableInferPredicate;\n }\n\n public void setEnableInferPredicate(boolean enableInferPredicate) {\n this.enableInferPredicate = enableInferPredicate;\n }\n\n public boolean isEnableProjection() {\n return enableProjection;\n }\n\n public boolean checkOverflowForDecimal() {\n return checkOverflowForDecimal;\n }\n\n public boolean isTrimTailingSpacesForExternalTableQuery() {\n return trimTailingSpacesForExternalTableQuery;\n }\n\n public void setTrimTailingSpacesForExternalTableQuery(boolean trimTailingSpacesForExternalTableQuery) {\n this.trimTailingSpacesForExternalTableQuery = trimTailingSpacesForExternalTableQuery;\n }\n\n public void setEnableJoinReorderBasedCost(boolean enableJoinReorderBasedCost) {\n this.enableJoinReorderBasedCost = enableJoinReorderBasedCost;\n }\n\n public void setDisableJoinReorder(boolean disableJoinReorder) {\n this.disableJoinReorder = disableJoinReorder;\n }\n\n /**\n * Nereids only support vectorized engine.\n *\n * @return true if both nereids and vectorized engine are enabled\n */\n public boolean isEnableNereidsPlanner() {\n return enableNereidsPlanner;\n }\n\n public void setEnableNereidsPlanner(boolean enableNereidsPlanner) {\n this.enableNereidsPlanner = enableNereidsPlanner;\n }\n\n public int getNthOptimizedPlan() {\n return nthOptimizedPlan;\n }\n\n public Set getDisableNereidsRuleNames() {\n return Arrays.stream(disableNereidsRules.split(\",[\\\\s]*\"))\n .map(rule -> rule.toUpperCase(Locale.ROOT))\n .collect(ImmutableSet.toImmutableSet());\n }\n\n public Set getDisableNereidsRules() {\n return Arrays.stream(disableNereidsRules.split(\",[\\\\s]*\"))\n .filter(rule -> !rule.isEmpty())\n .map(rule -> rule.toUpperCase(Locale.ROOT))\n .map(rule -> RuleType.valueOf(rule).type())\n .collect(ImmutableSet.toImmutableSet());\n }\n\n public void setEnableNewCostModel(boolean enable) {\n this.enableNewCostModel = enable;\n }\n\n public boolean getEnableNewCostModel() {\n return this.enableNewCostModel;\n }\n\n public void setDisableNereidsRules(String disableNereidsRules) {\n this.disableNereidsRules = disableNereidsRules;\n }\n\n public double getNereidsCboPenaltyFactor() {\n return nereidsCboPenaltyFactor;\n }\n\n public void setNereidsCboPenaltyFactor(double penaltyFactor) {\n this.nereidsCboPenaltyFactor = penaltyFactor;\n }\n\n public boolean isEnableNereidsTrace() {\n return isEnableNereidsPlanner() && enableNereidsTrace;\n }\n\n public boolean isEnableSingleReplicaInsert() {\n return enableSingleReplicaInsert;\n }\n\n public void setEnableSingleReplicaInsert(boolean enableSingleReplicaInsert) {\n this.enableSingleReplicaInsert = enableSingleReplicaInsert;\n }\n\n public boolean isEnableRuntimeFilterPrune() {\n return enableRuntimeFilterPrune;\n }\n\n public void setEnableRuntimeFilterPrune(boolean enableRuntimeFilterPrune) {\n this.enableRuntimeFilterPrune = enableRuntimeFilterPrune;\n }\n\n public void setFragmentTransmissionCompressionCodec(String codec) {\n this.fragmentTransmissionCompressionCodec = codec;\n }\n\n public boolean isEnableUnicodeNameSupport() {\n return enableUnicodeNameSupport;\n }\n\n public void setEnableUnicodeNameSupport(boolean enableUnicodeNameSupport) {\n this.enableUnicodeNameSupport = enableUnicodeNameSupport;\n }\n\n public boolean isDropTableIfCtasFailed() {\n return dropTableIfCtasFailed;\n }\n\n public void checkExternalSortBytesThreshold(String externalSortBytesThreshold) {\n long value = Long.valueOf(externalSortBytesThreshold);\n if (value > 0 && value < MIN_EXTERNAL_SORT_BYTES_THRESHOLD) {\n LOG.warn(\"external sort bytes threshold: {}, min: {}\", value, MIN_EXTERNAL_SORT_BYTES_THRESHOLD);\n throw new UnsupportedOperationException(\"minimum value is \" + MIN_EXTERNAL_SORT_BYTES_THRESHOLD);\n }\n }\n\n public void checkExternalAggBytesThreshold(String externalAggBytesThreshold) {\n long value = Long.valueOf(externalAggBytesThreshold);\n if (value > 0 && value < MIN_EXTERNAL_AGG_BYTES_THRESHOLD) {\n LOG.warn(\"external agg bytes threshold: {}, min: {}\", value, MIN_EXTERNAL_AGG_BYTES_THRESHOLD);\n throw new UnsupportedOperationException(\"minimum value is \" + MIN_EXTERNAL_AGG_BYTES_THRESHOLD);\n }\n }\n\n public void checkExternalAggPartitionBits(String externalAggPartitionBits) {\n int value = Integer.valueOf(externalAggPartitionBits);\n if (value < MIN_EXTERNAL_AGG_PARTITION_BITS || value > MAX_EXTERNAL_AGG_PARTITION_BITS) {\n LOG.warn(\"external agg bytes threshold: {}, min: {}, max: {}\",\n value, MIN_EXTERNAL_AGG_PARTITION_BITS, MAX_EXTERNAL_AGG_PARTITION_BITS);\n throw new UnsupportedOperationException(\"min value is \" + MIN_EXTERNAL_AGG_PARTITION_BITS + \" max value is \"\n + MAX_EXTERNAL_AGG_PARTITION_BITS);\n }\n }\n\n public boolean isEnableFileCache() {\n return enableFileCache;\n }\n\n public void setEnableFileCache(boolean enableFileCache) {\n this.enableFileCache = enableFileCache;\n }\n\n public String getFileCacheBasePath() {\n return fileCacheBasePath;\n }\n\n public void setFileCacheBasePath(String basePath) {\n this.fileCacheBasePath = basePath;\n }\n\n public boolean isEnableInvertedIndexQuery() {\n return enableInvertedIndexQuery;\n }\n\n public void setEnableInvertedIndexQuery(boolean enableInvertedIndexQuery) {\n this.enableInvertedIndexQuery = enableInvertedIndexQuery;\n }\n\n public boolean isEnablePushDownCountOnIndex() {\n return enablePushDownCountOnIndex;\n }\n\n public void setEnablePushDownCountOnIndex(boolean enablePushDownCountOnIndex) {\n this.enablePushDownCountOnIndex = enablePushDownCountOnIndex;\n }\n\n public int getMaxTableCountUseCascadesJoinReorder() {\n return this.maxTableCountUseCascadesJoinReorder;\n }\n\n public void setMaxTableCountUseCascadesJoinReorder(int maxTableCountUseCascadesJoinReorder) {\n this.maxTableCountUseCascadesJoinReorder =\n maxTableCountUseCascadesJoinReorder < MIN_JOIN_REORDER_TABLE_COUNT\n ? MIN_JOIN_REORDER_TABLE_COUNT\n : maxTableCountUseCascadesJoinReorder;\n }\n\n public boolean isShowUserDefaultRole() {\n return showUserDefaultRole;\n }\n\n public int getExternalTableAnalyzePartNum() {\n return externalTableAnalyzePartNum;\n }\n\n public boolean isTruncateCharOrVarcharColumns() {\n return truncateCharOrVarcharColumns;\n }\n\n public void setTruncateCharOrVarcharColumns(boolean truncateCharOrVarcharColumns) {\n this.truncateCharOrVarcharColumns = truncateCharOrVarcharColumns;\n }\n\n /**\n * Serialize to thrift object.\n * Used for rest api.\n */\n public TQueryOptions toThrift() {\n TQueryOptions tResult = new TQueryOptions();\n tResult.setMemLimit(maxExecMemByte);\n tResult.setScanQueueMemLimit(Math.min(maxScanQueueMemByte, maxExecMemByte / 20));\n\n \n tResult.setMinReservation(0);\n tResult.setMaxReservation(maxExecMemByte);\n tResult.setInitialReservationTotalClaims(maxExecMemByte);\n tResult.setBufferPoolLimit(maxExecMemByte);\n\n tResult.setQueryTimeout(queryTimeoutS);\n tResult.setEnableProfile(enableProfile);\n if (enableProfile) {\n \n \n \n tResult.setIsReportSuccess(true);\n }\n tResult.setCodegenLevel(codegenLevel);\n tResult.setBeExecVersion(Config.be_exec_version);\n tResult.setEnablePipelineEngine(enablePipelineEngine);\n tResult.setEnablePipelineXEngine(enablePipelineXEngine);\n tResult.setParallelInstance(getParallelExecInstanceNum());\n tResult.setReturnObjectDataAsBinary(returnObjectDataAsBinary);\n tResult.setTrimTailingSpacesForExternalTableQuery(trimTailingSpacesForExternalTableQuery);\n tResult.setEnableShareHashTableForBroadcastJoin(enableShareHashTableForBroadcastJoin);\n tResult.setEnableHashJoinEarlyStartProbe(enableHashJoinEarlyStartProbe);\n\n tResult.setBatchSize(batchSize);\n tResult.setDisableStreamPreaggregations(disableStreamPreaggregations);\n\n if (maxScanKeyNum > -1) {\n tResult.setMaxScanKeyNum(maxScanKeyNum);\n }\n if (maxPushdownConditionsPerColumn > -1) {\n tResult.setMaxPushdownConditionsPerColumn(maxPushdownConditionsPerColumn);\n }\n\n tResult.setEnableSpilling(enableSpilling);\n tResult.setEnableEnableExchangeNodeParallelMerge(enableExchangeNodeParallelMerge);\n\n tResult.setRuntimeFilterWaitTimeMs(runtimeFilterWaitTimeMs);\n tResult.setRuntimeFilterMaxInNum(runtimeFilterMaxInNum);\n\n if (cpuResourceLimit > 0) {\n TResourceLimit resourceLimit = new TResourceLimit();\n resourceLimit.setCpuLimit(cpuResourceLimit);\n tResult.setResourceLimit(resourceLimit);\n }\n\n tResult.setEnableFunctionPushdown(enableFunctionPushdown);\n tResult.setEnableCommonExprPushdown(enableCommonExprPushdown);\n tResult.setCheckOverflowForDecimal(checkOverflowForDecimal);\n tResult.setFragmentTransmissionCompressionCodec(fragmentTransmissionCompressionCodec);\n tResult.setEnableLocalExchange(enableLocalExchange);\n\n tResult.setSkipStorageEngineMerge(skipStorageEngineMerge);\n\n tResult.setSkipDeletePredicate(skipDeletePredicate);\n\n tResult.setSkipDeleteBitmap(skipDeleteBitmap);\n\n tResult.setPartitionedHashJoinRowsThreshold(partitionedHashJoinRowsThreshold);\n tResult.setPartitionedHashAggRowsThreshold(partitionedHashAggRowsThreshold);\n\n tResult.setRepeatMaxNum(repeatMaxNum);\n\n tResult.setExternalSortBytesThreshold(externalSortBytesThreshold);\n\n tResult.setExternalAggBytesThreshold(externalAggBytesThreshold);\n\n tResult.setExternalAggPartitionBits(externalAggPartitionBits);\n\n tResult.setEnableFileCache(enableFileCache);\n\n tResult.setFileCacheBasePath(fileCacheBasePath);\n\n tResult.setEnableInvertedIndexQuery(enableInvertedIndexQuery);\n\n if (dryRunQuery) {\n tResult.setDryRunQuery(true);\n }\n\n tResult.setEnableParquetLazyMat(enableParquetLazyMat);\n tResult.setEnableOrcLazyMat(enableOrcLazyMat);\n\n tResult.setEnableDeleteSubPredicateV2(enableDeleteSubPredicateV2);\n tResult.setTruncateCharOrVarcharColumns(truncateCharOrVarcharColumns);\n tResult.setEnableMemtableOnSinkNode(enableMemtableOnSinkNode);\n\n tResult.setInvertedIndexConjunctionOptThreshold(invertedIndexConjunctionOptThreshold);\n\n return tResult;\n }\n\n public JSONObject toJson() throws IOException {\n JSONObject root = new JSONObject();\n try {\n for (Field field : SessionVariable.class.getDeclaredFields()) {\n VarAttr attr = field.getAnnotation(VarAttr.class);\n if (attr == null) {\n continue;\n }\n switch (field.getType().getSimpleName()) {\n case \"boolean\":\n root.put(attr.name(), (Boolean) field.get(this));\n break;\n case \"int\":\n root.put(attr.name(), (Integer) field.get(this));\n break;\n case \"long\":\n root.put(attr.name(), (Long) field.get(this));\n break;\n case \"float\":\n root.put(attr.name(), (Float) field.get(this));\n break;\n case \"double\":\n root.put(attr.name(), (Double) field.get(this));\n break;\n case \"String\":\n root.put(attr.name(), (String) field.get(this));\n break;\n default:\n \n throw new IOException(\"invalid type: \" + field.getType().getSimpleName());\n }\n }\n } catch (Exception e) {\n throw new IOException(\"failed to write session variable: \" + e.getMessage());\n }\n return root;\n }\n\n @Override\n public void write(DataOutput out) throws IOException {\n JSONObject root = toJson();\n Text.writeString(out, root.toString());\n }\n\n\n public void readFields(DataInput in) throws IOException {\n String json = Text.readString(in);\n readFromJson(json);\n }\n\n public void readFromJson(String json) throws IOException {\n JSONObject root = (JSONObject) JSONValue.parse(json);\n try {\n for (Field field : SessionVariable.class.getDeclaredFields()) {\n VarAttr attr = field.getAnnotation(VarAttr.class);\n if (attr == null) {\n continue;\n }\n\n if (!root.containsKey(attr.name())) {\n continue;\n }\n\n switch (field.getType().getSimpleName()) {\n case \"boolean\":\n field.set(this, root.get(attr.name()));\n break;\n case \"int\":\n \n field.set(this, Integer.valueOf(root.get(attr.name()).toString()));\n break;\n case \"long\":\n field.set(this, (Long) root.get(attr.name()));\n break;\n case \"float\":\n field.set(this, root.get(attr.name()));\n break;\n case \"double\":\n field.set(this, root.get(attr.name()));\n break;\n case \"String\":\n field.set(this, root.get(attr.name()));\n break;\n default:\n \n throw new IOException(\"invalid type: \" + field.getType().getSimpleName());\n }\n }\n } catch (Exception e) {\n throw new IOException(\"failed to read session variable: \" + e.getMessage());\n }\n }\n\n /**\n * Get all variables which need to forward along with statement.\n **/\n public Map getForwardVariables() {\n HashMap map = new HashMap();\n try {\n Field[] fields = SessionVariable.class.getDeclaredFields();\n for (Field f : fields) {\n VarAttr varAttr = f.getAnnotation(VarAttr.class);\n if (varAttr == null || !varAttr.needForward()) {\n continue;\n }\n map.put(varAttr.name(), String.valueOf(f.get(this)));\n }\n } catch (IllegalAccessException e) {\n LOG.error(\"failed to get forward variables\", e);\n }\n return map;\n }\n\n /**\n * Set forwardedSessionVariables for variables.\n **/\n public void setForwardedSessionVariables(Map variables) {\n try {\n Field[] fields = SessionVariable.class.getFields();\n for (Field f : fields) {\n VarAttr varAttr = f.getAnnotation(VarAttr.class);\n if (varAttr == null || !varAttr.needForward()) {\n continue;\n }\n String val = variables.get(varAttr.name());\n if (val == null) {\n continue;\n }\n\n LOG.debug(\"set forward variable: {} = {}\", varAttr.name(), val);\n\n \n switch (f.getType().getSimpleName()) {\n case \"short\":\n f.setShort(this, Short.parseShort(val));\n break;\n case \"int\":\n f.setInt(this, Integer.parseInt(val));\n break;\n case \"long\":\n f.setLong(this, Long.parseLong(val));\n break;\n case \"double\":\n f.setDouble(this, Double.parseDouble(val));\n break;\n case \"boolean\":\n f.setBoolean(this, Boolean.parseBoolean(val));\n break;\n case \"String\":\n f.set(this, val);\n break;\n default:\n throw new IllegalArgumentException(\"Unknown field type: \" + f.getType().getSimpleName());\n }\n }\n } catch (IllegalAccessException e) {\n LOG.error(\"failed to set forward variables\", e);\n }\n }\n\n /**\n * Set forwardedSessionVariables for queryOptions.\n **/\n public void setForwardedSessionVariables(TQueryOptions queryOptions) {\n if (queryOptions.isSetMemLimit()) {\n setMaxExecMemByte(queryOptions.getMemLimit());\n }\n if (queryOptions.isSetQueryTimeout()) {\n setQueryTimeoutS(queryOptions.getQueryTimeout());\n }\n if (queryOptions.isSetInsertTimeout()) {\n setInsertTimeoutS(queryOptions.getInsertTimeout());\n }\n }\n\n /**\n * Get all variables which need to be set in TQueryOptions.\n **/\n public TQueryOptions getQueryOptionVariables() {\n TQueryOptions queryOptions = new TQueryOptions();\n queryOptions.setMemLimit(maxExecMemByte);\n queryOptions.setScanQueueMemLimit(Math.min(maxScanQueueMemByte, maxExecMemByte / 20));\n queryOptions.setQueryTimeout(queryTimeoutS);\n queryOptions.setInsertTimeout(insertTimeoutS);\n return queryOptions;\n }\n\n /**\n * The sessionContext is as follows:\n * \"k1:v1;k2:v2;...\"\n * Here we want to get value with key named \"trace_id\",\n * Return empty string is not found.\n *\n * @return\n */\n public String getTraceId() {\n if (Strings.isNullOrEmpty(sessionContext)) {\n return \"\";\n }\n String[] parts = sessionContext.split(\";\");\n for (String part : parts) {\n String[] innerParts = part.split(\":\");\n if (innerParts.length != 2) {\n continue;\n }\n if (innerParts[0].equals(\"trace_id\")) {\n return innerParts[1];\n }\n }\n return \"\";\n }\n\n public boolean isEnableMinidump() {\n return enableMinidump;\n }\n\n public void setEnableMinidump(boolean enableMinidump) {\n this.enableMinidump = enableMinidump;\n }\n\n public String getMinidumpPath() {\n return minidumpPath;\n }\n\n public void setMinidumpPath(String minidumpPath) {\n this.minidumpPath = minidumpPath;\n }\n\n public boolean isTraceNereids() {\n return traceNereids;\n }\n\n public void setTraceNereids(boolean traceNereids) {\n this.traceNereids = traceNereids;\n }\n\n public boolean isPlayNereidsDump() {\n return planNereidsDump;\n }\n\n public void setPlanNereidsDump(boolean planNereidsDump) {\n this.planNereidsDump = planNereidsDump;\n }\n\n public boolean isDumpNereidsMemo() {\n return dumpNereidsMemo;\n }\n\n public void setDumpNereidsMemo(boolean dumpNereidsMemo) {\n this.dumpNereidsMemo = dumpNereidsMemo;\n }\n\n public void enableFallbackToOriginalPlannerOnce() throws DdlException {\n if (enableFallbackToOriginalPlanner) {\n return;\n }\n setIsSingleSetVar(true);\n VariableMgr.setVar(this,\n new SetVar(SessionVariable.ENABLE_FALLBACK_TO_ORIGINAL_PLANNER, new StringLiteral(\"true\")));\n }\n\n public void disableNereidsPlannerOnce() throws DdlException {\n if (!enableNereidsPlanner) {\n return;\n }\n setIsSingleSetVar(true);\n VariableMgr.setVar(this, new SetVar(SessionVariable.ENABLE_NEREIDS_PLANNER, new StringLiteral(\"false\")));\n }\n\n \n public int getVariableNumByVariableAnnotation(VariableAnnotation type) {\n int num = 0;\n Field[] fields = SessionVariable.class.getDeclaredFields();\n for (Field f : fields) {\n VarAttr varAttr = f.getAnnotation(VarAttr.class);\n if (varAttr == null) {\n continue;\n }\n if (varAttr.varType() == type) {\n ++num;\n }\n }\n return num;\n }\n\n public boolean isEnableUnifiedLoad() {\n return enableUnifiedLoad;\n }\n\n public boolean getEnablePipelineEngine() {\n return enablePipelineEngine || enablePipelineXEngine;\n }\n\n public boolean getEnablePipelineXEngine() {\n return enablePipelineXEngine;\n }\n\n public static boolean enablePipelineEngine() {\n ConnectContext connectContext = ConnectContext.get();\n if (connectContext == null) {\n return false;\n }\n return connectContext.getSessionVariable().enablePipelineEngine\n || connectContext.getSessionVariable().enablePipelineXEngine;\n }\n\n public static boolean enableAggState() {\n ConnectContext connectContext = ConnectContext.get();\n if (connectContext == null) {\n return true;\n }\n return connectContext.getSessionVariable().enableAggState;\n }\n\n public void checkAnalyzeTimeFormat(String time) {\n try {\n DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern(\"HH:mm:ss\");\n timeFormatter.parse(time);\n } catch (DateTimeParseException e) {\n LOG.warn(\"Parse analyze start/end time format fail\", e);\n throw new UnsupportedOperationException(\"Expect format: HH:mm:ss\");\n }\n }\n}" }, { "comment": "I hardcoded it because some previous versions didn't work (it was impossible to create a stream using kinesis sdk) without any order. It can be changed to \"latest\" but then it's possible that the test will stop working after the localstack image update.", "method_body": "private static void setupLocalstack() throws Exception {\n System.setProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY, \"true\");\n System.setProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY, \"true\");\n\n \n now = Instant.ofEpochMilli(Long.divideUnsigned(Instant.now().getMillis(), 1000));\n\n localstackContainer =\n new LocalStackContainer(\"0.11.3\")\n .withServices(LocalStackContainer.Service.KINESIS)\n .withEnv(\"USE_SSL\", \"true\")\n .withStartupAttempts(3);\n localstackContainer.start();\n\n options.setAwsServiceEndpoint(\n localstackContainer\n .getEndpointConfiguration(LocalStackContainer.Service.KINESIS)\n .getServiceEndpoint()\n .replace(\"http\", \"https\"));\n options.setAwsKinesisRegion(\n localstackContainer\n .getEndpointConfiguration(LocalStackContainer.Service.KINESIS)\n .getSigningRegion());\n options.setAwsAccessKey(\n localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSAccessKeyId());\n options.setAwsSecretKey(\n localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSSecretKey());\n options.setNumberOfRecords(1000);\n options.setNumberOfShards(1);\n options.setAwsKinesisStream(\"beam_kinesis_test\");\n options.setAwsVerifyCertificate(false);\n createStream(options.getAwsKinesisStream());\n }", "target_code": "new LocalStackContainer(\"0.11.3\")", "method_body_after": "private static void setupLocalstack() {\n \n now = Instant.ofEpochMilli(Long.divideUnsigned(now.getMillis(), 1000L));\n\n System.setProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY, \"true\");\n System.setProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY, \"true\");\n\n localstackContainer =\n new LocalStackContainer(LOCALSTACK_VERSION)\n .withServices(LocalStackContainer.Service.KINESIS)\n .withEnv(\"USE_SSL\", \"true\")\n .withStartupAttempts(3);\n localstackContainer.start();\n\n options.setAwsServiceEndpoint(\n localstackContainer\n .getEndpointConfiguration(LocalStackContainer.Service.KINESIS)\n .getServiceEndpoint()\n .replace(\"http\", \"https\"));\n options.setAwsKinesisRegion(\n localstackContainer\n .getEndpointConfiguration(LocalStackContainer.Service.KINESIS)\n .getSigningRegion());\n options.setAwsAccessKey(\n localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSAccessKeyId());\n options.setAwsSecretKey(\n localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSSecretKey());\n options.setNumberOfRecords(1000);\n options.setNumberOfShards(1);\n options.setAwsKinesisStream(\"beam_kinesis_test\");\n options.setAwsVerifyCertificate(false);\n }", "context_before": "class KinesisIOIT implements Serializable {\n @Rule public TestPipeline pipelineWrite = TestPipeline.create();\n @Rule public TestPipeline pipelineRead = TestPipeline.create();\n\n private static LocalStackContainer localstackContainer;\n\n private static KinesisTestOptions options;\n private static Instant now = Instant.now();\n\n @BeforeClass\n public static void setup() throws Exception {\n PipelineOptionsFactory.register(KinesisTestOptions.class);\n options = TestPipeline.testingPipelineOptions().as(KinesisTestOptions.class);\n if (doUseLocalstack()) {\n setupLocalstack();\n }\n }\n\n @AfterClass\n public static void teardown() {\n if (doUseLocalstack()) {\n System.clearProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY);\n System.clearProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY);\n localstackContainer.stop();\n }\n }\n\n /** Test which write and then read data for a Kinesis stream. */\n @Test\n public void testWriteThenRead() {\n runWrite();\n runRead();\n }\n\n /** Write test dataset into Kinesis stream. */\n private void runWrite() {\n pipelineWrite\n .apply(\"Generate Sequence\", GenerateSequence.from(0).to(options.getNumberOfRecords()))\n .apply(\"Prepare TestRows\", ParDo.of(new TestRow.DeterministicallyConstructTestRowFn()))\n .apply(\"Prepare Kinesis input records\", ParDo.of(new ConvertToBytes()))\n .apply(\n \"Write to Kinesis\",\n KinesisIO.write()\n .withStreamName(options.getAwsKinesisStream())\n .withPartitioner(new RandomPartitioner())\n .withAWSClientsProvider(\n options.getAwsAccessKey(),\n options.getAwsSecretKey(),\n Regions.fromName(options.getAwsKinesisRegion()),\n options.getAwsServiceEndpoint(),\n options.getAwsVerifyCertificate()));\n\n pipelineWrite.run().waitUntilFinish();\n }\n\n /** Read test dataset from Kinesis stream. */\n private void runRead() {\n PCollection output =\n pipelineRead.apply(\n KinesisIO.read()\n .withStreamName(options.getAwsKinesisStream())\n .withAWSClientsProvider(\n options.getAwsAccessKey(),\n options.getAwsSecretKey(),\n Regions.fromName(options.getAwsKinesisRegion()),\n options.getAwsServiceEndpoint(),\n options.getAwsVerifyCertificate())\n .withMaxNumRecords(options.getNumberOfRecords())\n \n .withMaxReadTime(Duration.standardMinutes(10L))\n .withInitialPositionInStream(InitialPositionInStream.AT_TIMESTAMP)\n .withInitialTimestampInStream(now)\n .withRequestRecordsLimit(1000));\n\n PAssert.thatSingleton(output.apply(\"Count All\", Count.globally()))\n .isEqualTo((long) options.getNumberOfRecords());\n\n PCollection consolidatedHashcode =\n output\n .apply(ParDo.of(new ExtractDataValues()))\n .apply(\"Hash row contents\", Combine.globally(new HashingFn()).withoutDefaults());\n\n PAssert.that(consolidatedHashcode)\n .containsInAnyOrder(TestRow.getExpectedHashForRowCount(options.getNumberOfRecords()));\n\n pipelineRead.run().waitUntilFinish();\n }\n\n /** Necessary setup for localstack environment. */\n \n\n private static void createStream(String streamName) throws Exception {\n AmazonKinesisClientBuilder clientBuilder = AmazonKinesisClientBuilder.standard();\n\n clientBuilder.setCredentials(localstackContainer.getDefaultCredentialsProvider());\n clientBuilder.setEndpointConfiguration(\n localstackContainer.getEndpointConfiguration(LocalStackContainer.Service.KINESIS));\n\n AmazonKinesis client = clientBuilder.build();\n\n client.createStream(streamName, 1);\n int repeats = 10;\n for (int i = 0; i <= repeats; ++i) {\n String streamStatus =\n client.describeStream(streamName).getStreamDescription().getStreamStatus();\n if (\"ACTIVE\".equals(streamStatus)) {\n break;\n }\n if (i == repeats) {\n throw new RuntimeException(\"Unable to initialize stream\");\n }\n Thread.sleep(1000L);\n }\n }\n\n /** Check whether pipeline options were provided. If not, use localstack container. */\n private static boolean doUseLocalstack() {\n return \"aws-access-key\".equals(options.getAwsAccessKey())\n && \"aws-secret-key\".equals(options.getAwsSecretKey())\n && \"aws-kinesis-stream\".equals(options.getAwsKinesisStream())\n && \"aws-kinesis-region\".equals(options.getAwsKinesisRegion())\n && options.getNumberOfShards() == 2\n && options.getNumberOfRecords() == 1000\n && options.getAwsServiceEndpoint() == null\n && options.getAwsVerifyCertificate();\n }\n\n /** Produces test rows. */\n private static class ConvertToBytes extends DoFn {\n @ProcessElement\n public void processElement(ProcessContext c) {\n c.output(String.valueOf(c.element().name()).getBytes(StandardCharsets.UTF_8));\n }\n }\n\n /** Read rows from Table. */\n private static class ExtractDataValues extends DoFn {\n @ProcessElement\n public void processElement(ProcessContext c) {\n c.output(new String(c.element().getDataAsBytes(), StandardCharsets.UTF_8));\n }\n }\n\n private static final class RandomPartitioner implements KinesisPartitioner {\n @Override\n public String getPartitionKey(byte[] value) {\n Random rand = new Random();\n int n = rand.nextInt(options.getNumberOfShards()) + 1;\n return String.valueOf(n);\n }\n\n @Override\n public String getExplicitHashKey(byte[] value) {\n return null;\n }\n }\n}", "context_after": "class KinesisIOIT implements Serializable {\n private static final String LOCALSTACK_VERSION = \"0.11.3\";\n\n @Rule public TestPipeline pipelineWrite = TestPipeline.create();\n @Rule public TestPipeline pipelineRead = TestPipeline.create();\n\n private static KinesisTestOptions options;\n\n private static AmazonKinesis kinesisClient;\n private static LocalStackContainer localstackContainer;\n private static Instant now = Instant.now();\n\n @BeforeClass\n public static void setup() throws Exception {\n PipelineOptionsFactory.register(KinesisTestOptions.class);\n options = TestPipeline.testingPipelineOptions().as(KinesisTestOptions.class);\n if (options.getUseLocalstack()) {\n setupLocalstack();\n kinesisClient = createKinesisClient();\n createStream(options.getAwsKinesisStream());\n }\n }\n\n @AfterClass\n public static void teardown() {\n if (options.getUseLocalstack()) {\n kinesisClient.deleteStream(options.getAwsKinesisStream());\n System.clearProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY);\n System.clearProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY);\n localstackContainer.stop();\n }\n }\n\n /** Test which write and then read data for a Kinesis stream. */\n @Test\n public void testWriteThenRead() {\n runWrite();\n runRead();\n }\n\n /** Write test dataset into Kinesis stream. */\n private void runWrite() {\n pipelineWrite\n .apply(\"Generate Sequence\", GenerateSequence.from(0).to(options.getNumberOfRecords()))\n .apply(\"Prepare TestRows\", ParDo.of(new TestRow.DeterministicallyConstructTestRowFn()))\n .apply(\"Prepare Kinesis input records\", ParDo.of(new ConvertToBytes()))\n .apply(\n \"Write to Kinesis\",\n KinesisIO.write()\n .withStreamName(options.getAwsKinesisStream())\n .withPartitioner(new RandomPartitioner())\n .withAWSClientsProvider(\n options.getAwsAccessKey(),\n options.getAwsSecretKey(),\n Regions.fromName(options.getAwsKinesisRegion()),\n options.getAwsServiceEndpoint(),\n options.getAwsVerifyCertificate()));\n\n pipelineWrite.run().waitUntilFinish();\n }\n\n /** Read test dataset from Kinesis stream. */\n private void runRead() {\n PCollection output =\n pipelineRead.apply(\n KinesisIO.read()\n .withStreamName(options.getAwsKinesisStream())\n .withAWSClientsProvider(\n options.getAwsAccessKey(),\n options.getAwsSecretKey(),\n Regions.fromName(options.getAwsKinesisRegion()),\n options.getAwsServiceEndpoint(),\n options.getAwsVerifyCertificate())\n .withMaxNumRecords(options.getNumberOfRecords())\n \n .withMaxReadTime(Duration.standardMinutes(10L))\n .withInitialPositionInStream(InitialPositionInStream.AT_TIMESTAMP)\n .withInitialTimestampInStream(now)\n .withRequestRecordsLimit(1000));\n\n PAssert.thatSingleton(output.apply(\"Count All\", Count.globally()))\n .isEqualTo((long) options.getNumberOfRecords());\n\n PCollection consolidatedHashcode =\n output\n .apply(ParDo.of(new ExtractDataValues()))\n .apply(\"Hash row contents\", Combine.globally(new HashingFn()).withoutDefaults());\n\n PAssert.that(consolidatedHashcode)\n .containsInAnyOrder(TestRow.getExpectedHashForRowCount(options.getNumberOfRecords()));\n\n pipelineRead.run().waitUntilFinish();\n }\n\n /** Necessary setup for localstack environment. */\n \n\n private static AmazonKinesis createKinesisClient() {\n AmazonKinesisClientBuilder clientBuilder = AmazonKinesisClientBuilder.standard();\n\n AWSCredentialsProvider credentialsProvider =\n new AWSStaticCredentialsProvider(\n new BasicAWSCredentials(options.getAwsAccessKey(), options.getAwsSecretKey()));\n clientBuilder.setCredentials(credentialsProvider);\n\n if (options.getAwsServiceEndpoint() != null) {\n AwsClientBuilder.EndpointConfiguration endpointConfiguration =\n new AwsClientBuilder.EndpointConfiguration(\n options.getAwsServiceEndpoint(), options.getAwsKinesisRegion());\n clientBuilder.setEndpointConfiguration(endpointConfiguration);\n } else {\n clientBuilder.setRegion(options.getAwsKinesisRegion());\n }\n\n return clientBuilder.build();\n }\n\n private static void createStream(String streamName) throws Exception {\n kinesisClient.createStream(streamName, 1);\n int repeats = 10;\n for (int i = 0; i <= repeats; ++i) {\n String streamStatus =\n kinesisClient.describeStream(streamName).getStreamDescription().getStreamStatus();\n if (\"ACTIVE\".equals(streamStatus)) {\n break;\n }\n if (i == repeats) {\n throw new RuntimeException(\"Unable to initialize stream\");\n }\n Thread.sleep(1000L);\n }\n }\n\n /** Produces test rows. */\n private static class ConvertToBytes extends DoFn {\n @ProcessElement\n public void processElement(ProcessContext c) {\n c.output(String.valueOf(c.element().name()).getBytes(StandardCharsets.UTF_8));\n }\n }\n\n /** Read rows from Table. */\n private static class ExtractDataValues extends DoFn {\n @ProcessElement\n public void processElement(ProcessContext c) {\n c.output(new String(c.element().getDataAsBytes(), StandardCharsets.UTF_8));\n }\n }\n\n private static final class RandomPartitioner implements KinesisPartitioner {\n @Override\n public String getPartitionKey(byte[] value) {\n Random rand = new Random();\n int n = rand.nextInt(options.getNumberOfShards()) + 1;\n return String.valueOf(n);\n }\n\n @Override\n public String getExplicitHashKey(byte[] value) {\n return null;\n }\n }\n}" }, { "comment": "Does \"if (index > lastIndex)\" condition not handle capacity?", "method_body": "private void unshiftArray(long index, int unshiftByN, int arrLength) {\n int lastIndex = size() + unshiftByN - 1;\n prepareForConsecutiveMultiAdd(lastIndex, arrLength);\n if (index > lastIndex) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INDEX_OUT_OF_RANGE_ERROR_IDENTIFIER),\n RuntimeErrors.INDEX_NUMBER_TOO_LARGE, index);\n }\n int i = (int) index;\n ensureCapacity(this.size + unshiftByN, this.size);\n Object arr = getArrayFromType(elementType.getTag());\n System.arraycopy(arr, i, arr, i + unshiftByN, this.size - i);\n }", "target_code": "ensureCapacity(this.size + unshiftByN, this.size);", "method_body_after": "private void unshiftArray(long index, int unshiftByN, int arrLength) {\n int lastIndex = size() + unshiftByN - 1;\n prepareForConsecutiveMultiAdd(lastIndex, arrLength);\n if (index > lastIndex) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INDEX_OUT_OF_RANGE_ERROR_IDENTIFIER),\n RuntimeErrors.INDEX_NUMBER_TOO_LARGE, index);\n }\n int i = (int) index;\n ensureCapacity(this.size + unshiftByN, this.size);\n Object arr = getArrayFromType(elementType.getTag());\n System.arraycopy(arr, i, arr, i + unshiftByN, this.size - i);\n }", "context_before": "class ArrayValueImpl extends AbstractArrayValue {\n\n protected ArrayType arrayType;\n protected Type elementType;\n private TypedescValue elementTypedescValue = null;\n\n protected Object[] refValues;\n private long[] intValues;\n private boolean[] booleanValues;\n private byte[] byteValues;\n private double[] floatValues;\n private BString[] bStringValues;\n private BTypedesc typedesc;\n \n\n public ArrayValueImpl(Object[] values, ArrayType type) {\n this.refValues = values;\n this.arrayType = type;\n this.size = values.length;\n if (type.getTag() == TypeTags.ARRAY_TAG) {\n this.elementType = type.getElementType();\n }\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(long[] values, boolean readonly) {\n this.intValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_INT, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(boolean[] values, boolean readonly) {\n this.booleanValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_BOOLEAN, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(byte[] values, boolean readonly) {\n this.byteValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_BYTE, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(double[] values, boolean readonly) {\n this.floatValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_FLOAT, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(String[] values, boolean readonly) {\n this.size = values.length;\n bStringValues = new BString[size];\n for (int i = 0; i < size; i++) {\n bStringValues[i] = StringUtils.fromString(values[i]);\n }\n setArrayType(PredefinedTypes.TYPE_STRING, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(BString[] values, boolean readonly) {\n this.bStringValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_STRING, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(ArrayType type) {\n this.arrayType = type;\n this.elementType = type.getElementType();\n initArrayValues(elementType);\n if (type.getState() == ArrayState.CLOSED) {\n this.size = maxSize = type.getSize();\n }\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n private void initArrayValues(Type elementType) {\n int initialArraySize = (arrayType.getSize() != -1) ? arrayType.getSize() : DEFAULT_ARRAY_SIZE;\n switch (elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n this.intValues = new long[initialArraySize];\n break;\n case TypeTags.FLOAT_TAG:\n this.floatValues = new double[initialArraySize];\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n this.bStringValues = new BString[initialArraySize];\n break;\n case TypeTags.BOOLEAN_TAG:\n this.booleanValues = new boolean[initialArraySize];\n break;\n case TypeTags.BYTE_TAG:\n this.byteValues = new byte[initialArraySize];\n break;\n default:\n this.refValues = new Object[initialArraySize];\n if (arrayType.getState() == ArrayState.CLOSED) {\n fillerValueCheck(initialArraySize, initialArraySize);\n fillValues(initialArraySize);\n }\n }\n }\n\n @Override\n public BTypedesc getTypedesc() {\n return typedesc;\n }\n\n @Override\n public Object reverse() {\n switch (elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n long temp = intValues[j];\n intValues[j] = intValues[i];\n intValues[i] = temp;\n }\n return intValues;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n BString temp = bStringValues[j];\n bStringValues[j] = bStringValues[i];\n bStringValues[i] = temp;\n }\n return bStringValues;\n case TypeTags.FLOAT_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n double temp = floatValues[j];\n floatValues[j] = floatValues[i];\n floatValues[i] = temp;\n }\n return floatValues;\n case TypeTags.BOOLEAN_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n boolean temp = booleanValues[j];\n booleanValues[j] = booleanValues[i];\n booleanValues[i] = temp;\n }\n return booleanValues;\n case TypeTags.BYTE_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n byte temp = byteValues[j];\n byteValues[j] = byteValues[i];\n byteValues[i] = temp;\n }\n return byteValues;\n default:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n Object temp = refValues[j];\n refValues[j] = refValues[i];\n refValues[i] = temp;\n }\n return refValues;\n }\n }\n\n public ArrayValueImpl(ArrayType type, long size) {\n this.arrayType = type;\n this.elementType = type.getElementType();\n initArrayValues(this.elementType);\n if (size != -1) {\n this.size = this.maxSize = (int) size;\n }\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(ArrayType type, long size, BListInitialValueEntry[] initialValues) {\n this(type, size, initialValues, null);\n }\n\n public ArrayValueImpl(ArrayType type, long size, BListInitialValueEntry[] initialValues,\n TypedescValue typedescValue) {\n this.arrayType = type;\n this.elementType = type.getElementType();\n this.elementTypedescValue = typedescValue;\n initArrayValues(this.elementType);\n if (size != -1) {\n this.size = this.maxSize = (int) size;\n }\n for (int index = 0; index < initialValues.length; index++) {\n addRefValue(index, ((ListInitialValueEntry.ExpressionEntry) initialValues[index]).value);\n }\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n \n\n /**\n * Get value in the given array index.\n * \n * @param index array index\n * @return array value\n */\n @Override\n public Object get(long index) {\n rangeCheckForGet(index, size);\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return intValues[(int) index];\n case TypeTags.BOOLEAN_TAG:\n return booleanValues[(int) index];\n case TypeTags.BYTE_TAG:\n return Byte.toUnsignedInt(byteValues[(int) index]);\n case TypeTags.FLOAT_TAG:\n return floatValues[(int) index];\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n return bStringValues[(int) index];\n default:\n return refValues[(int) index];\n }\n }\n\n /**\n * Get ref value in the given index.\n *\n * @param index array index\n * @return array value\n */\n @Override\n public Object getRefValue(long index) {\n rangeCheckForGet(index, size);\n if (refValues != null) {\n return refValues[(int) index];\n }\n return get(index);\n }\n\n @Override\n public Object fillAndGetRefValue(long index) {\n if (refValues != null) {\n \n if (index >= this.size) {\n handleImmutableArrayValue();\n fillRead(index, refValues.length);\n }\n return refValues[(int) index];\n }\n return get(index);\n }\n\n /**\n * Get int value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public long getInt(long index) {\n rangeCheckForGet(index, size);\n if (intValues != null) {\n return intValues[(int) index];\n } else if (refValues != null) {\n return (Long) refValues[(int) index];\n }\n return Byte.toUnsignedInt(byteValues[(int) index]);\n }\n\n /**\n * Get boolean value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public boolean getBoolean(long index) {\n rangeCheckForGet(index, size);\n if (booleanValues != null) {\n return booleanValues[(int) index];\n }\n return (Boolean) refValues[(int) index];\n }\n\n /**\n * Get byte value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public byte getByte(long index) {\n rangeCheckForGet(index, size);\n if (byteValues != null) {\n return byteValues[(int) index];\n } else if (intValues != null) {\n return ((Long) intValues[(int) index]).byteValue();\n }\n return ((Long) refValues[(int) index]).byteValue();\n }\n\n /**\n * Get float value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public double getFloat(long index) {\n rangeCheckForGet(index, size);\n if (floatValues != null) {\n return floatValues[(int) index];\n }\n return (Double) refValues[(int) index];\n }\n\n /**\n * Get string value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n @Deprecated\n public String getString(long index) {\n rangeCheckForGet(index, size);\n if (bStringValues != null) {\n return bStringValues[(int) index].getValue();\n }\n return (String) refValues[(int) index];\n }\n\n /**\n * Get string value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public BString getBString(long index) {\n rangeCheckForGet(index, size);\n if (bStringValues != null) {\n return bStringValues[(int) index];\n }\n return (BString) refValues[(int) index];\n }\n\n \n\n /**\n * Add ref value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, Object value) {\n handleImmutableArrayValue();\n addRefValue(index, value);\n }\n\n /**\n * Add int value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, long value) {\n handleImmutableArrayValue();\n addInt(index, value);\n }\n\n /**\n * Add boolean value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, boolean value) {\n handleImmutableArrayValue();\n addBoolean(index, value);\n }\n\n /**\n * Add byte value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, byte value) {\n handleImmutableArrayValue();\n addByte(index, value);\n }\n\n /**\n * Add double value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, double value) {\n handleImmutableArrayValue();\n addFloat(index, value);\n }\n\n /**\n * Add string value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Deprecated\n @Override\n public void add(long index, String value) {\n handleImmutableArrayValue();\n addString(index, value);\n }\n\n /**\n * Add string value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, BString value) {\n handleImmutableArrayValue();\n addBString(index, value);\n }\n\n public void addRefValue(long index, Object value) {\n Type type = TypeChecker.getType(value);\n switch (this.elementType.getTag()) {\n case TypeTags.BOOLEAN_TAG:\n prepareForAdd(index, value, type, booleanValues.length);\n this.booleanValues[(int) index] = (Boolean) value;\n return;\n case TypeTags.FLOAT_TAG:\n prepareForAdd(index, value, type, floatValues.length);\n this.floatValues[(int) index] = (Double) value;\n return;\n case TypeTags.BYTE_TAG:\n prepareForAdd(index, value, type, byteValues.length);\n this.byteValues[(int) index] = ((Number) value).byteValue();\n return;\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n prepareForAdd(index, value, type, intValues.length);\n this.intValues[(int) index] = (Long) value;\n return;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n prepareForAdd(index, value, type, bStringValues.length);\n this.bStringValues[(int) index] = (BString) value;\n return;\n default:\n prepareForAdd(index, value, type, refValues.length);\n this.refValues[(int) index] = value;\n }\n }\n\n public void addInt(long index, long value) {\n if (intValues != null) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_INT, intValues.length);\n intValues[(int) index] = value;\n return;\n }\n\n prepareForAdd(index, value, TypeChecker.getType(value), byteValues.length);\n byteValues[(int) index] = (byte) ((Long) value).intValue();\n }\n\n private void addBoolean(long index, boolean value) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_BOOLEAN, booleanValues.length);\n booleanValues[(int) index] = value;\n }\n\n private void addByte(long index, byte value) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_BYTE, byteValues.length);\n byteValues[(int) index] = value;\n }\n\n private void addFloat(long index, double value) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_FLOAT, floatValues.length);\n floatValues[(int) index] = value;\n }\n\n @Deprecated\n private void addString(long index, String value) {\n addBString(index, StringUtils.fromString(value));\n }\n\n private void addBString(long index, BString value) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_STRING, bStringValues.length);\n bStringValues[(int) index] = value;\n }\n\n \n\n /**\n * Append value to the existing array.\n *\n * @param value value to be appended\n */\n @Override\n public void append(Object value) {\n add(this.size, value);\n }\n\n @Override\n public Object shift(long index) {\n handleImmutableArrayValue();\n Object val = get(index);\n shiftArray((int) index, getArrayFromType(this.elementType.getTag()));\n return val;\n }\n\n /**\n * Removes and returns first member of an array.\n *\n * @return the value that was the first member of the array\n */\n @Override\n public Object shift() {\n return shift(0);\n }\n\n @Override\n public void unshift(Object[] values) {\n unshift(0, values);\n }\n\n @Override\n public String stringValue(BLink parent) {\n StringJoiner sj = new StringJoiner(\",\");\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Long.toString(intValues[i]));\n }\n break;\n case TypeTags.BOOLEAN_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Boolean.toString(booleanValues[i]));\n }\n break;\n case TypeTags.BYTE_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Long.toString(Byte.toUnsignedLong(byteValues[i])));\n }\n break;\n case TypeTags.FLOAT_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Double.toString(floatValues[i]));\n }\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(((BValue) (bStringValues[i])).informalStringValue(parent));\n }\n break;\n default:\n getRefValuesString(parent, sj);\n break;\n }\n return \"[\" + sj + \"]\";\n }\n\n private void getRefValuesString(BLink parent, StringJoiner sj) {\n for (int i = 0; i < size; i++) {\n if (refValues[i] == null) {\n sj.add(\"null\");\n } else {\n Type type = TypeChecker.getType(refValues[i]);\n switch (type.getTag()) {\n case TypeTags.STRING_TAG:\n case TypeTags.XML_TAG:\n case TypeTags.XML_ELEMENT_TAG:\n case TypeTags.XML_ATTRIBUTES_TAG:\n case TypeTags.XML_COMMENT_TAG:\n case TypeTags.XML_PI_TAG:\n case TypeTags.XMLNS_TAG:\n case TypeTags.XML_TEXT_TAG:\n sj.add(((BValue) (refValues[i])).informalStringValue(new CycleUtils\n .Node(this, parent)));\n break;\n default:\n sj.add(StringUtils.getStringValue(refValues[i], new CycleUtils.Node(this, parent)));\n break;\n }\n }\n }\n }\n\n @Override\n public String expressionStringValue(BLink parent) {\n StringJoiner sj = new StringJoiner(\",\");\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(intValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n case TypeTags.BOOLEAN_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(booleanValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n case TypeTags.BYTE_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Long.toString(Byte.toUnsignedLong(byteValues[i])));\n }\n break;\n case TypeTags.FLOAT_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(floatValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(bStringValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n default:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(refValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n }\n return \"[\" + sj + \"]\";\n }\n\n @Override\n public Type getType() {\n return this.arrayType;\n }\n\n @Override\n public int size() {\n return this.size;\n }\n\n @Override\n public boolean isEmpty() {\n return this.size == 0;\n }\n\n @Override\n public Object copy(Map refs) {\n if (isFrozen()) {\n return this;\n }\n\n if (refs.containsKey(this)) {\n return refs.get(this);\n }\n\n ArrayValue valueArray;\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(intValues, this.size), arrayType.isReadOnly());\n break;\n case TypeTags.BOOLEAN_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(booleanValues, this.size), arrayType.isReadOnly());\n break;\n case TypeTags.BYTE_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(byteValues, this.size), arrayType.isReadOnly());\n break;\n case TypeTags.FLOAT_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(floatValues, this.size), arrayType.isReadOnly());\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(bStringValues, this.size), arrayType.isReadOnly());\n break;\n default:\n Object[] values = new Object[this.size];\n valueArray = new ArrayValueImpl(values, arrayType);\n IntStream.range(0, this.size).forEach(i -> {\n Object value = this.refValues[i];\n if (value instanceof RefValue) {\n values[i] = ((RefValue) value).copy(refs);\n } else {\n values[i] = value;\n }\n });\n break;\n }\n\n refs.put(this, valueArray);\n return valueArray;\n }\n\n @Override\n public Object frozenCopy(Map refs) {\n ArrayValue copy = (ArrayValue) copy(refs);\n if (!copy.isFrozen()) {\n copy.freezeDirect();\n }\n return copy;\n }\n\n /**\n * Return a subarray starting from `startIndex` (inclusive) to `endIndex` (exclusive).\n *\n * @param startIndex index of first member to include in the slice\n * @param endIndex index of first member not to include in the slice\n * @return array slice within specified range\n */\n public ArrayValueImpl slice(long startIndex, long endIndex) {\n ArrayValueImpl slicedArray;\n int slicedSize = (int) (endIndex - startIndex);\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n slicedArray = new ArrayValueImpl(new long[slicedSize], false);\n System.arraycopy(intValues, (int) startIndex, slicedArray.intValues, 0, slicedSize);\n break;\n case TypeTags.BOOLEAN_TAG:\n slicedArray = new ArrayValueImpl(new boolean[slicedSize], false);\n System.arraycopy(booleanValues, (int) startIndex, slicedArray.booleanValues, 0, slicedSize);\n break;\n case TypeTags.BYTE_TAG:\n slicedArray = new ArrayValueImpl(new byte[slicedSize], false);\n System.arraycopy(byteValues, (int) startIndex, slicedArray.byteValues, 0, slicedSize);\n break;\n case TypeTags.FLOAT_TAG:\n slicedArray = new ArrayValueImpl(new double[slicedSize], false);\n System.arraycopy(floatValues, (int) startIndex, slicedArray.floatValues, 0, slicedSize);\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n slicedArray = new ArrayValueImpl(new BString[slicedSize], false);\n System.arraycopy(bStringValues, (int) startIndex, slicedArray.bStringValues, 0, slicedSize);\n break;\n default:\n slicedArray = new ArrayValueImpl(new Object[slicedSize], new BArrayType(this.elementType));\n System.arraycopy(refValues, (int) startIndex, slicedArray.refValues, 0, slicedSize);\n break;\n }\n return slicedArray;\n }\n\n @Override\n public String toString() {\n return stringValue(null);\n }\n\n /**\n * Get ref values array.\n *\n * @return ref value array\n */\n @Override\n public Object[] getValues() {\n return refValues;\n }\n\n /**\n * Get a copy of byte array.\n *\n * @return byte array\n */\n @Override\n public byte[] getBytes() {\n byte[] bytes = new byte[this.size];\n System.arraycopy(byteValues, 0, bytes, 0, this.size);\n return bytes;\n }\n\n /**\n * Get a copy of string array.\n *\n * @return string array\n */\n @Override\n public String[] getStringArray() {\n String[] arr = new String[size];\n for (int i = 0; i < size; i++) {\n arr[i] = bStringValues[i].getValue();\n }\n return arr;\n }\n\n /**\n * Get a copy of int array.\n *\n * @return int array\n */\n @Override\n public long[] getIntArray() {\n return Arrays.copyOf(intValues, size);\n }\n\n @Override\n public boolean[] getBooleanArray() {\n return Arrays.copyOf(booleanValues, size);\n }\n\n @Override\n public byte[] getByteArray() {\n return Arrays.copyOf(byteValues, size);\n }\n\n @Override\n public double[] getFloatArray() {\n return Arrays.copyOf(floatValues, size);\n }\n\n @Override\n public void serialize(OutputStream outputStream) {\n if (this.elementType.getTag() == TypeTags.BYTE_TAG) {\n try {\n for (int i = 0; i < this.size; i++) {\n outputStream.write(this.byteValues[i]);\n }\n } catch (IOException e) {\n throw new BallerinaException(\"error occurred while writing the binary content to the output stream\", e);\n }\n } else {\n try {\n outputStream.write(this.toString().getBytes(Charset.defaultCharset()));\n } catch (IOException e) {\n throw new BallerinaException(\"error occurred while serializing data\", e);\n }\n }\n }\n\n /**\n * {@inheritDoc}\n */\n @Override\n public void freezeDirect() {\n if (arrayType.isReadOnly()) {\n return;\n }\n\n this.arrayType = (ArrayType) ReadOnlyUtils.setImmutableTypeAndGetEffectiveType(this.arrayType);\n if (this.elementType == null || this.elementType.getTag() > TypeTags.BOOLEAN_TAG) {\n for (int i = 0; i < this.size; i++) {\n Object value = this.getRefValue(i);\n if (value instanceof RefValue) {\n ((RefValue) value).freezeDirect();\n }\n }\n }\n this.typedesc = createSingletonTypedesc(this);\n }\n\n /**\n * {@inheritDoc}\n */\n @Override\n public IteratorValue getIterator() {\n return new ArrayIterator(this);\n }\n\n /**\n * Get {@code BType} of the array elements.\n *\n * @return element type\n */\n @Override\n public Type getElementType() {\n return this.elementType;\n }\n\n \n\n @Override\n protected void resizeInternalArray(int newLength) {\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n intValues = Arrays.copyOf(intValues, newLength);\n break;\n case TypeTags.BOOLEAN_TAG:\n booleanValues = Arrays.copyOf(booleanValues, newLength);\n break;\n case TypeTags.BYTE_TAG:\n byteValues = Arrays.copyOf(byteValues, newLength);\n break;\n case TypeTags.FLOAT_TAG:\n floatValues = Arrays.copyOf(floatValues, newLength);\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n bStringValues = Arrays.copyOf(bStringValues, newLength);\n break;\n default:\n refValues = Arrays.copyOf(refValues, newLength);\n break;\n }\n }\n\n @Override\n protected void fillValues(int index) {\n if (index <= this.size) {\n return;\n }\n\n switch (this.elementType.getTag()) {\n case TypeTags.STRING_TAG:\n Arrays.fill(bStringValues, size, index, RuntimeConstants.STRING_EMPTY_VALUE);\n return;\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n case TypeTags.BYTE_TAG:\n case TypeTags.FLOAT_TAG:\n case TypeTags.BOOLEAN_TAG:\n return;\n default:\n if (arrayType.hasFillerValue()) {\n if (elementTypedescValue != null) {\n extractRecordFillerValues(index);\n } else {\n extractComplexFillerValues(index);\n }\n }\n }\n }\n\n private void extractComplexFillerValues(int index) {\n for (int i = size; i < index; i++) {\n this.refValues[i] = this.elementType.getZeroValue();\n }\n }\n\n private void extractRecordFillerValues(int index) {\n for (int i = size; i < index; i++) {\n this.refValues[i] = elementTypedescValue.instantiate(Scheduler.getStrand());\n }\n }\n\n @Override\n protected void rangeCheckForGet(long index, int size) {\n rangeCheck(index, size);\n if (index >= size) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INDEX_OUT_OF_RANGE_ERROR_IDENTIFIER),\n RuntimeErrors.ARRAY_INDEX_OUT_OF_RANGE, index, size);\n }\n }\n\n @Override\n protected void rangeCheck(long index, int size) {\n if (index > Integer.MAX_VALUE || index < Integer.MIN_VALUE) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INDEX_OUT_OF_RANGE_ERROR_IDENTIFIER),\n RuntimeErrors.INDEX_NUMBER_TOO_LARGE, index);\n }\n\n if ((int) index < 0 || index >= maxSize) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INDEX_OUT_OF_RANGE_ERROR_IDENTIFIER),\n RuntimeErrors.ARRAY_INDEX_OUT_OF_RANGE, index, size);\n }\n }\n\n @Override\n protected void fillerValueCheck(int index, int size) {\n \n \n if (arrayType.hasFillerValue()) {\n return;\n }\n if (index > size) {\n throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.ILLEGAL_LIST_INSERTION_ERROR,\n RuntimeErrors.ILLEGAL_ARRAY_INSERTION, size, index + 1);\n }\n }\n\n @Override\n protected void ensureCapacity(int requestedCapacity, int currentArraySize) {\n if (requestedCapacity <= currentArraySize) {\n return;\n }\n\n if (this.arrayType.getState() != ArrayState.OPEN) {\n return;\n }\n\n \n int newArraySize = currentArraySize + (currentArraySize >> 1);\n\n \n newArraySize = Math.max(newArraySize, requestedCapacity);\n\n \n newArraySize = Math.min(newArraySize, maxSize);\n resizeInternalArray(newArraySize);\n }\n\n @Override\n protected void checkFixedLength(long length) {\n if (this.arrayType.getState() == ArrayState.CLOSED) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INHERENT_TYPE_VIOLATION_ERROR_IDENTIFIER),\n RuntimeErrors.ILLEGAL_ARRAY_SIZE, size, length);\n }\n }\n\n @Override\n protected void unshift(long index, Object[] vals) {\n handleImmutableArrayValue();\n unshiftArray(index, vals.length, getCurrentArrayLength());\n\n int startIndex = (int) index;\n int endIndex = startIndex + vals.length;\n\n for (int i = startIndex, j = 0; i < endIndex; i++, j++) {\n add(i, vals[j]);\n }\n }\n\n \n\n private void prepareForAdd(long index, Object value, Type sourceType, int currentArraySize) {\n \n if (!TypeChecker.checkIsType(null, value, sourceType, this.elementType)) {\n throw ErrorCreator.createError(getModulePrefixedReason(ARRAY_LANG_LIB,\n INHERENT_TYPE_VIOLATION_ERROR_IDENTIFIER), BLangExceptionHelper.getErrorDetails(\n RuntimeErrors.INCOMPATIBLE_TYPE, this.elementType, sourceType));\n }\n\n int intIndex = (int) index;\n rangeCheck(index, size);\n fillerValueCheck(intIndex, size);\n ensureCapacity(intIndex + 1, currentArraySize);\n fillValues(intIndex);\n resetSize(intIndex);\n }\n\n private void fillRead(long index, int currentArraySize) {\n if (!arrayType.hasFillerValue()) {\n throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.ILLEGAL_LIST_INSERTION_ERROR,\n RuntimeErrors.ILLEGAL_ARRAY_INSERTION, size, index + 1);\n }\n\n int intIndex = (int) index;\n rangeCheck(index, size);\n ensureCapacity(intIndex + 1, currentArraySize);\n\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.BYTE_TAG:\n case TypeTags.FLOAT_TAG:\n case TypeTags.BOOLEAN_TAG:\n break;\n case TypeTags.STRING_TAG:\n Arrays.fill(bStringValues, size, intIndex, RuntimeConstants.STRING_EMPTY_VALUE);\n break;\n default:\n for (int i = size; i <= index; i++) {\n this.refValues[i] = this.elementType.getZeroValue();\n }\n }\n\n resetSize(intIndex);\n }\n\n private void setArrayType(Type elementType, boolean readonly) {\n this.arrayType = new BArrayType(elementType, readonly);\n this.elementType = elementType;\n }\n\n private void resetSize(int index) {\n if (index >= size) {\n size = index + 1;\n }\n }\n\n private void shiftArray(int index, Object arr) {\n int nElemsToBeMoved = this.size - 1 - index;\n if (nElemsToBeMoved >= 0) {\n System.arraycopy(arr, index + 1, arr, index, nElemsToBeMoved);\n }\n this.size--;\n }\n\n \n\n private Object getArrayFromType(int typeTag) {\n switch (typeTag) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return intValues;\n case TypeTags.BOOLEAN_TAG:\n return booleanValues;\n case TypeTags.BYTE_TAG:\n return byteValues;\n case TypeTags.FLOAT_TAG:\n return floatValues;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n return bStringValues;\n default:\n return refValues;\n }\n }\n\n private int getCurrentArrayLength() {\n switch (elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return intValues.length;\n case TypeTags.BOOLEAN_TAG:\n return booleanValues.length;\n case TypeTags.BYTE_TAG:\n return byteValues.length;\n case TypeTags.FLOAT_TAG:\n return floatValues.length;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n return bStringValues.length;\n default:\n return refValues.length;\n }\n }\n\n @Override\n public int hashCode() {\n int result = Objects.hash(arrayType, elementType);\n result = 31 * result + Arrays.hashCode(refValues);\n result = 31 * result + Arrays.hashCode(intValues);\n result = 31 * result + Arrays.hashCode(booleanValues);\n result = 31 * result + Arrays.hashCode(byteValues);\n result = 31 * result + Arrays.hashCode(floatValues);\n result = 31 * result + Arrays.hashCode(bStringValues);\n return result;\n }\n}", "context_after": "class ArrayValueImpl extends AbstractArrayValue {\n\n protected ArrayType arrayType;\n protected Type elementType;\n private TypedescValue elementTypedescValue = null;\n\n protected Object[] refValues;\n private long[] intValues;\n private boolean[] booleanValues;\n private byte[] byteValues;\n private double[] floatValues;\n private BString[] bStringValues;\n private BTypedesc typedesc;\n \n\n public ArrayValueImpl(Object[] values, ArrayType type) {\n this.refValues = values;\n this.arrayType = type;\n this.size = values.length;\n if (type.getTag() == TypeTags.ARRAY_TAG) {\n this.elementType = type.getElementType();\n }\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(long[] values, boolean readonly) {\n this.intValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_INT, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(boolean[] values, boolean readonly) {\n this.booleanValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_BOOLEAN, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(byte[] values, boolean readonly) {\n this.byteValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_BYTE, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(double[] values, boolean readonly) {\n this.floatValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_FLOAT, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(String[] values, boolean readonly) {\n this.size = values.length;\n bStringValues = new BString[size];\n for (int i = 0; i < size; i++) {\n bStringValues[i] = StringUtils.fromString(values[i]);\n }\n setArrayType(PredefinedTypes.TYPE_STRING, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(BString[] values, boolean readonly) {\n this.bStringValues = values;\n this.size = values.length;\n setArrayType(PredefinedTypes.TYPE_STRING, readonly);\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(ArrayType type) {\n this.arrayType = type;\n this.elementType = type.getElementType();\n initArrayValues(elementType);\n if (type.getState() == ArrayState.CLOSED) {\n this.size = maxSize = type.getSize();\n }\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n private void initArrayValues(Type elementType) {\n int initialArraySize = (arrayType.getSize() != -1) ? arrayType.getSize() : DEFAULT_ARRAY_SIZE;\n switch (elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n this.intValues = new long[initialArraySize];\n break;\n case TypeTags.FLOAT_TAG:\n this.floatValues = new double[initialArraySize];\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n this.bStringValues = new BString[initialArraySize];\n break;\n case TypeTags.BOOLEAN_TAG:\n this.booleanValues = new boolean[initialArraySize];\n break;\n case TypeTags.BYTE_TAG:\n this.byteValues = new byte[initialArraySize];\n break;\n default:\n this.refValues = new Object[initialArraySize];\n if (arrayType.getState() == ArrayState.CLOSED) {\n fillerValueCheck(initialArraySize, initialArraySize);\n fillValues(initialArraySize);\n }\n }\n }\n\n @Override\n public BTypedesc getTypedesc() {\n return typedesc;\n }\n\n @Override\n public Object reverse() {\n switch (elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n long temp = intValues[j];\n intValues[j] = intValues[i];\n intValues[i] = temp;\n }\n return intValues;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n BString temp = bStringValues[j];\n bStringValues[j] = bStringValues[i];\n bStringValues[i] = temp;\n }\n return bStringValues;\n case TypeTags.FLOAT_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n double temp = floatValues[j];\n floatValues[j] = floatValues[i];\n floatValues[i] = temp;\n }\n return floatValues;\n case TypeTags.BOOLEAN_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n boolean temp = booleanValues[j];\n booleanValues[j] = booleanValues[i];\n booleanValues[i] = temp;\n }\n return booleanValues;\n case TypeTags.BYTE_TAG:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n byte temp = byteValues[j];\n byteValues[j] = byteValues[i];\n byteValues[i] = temp;\n }\n return byteValues;\n default:\n for (int i = size - 1, j = 0; j < size / 2; i--, j++) {\n Object temp = refValues[j];\n refValues[j] = refValues[i];\n refValues[i] = temp;\n }\n return refValues;\n }\n }\n\n public ArrayValueImpl(ArrayType type, long size) {\n this.arrayType = type;\n this.elementType = type.getElementType();\n initArrayValues(this.elementType);\n if (size != -1) {\n this.size = this.maxSize = (int) size;\n }\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n public ArrayValueImpl(ArrayType type, long size, BListInitialValueEntry[] initialValues) {\n this(type, size, initialValues, null);\n }\n\n public ArrayValueImpl(ArrayType type, long size, BListInitialValueEntry[] initialValues,\n TypedescValue typedescValue) {\n this.arrayType = type;\n this.elementType = type.getElementType();\n this.elementTypedescValue = typedescValue;\n initArrayValues(this.elementType);\n if (size != -1) {\n this.size = this.maxSize = (int) size;\n }\n for (int index = 0; index < initialValues.length; index++) {\n addRefValue(index, ((ListInitialValueEntry.ExpressionEntry) initialValues[index]).value);\n }\n this.typedesc = getTypedescValue(arrayType, this);\n }\n\n \n\n /**\n * Get value in the given array index.\n * \n * @param index array index\n * @return array value\n */\n @Override\n public Object get(long index) {\n rangeCheckForGet(index, size);\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return intValues[(int) index];\n case TypeTags.BOOLEAN_TAG:\n return booleanValues[(int) index];\n case TypeTags.BYTE_TAG:\n return Byte.toUnsignedInt(byteValues[(int) index]);\n case TypeTags.FLOAT_TAG:\n return floatValues[(int) index];\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n return bStringValues[(int) index];\n default:\n return refValues[(int) index];\n }\n }\n\n /**\n * Get ref value in the given index.\n *\n * @param index array index\n * @return array value\n */\n @Override\n public Object getRefValue(long index) {\n rangeCheckForGet(index, size);\n if (refValues != null) {\n return refValues[(int) index];\n }\n return get(index);\n }\n\n @Override\n public Object fillAndGetRefValue(long index) {\n if (refValues != null) {\n \n if (index >= this.size) {\n handleImmutableArrayValue();\n fillRead(index, refValues.length);\n }\n return refValues[(int) index];\n }\n return get(index);\n }\n\n /**\n * Get int value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public long getInt(long index) {\n rangeCheckForGet(index, size);\n if (intValues != null) {\n return intValues[(int) index];\n } else if (refValues != null) {\n return (Long) refValues[(int) index];\n }\n return Byte.toUnsignedInt(byteValues[(int) index]);\n }\n\n /**\n * Get boolean value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public boolean getBoolean(long index) {\n rangeCheckForGet(index, size);\n if (booleanValues != null) {\n return booleanValues[(int) index];\n }\n return (Boolean) refValues[(int) index];\n }\n\n /**\n * Get byte value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public byte getByte(long index) {\n rangeCheckForGet(index, size);\n if (byteValues != null) {\n return byteValues[(int) index];\n } else if (intValues != null) {\n return ((Long) intValues[(int) index]).byteValue();\n }\n return ((Long) refValues[(int) index]).byteValue();\n }\n\n /**\n * Get float value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public double getFloat(long index) {\n rangeCheckForGet(index, size);\n if (floatValues != null) {\n return floatValues[(int) index];\n }\n return (Double) refValues[(int) index];\n }\n\n /**\n * Get string value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n @Deprecated\n public String getString(long index) {\n rangeCheckForGet(index, size);\n if (bStringValues != null) {\n return bStringValues[(int) index].getValue();\n }\n return (String) refValues[(int) index];\n }\n\n /**\n * Get string value in the given index.\n *\n * @param index array index\n * @return array element\n */\n @Override\n public BString getBString(long index) {\n rangeCheckForGet(index, size);\n if (bStringValues != null) {\n return bStringValues[(int) index];\n }\n return (BString) refValues[(int) index];\n }\n\n \n\n /**\n * Add ref value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, Object value) {\n handleImmutableArrayValue();\n addRefValue(index, value);\n }\n\n /**\n * Add int value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, long value) {\n handleImmutableArrayValue();\n addInt(index, value);\n }\n\n /**\n * Add boolean value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, boolean value) {\n handleImmutableArrayValue();\n addBoolean(index, value);\n }\n\n /**\n * Add byte value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, byte value) {\n handleImmutableArrayValue();\n addByte(index, value);\n }\n\n /**\n * Add double value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, double value) {\n handleImmutableArrayValue();\n addFloat(index, value);\n }\n\n /**\n * Add string value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Deprecated\n @Override\n public void add(long index, String value) {\n handleImmutableArrayValue();\n addString(index, value);\n }\n\n /**\n * Add string value to the given array index.\n *\n * @param index array index\n * @param value value to be added\n */\n @Override\n public void add(long index, BString value) {\n handleImmutableArrayValue();\n addBString(index, value);\n }\n\n public void addRefValue(long index, Object value) {\n Type type = TypeChecker.getType(value);\n switch (this.elementType.getTag()) {\n case TypeTags.BOOLEAN_TAG:\n prepareForAdd(index, value, type, booleanValues.length);\n this.booleanValues[(int) index] = (Boolean) value;\n return;\n case TypeTags.FLOAT_TAG:\n prepareForAdd(index, value, type, floatValues.length);\n this.floatValues[(int) index] = (Double) value;\n return;\n case TypeTags.BYTE_TAG:\n prepareForAdd(index, value, type, byteValues.length);\n this.byteValues[(int) index] = ((Number) value).byteValue();\n return;\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n prepareForAdd(index, value, type, intValues.length);\n this.intValues[(int) index] = (Long) value;\n return;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n prepareForAdd(index, value, type, bStringValues.length);\n this.bStringValues[(int) index] = (BString) value;\n return;\n default:\n prepareForAdd(index, value, type, refValues.length);\n this.refValues[(int) index] = value;\n }\n }\n\n public void addInt(long index, long value) {\n if (intValues != null) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_INT, intValues.length);\n intValues[(int) index] = value;\n return;\n }\n\n prepareForAdd(index, value, TypeChecker.getType(value), byteValues.length);\n byteValues[(int) index] = (byte) ((Long) value).intValue();\n }\n\n private void addBoolean(long index, boolean value) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_BOOLEAN, booleanValues.length);\n booleanValues[(int) index] = value;\n }\n\n private void addByte(long index, byte value) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_BYTE, byteValues.length);\n byteValues[(int) index] = value;\n }\n\n private void addFloat(long index, double value) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_FLOAT, floatValues.length);\n floatValues[(int) index] = value;\n }\n\n @Deprecated\n private void addString(long index, String value) {\n addBString(index, StringUtils.fromString(value));\n }\n\n private void addBString(long index, BString value) {\n prepareForAdd(index, value, PredefinedTypes.TYPE_STRING, bStringValues.length);\n bStringValues[(int) index] = value;\n }\n\n \n\n /**\n * Append value to the existing array.\n *\n * @param value value to be appended\n */\n @Override\n public void append(Object value) {\n add(this.size, value);\n }\n\n @Override\n public Object shift(long index) {\n handleImmutableArrayValue();\n Object val = get(index);\n shiftArray((int) index, getArrayFromType(this.elementType.getTag()));\n return val;\n }\n\n /**\n * Removes and returns first member of an array.\n *\n * @return the value that was the first member of the array\n */\n @Override\n public Object shift() {\n return shift(0);\n }\n\n @Override\n public void unshift(Object[] values) {\n unshift(0, values);\n }\n\n @Override\n public String stringValue(BLink parent) {\n StringJoiner sj = new StringJoiner(\",\");\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Long.toString(intValues[i]));\n }\n break;\n case TypeTags.BOOLEAN_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Boolean.toString(booleanValues[i]));\n }\n break;\n case TypeTags.BYTE_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Long.toString(Byte.toUnsignedLong(byteValues[i])));\n }\n break;\n case TypeTags.FLOAT_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Double.toString(floatValues[i]));\n }\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(((BValue) (bStringValues[i])).informalStringValue(parent));\n }\n break;\n default:\n getRefValuesString(parent, sj);\n break;\n }\n return \"[\" + sj + \"]\";\n }\n\n private void getRefValuesString(BLink parent, StringJoiner sj) {\n for (int i = 0; i < size; i++) {\n if (refValues[i] == null) {\n sj.add(\"null\");\n } else {\n Type type = TypeChecker.getType(refValues[i]);\n switch (type.getTag()) {\n case TypeTags.STRING_TAG:\n case TypeTags.XML_TAG:\n case TypeTags.XML_ELEMENT_TAG:\n case TypeTags.XML_ATTRIBUTES_TAG:\n case TypeTags.XML_COMMENT_TAG:\n case TypeTags.XML_PI_TAG:\n case TypeTags.XMLNS_TAG:\n case TypeTags.XML_TEXT_TAG:\n sj.add(((BValue) (refValues[i])).informalStringValue(new CycleUtils\n .Node(this, parent)));\n break;\n default:\n sj.add(StringUtils.getStringValue(refValues[i], new CycleUtils.Node(this, parent)));\n break;\n }\n }\n }\n }\n\n @Override\n public String expressionStringValue(BLink parent) {\n StringJoiner sj = new StringJoiner(\",\");\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(intValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n case TypeTags.BOOLEAN_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(booleanValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n case TypeTags.BYTE_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(Long.toString(Byte.toUnsignedLong(byteValues[i])));\n }\n break;\n case TypeTags.FLOAT_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(floatValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(bStringValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n default:\n for (int i = 0; i < size; i++) {\n sj.add(StringUtils.getExpressionStringValue(refValues[i],\n new CycleUtils.Node(this, parent)));\n }\n break;\n }\n return \"[\" + sj + \"]\";\n }\n\n @Override\n public Type getType() {\n return this.arrayType;\n }\n\n @Override\n public int size() {\n return this.size;\n }\n\n @Override\n public boolean isEmpty() {\n return this.size == 0;\n }\n\n @Override\n public Object copy(Map refs) {\n if (isFrozen()) {\n return this;\n }\n\n if (refs.containsKey(this)) {\n return refs.get(this);\n }\n\n ArrayValue valueArray;\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(intValues, this.size), arrayType.isReadOnly());\n break;\n case TypeTags.BOOLEAN_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(booleanValues, this.size), arrayType.isReadOnly());\n break;\n case TypeTags.BYTE_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(byteValues, this.size), arrayType.isReadOnly());\n break;\n case TypeTags.FLOAT_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(floatValues, this.size), arrayType.isReadOnly());\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n valueArray = new ArrayValueImpl(Arrays.copyOf(bStringValues, this.size), arrayType.isReadOnly());\n break;\n default:\n Object[] values = new Object[this.size];\n valueArray = new ArrayValueImpl(values, arrayType);\n IntStream.range(0, this.size).forEach(i -> {\n Object value = this.refValues[i];\n if (value instanceof RefValue) {\n values[i] = ((RefValue) value).copy(refs);\n } else {\n values[i] = value;\n }\n });\n break;\n }\n\n refs.put(this, valueArray);\n return valueArray;\n }\n\n @Override\n public Object frozenCopy(Map refs) {\n ArrayValue copy = (ArrayValue) copy(refs);\n if (!copy.isFrozen()) {\n copy.freezeDirect();\n }\n return copy;\n }\n\n /**\n * Return a subarray starting from `startIndex` (inclusive) to `endIndex` (exclusive).\n *\n * @param startIndex index of first member to include in the slice\n * @param endIndex index of first member not to include in the slice\n * @return array slice within specified range\n */\n public ArrayValueImpl slice(long startIndex, long endIndex) {\n ArrayValueImpl slicedArray;\n int slicedSize = (int) (endIndex - startIndex);\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n slicedArray = new ArrayValueImpl(new long[slicedSize], false);\n System.arraycopy(intValues, (int) startIndex, slicedArray.intValues, 0, slicedSize);\n break;\n case TypeTags.BOOLEAN_TAG:\n slicedArray = new ArrayValueImpl(new boolean[slicedSize], false);\n System.arraycopy(booleanValues, (int) startIndex, slicedArray.booleanValues, 0, slicedSize);\n break;\n case TypeTags.BYTE_TAG:\n slicedArray = new ArrayValueImpl(new byte[slicedSize], false);\n System.arraycopy(byteValues, (int) startIndex, slicedArray.byteValues, 0, slicedSize);\n break;\n case TypeTags.FLOAT_TAG:\n slicedArray = new ArrayValueImpl(new double[slicedSize], false);\n System.arraycopy(floatValues, (int) startIndex, slicedArray.floatValues, 0, slicedSize);\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n slicedArray = new ArrayValueImpl(new BString[slicedSize], false);\n System.arraycopy(bStringValues, (int) startIndex, slicedArray.bStringValues, 0, slicedSize);\n break;\n default:\n slicedArray = new ArrayValueImpl(new Object[slicedSize], new BArrayType(this.elementType));\n System.arraycopy(refValues, (int) startIndex, slicedArray.refValues, 0, slicedSize);\n break;\n }\n return slicedArray;\n }\n\n @Override\n public String toString() {\n return stringValue(null);\n }\n\n /**\n * Get ref values array.\n *\n * @return ref value array\n */\n @Override\n public Object[] getValues() {\n return refValues;\n }\n\n /**\n * Get a copy of byte array.\n *\n * @return byte array\n */\n @Override\n public byte[] getBytes() {\n byte[] bytes = new byte[this.size];\n System.arraycopy(byteValues, 0, bytes, 0, this.size);\n return bytes;\n }\n\n /**\n * Get a copy of string array.\n *\n * @return string array\n */\n @Override\n public String[] getStringArray() {\n String[] arr = new String[size];\n for (int i = 0; i < size; i++) {\n arr[i] = bStringValues[i].getValue();\n }\n return arr;\n }\n\n /**\n * Get a copy of int array.\n *\n * @return int array\n */\n @Override\n public long[] getIntArray() {\n return Arrays.copyOf(intValues, size);\n }\n\n @Override\n public boolean[] getBooleanArray() {\n return Arrays.copyOf(booleanValues, size);\n }\n\n @Override\n public byte[] getByteArray() {\n return Arrays.copyOf(byteValues, size);\n }\n\n @Override\n public double[] getFloatArray() {\n return Arrays.copyOf(floatValues, size);\n }\n\n @Override\n public void serialize(OutputStream outputStream) {\n if (this.elementType.getTag() == TypeTags.BYTE_TAG) {\n try {\n for (int i = 0; i < this.size; i++) {\n outputStream.write(this.byteValues[i]);\n }\n } catch (IOException e) {\n throw new BallerinaException(\"error occurred while writing the binary content to the output stream\", e);\n }\n } else {\n try {\n outputStream.write(this.toString().getBytes(Charset.defaultCharset()));\n } catch (IOException e) {\n throw new BallerinaException(\"error occurred while serializing data\", e);\n }\n }\n }\n\n /**\n * {@inheritDoc}\n */\n @Override\n public void freezeDirect() {\n if (arrayType.isReadOnly()) {\n return;\n }\n\n this.arrayType = (ArrayType) ReadOnlyUtils.setImmutableTypeAndGetEffectiveType(this.arrayType);\n if (this.elementType == null || this.elementType.getTag() > TypeTags.BOOLEAN_TAG) {\n for (int i = 0; i < this.size; i++) {\n Object value = this.getRefValue(i);\n if (value instanceof RefValue) {\n ((RefValue) value).freezeDirect();\n }\n }\n }\n this.typedesc = createSingletonTypedesc(this);\n }\n\n /**\n * {@inheritDoc}\n */\n @Override\n public IteratorValue getIterator() {\n return new ArrayIterator(this);\n }\n\n /**\n * Get {@code BType} of the array elements.\n *\n * @return element type\n */\n @Override\n public Type getElementType() {\n return this.elementType;\n }\n\n \n\n @Override\n protected void resizeInternalArray(int newLength) {\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n intValues = Arrays.copyOf(intValues, newLength);\n break;\n case TypeTags.BOOLEAN_TAG:\n booleanValues = Arrays.copyOf(booleanValues, newLength);\n break;\n case TypeTags.BYTE_TAG:\n byteValues = Arrays.copyOf(byteValues, newLength);\n break;\n case TypeTags.FLOAT_TAG:\n floatValues = Arrays.copyOf(floatValues, newLength);\n break;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n bStringValues = Arrays.copyOf(bStringValues, newLength);\n break;\n default:\n refValues = Arrays.copyOf(refValues, newLength);\n break;\n }\n }\n\n @Override\n protected void fillValues(int index) {\n if (index <= this.size) {\n return;\n }\n\n switch (this.elementType.getTag()) {\n case TypeTags.STRING_TAG:\n Arrays.fill(bStringValues, size, index, RuntimeConstants.STRING_EMPTY_VALUE);\n return;\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n case TypeTags.BYTE_TAG:\n case TypeTags.FLOAT_TAG:\n case TypeTags.BOOLEAN_TAG:\n return;\n default:\n if (arrayType.hasFillerValue()) {\n if (elementTypedescValue != null) {\n extractRecordFillerValues(index);\n } else {\n extractComplexFillerValues(index);\n }\n }\n }\n }\n\n private void extractComplexFillerValues(int index) {\n for (int i = size; i < index; i++) {\n this.refValues[i] = this.elementType.getZeroValue();\n }\n }\n\n private void extractRecordFillerValues(int index) {\n for (int i = size; i < index; i++) {\n this.refValues[i] = elementTypedescValue.instantiate(Scheduler.getStrand());\n }\n }\n\n @Override\n protected void rangeCheckForGet(long index, int size) {\n rangeCheck(index, size);\n if (index >= size) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INDEX_OUT_OF_RANGE_ERROR_IDENTIFIER),\n RuntimeErrors.ARRAY_INDEX_OUT_OF_RANGE, index, size);\n }\n }\n\n @Override\n protected void rangeCheck(long index, int size) {\n if (index > Integer.MAX_VALUE || index < Integer.MIN_VALUE) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INDEX_OUT_OF_RANGE_ERROR_IDENTIFIER),\n RuntimeErrors.INDEX_NUMBER_TOO_LARGE, index);\n }\n\n if ((int) index < 0 || index >= maxSize) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INDEX_OUT_OF_RANGE_ERROR_IDENTIFIER),\n RuntimeErrors.ARRAY_INDEX_OUT_OF_RANGE, index, size);\n }\n }\n\n @Override\n protected void fillerValueCheck(int index, int size) {\n \n \n if (arrayType.hasFillerValue()) {\n return;\n }\n if (index > size) {\n throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.ILLEGAL_LIST_INSERTION_ERROR,\n RuntimeErrors.ILLEGAL_ARRAY_INSERTION, size, index + 1);\n }\n }\n\n @Override\n protected void ensureCapacity(int requestedCapacity, int currentArraySize) {\n if (requestedCapacity <= currentArraySize) {\n return;\n }\n\n if (this.arrayType.getState() != ArrayState.OPEN) {\n return;\n }\n\n \n int newArraySize = currentArraySize + (currentArraySize >> 1);\n\n \n newArraySize = Math.max(newArraySize, requestedCapacity);\n\n \n newArraySize = Math.min(newArraySize, maxSize);\n resizeInternalArray(newArraySize);\n }\n\n @Override\n protected void checkFixedLength(long length) {\n if (this.arrayType.getState() == ArrayState.CLOSED) {\n throw BLangExceptionHelper.getRuntimeException(\n getModulePrefixedReason(ARRAY_LANG_LIB, INHERENT_TYPE_VIOLATION_ERROR_IDENTIFIER),\n RuntimeErrors.ILLEGAL_ARRAY_SIZE, size, length);\n }\n }\n\n @Override\n protected void unshift(long index, Object[] vals) {\n handleImmutableArrayValue();\n unshiftArray(index, vals.length, getCurrentArrayLength());\n\n int startIndex = (int) index;\n int endIndex = startIndex + vals.length;\n\n for (int i = startIndex, j = 0; i < endIndex; i++, j++) {\n add(i, vals[j]);\n }\n }\n\n \n\n private void prepareForAdd(long index, Object value, Type sourceType, int currentArraySize) {\n \n if (!TypeChecker.checkIsType(null, value, sourceType, this.elementType)) {\n throw ErrorCreator.createError(getModulePrefixedReason(ARRAY_LANG_LIB,\n INHERENT_TYPE_VIOLATION_ERROR_IDENTIFIER), BLangExceptionHelper.getErrorDetails(\n RuntimeErrors.INCOMPATIBLE_TYPE, this.elementType, sourceType));\n }\n\n int intIndex = (int) index;\n rangeCheck(index, size);\n fillerValueCheck(intIndex, size);\n ensureCapacity(intIndex + 1, currentArraySize);\n fillValues(intIndex);\n resetSize(intIndex);\n }\n\n private void fillRead(long index, int currentArraySize) {\n if (!arrayType.hasFillerValue()) {\n throw BLangExceptionHelper.getRuntimeException(BallerinaErrorReasons.ILLEGAL_LIST_INSERTION_ERROR,\n RuntimeErrors.ILLEGAL_ARRAY_INSERTION, size, index + 1);\n }\n\n int intIndex = (int) index;\n rangeCheck(index, size);\n ensureCapacity(intIndex + 1, currentArraySize);\n\n switch (this.elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.BYTE_TAG:\n case TypeTags.FLOAT_TAG:\n case TypeTags.BOOLEAN_TAG:\n break;\n case TypeTags.STRING_TAG:\n Arrays.fill(bStringValues, size, intIndex, RuntimeConstants.STRING_EMPTY_VALUE);\n break;\n default:\n for (int i = size; i <= index; i++) {\n this.refValues[i] = this.elementType.getZeroValue();\n }\n }\n\n resetSize(intIndex);\n }\n\n private void setArrayType(Type elementType, boolean readonly) {\n this.arrayType = new BArrayType(elementType, readonly);\n this.elementType = elementType;\n }\n\n private void resetSize(int index) {\n if (index >= size) {\n size = index + 1;\n }\n }\n\n private void shiftArray(int index, Object arr) {\n int nElemsToBeMoved = this.size - 1 - index;\n if (nElemsToBeMoved >= 0) {\n System.arraycopy(arr, index + 1, arr, index, nElemsToBeMoved);\n }\n this.size--;\n }\n\n \n\n private Object getArrayFromType(int typeTag) {\n switch (typeTag) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return intValues;\n case TypeTags.BOOLEAN_TAG:\n return booleanValues;\n case TypeTags.BYTE_TAG:\n return byteValues;\n case TypeTags.FLOAT_TAG:\n return floatValues;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n return bStringValues;\n default:\n return refValues;\n }\n }\n\n private int getCurrentArrayLength() {\n switch (elementType.getTag()) {\n case TypeTags.INT_TAG:\n case TypeTags.SIGNED32_INT_TAG:\n case TypeTags.SIGNED16_INT_TAG:\n case TypeTags.SIGNED8_INT_TAG:\n case TypeTags.UNSIGNED32_INT_TAG:\n case TypeTags.UNSIGNED16_INT_TAG:\n case TypeTags.UNSIGNED8_INT_TAG:\n return intValues.length;\n case TypeTags.BOOLEAN_TAG:\n return booleanValues.length;\n case TypeTags.BYTE_TAG:\n return byteValues.length;\n case TypeTags.FLOAT_TAG:\n return floatValues.length;\n case TypeTags.STRING_TAG:\n case TypeTags.CHAR_STRING_TAG:\n return bStringValues.length;\n default:\n return refValues.length;\n }\n }\n\n @Override\n public int hashCode() {\n int result = Objects.hash(arrayType, elementType);\n result = 31 * result + Arrays.hashCode(refValues);\n result = 31 * result + Arrays.hashCode(intValues);\n result = 31 * result + Arrays.hashCode(booleanValues);\n result = 31 * result + Arrays.hashCode(byteValues);\n result = 31 * result + Arrays.hashCode(floatValues);\n result = 31 * result + Arrays.hashCode(bStringValues);\n return result;\n }\n}" }, { "comment": "OK, I will remove the Parquet protocal part. and the Parquet reader should holds a Long or Bytes vector and implement the `TimestmapColumnVector` interface.", "method_body": "public SqlTimestamp getTimestamp(int rowId, int colId, int precision) {\n\t\tif (isNullAt(rowId, colId)) {\n\t\t\treturn null;\n\t\t}\n\n\t\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\tif (columns[colId] instanceof TimestampColumnVector) {\n\t\t\treturn ((TimestampColumnVector) (columns[colId])).getTimestamp(rowId, precision);\n\t\t} else if (precision <= 3) {\n\t\t\treturn SqlTimestamp.fromEpochMillis(getLong(rowId, colId));\n\t\t} else if (precision <= 6) {\n\t\t\tlong microseconds = getLong(rowId, colId);\n\t\t\treturn SqlTimestamp.fromEpochMillis(\n\t\t\t\tmicroseconds / 1000, (int) (microseconds % 1000) * 1000);\n\t\t} else {\n\t\t\tbyte[] bytes = getBytes(rowId, colId);\n\t\t\tassert bytes.length == 12;\n\t\t\tlong nanoOfDay = 0;\n\t\t\tfor (int i = 0; i < 8; i++) {\n\t\t\t\tnanoOfDay <<= 8;\n\t\t\t\tnanoOfDay |= (bytes[i] & (0xff));\n\t\t\t}\n\t\t\tint julianDay = 0;\n\t\t\tfor (int i = 8; i < 12; i++) {\n\t\t\t\tjulianDay <<= 8;\n\t\t\t\tjulianDay |= (bytes[i] & (0xff));\n\t\t\t}\n\t\t\tlong millisecond =\n\t\t\t\t(julianDay - DateTimeUtils.EPOCH_JULIAN) * DateTimeUtils.MILLIS_PER_DAY + nanoOfDay / 1000000;\n\t\t\tint nanoOfMillisecond = (int) (nanoOfDay % 1000000);\n\t\t\treturn SqlTimestamp.fromEpochMillis(millisecond, nanoOfMillisecond);\n\t\t}\n\t}", "target_code": "if (columns[colId] instanceof TimestampColumnVector) {", "method_body_after": "public SqlTimestamp getTimestamp(int rowId, int colId, int precision) {\n\t\treturn ((TimestampColumnVector) (columns[colId])).getTimestamp(rowId, precision);\n\t}", "context_before": "class VectorizedColumnBatch implements Serializable {\n\tprivate static final long serialVersionUID = 8180323238728166155L;\n\n\t/**\n\t * This number is carefully chosen to minimize overhead and typically allows\n\t * one VectorizedColumnBatch to fit in cache.\n\t */\n\tpublic static final int DEFAULT_SIZE = 2048;\n\n\tprivate int numRows;\n\tpublic final ColumnVector[] columns;\n\n\tpublic VectorizedColumnBatch(ColumnVector[] vectors) {\n\t\tthis.columns = vectors;\n\t}\n\n\t/**\n\t * Resets the batch for writing.\n\t */\n\tpublic void reset() {\n\t\tfor (ColumnVector column : columns) {\n\t\t\tcolumn.reset();\n\t\t}\n\t\tthis.numRows = 0;\n\t}\n\n\tpublic void setNumRows(int numRows) {\n\t\tthis.numRows = numRows;\n\t}\n\n\tpublic int getNumRows() {\n\t\treturn numRows;\n\t}\n\n\tpublic int getArity() {\n\t\treturn columns.length;\n\t}\n\n\tpublic boolean isNullAt(int rowId, int colId) {\n\t\treturn columns[colId].isNullAt(rowId);\n\t}\n\n\tpublic boolean getBoolean(int rowId, int colId) {\n\t\treturn ((BooleanColumnVector) columns[colId]).getBoolean(rowId);\n\t}\n\n\tpublic byte getByte(int rowId, int colId) {\n\t\treturn ((ByteColumnVector) columns[colId]).getByte(rowId);\n\t}\n\n\tpublic short getShort(int rowId, int colId) {\n\t\treturn ((ShortColumnVector) columns[colId]).getShort(rowId);\n\t}\n\n\tpublic int getInt(int rowId, int colId) {\n\t\treturn ((IntColumnVector) columns[colId]).getInt(rowId);\n\t}\n\n\tpublic long getLong(int rowId, int colId) {\n\t\treturn ((LongColumnVector) columns[colId]).getLong(rowId);\n\t}\n\n\tpublic float getFloat(int rowId, int colId) {\n\t\treturn ((FloatColumnVector) columns[colId]).getFloat(rowId);\n\t}\n\n\tpublic double getDouble(int rowId, int colId) {\n\t\treturn ((DoubleColumnVector) columns[colId]).getDouble(rowId);\n\t}\n\n\tpublic Bytes getByteArray(int rowId, int colId) {\n\t\treturn ((BytesColumnVector) columns[colId]).getBytes(rowId);\n\t}\n\n\tprivate byte[] getBytes(int rowId, int colId) {\n\t\tBytes byteArray = getByteArray(rowId, colId);\n\t\tif (byteArray.len == byteArray.data.length) {\n\t\t\treturn byteArray.data;\n\t\t} else {\n\t\t\treturn byteArray.getBytes();\n\t\t}\n\t}\n\n\tpublic String getString(int rowId, int colId) {\n\t\tBytes byteArray = getByteArray(rowId, colId);\n\t\treturn new String(byteArray.data, byteArray.offset, byteArray.len, StandardCharsets.UTF_8);\n\t}\n\n\tpublic Decimal getDecimal(int rowId, int colId, int precision, int scale) {\n\t\tif (isNullAt(rowId, colId)) {\n\t\t\treturn null;\n\t\t}\n\n\t\tif (Decimal.is32BitDecimal(precision)) {\n\t\t\treturn Decimal.fromUnscaledLong(precision, scale, getInt(rowId, colId));\n\t\t} else if (Decimal.is64BitDecimal(precision)) {\n\t\t\treturn Decimal.fromUnscaledLong(precision, scale, getLong(rowId, colId));\n\t\t} else {\n\t\t\tbyte[] bytes = getBytes(rowId, colId);\n\t\t\treturn Decimal.fromUnscaledBytes(precision, scale, bytes);\n\t\t}\n\t}\n\n\t\n}", "context_after": "class VectorizedColumnBatch implements Serializable {\n\tprivate static final long serialVersionUID = 8180323238728166155L;\n\n\t/**\n\t * This number is carefully chosen to minimize overhead and typically allows\n\t * one VectorizedColumnBatch to fit in cache.\n\t */\n\tpublic static final int DEFAULT_SIZE = 2048;\n\n\tprivate int numRows;\n\tpublic final ColumnVector[] columns;\n\n\tpublic VectorizedColumnBatch(ColumnVector[] vectors) {\n\t\tthis.columns = vectors;\n\t}\n\n\t/**\n\t * Resets the batch for writing.\n\t */\n\tpublic void reset() {\n\t\tfor (ColumnVector column : columns) {\n\t\t\tcolumn.reset();\n\t\t}\n\t\tthis.numRows = 0;\n\t}\n\n\tpublic void setNumRows(int numRows) {\n\t\tthis.numRows = numRows;\n\t}\n\n\tpublic int getNumRows() {\n\t\treturn numRows;\n\t}\n\n\tpublic int getArity() {\n\t\treturn columns.length;\n\t}\n\n\tpublic boolean isNullAt(int rowId, int colId) {\n\t\treturn columns[colId].isNullAt(rowId);\n\t}\n\n\tpublic boolean getBoolean(int rowId, int colId) {\n\t\treturn ((BooleanColumnVector) columns[colId]).getBoolean(rowId);\n\t}\n\n\tpublic byte getByte(int rowId, int colId) {\n\t\treturn ((ByteColumnVector) columns[colId]).getByte(rowId);\n\t}\n\n\tpublic short getShort(int rowId, int colId) {\n\t\treturn ((ShortColumnVector) columns[colId]).getShort(rowId);\n\t}\n\n\tpublic int getInt(int rowId, int colId) {\n\t\treturn ((IntColumnVector) columns[colId]).getInt(rowId);\n\t}\n\n\tpublic long getLong(int rowId, int colId) {\n\t\treturn ((LongColumnVector) columns[colId]).getLong(rowId);\n\t}\n\n\tpublic float getFloat(int rowId, int colId) {\n\t\treturn ((FloatColumnVector) columns[colId]).getFloat(rowId);\n\t}\n\n\tpublic double getDouble(int rowId, int colId) {\n\t\treturn ((DoubleColumnVector) columns[colId]).getDouble(rowId);\n\t}\n\n\tpublic Bytes getByteArray(int rowId, int colId) {\n\t\treturn ((BytesColumnVector) columns[colId]).getBytes(rowId);\n\t}\n\n\tprivate byte[] getBytes(int rowId, int colId) {\n\t\tBytes byteArray = getByteArray(rowId, colId);\n\t\tif (byteArray.len == byteArray.data.length) {\n\t\t\treturn byteArray.data;\n\t\t} else {\n\t\t\treturn byteArray.getBytes();\n\t\t}\n\t}\n\n\tpublic String getString(int rowId, int colId) {\n\t\tBytes byteArray = getByteArray(rowId, colId);\n\t\treturn new String(byteArray.data, byteArray.offset, byteArray.len, StandardCharsets.UTF_8);\n\t}\n\n\tpublic Decimal getDecimal(int rowId, int colId, int precision, int scale) {\n\t\tif (isNullAt(rowId, colId)) {\n\t\t\treturn null;\n\t\t}\n\n\t\tif (Decimal.is32BitDecimal(precision)) {\n\t\t\treturn Decimal.fromUnscaledLong(precision, scale, getInt(rowId, colId));\n\t\t} else if (Decimal.is64BitDecimal(precision)) {\n\t\t\treturn Decimal.fromUnscaledLong(precision, scale, getLong(rowId, colId));\n\t\t} else {\n\t\t\tbyte[] bytes = getBytes(rowId, colId);\n\t\t\treturn Decimal.fromUnscaledBytes(precision, scale, bytes);\n\t\t}\n\t}\n\n\t\n}" }, { "comment": "I'll need to check whether it's safe to assume the file is always available and exists.", "method_body": "public void execute() throws MojoExecutionException {\n\n if (project.getPackaging().equals(\"pom\")) {\n getLog().info(\"Type of the artifact is POM, skipping build goal\");\n return;\n }\n if (skip) {\n getLog().info(\"Skipping Quarkus build\");\n return;\n }\n\n boolean clear = false;\n try {\n\n final Properties projectProperties = project.getProperties();\n final Properties realProperties = new Properties();\n for (String name : projectProperties.stringPropertyNames()) {\n if (name.startsWith(\"quarkus.\")) {\n realProperties.setProperty(name, projectProperties.getProperty(name));\n }\n }\n if (uberJar && System.getProperty(QUARKUS_PACKAGE_UBER_JAR) == null) {\n System.setProperty(QUARKUS_PACKAGE_UBER_JAR, \"true\");\n clear = true;\n }\n realProperties.putIfAbsent(\"quarkus.application.name\", project.getArtifactId());\n realProperties.putIfAbsent(\"quarkus.application.version\", project.getVersion());\n\n MavenArtifactResolver resolver = MavenArtifactResolver.builder()\n .setRepositorySystem(repoSystem)\n .setRepositorySystemSession(repoSession)\n .setRemoteRepositories(repos)\n .build();\n\n final Artifact projectArtifact = project.getArtifact();\n final AppArtifact appArtifact = new AppArtifact(projectArtifact.getGroupId(), projectArtifact.getArtifactId(),\n projectArtifact.getClassifier(), projectArtifact.getArtifactHandler().getExtension(),\n projectArtifact.getVersion());\n appArtifact.setPaths(PathsCollection.of(projectArtifact.getFile().toPath()));\n\n CuratedApplication curatedApplication = QuarkusBootstrap.builder()\n .setAppArtifact(appArtifact)\n .setMavenArtifactResolver(resolver)\n .setBaseClassLoader(BuildMojo.class.getClassLoader())\n .setBuildSystemProperties(realProperties)\n .setLocalProjectDiscovery(false)\n .setBaseName(finalName)\n .setTargetDirectory(buildDir.toPath())\n .build().bootstrap();\n\n AugmentAction action = curatedApplication.createAugmentor();\n AugmentResult result = action.createProductionApplication();\n\n Artifact original = project.getArtifact();\n if (result.getJar() != null) {\n if (result.getJar().isUberJar() && result.getJar().getOriginalArtifact() != null) {\n final Path standardJar = curatedApplication.getAppModel().getAppArtifact().getPaths().getSinglePath();\n if (Files.exists(standardJar)) {\n try {\n Files.deleteIfExists(result.getJar().getOriginalArtifact());\n Files.move(standardJar, result.getJar().getOriginalArtifact());\n } catch (IOException e) {\n throw new UncheckedIOException(e);\n }\n original.setFile(result.getJar().getOriginalArtifact().toFile());\n }\n }\n if (result.getJar().isUberJar()) {\n projectHelper.attachArtifact(project, result.getJar().getPath().toFile(), \"runner\");\n }\n }\n\n } catch (Exception e) {\n throw new MojoExecutionException(\"Failed to build quarkus application\", e);\n } finally {\n if (clear) {\n System.clearProperty(QUARKUS_PACKAGE_UBER_JAR);\n }\n }\n }", "target_code": "appArtifact.setPaths(PathsCollection.of(projectArtifact.getFile().toPath()));", "method_body_after": "public void execute() throws MojoExecutionException {\n\n if (project.getPackaging().equals(\"pom\")) {\n getLog().info(\"Type of the artifact is POM, skipping build goal\");\n return;\n }\n if (skip) {\n getLog().info(\"Skipping Quarkus build\");\n return;\n }\n\n boolean clear = false;\n try {\n\n final Properties projectProperties = project.getProperties();\n final Properties realProperties = new Properties();\n for (String name : projectProperties.stringPropertyNames()) {\n if (name.startsWith(\"quarkus.\")) {\n realProperties.setProperty(name, projectProperties.getProperty(name));\n }\n }\n if (uberJar && System.getProperty(QUARKUS_PACKAGE_UBER_JAR) == null) {\n System.setProperty(QUARKUS_PACKAGE_UBER_JAR, \"true\");\n clear = true;\n }\n realProperties.putIfAbsent(\"quarkus.application.name\", project.getArtifactId());\n realProperties.putIfAbsent(\"quarkus.application.version\", project.getVersion());\n\n MavenArtifactResolver resolver = MavenArtifactResolver.builder()\n .setRepositorySystem(repoSystem)\n .setRepositorySystemSession(repoSession)\n .setRemoteRepositories(repos)\n .build();\n\n final Artifact projectArtifact = project.getArtifact();\n final AppArtifact appArtifact = new AppArtifact(projectArtifact.getGroupId(), projectArtifact.getArtifactId(),\n projectArtifact.getClassifier(), projectArtifact.getArtifactHandler().getExtension(),\n projectArtifact.getVersion());\n appArtifact.setPaths(PathsCollection.of(projectArtifact.getFile().toPath()));\n\n CuratedApplication curatedApplication = QuarkusBootstrap.builder()\n .setAppArtifact(appArtifact)\n .setMavenArtifactResolver(resolver)\n .setBaseClassLoader(BuildMojo.class.getClassLoader())\n .setBuildSystemProperties(realProperties)\n .setLocalProjectDiscovery(false)\n .setBaseName(finalName)\n .setTargetDirectory(buildDir.toPath())\n .build().bootstrap();\n\n AugmentAction action = curatedApplication.createAugmentor();\n AugmentResult result = action.createProductionApplication();\n\n Artifact original = project.getArtifact();\n if (result.getJar() != null) {\n if (result.getJar().isUberJar() && result.getJar().getOriginalArtifact() != null) {\n final Path standardJar = curatedApplication.getAppModel().getAppArtifact().getPaths().getSinglePath();\n if (Files.exists(standardJar)) {\n try {\n Files.deleteIfExists(result.getJar().getOriginalArtifact());\n Files.move(standardJar, result.getJar().getOriginalArtifact());\n } catch (IOException e) {\n throw new UncheckedIOException(e);\n }\n original.setFile(result.getJar().getOriginalArtifact().toFile());\n }\n }\n if (result.getJar().isUberJar()) {\n projectHelper.attachArtifact(project, result.getJar().getPath().toFile(), \"runner\");\n }\n }\n\n } catch (Exception e) {\n throw new MojoExecutionException(\"Failed to build quarkus application\", e);\n } finally {\n if (clear) {\n System.clearProperty(QUARKUS_PACKAGE_UBER_JAR);\n }\n }\n }", "context_before": "class BuildMojo extends AbstractMojo {\n\n protected static final String QUARKUS_PACKAGE_UBER_JAR = \"quarkus.package.uber-jar\";\n /**\n * The entry point to Aether, i.e. the component doing all the work.\n *\n * @component\n */\n @Component\n private RepositorySystem repoSystem;\n\n @Component\n private MavenProjectHelper projectHelper;\n\n /**\n * The current repository/network configuration of Maven.\n *\n * @parameter default-value=\"${repositorySystemSession}\"\n * @readonly\n */\n @Parameter(defaultValue = \"${repositorySystemSession}\", readonly = true)\n private RepositorySystemSession repoSession;\n\n /**\n * The project's remote repositories to use for the resolution of artifacts and their dependencies.\n *\n * @parameter default-value=\"${project.remoteProjectRepositories}\"\n * @readonly\n */\n @Parameter(defaultValue = \"${project.remoteProjectRepositories}\", readonly = true, required = true)\n private List repos;\n\n /**\n * The project's remote repositories to use for the resolution of plugins and their dependencies.\n *\n * @parameter default-value=\"${project.remotePluginRepositories}\"\n * @readonly\n */\n @Parameter(defaultValue = \"${project.remotePluginRepositories}\", readonly = true, required = true)\n private List pluginRepos;\n\n /**\n * The directory for compiled classes.\n */\n @Parameter(readonly = true, required = true, defaultValue = \"${project.build.outputDirectory}\")\n @Deprecated\n private File outputDirectory;\n\n @Parameter(defaultValue = \"${project}\", readonly = true, required = true)\n protected MavenProject project;\n\n /**\n * The directory for generated source files.\n */\n @Parameter(defaultValue = \"${project.build.directory}/generated-sources\")\n private File generatedSourcesDirectory;\n\n @Parameter(defaultValue = \"${project.build.directory}\")\n private File buildDir;\n\n @Parameter(defaultValue = \"${project.build.finalName}\")\n private String finalName;\n\n @Parameter(property = \"uberJar\", defaultValue = \"false\")\n private boolean uberJar;\n\n /**\n * When using the uberJar option, this array specifies entries that should\n * be excluded from the final jar. The entries are relative to the root of\n * the file. An example of this configuration could be:\n *
\n     * &\n     *   &\n     *   &\n     *     &\n     *     &\n     *     &\n     *     &\n     *   &\n     * &\n     * 
\n */\n @Parameter(property = \"ignoredEntries\")\n private String[] ignoredEntries;\n\n /** Skip the execution of this mojo */\n @Parameter(defaultValue = \"false\", property = \"quarkus.build.skip\")\n private boolean skip = false;\n\n public BuildMojo() {\n MojoLogger.logSupplier = this::getLog;\n }\n\n @Override\n \n\n}", "context_after": "class BuildMojo extends AbstractMojo {\n\n protected static final String QUARKUS_PACKAGE_UBER_JAR = \"quarkus.package.uber-jar\";\n /**\n * The entry point to Aether, i.e. the component doing all the work.\n *\n * @component\n */\n @Component\n private RepositorySystem repoSystem;\n\n @Component\n private MavenProjectHelper projectHelper;\n\n /**\n * The current repository/network configuration of Maven.\n *\n * @parameter default-value=\"${repositorySystemSession}\"\n * @readonly\n */\n @Parameter(defaultValue = \"${repositorySystemSession}\", readonly = true)\n private RepositorySystemSession repoSession;\n\n /**\n * The project's remote repositories to use for the resolution of artifacts and their dependencies.\n *\n * @parameter default-value=\"${project.remoteProjectRepositories}\"\n * @readonly\n */\n @Parameter(defaultValue = \"${project.remoteProjectRepositories}\", readonly = true, required = true)\n private List repos;\n\n /**\n * The project's remote repositories to use for the resolution of plugins and their dependencies.\n *\n * @parameter default-value=\"${project.remotePluginRepositories}\"\n * @readonly\n */\n @Parameter(defaultValue = \"${project.remotePluginRepositories}\", readonly = true, required = true)\n private List pluginRepos;\n\n /**\n * The directory for compiled classes.\n */\n @Parameter(readonly = true, required = true, defaultValue = \"${project.build.outputDirectory}\")\n @Deprecated\n private File outputDirectory;\n\n @Parameter(defaultValue = \"${project}\", readonly = true, required = true)\n protected MavenProject project;\n\n /**\n * The directory for generated source files.\n */\n @Parameter(defaultValue = \"${project.build.directory}/generated-sources\")\n private File generatedSourcesDirectory;\n\n @Parameter(defaultValue = \"${project.build.directory}\")\n private File buildDir;\n\n @Parameter(defaultValue = \"${project.build.finalName}\")\n private String finalName;\n\n @Parameter(property = \"uberJar\", defaultValue = \"false\")\n private boolean uberJar;\n\n /**\n * When using the uberJar option, this array specifies entries that should\n * be excluded from the final jar. The entries are relative to the root of\n * the file. An example of this configuration could be:\n *
\n     * &\n     *   &\n     *   &\n     *     &\n     *     &\n     *     &\n     *     &\n     *   &\n     * &\n     * 
\n */\n @Parameter(property = \"ignoredEntries\")\n private String[] ignoredEntries;\n\n /** Skip the execution of this mojo */\n @Parameter(defaultValue = \"false\", property = \"quarkus.build.skip\")\n private boolean skip = false;\n\n public BuildMojo() {\n MojoLogger.logSupplier = this::getLog;\n }\n\n @Override\n \n\n}" }, { "comment": "It's best if we can ensure the configuration never ends up in a bad state (i.e. where required values are set to null). > As for the NonNull annotations at the level of the builders: they force you to essentially create a method which accepts all mandatory fields totally negating the idea behind the builder in the first place. If you accept that you're filling in the information one \"with\" method at a time, you're going to temporary have fields which are null until all information is provided. Your analysis is correct, but I think we should reconsider the assumption that we must only provide `with` methods as the public API. As far as I can tell, there are three things `with` methods accomplish here: 1. null checks 2. helper methods for StaticValueProviders 3. create a modified copy of a configuration Item 1 is best left to the AutoValue generated code, and 2 is unneeded if we get remove ValueProviders. 3 is the only one that really provides value IMO. If we make the Builder public, we can remove all the `@Nullable` annotations and null checks. We can still offer `with` methods to address item 3 if we want, but the builder would be the only way to construct an object initially.", "method_body": "public DriverConfiguration withUrl(ValueProvider url) {\n Preconditions.checkArgument(\n url != null, \"a neo4j connection URL can not be empty or null\", url);\n Preconditions.checkArgument(\n StringUtils.isNotEmpty(url.get()),\n \"a neo4j connection URL can not be empty or null\",\n url);\n return builder().setUrl(url).build();\n }", "target_code": "\"a neo4j connection URL can not be empty or null\",", "method_body_after": "public DriverConfiguration withUrl(ValueProvider url) {\n Preconditions.checkArgument(\n url != null, \"a neo4j connection URL can not be empty or null\", url);\n Preconditions.checkArgument(\n StringUtils.isNotEmpty(url.get()),\n \"a neo4j connection URL can not be empty or null\",\n url);\n return builder().setUrl(url).build();\n }", "context_before": "class DriverConfiguration implements Serializable {\n public static DriverConfiguration create() {\n return new AutoValue_Neo4jIO_DriverConfiguration.Builder().build();\n }\n\n public static DriverConfiguration create(String url, String username, String password) {\n checkArgument(url != null, \"url can not be null\");\n checkArgument(username != null, \"username can not be null\");\n checkArgument(password != null, \"password can not be null\");\n return new AutoValue_Neo4jIO_DriverConfiguration.Builder()\n .build()\n .withUrl(url)\n .withUsername(username)\n .withPassword(password);\n }\n\n abstract @Nullable ValueProvider getUrl();\n\n abstract @Nullable ValueProvider> getUrls();\n\n abstract @Nullable ValueProvider getUsername();\n\n abstract @Nullable ValueProvider getPassword();\n\n abstract @Nullable ValueProvider getEncryption();\n\n abstract @Nullable ValueProvider getConnectionLivenessCheckTimeoutMs();\n\n abstract @Nullable ValueProvider getMaxConnectionLifetimeMs();\n\n abstract @Nullable ValueProvider getMaxConnectionPoolSize();\n\n abstract @Nullable ValueProvider getConnectionAcquisitionTimeoutMs();\n\n abstract @Nullable ValueProvider getConnectionTimeoutMs();\n\n abstract @Nullable ValueProvider getMaxTransactionRetryTimeMs();\n\n abstract @Nullable ValueProvider getRouting();\n\n abstract Builder builder();\n\n \n public DriverConfiguration withUrl(String url) {\n return withUrl(ValueProvider.StaticValueProvider.of(url));\n }\n\n \n\n \n public DriverConfiguration withUrls(List urls) {\n return withUrls(ValueProvider.StaticValueProvider.of(urls));\n }\n\n public DriverConfiguration withUrls(ValueProvider> urls) {\n Preconditions.checkArgument(\n urls != null, \"a list of neo4j connection URLs can not be empty or null\", urls);\n Preconditions.checkArgument(\n urls.get() != null && !urls.get().isEmpty(),\n \"a neo4j connection URL can not be empty or null\",\n urls);\n return builder().setUrls(urls).build();\n }\n\n \n public DriverConfiguration withEncryption() {\n return builder().setEncryption(ValueProvider.StaticValueProvider.of(Boolean.TRUE)).build();\n }\n\n public DriverConfiguration withoutEncryption() {\n return builder().setEncryption(ValueProvider.StaticValueProvider.of(Boolean.FALSE)).build();\n }\n\n \n public DriverConfiguration withConnectionLivenessCheckTimeoutMs(\n long connectionLivenessCheckTimeoutMs) {\n return withConnectionLivenessCheckTimeoutMs(\n ValueProvider.StaticValueProvider.of(connectionLivenessCheckTimeoutMs));\n }\n\n public DriverConfiguration withConnectionLivenessCheckTimeoutMs(\n ValueProvider connectionLivenessCheckTimeoutMs) {\n return builder()\n .setConnectionLivenessCheckTimeoutMs(connectionLivenessCheckTimeoutMs)\n .build();\n }\n\n \n public DriverConfiguration withMaxConnectionLifetimeMs(long maxConnectionLifetimeMs) {\n return withMaxConnectionLifetimeMs(\n ValueProvider.StaticValueProvider.of(maxConnectionLifetimeMs));\n }\n\n public DriverConfiguration withMaxConnectionLifetimeMs(\n ValueProvider maxConnectionLifetimeMs) {\n return builder().setMaxConnectionLifetimeMs(maxConnectionLifetimeMs).build();\n }\n\n \n public DriverConfiguration withMaxConnectionPoolSize(int maxConnectionPoolSize) {\n return withMaxConnectionPoolSize(ValueProvider.StaticValueProvider.of(maxConnectionPoolSize));\n }\n\n public DriverConfiguration withMaxConnectionPoolSize(\n ValueProvider maxConnectionPoolSize) {\n return builder().setMaxConnectionPoolSize(maxConnectionPoolSize).build();\n }\n\n \n public DriverConfiguration withConnectionAcquisitionTimeoutMs(\n long connectionAcquisitionTimeoutMs) {\n return withConnectionAcquisitionTimeoutMs(\n ValueProvider.StaticValueProvider.of(connectionAcquisitionTimeoutMs));\n }\n\n public DriverConfiguration withConnectionAcquisitionTimeoutMs(\n ValueProvider connectionAcquisitionTimeoutMs) {\n return builder().setConnectionAcquisitionTimeoutMs(connectionAcquisitionTimeoutMs).build();\n }\n\n \n public DriverConfiguration withConnectionTimeoutMs(long connectionTimeoutMs) {\n return withConnectionTimeoutMs(ValueProvider.StaticValueProvider.of(connectionTimeoutMs));\n }\n\n public DriverConfiguration withConnectionTimeoutMs(ValueProvider connectionTimeoutMs) {\n return builder().setConnectionTimeoutMs(connectionTimeoutMs).build();\n }\n\n \n public DriverConfiguration withMaxTransactionRetryTimeMs(long maxTransactionRetryTimeMs) {\n return withMaxTransactionRetryTimeMs(\n ValueProvider.StaticValueProvider.of(maxTransactionRetryTimeMs));\n }\n\n public DriverConfiguration withMaxTransactionRetryTimeMs(\n ValueProvider maxTransactionRetryTimeMs) {\n return builder().setMaxTransactionRetryTimeMs(maxTransactionRetryTimeMs).build();\n }\n\n public DriverConfiguration withUsername(String username) {\n return withUsername(ValueProvider.StaticValueProvider.of(username));\n }\n\n public DriverConfiguration withUsername(ValueProvider username) {\n Preconditions.checkArgument(username != null, \"neo4j username can not be null\", username);\n Preconditions.checkArgument(\n username.get() != null, \"neo4j username can not be null\", username);\n return builder().setUsername(username).build();\n }\n\n public DriverConfiguration withPassword(String password) {\n return withPassword(ValueProvider.StaticValueProvider.of(password));\n }\n\n public DriverConfiguration withPassword(ValueProvider password) {\n Preconditions.checkArgument(password != null, \"neo4j password can not be null\", password);\n Preconditions.checkArgument(\n password.get() != null, \"neo4j password can not be null\", password);\n return builder().setPassword(password).build();\n }\n\n \n public DriverConfiguration withRouting() {\n return builder().setRouting(ValueProvider.StaticValueProvider.of(Boolean.TRUE)).build();\n }\n\n public DriverConfiguration withoutRouting() {\n return builder().setRouting(ValueProvider.StaticValueProvider.of(Boolean.FALSE)).build();\n }\n\n void populateDisplayData(DisplayData.Builder builder) {\n builder.addIfNotNull(DisplayData.item(\"neo4j-url\", getUrl()));\n builder.addIfNotNull(DisplayData.item(\"neo4j-username\", getUsername()));\n builder.addIfNotNull(\n DisplayData.item(\n \"neo4j-password\", getPassword() != null ? \"\" : \"\"));\n builder.addIfNotNull(\n DisplayData.item(\n \"neo4j-encryption\", getEncryption() != null ? \"\" : \"\"));\n }\n\n Driver buildDriver() {\n \n \n \n Config.ConfigBuilder configBuilder = Config.builder();\n\n if (getEncryption() != null && getEncryption().get() != null) {\n if (getEncryption().get()) {\n configBuilder =\n Config.builder()\n .withEncryption()\n .withTrustStrategy(Config.TrustStrategy.trustAllCertificates());\n } else {\n configBuilder = Config.builder().withoutEncryption();\n }\n }\n\n \n if (getConnectionLivenessCheckTimeoutMs() != null\n && getConnectionLivenessCheckTimeoutMs().get() != null\n && getConnectionLivenessCheckTimeoutMs().get() > 0) {\n configBuilder =\n configBuilder.withConnectionLivenessCheckTimeout(\n getConnectionLivenessCheckTimeoutMs().get(), TimeUnit.MILLISECONDS);\n }\n if (getMaxConnectionLifetimeMs() != null\n && getMaxConnectionLifetimeMs().get() != null\n && getMaxConnectionLifetimeMs().get() > 0) {\n configBuilder =\n configBuilder.withMaxConnectionLifetime(\n getMaxConnectionLifetimeMs().get(), TimeUnit.MILLISECONDS);\n }\n if (getMaxConnectionPoolSize() != null && getMaxConnectionPoolSize().get() > 0) {\n configBuilder = configBuilder.withMaxConnectionPoolSize(getMaxConnectionPoolSize().get());\n }\n if (getConnectionAcquisitionTimeoutMs() != null\n && getConnectionAcquisitionTimeoutMs().get() != null\n && getConnectionAcquisitionTimeoutMs().get() > 0) {\n configBuilder =\n configBuilder.withConnectionAcquisitionTimeout(\n getConnectionAcquisitionTimeoutMs().get(), TimeUnit.MILLISECONDS);\n }\n if (getConnectionTimeoutMs() != null\n && getConnectionTimeoutMs().get() != null\n && getConnectionTimeoutMs().get() > 0) {\n configBuilder =\n configBuilder.withConnectionTimeout(\n getConnectionTimeoutMs().get(), TimeUnit.MILLISECONDS);\n }\n if (getMaxTransactionRetryTimeMs() != null\n && getMaxTransactionRetryTimeMs().get() != null\n && getMaxTransactionRetryTimeMs().get() > 0) {\n configBuilder =\n configBuilder.withMaxTransactionRetryTime(\n getMaxTransactionRetryTimeMs().get(), TimeUnit.MILLISECONDS);\n }\n\n \n \n configBuilder = configBuilder.withLogging(Logging.javaUtilLogging(Level.WARNING));\n\n \n \n Config config = configBuilder.build();\n\n \n \n List uris = new ArrayList<>();\n if (getUrl() != null && getUrl().get() != null) {\n try {\n uris.add(new URI(getUrl().get()));\n } catch (URISyntaxException e) {\n throw new RuntimeException(\"Error creating URI from URL '\" + getUrl().get() + \"'\", e);\n }\n }\n if (getUrls() != null && getUrls().get() != null) {\n List urls = getUrls().get();\n for (String url : urls) {\n try {\n uris.add(new URI(url));\n } catch (URISyntaxException e) {\n throw new RuntimeException(\n \"Error creating URI '\"\n + getUrl().get()\n + \"' from a list of \"\n + urls.size()\n + \" URLs\",\n e);\n }\n }\n }\n\n checkArgument(\n getUsername() != null && getUsername().get() != null,\n \"please provide a username to connect to Neo4j\");\n checkArgument(\n getPassword() != null && getPassword().get() != null,\n \"please provide a password to connect to Neo4j\");\n\n Driver driver;\n if (getRouting() != null && getRouting().get() != null && getRouting().get()) {\n driver =\n GraphDatabase.routingDriver(\n uris, AuthTokens.basic(getUsername().get(), getPassword().get()), config);\n } else {\n \n driver =\n GraphDatabase.driver(\n uris.get(0), AuthTokens.basic(getUsername().get(), getPassword().get()), config);\n }\n\n \n return driver;\n }\n\n /**\n * The Builder class below is not visible. We use it to service the \"with\" methods below the\n * Builder class.\n */\n @AutoValue.Builder\n abstract static class Builder {\n abstract Builder setUrl(ValueProvider url);\n\n abstract Builder setUrls(ValueProvider> url);\n\n abstract Builder setUsername(ValueProvider username);\n\n abstract Builder setPassword(ValueProvider password);\n\n abstract Builder setEncryption(ValueProvider encryption);\n\n abstract Builder setConnectionLivenessCheckTimeoutMs(\n ValueProvider connectionLivenessCheckTimeoutMs);\n\n abstract Builder setMaxConnectionLifetimeMs(ValueProvider maxConnectionLifetimeMs);\n\n abstract Builder setMaxConnectionPoolSize(ValueProvider maxConnectionPoolSize);\n\n abstract Builder setConnectionAcquisitionTimeoutMs(\n ValueProvider connectionAcquisitionTimeoutMs);\n\n abstract Builder setConnectionTimeoutMs(ValueProvider connectionTimeoutMs);\n\n abstract Builder setMaxTransactionRetryTimeMs(ValueProvider maxTransactionRetryTimeMs);\n\n abstract Builder setRouting(ValueProvider routing);\n\n abstract DriverConfiguration build();\n }\n }", "context_after": "class DriverConfiguration implements Serializable {\n public static DriverConfiguration create() {\n return new AutoValue_Neo4jIO_DriverConfiguration.Builder()\n .build()\n .withDefaultConfig(true)\n .withConfig(Config.defaultConfig());\n }\n\n public static DriverConfiguration create(String url, String username, String password) {\n checkArgument(url != null, \"url can not be null\");\n checkArgument(username != null, \"username can not be null\");\n checkArgument(password != null, \"password can not be null\");\n return new AutoValue_Neo4jIO_DriverConfiguration.Builder()\n .build()\n .withDefaultConfig(true)\n .withConfig(Config.defaultConfig())\n .withUrl(url)\n .withUsername(username)\n .withPassword(password);\n }\n\n abstract @Nullable ValueProvider getUrl();\n\n abstract @Nullable ValueProvider> getUrls();\n\n abstract @Nullable ValueProvider getUsername();\n\n abstract @Nullable ValueProvider getPassword();\n\n abstract @Nullable Config getConfig();\n\n abstract @Nullable ValueProvider getHasDefaultConfig();\n\n abstract Builder builder();\n\n public DriverConfiguration withUrl(String url) {\n return withUrl(ValueProvider.StaticValueProvider.of(url));\n }\n\n \n\n public DriverConfiguration withUrls(List urls) {\n return withUrls(ValueProvider.StaticValueProvider.of(urls));\n }\n\n public DriverConfiguration withUrls(ValueProvider> urls) {\n Preconditions.checkArgument(\n urls != null, \"a list of neo4j connection URLs can not be empty or null\", urls);\n Preconditions.checkArgument(\n urls.get() != null && !urls.get().isEmpty(),\n \"a neo4j connection URL can not be empty or null\",\n urls);\n return builder().setUrls(urls).build();\n }\n\n public DriverConfiguration withConfig(Config config) {\n return builder().setConfig(config).build();\n }\n\n public DriverConfiguration withUsername(String username) {\n return withUsername(ValueProvider.StaticValueProvider.of(username));\n }\n\n public DriverConfiguration withUsername(ValueProvider username) {\n Preconditions.checkArgument(username != null, \"neo4j username can not be null\", username);\n Preconditions.checkArgument(\n username.get() != null, \"neo4j username can not be null\", username);\n return builder().setUsername(username).build();\n }\n\n public DriverConfiguration withPassword(String password) {\n return withPassword(ValueProvider.StaticValueProvider.of(password));\n }\n\n public DriverConfiguration withPassword(ValueProvider password) {\n Preconditions.checkArgument(password != null, \"neo4j password can not be null\", password);\n Preconditions.checkArgument(\n password.get() != null, \"neo4j password can not be null\", password);\n return builder().setPassword(password).build();\n }\n\n public DriverConfiguration withDefaultConfig(boolean useDefault) {\n return withDefaultConfig(ValueProvider.StaticValueProvider.of(useDefault));\n }\n\n public DriverConfiguration withDefaultConfig(ValueProvider useDefault) {\n Preconditions.checkArgument(\n useDefault != null, \"withDefaultConfig parameter useDefault can not be null\", useDefault);\n Preconditions.checkArgument(\n useDefault.get() != null,\n \"withDefaultConfig parameter useDefault can not be null\",\n useDefault);\n return builder().setHasDefaultConfig(useDefault).build();\n }\n\n void populateDisplayData(DisplayData.Builder builder) {\n builder.addIfNotNull(DisplayData.item(\"neo4j-url\", getUrl()));\n builder.addIfNotNull(DisplayData.item(\"neo4j-username\", getUsername()));\n builder.addIfNotNull(\n DisplayData.item(\n \"neo4j-password\", getPassword() != null ? \"\" : \"\"));\n }\n\n Driver buildDriver() {\n \n \n \n Config config = getConfig();\n if (config == null) {\n throw new RuntimeException(\"please provide a neo4j config\");\n }\n \n \n \n \n Boolean hasDefaultConfig = getProvidedValue(getHasDefaultConfig());\n if (hasDefaultConfig != null && hasDefaultConfig) {\n config = Config.defaultConfig();\n }\n\n \n \n List uris = new ArrayList<>();\n String url = getProvidedValue(getUrl());\n if (url != null) {\n try {\n uris.add(new URI(url));\n } catch (URISyntaxException e) {\n throw new RuntimeException(\"Error creating URI from URL '\" + url + \"'\", e);\n }\n }\n List providedUrls = getProvidedValue(getUrls());\n if (providedUrls != null) {\n for (String providedUrl : providedUrls) {\n try {\n uris.add(new URI(providedUrl));\n } catch (URISyntaxException e) {\n throw new RuntimeException(\n \"Error creating URI '\"\n + providedUrl\n + \"' from a list of \"\n + providedUrls.size()\n + \" URLs\",\n e);\n }\n }\n }\n\n \n \n \n \n \n Driver driver;\n AuthToken authTokens =\n getAuthToken(getProvidedValue(getUsername()), getProvidedValue(getPassword()));\n if (uris.size() > 1) {\n driver = GraphDatabase.routingDriver(uris, authTokens, config);\n } else {\n \n driver = GraphDatabase.driver(uris.get(0), authTokens, config);\n }\n\n return driver;\n }\n\n /**\n * Certain embedded scenarios and so on actually allow for having no authentication at all.\n *\n * @param username The username if one is needed\n * @param password The password if one is needed\n * @return The AuthToken\n */\n protected AuthToken getAuthToken(String username, String password) {\n if (username != null && password != null) {\n return AuthTokens.basic(username, password);\n } else {\n return AuthTokens.none();\n }\n }\n\n /**\n * The Builder class below is not visible. We use it to service the \"with\" methods below the\n * Builder class.\n */\n @AutoValue.Builder\n abstract static class Builder {\n abstract Builder setUrl(ValueProvider url);\n\n abstract Builder setUrls(ValueProvider> url);\n\n abstract Builder setUsername(ValueProvider username);\n\n abstract Builder setPassword(ValueProvider password);\n\n abstract Builder setConfig(Config config);\n\n abstract Builder setHasDefaultConfig(ValueProvider useDefault);\n\n abstract DriverConfiguration build();\n }\n }" }, { "comment": "I can reproduce the exception after reverting the fixing changes.", "method_body": "public void testCreateCatalogFromUserClassLoader() throws Exception {\n\t\tfinal String className = \"UserCatalogFactory\";\n\t\tURLClassLoader classLoader = ClassLoaderUtils.withRoot(temporaryFolder.newFolder())\n\t\t\t.addResource(\"META-INF/services/org.apache.flink.table.factories.TableFactory\", \"UserCatalogFactory\")\n\t\t\t.addClass(\n\t\t\t\tclassName,\n\t\t\t\t\"import org.apache.flink.table.catalog.GenericInMemoryCatalog;\\n\" +\n\t\t\t\t\t\"import org.apache.flink.table.factories.CatalogFactory;\\n\" +\n\t\t\t\t\t\"import java.util.Collections;\\n\" +\n\t\t\t\t\t\"import org.apache.flink.table.catalog.Catalog;\\n\" +\n\t\t\t\t\t\"import java.util.HashMap;\\n\" +\n\t\t\t\t\t\"import java.util.List;\\n\" +\n\t\t\t\t\t\"import java.util.Map;\\n\" +\n\t\t\t\t\t\"\\tpublic class UserCatalogFactory implements CatalogFactory {\\n\" +\n\t\t\t\t\t\"\\t\\t@Override\\n\" +\n\t\t\t\t\t\"\\t\\tpublic Catalog createCatalog(\\n\" +\n\t\t\t\t\t\"\\t\\t\\t\\tString name,\\n\" +\n\t\t\t\t\t\"\\t\\t\\t\\tMap properties) {\\n\" +\n\t\t\t\t\t\"\\t\\t\\treturn new GenericInMemoryCatalog(name);\\n\" +\n\t\t\t\t\t\"\\t\\t}\\n\" +\n\t\t\t\t\t\"\\n\" +\n\t\t\t\t\t\"\\t\\t@Override\\n\" +\n\t\t\t\t\t\"\\t\\tpublic Map requiredContext() {\\n\" +\n\t\t\t\t\t\"\\t\\t\\tHashMap hashMap = new HashMap<>();\\n\" +\n\t\t\t\t\t\"\\t\\t\\thashMap.put(\\\"type\\\", \\\"userCatalog\\\");\\n\" +\n\t\t\t\t\t\"\\t\\t\\treturn hashMap;\\n\" +\n\t\t\t\t\t\"\\t\\t}\\n\" +\n\t\t\t\t\t\"\\n\" +\n\t\t\t\t\t\"\\t\\t@Override\\n\" +\n\t\t\t\t\t\"\\t\\tpublic List supportedProperties() {\\n\" +\n\t\t\t\t\t\"\\t\\t\\treturn Collections.emptyList();\\n\" +\n\t\t\t\t\t\"\\t\\t}\\n\" +\n\t\t\t\t\t\"\\t}\"\n\t\t\t).build();\n\n\t\ttry (TemporaryClassLoaderContext context = TemporaryClassLoaderContext.of(classLoader)) {\n\t\t\tTableEnvironment tableEnvironment = getTableEnvironment();\n\t\t\ttableEnvironment.executeSql(\"CREATE CATALOG cat WITH ('type'='userCatalog')\");\n\n\t\t\tassertTrue(tableEnvironment.getCatalog(\"cat\").isPresent());\n\t\t}\n\t}", "target_code": "try (TemporaryClassLoaderContext context = TemporaryClassLoaderContext.of(classLoader)) {", "method_body_after": "public void testCreateCatalogFromUserClassLoader() throws Exception {\n\t\tfinal String className = \"UserCatalogFactory\";\n\t\tURLClassLoader classLoader = ClassLoaderUtils.withRoot(temporaryFolder.newFolder())\n\t\t\t.addResource(\"META-INF/services/org.apache.flink.table.factories.TableFactory\", \"UserCatalogFactory\")\n\t\t\t.addClass(\n\t\t\t\tclassName,\n\t\t\t\t\"import org.apache.flink.table.catalog.GenericInMemoryCatalog;\\n\" +\n\t\t\t\t\t\"import org.apache.flink.table.factories.CatalogFactory;\\n\" +\n\t\t\t\t\t\"import java.util.Collections;\\n\" +\n\t\t\t\t\t\"import org.apache.flink.table.catalog.Catalog;\\n\" +\n\t\t\t\t\t\"import java.util.HashMap;\\n\" +\n\t\t\t\t\t\"import java.util.List;\\n\" +\n\t\t\t\t\t\"import java.util.Map;\\n\" +\n\t\t\t\t\t\"\\tpublic class UserCatalogFactory implements CatalogFactory {\\n\" +\n\t\t\t\t\t\"\\t\\t@Override\\n\" +\n\t\t\t\t\t\"\\t\\tpublic Catalog createCatalog(\\n\" +\n\t\t\t\t\t\"\\t\\t\\t\\tString name,\\n\" +\n\t\t\t\t\t\"\\t\\t\\t\\tMap properties) {\\n\" +\n\t\t\t\t\t\"\\t\\t\\treturn new GenericInMemoryCatalog(name);\\n\" +\n\t\t\t\t\t\"\\t\\t}\\n\" +\n\t\t\t\t\t\"\\n\" +\n\t\t\t\t\t\"\\t\\t@Override\\n\" +\n\t\t\t\t\t\"\\t\\tpublic Map requiredContext() {\\n\" +\n\t\t\t\t\t\"\\t\\t\\tHashMap hashMap = new HashMap<>();\\n\" +\n\t\t\t\t\t\"\\t\\t\\thashMap.put(\\\"type\\\", \\\"userCatalog\\\");\\n\" +\n\t\t\t\t\t\"\\t\\t\\treturn hashMap;\\n\" +\n\t\t\t\t\t\"\\t\\t}\\n\" +\n\t\t\t\t\t\"\\n\" +\n\t\t\t\t\t\"\\t\\t@Override\\n\" +\n\t\t\t\t\t\"\\t\\tpublic List supportedProperties() {\\n\" +\n\t\t\t\t\t\"\\t\\t\\treturn Collections.emptyList();\\n\" +\n\t\t\t\t\t\"\\t\\t}\\n\" +\n\t\t\t\t\t\"\\t}\"\n\t\t\t).build();\n\n\t\ttry (TemporaryClassLoaderContext context = TemporaryClassLoaderContext.of(classLoader)) {\n\t\t\tTableEnvironment tableEnvironment = getTableEnvironment();\n\t\t\ttableEnvironment.executeSql(\"CREATE CATALOG cat WITH ('type'='userCatalog')\");\n\n\t\t\tassertTrue(tableEnvironment.getCatalog(\"cat\").isPresent());\n\t\t}\n\t}", "context_before": "class CatalogITCase {\n\n\t@Rule\n\tpublic TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n\t@Test\n\tpublic void testCreateCatalog() {\n\t\tString name = \"c1\";\n\t\tTableEnvironment tableEnv = getTableEnvironment();\n\t\tString ddl = String.format(\"create catalog %s with('type'='%s')\", name, CATALOG_TYPE_VALUE_GENERIC_IN_MEMORY);\n\n\t\ttableEnv.executeSql(ddl);\n\n\t\tassertTrue(tableEnv.getCatalog(name).isPresent());\n\t\tassertTrue(tableEnv.getCatalog(name).get() instanceof GenericInMemoryCatalog);\n\t}\n\n\t@Test\n\tpublic void testDropCatalog() {\n\t\tString name = \"c1\";\n\t\tTableEnvironment tableEnv = getTableEnvironment();\n\n\t\tString ddl = String.format(\"create catalog %s with('type'='%s')\", name, CATALOG_TYPE_VALUE_GENERIC_IN_MEMORY);\n\t\ttableEnv.executeSql(ddl);\n\t\tassertTrue(tableEnv.getCatalog(name).isPresent());\n\n\t\tddl = String.format(\"drop catalog %s\", name);\n\t\ttableEnv.executeSql(ddl);\n\t\tassertFalse(tableEnv.getCatalog(name).isPresent());\n\t}\n\n\t@Test\n\t\n\n\tprivate TableEnvironment getTableEnvironment() {\n\t\tEnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();\n\t\tStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n\t\treturn StreamTableEnvironment.create(env, settings);\n\t}\n\n}", "context_after": "class CatalogITCase {\n\n\t@Rule\n\tpublic TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n\t@Test\n\tpublic void testCreateCatalog() {\n\t\tString name = \"c1\";\n\t\tTableEnvironment tableEnv = getTableEnvironment();\n\t\tString ddl = String.format(\"create catalog %s with('type'='%s')\", name, CATALOG_TYPE_VALUE_GENERIC_IN_MEMORY);\n\n\t\ttableEnv.executeSql(ddl);\n\n\t\tassertTrue(tableEnv.getCatalog(name).isPresent());\n\t\tassertTrue(tableEnv.getCatalog(name).get() instanceof GenericInMemoryCatalog);\n\t}\n\n\t@Test\n\tpublic void testDropCatalog() {\n\t\tString name = \"c1\";\n\t\tTableEnvironment tableEnv = getTableEnvironment();\n\n\t\tString ddl = String.format(\"create catalog %s with('type'='%s')\", name, CATALOG_TYPE_VALUE_GENERIC_IN_MEMORY);\n\t\ttableEnv.executeSql(ddl);\n\t\tassertTrue(tableEnv.getCatalog(name).isPresent());\n\n\t\tddl = String.format(\"drop catalog %s\", name);\n\t\ttableEnv.executeSql(ddl);\n\t\tassertFalse(tableEnv.getCatalog(name).isPresent());\n\t}\n\n\t@Test\n\t\n\n\tprivate TableEnvironment getTableEnvironment() {\n\t\tEnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();\n\t\tStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n\t\treturn StreamTableEnvironment.create(env, settings);\n\t}\n\n}" }, { "comment": "I now explicitly nulled the `currentProducer`. That still leaves room in the future for errors but at least not correctness issues.", "method_body": "public List snapshotState(long checkpointId) throws IOException {\n currentProducer = createProducer(checkpointId);\n return ImmutableList.of(kafkaWriterState);\n }", "target_code": "currentProducer = createProducer(checkpointId);", "method_body_after": "public List snapshotState(long checkpointId) throws IOException {\n if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE) {\n currentProducer = getTransactionalProducer(checkpointId + 1);\n currentProducer.beginTransaction();\n }\n return ImmutableList.of(kafkaWriterState);\n }", "context_before": "class KafkaWriter implements SinkWriter {\n\n private static final Logger LOG = LoggerFactory.getLogger(KafkaWriter.class);\n private static final String KEY_DISABLE_METRICS = \"flink.disable-metrics\";\n private static final String KAFKA_PRODUCER_METRIC_NAME = \"KafkaProducer\";\n private static final long METRIC_UPDATE_INTERVAL_MILLIS = 500;\n\n private final DeliveryGuarantee deliveryGuarantee;\n private final Properties kafkaProducerConfig;\n private final String transactionalIdPrefix;\n private final KafkaRecordSerializationSchema recordSerializer;\n private final Callback deliveryCallback;\n private final AtomicLong pendingRecords = new AtomicLong();\n private final KafkaRecordSerializationSchema.KafkaSinkContext kafkaSinkContext;\n private final Map previouslyCreatedMetrics = new HashMap<>();\n private final SinkWriterMetricGroup metricGroup;\n private final Counter numBytesOutCounter;\n private final Sink.ProcessingTimeService timeService;\n\n private transient Metric byteOutMetric;\n private FlinkKafkaInternalProducer currentProducer;\n private KafkaWriterState kafkaWriterState;\n private final Closer closer = Closer.create();\n @Nullable private volatile Exception producerAsyncException;\n private final boolean disableMetrics;\n private long lastCheckpointId;\n\n private boolean closed = false;\n private long lastSync = System.currentTimeMillis();\n\n /**\n * Constructor creating a kafka writer.\n *\n *

It will throw a {@link RuntimeException} if {@link\n * KafkaRecordSerializationSchema\n * KafkaRecordSerializationSchema.KafkaSinkContext)} fails.\n *\n * @param deliveryGuarantee the Sink's delivery guarantee\n * @param kafkaProducerConfig the properties to configure the {@link FlinkKafkaInternalProducer}\n * @param transactionalIdPrefix used to create the transactionalIds\n * @param sinkInitContext context to provide information about the runtime environment\n * @param recordSerializer serialize to transform the incoming records to {@link ProducerRecord}\n * @param schemaContext context used to initialize the {@link KafkaRecordSerializationSchema}\n * @param recoveredStates state from an previous execution which was covered\n */\n KafkaWriter(\n DeliveryGuarantee deliveryGuarantee,\n Properties kafkaProducerConfig,\n String transactionalIdPrefix,\n Sink.InitContext sinkInitContext,\n KafkaRecordSerializationSchema recordSerializer,\n SerializationSchema.InitializationContext schemaContext,\n List recoveredStates) {\n this.deliveryGuarantee = checkNotNull(deliveryGuarantee, \"deliveryGuarantee\");\n this.kafkaProducerConfig = checkNotNull(kafkaProducerConfig, \"kafkaProducerConfig\");\n this.transactionalIdPrefix = checkNotNull(transactionalIdPrefix, \"transactionalIdPrefix\");\n this.recordSerializer = checkNotNull(recordSerializer, \"recordSerializer\");\n this.deliveryCallback =\n (metadata, exception) -> {\n if (exception != null && producerAsyncException == null) {\n producerAsyncException = exception;\n }\n acknowledgeMessage();\n };\n checkNotNull(sinkInitContext, \"sinkInitContext\");\n this.timeService = sinkInitContext.getProcessingTimeService();\n this.metricGroup = sinkInitContext.metricGroup();\n this.numBytesOutCounter = metricGroup.getIOMetricGroup().getNumBytesOutCounter();\n this.kafkaSinkContext =\n new DefaultKafkaSinkContext(\n sinkInitContext.getSubtaskId(),\n sinkInitContext.getNumberOfParallelSubtasks(),\n kafkaProducerConfig);\n try {\n recordSerializer.open(schemaContext, kafkaSinkContext);\n } catch (Exception e) {\n throw new FlinkRuntimeException(\"Cannot initialize schema.\", e);\n }\n lastCheckpointId = sinkInitContext.getRestoredCheckpointId().orElse(-1) + 1;\n abortLingeringTransactions(\n checkNotNull(recoveredStates, \"recoveredStates\"), lastCheckpointId);\n this.kafkaWriterState = new KafkaWriterState(transactionalIdPrefix);\n this.currentProducer = createProducer(lastCheckpointId);\n disableMetrics =\n kafkaProducerConfig.containsKey(KEY_DISABLE_METRICS)\n && Boolean.parseBoolean(\n kafkaProducerConfig.getProperty(KEY_DISABLE_METRICS));\n registerMetricSync();\n }\n\n @Override\n public void write(IN element, Context context) throws IOException {\n checkErroneous();\n final ProducerRecord record =\n recordSerializer.serialize(element, kafkaSinkContext, context.timestamp());\n pendingRecords.incrementAndGet();\n currentProducer.send(record, deliveryCallback);\n }\n\n @Override\n public List prepareCommit(boolean flush) {\n flushRecords(flush);\n return precommit();\n }\n\n @Override\n \n\n @Override\n public void close() throws Exception {\n if (currentProducer.isInTransaction()) {\n currentProducer.abortTransaction();\n }\n closed = true;\n closer.close();\n }\n\n private void abortLingeringTransactions(\n List recoveredStates, long startCheckpointId) {\n List prefixesToAbort = Lists.newArrayList(transactionalIdPrefix);\n\n if (!recoveredStates.isEmpty()) {\n KafkaWriterState lastState = recoveredStates.get(0);\n if (!lastState.getTransactionalIdPrefix().equals(transactionalIdPrefix)) {\n prefixesToAbort.add(lastState.getTransactionalIdPrefix());\n LOG.warn(\n \"Transactional id prefix from previous execution {} has changed to {}.\",\n lastState.getTransactionalIdPrefix(),\n transactionalIdPrefix);\n }\n }\n\n final Properties properties = new Properties();\n properties.putAll(kafkaProducerConfig);\n properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, \"dummy\");\n try (FlinkKafkaInternalProducer producer =\n new FlinkKafkaInternalProducer<>(properties)) {\n for (String prefix : prefixesToAbort) {\n abortTransactionsWithPrefix(producer, prefix, startCheckpointId);\n }\n }\n }\n\n /**\n * Aborts all transactions that have been created by this subtask in a previous run.\n *\n *

It also aborts transactions from subtasks that may have been removed because of\n * downscaling.\n *\n *

When Flink downscales X subtasks to Y subtasks, then subtask i is responsible for cleaning\n * all subtasks j in [0; X), where j % Y = i. For example, if we downscale to 2, then subtask 0\n * is responsible for all even and subtask 1 for all odd subtasks.\n */\n private void abortTransactionsWithPrefix(\n FlinkKafkaInternalProducer producer,\n String prefix,\n long startCheckpointId) {\n final int p = kafkaSinkContext.getNumberOfParallelInstances();\n for (int subtaskId = kafkaSinkContext.getParallelInstanceId(); ; subtaskId += p) {\n if (abortTransactionOfSubtask(producer, prefix, startCheckpointId, subtaskId) == 0) {\n \n \n break;\n }\n }\n }\n\n /**\n * Aborts all transactions that have been created by a subtask in a previous run after the given\n * checkpoint id.\n *\n *

We assume that transaction ids are consecutively used and thus Flink can stop aborting as\n * soon as Flink notices that a particular transaction id was unused.\n */\n private int abortTransactionOfSubtask(\n FlinkKafkaInternalProducer producer,\n String prefix,\n long startCheckpointId,\n int subtaskId) {\n int numTransactionAborted = 0;\n for (long checkpointId = startCheckpointId + 1; ; checkpointId++, numTransactionAborted++) {\n producer.setTransactionalId(\n TransactionalIdFactory.buildTransactionalId(prefix, subtaskId, checkpointId));\n \n producer.initTransactions();\n \n if (producer.getEpoch() == 0) {\n \n \n \n break;\n }\n }\n return numTransactionAborted;\n }\n\n private void acknowledgeMessage() {\n pendingRecords.decrementAndGet();\n }\n\n private void checkErroneous() {\n Exception e = producerAsyncException;\n if (e != null) {\n \n producerAsyncException = null;\n throw new RuntimeException(\"Failed to send data to Kafka: \" + e.getMessage(), e);\n }\n }\n\n private FlinkKafkaInternalProducer createProducer(long checkpointId) {\n switch (deliveryGuarantee) {\n case EXACTLY_ONCE:\n final FlinkKafkaInternalProducer transactionalProducer =\n createTransactionalProducer(checkpointId);\n initMetrics(transactionalProducer);\n transactionalProducer.beginTransaction();\n closer.register(transactionalProducer);\n return transactionalProducer;\n case AT_LEAST_ONCE:\n case NONE:\n if (currentProducer != null) {\n LOG.debug(\"Reusing existing KafkaProducer\");\n return currentProducer;\n }\n final FlinkKafkaInternalProducer producer =\n new FlinkKafkaInternalProducer<>(kafkaProducerConfig);\n initMetrics(producer);\n closer.register(producer);\n return producer;\n default:\n throw new UnsupportedOperationException(\n \"Unsupported Kafka writer semantic \" + deliveryGuarantee);\n }\n }\n\n private void flushRecords(boolean finalFlush) {\n switch (deliveryGuarantee) {\n case EXACTLY_ONCE:\n case AT_LEAST_ONCE:\n currentProducer.flush();\n final long pendingRecordsCount = pendingRecords.get();\n if (pendingRecordsCount != 0) {\n throw new IllegalStateException(\n \"Pending record count must be zero at this point: \"\n + pendingRecordsCount);\n }\n break;\n case NONE:\n if (finalFlush) {\n currentProducer.flush();\n }\n break;\n default:\n throw new UnsupportedOperationException(\n \"Unsupported Kafka writer semantic \" + deliveryGuarantee);\n }\n \n checkErroneous();\n }\n\n private List precommit() {\n final List committables;\n switch (deliveryGuarantee) {\n case EXACTLY_ONCE:\n committables = Collections.singletonList(KafkaCommittable.of(currentProducer));\n break;\n case AT_LEAST_ONCE:\n case NONE:\n committables = Collections.emptyList();\n break;\n default:\n throw new UnsupportedOperationException(\n \"Unsupported Kafka writer semantic \" + deliveryGuarantee);\n }\n LOG.info(\"Committing {} committables.\", committables);\n return committables;\n }\n\n /**\n * For each checkpoint we create new {@link FlinkKafkaInternalProducer} so that new transactions\n * will not clash with transactions created during previous checkpoints ({@code\n * producer.initTransactions()} assures that we obtain new producerId and epoch counters).\n *\n *

Ensures that all transaction ids in between lastCheckpointId and checkpointId are\n * initialized.\n */\n private FlinkKafkaInternalProducer createTransactionalProducer(\n long checkpointId) {\n checkState(\n checkpointId > lastCheckpointId,\n \"Expected %s > %s\",\n checkpointId,\n lastCheckpointId);\n final Properties copiedProducerConfig = new Properties();\n copiedProducerConfig.putAll(kafkaProducerConfig);\n copiedProducerConfig.put(\n ProducerConfig.TRANSACTIONAL_ID_CONFIG,\n TransactionalIdFactory.buildTransactionalId(\n transactionalIdPrefix,\n kafkaSinkContext.getParallelInstanceId(),\n lastCheckpointId + 1));\n final FlinkKafkaInternalProducer producer =\n new FlinkKafkaInternalProducer<>(copiedProducerConfig);\n producer.initTransactions();\n \n \n for (long id = lastCheckpointId + 2; id <= checkpointId; id++) {\n producer.setTransactionalId(\n TransactionalIdFactory.buildTransactionalId(\n transactionalIdPrefix,\n kafkaSinkContext.getParallelInstanceId(),\n lastCheckpointId + 1));\n producer.initTransactions();\n }\n this.lastCheckpointId = checkpointId;\n LOG.info(\"Created new transactional producer {}\", producer.getTransactionalId());\n return producer;\n }\n\n private void initMetrics(FlinkKafkaInternalProducer producer) {\n byteOutMetric =\n MetricUtil.getKafkaMetric(\n producer.metrics(), \"producer-metrics\", \"outgoing-byte-total\");\n metricGroup.setCurrentSendTimeGauge(() -> computeSendTime(producer));\n if (disableMetrics) {\n return;\n }\n final MetricGroup kafkaMetricGroup = metricGroup.addGroup(KAFKA_PRODUCER_METRIC_NAME);\n producer.metrics().entrySet().forEach(initMetric(kafkaMetricGroup));\n }\n\n private Consumer> initMetric(\n MetricGroup kafkaMetricGroup) {\n return (entry) -> {\n final String name = entry.getKey().name();\n final Metric metric = entry.getValue();\n if (previouslyCreatedMetrics.containsKey(name)) {\n final KafkaMetricMutableWrapper wrapper = previouslyCreatedMetrics.get(name);\n wrapper.setKafkaMetric(metric);\n } else {\n final KafkaMetricMutableWrapper wrapper = new KafkaMetricMutableWrapper(metric);\n previouslyCreatedMetrics.put(name, wrapper);\n kafkaMetricGroup.gauge(name, wrapper);\n }\n };\n }\n\n private static long computeSendTime(Producer producer) {\n final Metric sendTime =\n MetricUtil.getKafkaMetric(\n producer.metrics(), \"producer-metrics\", \"request-latency-avg\");\n final Metric queueTime =\n MetricUtil.getKafkaMetric(\n producer.metrics(), \"producer-metrics\", \"record-queue-time-avg\");\n return ((Number) sendTime.metricValue()).longValue()\n + ((Number) queueTime.metricValue()).longValue();\n }\n\n private void registerMetricSync() {\n timeService.registerProcessingTimer(\n lastSync + METRIC_UPDATE_INTERVAL_MILLIS,\n (time) -> {\n if (closed) {\n return;\n }\n MetricUtil.sync(byteOutMetric, numBytesOutCounter);\n lastSync = time;\n registerMetricSync();\n });\n }\n}", "context_after": "class KafkaWriter implements SinkWriter {\n\n private static final Logger LOG = LoggerFactory.getLogger(KafkaWriter.class);\n private static final String KAFKA_PRODUCER_METRIC_NAME = \"KafkaProducer\";\n private static final long METRIC_UPDATE_INTERVAL_MILLIS = 500;\n\n private static final String KEY_DISABLE_METRICS = \"flink.disable-metrics\";\n private static final String KEY_REGISTER_METRICS = \"register.producer.metrics\";\n\n private final DeliveryGuarantee deliveryGuarantee;\n private final Properties kafkaProducerConfig;\n private final String transactionalIdPrefix;\n private final KafkaRecordSerializationSchema recordSerializer;\n private final Callback deliveryCallback;\n private final AtomicLong pendingRecords = new AtomicLong();\n private final KafkaRecordSerializationSchema.KafkaSinkContext kafkaSinkContext;\n private final Map previouslyCreatedMetrics = new HashMap<>();\n private final SinkWriterMetricGroup metricGroup;\n private final Counter numBytesOutCounter;\n private final Sink.ProcessingTimeService timeService;\n private final boolean disabledMetrics;\n\n private Metric byteOutMetric;\n private FlinkKafkaInternalProducer currentProducer;\n private final KafkaWriterState kafkaWriterState;\n \n private final Deque> producerPool =\n new ArrayDeque<>();\n private final Closer closer = Closer.create();\n @Nullable private volatile Exception producerAsyncException;\n private long lastCheckpointId;\n\n private boolean closed = false;\n private long lastSync = System.currentTimeMillis();\n\n /**\n * Constructor creating a kafka writer.\n *\n *

It will throw a {@link RuntimeException} if {@link\n * KafkaRecordSerializationSchema\n * KafkaRecordSerializationSchema.KafkaSinkContext)} fails.\n *\n * @param deliveryGuarantee the Sink's delivery guarantee\n * @param kafkaProducerConfig the properties to configure the {@link FlinkKafkaInternalProducer}\n * @param transactionalIdPrefix used to create the transactionalIds\n * @param sinkInitContext context to provide information about the runtime environment\n * @param recordSerializer serialize to transform the incoming records to {@link ProducerRecord}\n * @param schemaContext context used to initialize the {@link KafkaRecordSerializationSchema}\n * @param recoveredStates state from an previous execution which was covered\n */\n KafkaWriter(\n DeliveryGuarantee deliveryGuarantee,\n Properties kafkaProducerConfig,\n String transactionalIdPrefix,\n Sink.InitContext sinkInitContext,\n KafkaRecordSerializationSchema recordSerializer,\n SerializationSchema.InitializationContext schemaContext,\n List recoveredStates) {\n this.deliveryGuarantee = checkNotNull(deliveryGuarantee, \"deliveryGuarantee\");\n this.kafkaProducerConfig = checkNotNull(kafkaProducerConfig, \"kafkaProducerConfig\");\n this.transactionalIdPrefix = checkNotNull(transactionalIdPrefix, \"transactionalIdPrefix\");\n this.recordSerializer = checkNotNull(recordSerializer, \"recordSerializer\");\n this.deliveryCallback =\n (metadata, exception) -> {\n if (exception != null && producerAsyncException == null) {\n producerAsyncException = exception;\n }\n acknowledgeMessage();\n };\n this.disabledMetrics =\n kafkaProducerConfig.containsKey(KEY_DISABLE_METRICS)\n && Boolean.parseBoolean(\n kafkaProducerConfig.get(KEY_DISABLE_METRICS).toString())\n || kafkaProducerConfig.containsKey(KEY_REGISTER_METRICS)\n && !Boolean.parseBoolean(\n kafkaProducerConfig.get(KEY_REGISTER_METRICS).toString());\n checkNotNull(sinkInitContext, \"sinkInitContext\");\n this.timeService = sinkInitContext.getProcessingTimeService();\n this.metricGroup = sinkInitContext.metricGroup();\n this.numBytesOutCounter = metricGroup.getIOMetricGroup().getNumBytesOutCounter();\n this.kafkaSinkContext =\n new DefaultKafkaSinkContext(\n sinkInitContext.getSubtaskId(),\n sinkInitContext.getNumberOfParallelSubtasks(),\n kafkaProducerConfig);\n try {\n recordSerializer.open(schemaContext, kafkaSinkContext);\n } catch (Exception e) {\n throw new FlinkRuntimeException(\"Cannot initialize schema.\", e);\n }\n\n this.kafkaWriterState = new KafkaWriterState(transactionalIdPrefix);\n this.lastCheckpointId = sinkInitContext.getRestoredCheckpointId().orElse(-1);\n if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE) {\n abortLingeringTransactions(\n checkNotNull(recoveredStates, \"recoveredStates\"), lastCheckpointId + 1);\n this.currentProducer = getTransactionalProducer(lastCheckpointId + 1);\n this.currentProducer.beginTransaction();\n } else if (deliveryGuarantee == DeliveryGuarantee.AT_LEAST_ONCE\n || deliveryGuarantee == DeliveryGuarantee.NONE) {\n this.currentProducer = new FlinkKafkaInternalProducer<>(this.kafkaProducerConfig, null);\n closer.register(this.currentProducer);\n initMetrics(this.currentProducer);\n } else {\n throw new UnsupportedOperationException(\n \"Unsupported Kafka writer semantic \" + this.deliveryGuarantee);\n }\n registerMetricSync();\n }\n\n @Override\n public void write(IN element, Context context) throws IOException {\n checkErroneous();\n final ProducerRecord record =\n recordSerializer.serialize(element, kafkaSinkContext, context.timestamp());\n pendingRecords.incrementAndGet();\n currentProducer.send(record, deliveryCallback);\n }\n\n @Override\n public List prepareCommit(boolean flush) {\n flushRecords(flush);\n if (deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE) {\n final List committables =\n Collections.singletonList(\n KafkaCommittable.of(currentProducer, producerPool::add));\n LOG.info(\"Committing {} committables.\", committables);\n return committables;\n }\n return Collections.emptyList();\n }\n\n @Override\n \n\n @Override\n public void close() throws Exception {\n if (currentProducer.isInTransaction()) {\n currentProducer.abortTransaction();\n }\n closed = true;\n closer.close();\n producerPool.clear();\n checkState(currentProducer.isClosed());\n currentProducer = null;\n }\n\n @VisibleForTesting\n Deque> getProducerPool() {\n return producerPool;\n }\n\n @VisibleForTesting\n FlinkKafkaInternalProducer getCurrentProducer() {\n return currentProducer;\n }\n\n void abortLingeringTransactions(\n List recoveredStates, long startCheckpointId) {\n List prefixesToAbort = Lists.newArrayList(transactionalIdPrefix);\n\n if (!recoveredStates.isEmpty()) {\n KafkaWriterState lastState = recoveredStates.get(0);\n if (!lastState.getTransactionalIdPrefix().equals(transactionalIdPrefix)) {\n prefixesToAbort.add(lastState.getTransactionalIdPrefix());\n LOG.warn(\n \"Transactional id prefix from previous execution {} has changed to {}.\",\n lastState.getTransactionalIdPrefix(),\n transactionalIdPrefix);\n }\n }\n\n try (TransactionAborter transactionAborter =\n new TransactionAborter(\n kafkaSinkContext.getParallelInstanceId(),\n kafkaSinkContext.getNumberOfParallelInstances(),\n this::getOrCreateTransactionalProducer,\n producerPool::add)) {\n transactionAborter.abortLingeringTransactions(prefixesToAbort, startCheckpointId);\n }\n }\n\n private void acknowledgeMessage() {\n pendingRecords.decrementAndGet();\n }\n\n private void checkErroneous() {\n Exception e = producerAsyncException;\n if (e != null) {\n \n producerAsyncException = null;\n throw new RuntimeException(\"Failed to send data to Kafka: \" + e.getMessage(), e);\n }\n }\n\n private void flushRecords(boolean finalFlush) {\n switch (deliveryGuarantee) {\n case EXACTLY_ONCE:\n case AT_LEAST_ONCE:\n currentProducer.flush();\n final long pendingRecordsCount = pendingRecords.get();\n if (pendingRecordsCount != 0) {\n throw new IllegalStateException(\n \"Pending record count must be zero at this point: \"\n + pendingRecordsCount);\n }\n break;\n case NONE:\n if (finalFlush) {\n currentProducer.flush();\n }\n break;\n default:\n throw new UnsupportedOperationException(\n \"Unsupported Kafka writer semantic \" + deliveryGuarantee);\n }\n \n checkErroneous();\n }\n\n /**\n * For each checkpoint we create new {@link FlinkKafkaInternalProducer} so that new transactions\n * will not clash with transactions created during previous checkpoints ({@code\n * producer.initTransactions()} assures that we obtain new producerId and epoch counters).\n *\n *

Ensures that all transaction ids in between lastCheckpointId and checkpointId are\n * initialized.\n */\n private FlinkKafkaInternalProducer getTransactionalProducer(long checkpointId) {\n checkState(\n checkpointId > lastCheckpointId,\n \"Expected %s > %s\",\n checkpointId,\n lastCheckpointId);\n FlinkKafkaInternalProducer producer = null;\n \n \n for (long id = lastCheckpointId + 1; id <= checkpointId; id++) {\n String transactionalId =\n TransactionalIdFactory.buildTransactionalId(\n transactionalIdPrefix, kafkaSinkContext.getParallelInstanceId(), id);\n producer = getOrCreateTransactionalProducer(transactionalId);\n }\n this.lastCheckpointId = checkpointId;\n assert producer != null;\n LOG.info(\"Created new transactional producer {}\", producer.getTransactionalId());\n return producer;\n }\n\n private FlinkKafkaInternalProducer getOrCreateTransactionalProducer(\n String transactionalId) {\n FlinkKafkaInternalProducer producer = producerPool.poll();\n if (producer == null) {\n producer = new FlinkKafkaInternalProducer<>(kafkaProducerConfig, transactionalId);\n closer.register(producer);\n producer.initTransactions();\n initMetrics(producer);\n } else {\n producer.initTransactionId(transactionalId);\n }\n return producer;\n }\n\n private void initMetrics(FlinkKafkaInternalProducer producer) {\n byteOutMetric =\n MetricUtil.getKafkaMetric(\n producer.metrics(), \"producer-metrics\", \"outgoing-byte-total\");\n metricGroup.setCurrentSendTimeGauge(() -> computeSendTime(producer));\n if (disabledMetrics) {\n return;\n }\n final MetricGroup kafkaMetricGroup = metricGroup.addGroup(KAFKA_PRODUCER_METRIC_NAME);\n producer.metrics().entrySet().forEach(initMetric(kafkaMetricGroup));\n }\n\n private Consumer> initMetric(\n MetricGroup kafkaMetricGroup) {\n return (entry) -> {\n final String name = entry.getKey().name();\n final Metric metric = entry.getValue();\n if (previouslyCreatedMetrics.containsKey(name)) {\n final KafkaMetricMutableWrapper wrapper = previouslyCreatedMetrics.get(name);\n wrapper.setKafkaMetric(metric);\n } else {\n final KafkaMetricMutableWrapper wrapper = new KafkaMetricMutableWrapper(metric);\n previouslyCreatedMetrics.put(name, wrapper);\n kafkaMetricGroup.gauge(name, wrapper);\n }\n };\n }\n\n private static long computeSendTime(Producer producer) {\n final Metric sendTime =\n MetricUtil.getKafkaMetric(\n producer.metrics(), \"producer-metrics\", \"request-latency-avg\");\n final Metric queueTime =\n MetricUtil.getKafkaMetric(\n producer.metrics(), \"producer-metrics\", \"record-queue-time-avg\");\n return ((Number) sendTime.metricValue()).longValue()\n + ((Number) queueTime.metricValue()).longValue();\n }\n\n private void registerMetricSync() {\n timeService.registerProcessingTimer(\n lastSync + METRIC_UPDATE_INTERVAL_MILLIS,\n (time) -> {\n if (closed) {\n return;\n }\n MetricUtil.sync(byteOutMetric, numBytesOutCounter);\n lastSync = time;\n registerMetricSync();\n });\n }\n}" }, { "comment": "```suggestion String lastDb = ctx.getLastDBOfCatalog(catalogName); ```", "method_body": "public void changeCatalog(ConnectContext ctx, String catalogName) throws DdlException {\n CatalogIf catalogIf = catalogMgr.getCatalog(catalogName);\n if (catalogIf == null) {\n throw new DdlException(ErrorCode.ERR_UNKNOWN_CATALOG.formatErrorMsg(catalogName),\n ErrorCode.ERR_UNKNOWN_CATALOG);\n }\n\n String currentDB = ctx.getDatabase();\n if (StringUtils.isNotEmpty(currentDB)) {\n \n if (ctx.getCurrentCatalog() != null) {\n ConnectContext.get().addLastDBOfCatalog(ctx.getCurrentCatalog().getName(), currentDB);\n }\n }\n ctx.changeDefaultCatalog(catalogName);\n String lastDb = ConnectContext.get().getLastDBOfCatalog(catalogName);\n if (StringUtils.isNotEmpty(lastDb)) {\n ctx.setDatabase(lastDb);\n }\n if (catalogIf instanceof EsExternalCatalog) {\n ctx.setDatabase(EsExternalCatalog.DEFAULT_DB);\n }\n }", "target_code": "String lastDb = ConnectContext.get().getLastDBOfCatalog(catalogName);", "method_body_after": "public void changeCatalog(ConnectContext ctx, String catalogName) throws DdlException {\n CatalogIf catalogIf = catalogMgr.getCatalog(catalogName);\n if (catalogIf == null) {\n throw new DdlException(ErrorCode.ERR_UNKNOWN_CATALOG.formatErrorMsg(catalogName),\n ErrorCode.ERR_UNKNOWN_CATALOG);\n }\n\n String currentDB = ctx.getDatabase();\n if (StringUtils.isNotEmpty(currentDB)) {\n \n if (ctx.getCurrentCatalog() != null) {\n ctx.addLastDBOfCatalog(ctx.getCurrentCatalog().getName(), currentDB);\n }\n }\n ctx.changeDefaultCatalog(catalogName);\n String lastDb = ctx.getLastDBOfCatalog(catalogName);\n if (StringUtils.isNotEmpty(lastDb)) {\n ctx.setDatabase(lastDb);\n }\n if (catalogIf instanceof EsExternalCatalog) {\n ctx.setDatabase(EsExternalCatalog.DEFAULT_DB);\n }\n }", "context_before": "class SingletonHolder {\n private static final Env INSTANCE = EnvFactory.getInstance().createEnv(false);\n }", "context_after": "class SingletonHolder {\n private static final Env INSTANCE = EnvFactory.getInstance().createEnv(false);\n }" }, { "comment": "array is generated for rest binding pattern in list binding pattern. For error and mapping bp it generates a map for rest bp. So to be generalized I removed array word", "method_body": "private List desugarGlobalVariables(BLangPackage pkgNode, BLangBlockFunctionBody initFnBody) {\n List globalVars = pkgNode.globalVars;\n List desugaredGlobalVarList = new ArrayList<>();\n SymbolEnv initFunctionEnv =\n SymbolEnv.createFunctionEnv(pkgNode.initFunction, pkgNode.initFunction.symbol.scope, env);\n\n globalVars.forEach(globalVar -> {\n this.env.enclPkg.topLevelNodes.remove(globalVar);\n \n switch (globalVar.getKind()) {\n case TUPLE_VARIABLE:\n case RECORD_VARIABLE:\n case ERROR_VARIABLE:\n BLangNode blockStatementNode = rewrite(globalVar, initFunctionEnv);\n List statements = ((BLangBlockStmt) blockStatementNode).stmts;\n for (int i = 0; i < statements.size(); i++) {\n BLangStatement bLangStatement = statements.get(i);\n \n \n if (bLangStatement.getKind() == NodeKind.BLOCK || i == 0) {\n initFnBody.stmts.add(bLangStatement);\n continue;\n }\n BLangSimpleVariable simpleVar = ((BLangSimpleVariableDef) bLangStatement).var;\n simpleVar.annAttachments = globalVar.getAnnotationAttachments();\n addToInitFunction(simpleVar, initFnBody);\n desugaredGlobalVarList.add(simpleVar);\n }\n break;\n default:\n long globalVarFlags = globalVar.symbol.flags;\n BLangSimpleVariable simpleGlobalVar = (BLangSimpleVariable) globalVar;\n if (Symbols.isFlagOn(globalVarFlags, Flags.CONFIGURABLE)) {\n if (Symbols.isFlagOn(globalVarFlags, Flags.REQUIRED)) {\n \n List args = getConfigurableLangLibInvocationParam(simpleGlobalVar);\n BLangInvocation getValueInvocation = createLangLibInvocationNode(\"getConfigurableValue\",\n args, symTable.anydataType, simpleGlobalVar.pos);\n simpleGlobalVar.expr = getValueInvocation;\n } else {\n \n simpleGlobalVar.expr = createIfElseFromConfigurable(simpleGlobalVar);\n }\n }\n\n \n if (Symbols.isFlagOn(globalVarFlags, Flags.LISTENER)\n && types.containsErrorType(globalVar.expr.type)) {\n globalVar.expr = ASTBuilderUtil.createCheckExpr(globalVar.expr.pos, globalVar.expr,\n globalVar.type);\n }\n\n addToInitFunction(simpleGlobalVar, initFnBody);\n desugaredGlobalVarList.add(simpleGlobalVar);\n break;\n }\n });\n this.env.enclPkg.topLevelNodes.addAll(desugaredGlobalVarList);\n return desugaredGlobalVarList;\n }", "target_code": "", "method_body_after": "private List desugarGlobalVariables(BLangPackage pkgNode, BLangBlockFunctionBody initFnBody) {\n List desugaredGlobalVarList = new ArrayList<>();\n SymbolEnv initFunctionEnv =\n SymbolEnv.createFunctionEnv(pkgNode.initFunction, pkgNode.initFunction.symbol.scope, env);\n\n for (BLangVariable globalVar : pkgNode.globalVars) {\n this.env.enclPkg.topLevelNodes.remove(globalVar);\n \n switch (globalVar.getKind()) {\n case TUPLE_VARIABLE:\n BLangNode blockStatementNode = rewrite(globalVar, initFunctionEnv);\n List statements = ((BLangBlockStmt) blockStatementNode).stmts;\n\n int statementSize = statements.size();\n for (int i = 0; i < statementSize - 1; i++) {\n addToGlobalVariableList(statements.get(i), initFnBody, globalVar, desugaredGlobalVarList);\n }\n\n \n BLangStatement bLangStatement = statements.get(statementSize - 1);\n if (bLangStatement.getKind() == NodeKind.BLOCK) {\n initFnBody.stmts.add(bLangStatement);\n } else {\n addToGlobalVariableList(bLangStatement, initFnBody, globalVar, desugaredGlobalVarList);\n }\n break;\n case RECORD_VARIABLE:\n case ERROR_VARIABLE:\n blockStatementNode = rewrite(globalVar, initFunctionEnv);\n for (BLangStatement statement : ((BLangBlockStmt) blockStatementNode).stmts) {\n addToGlobalVariableList(statement, initFnBody, globalVar, desugaredGlobalVarList);\n }\n break;\n default:\n long globalVarFlags = globalVar.symbol.flags;\n BLangSimpleVariable simpleGlobalVar = (BLangSimpleVariable) globalVar;\n if (Symbols.isFlagOn(globalVarFlags, Flags.CONFIGURABLE)) {\n if (Symbols.isFlagOn(globalVarFlags, Flags.REQUIRED)) {\n \n List args = getConfigurableLangLibInvocationParam(simpleGlobalVar);\n BLangInvocation getValueInvocation = createLangLibInvocationNode(\"getConfigurableValue\",\n args, symTable.anydataType, simpleGlobalVar.pos);\n simpleGlobalVar.expr = getValueInvocation;\n } else {\n \n simpleGlobalVar.expr = createIfElseFromConfigurable(simpleGlobalVar);\n }\n }\n\n \n if (Symbols.isFlagOn(globalVarFlags, Flags.LISTENER)\n && types.containsErrorType(globalVar.expr.type)) {\n globalVar.expr = ASTBuilderUtil.createCheckExpr(globalVar.expr.pos, globalVar.expr,\n globalVar.type);\n }\n\n addToInitFunction(simpleGlobalVar, initFnBody);\n desugaredGlobalVarList.add(simpleGlobalVar);\n break;\n }\n }\n\n this.env.enclPkg.topLevelNodes.addAll(desugaredGlobalVarList);\n return desugaredGlobalVarList;\n }", "context_before": "class definition node for which the initializer is created\n * @param env The env for the type node\n * @return The generated initializer method\n */\n private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {\n BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);\n if (classDefinition.initFunction == null) {\n return generatedInitFunc;\n }\n\n return wireUpGeneratedInitFunction(generatedInitFunc,\n (BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);\n }", "context_after": "class definition node for which the initializer is created\n * @param env The env for the type node\n * @return The generated initializer method\n */\n private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {\n BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);\n if (classDefinition.initFunction == null) {\n return generatedInitFunc;\n }\n\n return wireUpGeneratedInitFunction(generatedInitFunc,\n (BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);\n }" }, { "comment": "I guess we can check size of array before doing [0] ```suggestion var channelArray = propertyName.replace(INCOMING_PREFIX, \"\").split(\"\\\\.\") if( channelArray. length()>0){ var channelName = channelArray[0]; } ```", "method_body": "private List extractChannels(Config configIn) {\n\n var list = new ArrayList();\n\n for (String propertyName : configIn.getPropertyNames()) {\n if (propertyName.startsWith(INCOMING_PREFIX)) {\n var channelName = propertyName.replace(INCOMING_PREFIX, \"\").split(\"\\\\.\")[0];\n list.add(INCOMING_PREFIX + channelName + \".\");\n } else if (propertyName.startsWith(OUTGOING_PREFIX)) {\n var channelName = propertyName.replace(OUTGOING_PREFIX, \"\").split(\"\\\\.\")[0];\n list.add(OUTGOING_PREFIX + channelName + \".\");\n }\n }\n return list;\n }", "target_code": "for (String propertyName : configIn.getPropertyNames()) {", "method_body_after": "private List extractChannels(Config configIn) {\n\n var list = new ArrayList();\n\n for (String propertyName : configIn.getPropertyNames()) {\n if (propertyName.startsWith(INCOMING_PREFIX)) {\n var channelAndProp = StringUtils.substringAfter(propertyName, INCOMING_PREFIX);\n \n var channelName = StringUtils.substringBefore(channelAndProp, \".\");\n if (!StringUtils.isBlank(channelName))\n list.add(INCOMING_PREFIX + channelName + \".\");\n } else if (propertyName.startsWith(OUTGOING_PREFIX)) {\n var channelAndProp = StringUtils.substringAfter(propertyName, OUTGOING_PREFIX);\n \n var channelName = StringUtils.substringBefore(channelAndProp, \".\");\n if (!StringUtils.isBlank(channelName))\n list.add(OUTGOING_PREFIX + channelName + \".\");\n }\n }\n return list;\n }", "context_before": "class ServiceRegistryBindingConverter implements ServiceBindingConverter {\n\n private static Logger LOG = Logger.getLogger(ServiceRegistryBindingConverter.class.getName());\n\n private static final String INCOMING_PREFIX = \"mp.messaging.incoming.\";\n private static final String OUTGOING_PREFIX = \"mp.messaging.outgoing.\";\n\n @Override\n public Optional convert(List serviceBindings) {\n var matchingByType = ServiceBinding.singleMatchingByType(\"serviceregistry\", serviceBindings);\n Config config = ConfigProvider.getConfig();\n if (matchingByType.isEmpty()) {\n return Optional.empty();\n }\n\n var binding = matchingByType.get();\n\n List channels = extractChannels(config);\n\n Map properties = new HashMap<>();\n\n String registryUrl = binding.getProperties().get(\"registryUrl\");\n if (registryUrl == null) {\n registryUrl = binding.getProperties().get(\"registryurl\");\n }\n if (registryUrl != null) {\n properties.put(\"mp.messaging.connector.smallrye-kafka.apicurio.registry.url\", registryUrl);\n }\n \n\n for (String channel : channels) {\n\n String prefix = channel;\n\n String oAuthHost = binding.getProperties().get(\"oauthServerUrl\");\n if (oAuthHost == null) {\n oAuthHost = binding.getProperties().get(\"oauthserverurl\");\n }\n if (oAuthHost != null) {\n properties.put(prefix + \"apicurio.auth.service.url\", oAuthHost);\n }\n\n String clientId = binding.getProperties().get(\"clientId\");\n if (clientId == null) {\n clientId = binding.getProperties().get(\"clientid\");\n }\n if (clientId != null) {\n properties.put(prefix + \"apicurio.auth.client.id\", clientId);\n }\n\n String clientSecret = binding.getProperties().get(\"clientSecret\");\n if (clientSecret == null) {\n clientSecret = binding.getProperties().get(\"clientsecret\");\n }\n if (clientSecret != null) {\n properties.put(prefix + \"apicurio.auth.client.secret\", clientSecret);\n }\n\n String realm = binding.getProperties().get(\"oauthRealm\");\n if (realm == null) {\n realm = binding.getProperties().get(\"oauthRealm\");\n }\n if (clientSecret != null) {\n properties.put(prefix + \"apicurio.auth.realm\", realm);\n }\n\n if (registryUrl != null) {\n properties.put(prefix + \"apicurio.registry.url\", registryUrl);\n }\n }\n \n return Optional.of(new ServiceBindingConfigSource(\"serviceregistry-k8s-service-binding-source\", properties));\n }\n\n \n}", "context_after": "class ServiceRegistryBindingConverter implements ServiceBindingConverter {\n\n private static Logger LOG = Logger.getLogger(ServiceRegistryBindingConverter.class.getName());\n\n private static final String INCOMING_PREFIX = \"mp.messaging.incoming.\";\n private static final String OUTGOING_PREFIX = \"mp.messaging.outgoing.\";\n\n @Override\n public Optional convert(List serviceBindings) {\n var matchingByType = ServiceBinding.singleMatchingByType(\"serviceregistry\", serviceBindings);\n Config config = ConfigProvider.getConfig();\n if (matchingByType.isEmpty()) {\n return Optional.empty();\n }\n\n var binding = matchingByType.get();\n\n List channels = extractChannels(config);\n\n Map properties = new HashMap<>();\n\n String registryUrl = binding.getProperties().get(\"registryUrl\");\n if (registryUrl == null) {\n registryUrl = binding.getProperties().get(\"registryurl\");\n }\n if (registryUrl != null) {\n properties.put(\"kafka.apicurio.registry.url\", registryUrl);\n }\n\n for (String channel : channels) {\n\n String prefix = channel;\n\n String oauthTokenUrl = binding.getProperties().get(\"oauthTokenUrl\");\n if (oauthTokenUrl == null) {\n oauthTokenUrl = binding.getProperties().get(\"oauthtokenurl\");\n }\n if (oauthTokenUrl != null) {\n properties.put(prefix + \"apicurio.auth.service.token.endpoint\", oauthTokenUrl);\n }\n\n String clientId = binding.getProperties().get(\"clientId\");\n if (clientId == null) {\n clientId = binding.getProperties().get(\"clientid\");\n }\n if (clientId != null) {\n properties.put(prefix + \"apicurio.auth.client.id\", clientId);\n }\n\n String clientSecret = binding.getProperties().get(\"clientSecret\");\n if (clientSecret == null) {\n clientSecret = binding.getProperties().get(\"clientsecret\");\n }\n if (clientSecret != null) {\n properties.put(prefix + \"apicurio.auth.client.secret\", clientSecret);\n }\n if (registryUrl != null) {\n properties.put(prefix + \"apicurio.registry.url\", registryUrl);\n }\n }\n\n return Optional.of(new ServiceBindingConfigSource(\"serviceregistry-k8s-service-binding-source\", properties));\n }\n\n \n}" }, { "comment": "In Arrange, Act, and Assert, this line would be \"Act\". And the Line 135 would be \"Assert\"", "method_body": "void createsMessageBatchWithSize() {\n \n int batchSize = 1024;\n\n final CreateBatchOptions options = new CreateBatchOptions().setMaximumSizeInBytes(batchSize);\n ServiceBusMessageBatch batch = new ServiceBusMessageBatch(batchSize, null, null,\n null);\n when(asyncSender.createBatch(options)).thenReturn(Mono.just(batch));\n\n \n ServiceBusMessageBatch messageBatch = sender.createBatch(options);\n\n Assertions.assertEquals(batch, messageBatch);\n }", "target_code": "ServiceBusMessageBatch messageBatch = sender.createBatch(options);", "method_body_after": "void createsMessageBatchWithSize() {\n \n int batchSize = 1024;\n\n final CreateBatchOptions options = new CreateBatchOptions().setMaximumSizeInBytes(batchSize);\n final ServiceBusMessageBatch batch = new ServiceBusMessageBatch(batchSize, null, null,\n null);\n when(asyncSender.createBatch(options)).thenReturn(Mono.just(batch));\n\n \n ServiceBusMessageBatch messageBatch = sender.createBatch(options);\n\n \n Assertions.assertEquals(batch, messageBatch);\n }", "context_before": "class ServiceBusSenderClientTest {\n private static final String NAMESPACE = \"my-namespace\";\n private static final String ENTITY_NAME = \"my-servicebus-entity\";\n\n @Mock\n private ServiceBusSenderAsyncClient asyncSender;\n\n @Captor\n private ArgumentCaptor singleMessageCaptor;\n\n private ServiceBusSenderClient sender;\n\n private static final Duration RETRY_TIMEOUT = Duration.ofSeconds(10);\n private static final String TEST_CONTENTS = \"My message for service bus queue!\";\n\n @BeforeAll\n static void beforeAll() {\n StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));\n }\n\n @AfterAll\n static void afterAll() {\n StepVerifier.resetDefaultTimeout();\n }\n\n @BeforeEach\n void setup() {\n MockitoAnnotations.initMocks(this);\n when(asyncSender.getEntityPath()).thenReturn(ENTITY_NAME);\n when(asyncSender.getFullyQualifiedNamespace()).thenReturn(NAMESPACE);\n sender = new ServiceBusSenderClient(asyncSender, RETRY_TIMEOUT);\n }\n\n @AfterEach\n void teardown() {\n sender.close();\n singleMessageCaptor = null;\n Mockito.framework().clearInlineMocks();\n }\n\n @Test\n void verifyProperties() {\n Assertions.assertEquals(ENTITY_NAME, sender.getEntityPath());\n Assertions.assertEquals(NAMESPACE, sender.getFullyQualifiedNamespace());\n }\n\n /**\n * Verifies that an exception is thrown when we create a batch with null options.\n */\n @Test\n void createBatchNull() {\n Assertions.assertThrows(NullPointerException.class, () -> sender.createBatch(null));\n }\n\n /**\n * Verifies that the default batch is the same size as the message link.\n */\n @Test\n void createBatchDefault() {\n \n ServiceBusMessageBatch batch = new ServiceBusMessageBatch(MAX_MESSAGE_LENGTH_BYTES, null, null,\n null);\n when(asyncSender.createBatch()).thenReturn(Mono.just(batch));\n\n \n ServiceBusMessageBatch batchMessage = sender.createBatch();\n\n \n Assertions.assertEquals(MAX_MESSAGE_LENGTH_BYTES, batchMessage.getMaxSizeInBytes());\n Assertions.assertEquals(0, batchMessage.getCount());\n verify(asyncSender).createBatch();\n }\n\n /**\n * Verifies we cannot create a batch if the options size is larger than the link.\n */\n @Test\n void createBatchWhenSizeTooBigThanOnSendLink() {\n \n int maxLinkSize = 1024;\n int batchSize = maxLinkSize + 10;\n\n \n final CreateBatchOptions options = new CreateBatchOptions().setMaximumSizeInBytes(batchSize);\n when(asyncSender.createBatch(options)).thenThrow(new IllegalArgumentException(\"too large size\"));\n\n \n Assertions.assertThrows(IllegalArgumentException.class, () -> sender.createBatch(options));\n verify(asyncSender, times(1)).createBatch(options);\n }\n\n /**\n * Verifies that the producer can create a batch with a given {@link CreateBatchOptions\n */\n @Test\n \n\n /**\n * Verifies that sending a single message will result in calling sender.send(Message).\n */\n @Test\n void sendSingleMessage() {\n \n final ServiceBusMessage testData =\n new ServiceBusMessage(TEST_CONTENTS.getBytes(UTF_8));\n\n when(asyncSender.send(testData)).thenReturn(Mono.empty());\n\n \n sender.send(testData);\n\n \n verify(asyncSender, times(1)).send(testData);\n verify(asyncSender).send(singleMessageCaptor.capture());\n\n final ServiceBusMessage message = singleMessageCaptor.getValue();\n Assertions.assertArrayEquals(testData.getBody(), message.getBody());\n }\n}", "context_after": "class ServiceBusSenderClientTest {\n private static final String NAMESPACE = \"my-namespace\";\n private static final String ENTITY_NAME = \"my-servicebus-entity\";\n\n @Mock\n private ServiceBusSenderAsyncClient asyncSender;\n\n @Captor\n private ArgumentCaptor singleMessageCaptor;\n\n private ServiceBusSenderClient sender;\n\n private static final Duration RETRY_TIMEOUT = Duration.ofSeconds(10);\n private static final String TEST_CONTENTS = \"My message for service bus queue!\";\n\n @BeforeAll\n static void beforeAll() {\n StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));\n }\n\n @AfterAll\n static void afterAll() {\n StepVerifier.resetDefaultTimeout();\n }\n\n @BeforeEach\n void setup() {\n MockitoAnnotations.initMocks(this);\n when(asyncSender.getEntityPath()).thenReturn(ENTITY_NAME);\n when(asyncSender.getFullyQualifiedNamespace()).thenReturn(NAMESPACE);\n sender = new ServiceBusSenderClient(asyncSender, RETRY_TIMEOUT);\n }\n\n @AfterEach\n void teardown() {\n sender.close();\n singleMessageCaptor = null;\n Mockito.framework().clearInlineMocks();\n }\n\n @Test\n void verifyProperties() {\n Assertions.assertEquals(ENTITY_NAME, sender.getEntityPath());\n Assertions.assertEquals(NAMESPACE, sender.getFullyQualifiedNamespace());\n }\n\n /**\n * Verifies that an exception is thrown when we create a batch with null options.\n */\n @Test\n void createBatchNull() {\n Assertions.assertThrows(NullPointerException.class, () -> sender.createBatch(null));\n }\n\n /**\n * Verifies that the default batch is the same size as the message link.\n */\n @Test\n void createBatchDefault() {\n \n ServiceBusMessageBatch batch = new ServiceBusMessageBatch(MAX_MESSAGE_LENGTH_BYTES, null, null,\n null);\n when(asyncSender.createBatch()).thenReturn(Mono.just(batch));\n\n \n ServiceBusMessageBatch batchMessage = sender.createBatch();\n\n \n Assertions.assertEquals(MAX_MESSAGE_LENGTH_BYTES, batchMessage.getMaxSizeInBytes());\n Assertions.assertEquals(0, batchMessage.getCount());\n verify(asyncSender).createBatch();\n }\n\n /**\n * Verifies we cannot create a batch if the options size is larger than the link.\n */\n @Test\n void createBatchWhenSizeTooBigThanOnSendLink() {\n \n int maxLinkSize = 1024;\n int batchSize = maxLinkSize + 10;\n\n \n final CreateBatchOptions options = new CreateBatchOptions().setMaximumSizeInBytes(batchSize);\n when(asyncSender.createBatch(options)).thenThrow(new IllegalArgumentException(\"too large size\"));\n\n \n Assertions.assertThrows(IllegalArgumentException.class, () -> sender.createBatch(options));\n verify(asyncSender, times(1)).createBatch(options);\n }\n\n /**\n * Verifies that the producer can create a batch with a given {@link CreateBatchOptions\n */\n @Test\n \n\n /**\n * Verifies that sending a single message will result in calling sender.send(Message).\n */\n @Test\n void sendSingleMessage() {\n \n final ServiceBusMessage testData =\n new ServiceBusMessage(TEST_CONTENTS.getBytes(UTF_8));\n\n when(asyncSender.send(testData)).thenReturn(Mono.empty());\n\n \n sender.send(testData);\n\n \n verify(asyncSender, times(1)).send(testData);\n verify(asyncSender).send(singleMessageCaptor.capture());\n\n final ServiceBusMessage message = singleMessageCaptor.getValue();\n Assertions.assertArrayEquals(testData.getBody(), message.getBody());\n }\n}" }, { "comment": "The `numPendingPodRequests` would never be 0 from the log, so maybe it is better to make this log after below condition.", "method_body": "public void onAdded(List pods) {\n\t\trunAsync(() -> {\n\t\t\tfor (KubernetesPod pod : pods) {\n\t\t\t\tlog.info(\"Received new TaskExecutor pod: {} - Remaining pending pod requests: {}\",\n\t\t\t\t\tpod.getName(), numPendingPodRequests);\n\n\t\t\t\tif (numPendingPodRequests > 0) {\n\t\t\t\t\tnumPendingPodRequests--;\n\t\t\t\t\tfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\n\t\t\t\t\tworkerNodeMap.putIfAbsent(worker.getResourceID(), worker);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}", "target_code": "log.info(\"Received new TaskExecutor pod: {} - Remaining pending pod requests: {}\",", "method_body_after": "public void onAdded(List pods) {\n\t\trunAsync(() -> {\n\t\t\tfor (KubernetesPod pod : pods) {\n\t\t\t\tif (numPendingPodRequests > 0) {\n\t\t\t\t\tnumPendingPodRequests--;\n\t\t\t\t\tfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\n\t\t\t\t\tworkerNodes.putIfAbsent(worker.getResourceID(), worker);\n\t\t\t\t}\n\n\t\t\t\tlog.info(\"Received new TaskManager pod: {} - Remaining pending pod requests: {}\",\n\t\t\t\t\tpod.getName(), numPendingPodRequests);\n\t\t\t}\n\t\t});\n\t}", "context_before": "class KubernetesResourceManager extends ResourceManager\n\timplements FlinkKubeClient.PodCallbackHandler {\n\n\tprivate static final Logger LOG = LoggerFactory.getLogger(KubernetesResourceManager.class);\n\n\t/** The taskmanager pod name pattern is {clusterId}-{taskmanager}-{attemptId}-{podIndex}. */\n\tprivate static final String TASK_MANAGER_POD_FORMAT = \"%s-taskmanager-%d-%d\";\n\n\tprivate final ConcurrentMap workerNodeMap;\n\n\tprivate final int numberOfTaskSlots;\n\n\tprivate final TaskExecutorResourceSpec taskExecutorResourceSpec;\n\n\tprivate final int defaultTaskManagerMemoryMB;\n\n\tprivate final double defaultCpus;\n\n\tprivate final Collection slotsPerWorker;\n\n\tprivate final Configuration flinkConfig;\n\n\t/** Flink configuration uploaded by client. */\n\tprivate final Configuration flinkClientConfig;\n\n\t/** When ResourceManager failover, the max attempt should recover. */\n\tprivate final AtomicLong currentMaxAttemptId = new AtomicLong(0);\n\n\tprivate final AtomicLong currentMaxPodId = new AtomicLong(0);\n\n\tprivate final String clusterId;\n\n\tprivate FlinkKubeClient kubeClient;\n\n\t/** The number of pods requested, but not yet granted. */\n\tprivate int numPendingPodRequests;\n\n\tpublic KubernetesResourceManager(\n\t\tRpcService rpcService,\n\t\tString resourceManagerEndpointId,\n\t\tResourceID resourceId,\n\t\tConfiguration flinkConfig,\n\t\tHighAvailabilityServices highAvailabilityServices,\n\t\tHeartbeatServices heartbeatServices,\n\t\tSlotManager slotManager,\n\t\tJobLeaderIdService jobLeaderIdService,\n\t\tClusterInformation clusterInformation,\n\t\tFatalErrorHandler fatalErrorHandler,\n\t\tResourceManagerMetricGroup resourceManagerMetricGroup) {\n\t\tsuper(\n\t\t\trpcService,\n\t\t\tresourceManagerEndpointId,\n\t\t\tresourceId,\n\t\t\thighAvailabilityServices,\n\t\t\theartbeatServices,\n\t\t\tslotManager,\n\t\t\tjobLeaderIdService,\n\t\t\tclusterInformation,\n\t\t\tfatalErrorHandler,\n\t\t\tresourceManagerMetricGroup);\n\n\t\tthis.flinkConfig = flinkConfig;\n\t\tthis.clusterId = flinkConfig.getString(KubernetesConfigOptions.CLUSTER_ID);\n\n\t\tthis.workerNodeMap = new ConcurrentHashMap<>();\n\n\t\tthis.numPendingPodRequests = 0;\n\n\t\tthis.numberOfTaskSlots = flinkConfig.getInteger(TaskManagerOptions.NUM_TASK_SLOTS);\n\t\tthis.taskExecutorResourceSpec = TaskExecutorResourceUtils.resourceSpecFromConfig(flinkConfig);\n\t\tthis.defaultTaskManagerMemoryMB = taskExecutorResourceSpec.getTotalProcessMemorySize().getMebiBytes();\n\t\tthis.defaultCpus = flinkConfig.getDouble(KubernetesConfigOptions.TASK_MANAGER_CPU, numberOfTaskSlots);\n\n\t\tthis.slotsPerWorker = createWorkerSlotProfiles(flinkConfig);\n\n\t\t\n\t\tthis.flinkClientConfig = GlobalConfiguration.loadConfiguration();\n\t}\n\n\t@Override\n\tprotected void initialize() throws ResourceManagerException {\n\t\tLOG.info(\"Initializing Kubernetes client.\");\n\t\tLOG.info(\"KubernetesResourceManager.initialize clusterId:{}\", clusterId);\n\n\t\tthis.kubeClient = createFlinkKubeClient();\n\n\t\ttry {\n\t\t\tgetWorkerNodesFromPreviousAttempts();\n\t\t} catch (Exception e) {\n\t\t\tthrow new ResourceManagerException(e);\n\t\t}\n\n\t\tthis.kubeClient.watchPodsAndDoCallback(getTaskManagerLabels(), this);\n\t}\n\n\t@Override\n\tpublic CompletableFuture onStop() {\n\t\t\n\t\tThrowable firstException = null;\n\n\t\tif (kubeClient != null) {\n\t\t\ttry {\n\t\t\t\tkubeClient.close();\n\t\t\t} catch (Throwable t) {\n\t\t\t\tfirstException = t;\n\t\t\t}\n\t\t}\n\n\t\tfinal CompletableFuture terminationFuture = super.onStop();\n\n\t\tif (firstException != null) {\n\t\t\treturn FutureUtils.completedExceptionally(new FlinkException(\n\t\t\t\t\"Error while shutting down Kubernetes resource manager\", firstException));\n\t\t} else {\n\t\t\treturn terminationFuture;\n\t\t}\n\t}\n\n\t@Override\n\tprotected void internalDeregisterApplication(ApplicationStatus finalStatus, @Nullable String optionalDiagnostics) {\n\t\tLOG.info(\"Stopping kubernetes cluster, id: {}\", clusterId);\n\t\tthis.kubeClient.stopAndCleanupCluster(this.clusterId);\n\t}\n\n\t@Override\n\tpublic Collection startNewWorker(ResourceProfile resourceProfile) {\n\t\tLOG.info(\"Starting new worker with resource profile, {}\", resourceProfile.toString());\n\t\tif (!slotsPerWorker.iterator().next().isMatching(resourceProfile)) {\n\t\t\treturn Collections.emptyList();\n\t\t}\n\t\trequestKubernetesPod();\n\t\treturn slotsPerWorker;\n\t}\n\n\t@Override\n\tprotected KubernetesWorkerNode workerStarted(ResourceID resourceID) {\n\t\treturn workerNodeMap.get(resourceID);\n\t}\n\n\t@Override\n\tpublic boolean stopWorker(final KubernetesWorkerNode worker) {\n\t\tPreconditions.checkNotNull(this.kubeClient);\n\t\tLOG.info(\"Stopping Worker {}.\", worker.getResourceID().toString());\n\t\ttry {\n\t\t\tthis.kubeClient.stopPod(worker.getResourceID().toString());\n\t\t} catch (Exception e) {\n\t\t\tthis.kubeClient.handleException(e);\n\t\t\treturn false;\n\t\t}\n\t\tworkerNodeMap.remove(worker.getResourceID());\n\t\treturn true;\n\t}\n\n\t@Override\n\t\n\n\t@Override\n\tpublic void onModified(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodIfTerminated));\n\t}\n\n\t@Override\n\tpublic void onDeleted(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodIfTerminated));\n\t}\n\n\t@Override\n\tpublic void onError(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodIfTerminated));\n\t}\n\n\t@VisibleForTesting\n\tConcurrentMap getWorkerNodeMap() {\n\t\treturn workerNodeMap;\n\t}\n\n\tprivate void getWorkerNodesFromPreviousAttempts() throws Exception {\n\t\tfinal List podList = kubeClient.getPodsWithLabels(getTaskManagerLabels());\n\t\tfor (KubernetesPod pod : podList) {\n\t\t\tfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\n\t\t\tworkerNodeMap.put(worker.getResourceID(), worker);\n\t\t\tfinal long attempt = worker.getAttempt();\n\t\t\tif (attempt > currentMaxAttemptId.get()) {\n\t\t\t\tcurrentMaxAttemptId.set(attempt);\n\t\t\t}\n\t\t}\n\n\t\tlog.info(\"Recovered {} pods from previous attempts, current attempt id is {}.\",\n\t\t\tworkerNodeMap.size(),\n\t\t\tcurrentMaxAttemptId.addAndGet(1));\n\t}\n\n\tprivate void requestKubernetesPod() {\n\t\tPreconditions.checkNotNull(this.kubeClient);\n\n\t\tnumPendingPodRequests++;\n\n\t\tlog.info(\"Requesting new TaskExecutor pod with <{},{}>. Number pending requests {}.\",\n\t\t\tdefaultTaskManagerMemoryMB,\n\t\t\tdefaultCpus,\n\t\t\tnumPendingPodRequests);\n\n\t\tfinal String podName = String.format(\n\t\t\tTASK_MANAGER_POD_FORMAT,\n\t\t\tclusterId,\n\t\t\tcurrentMaxAttemptId.get(),\n\t\t\tcurrentMaxPodId.addAndGet(1));\n\n\t\ttry {\n\t\t\tfinal HashMap env = new HashMap<>();\n\t\t\tenv.put(Constants.ENV_FLINK_POD_NAME, podName);\n\n\t\t\tfinal TaskManagerPodParameter parameter = new TaskManagerPodParameter(\n\t\t\t\tpodName,\n\t\t\t\tgetTaskManagerStartCommand(podName),\n\t\t\t\tdefaultTaskManagerMemoryMB,\n\t\t\t\tdefaultCpus,\n\t\t\t\tenv);\n\n\t\t\tthis.kubeClient.createTaskManagerPod(parameter);\n\t\t} catch (Exception e) {\n\t\t\tthis.kubeClient.handleException(e);\n\t\t\tthrow new FlinkRuntimeException(\"Could not start new worker\");\n\t\t}\n\t}\n\n\t/**\n\t * Request new pod if pending pods cannot satisfies pending slot requests.\n\t */\n\tprivate void requestKubernetesPodIfRequired() {\n\t\tfinal int requiredTaskManagerSlots = getNumberRequiredTaskManagerSlots();\n\t\tfinal int pendingTaskManagerSlots = numPendingPodRequests * numberOfTaskSlots;\n\n\t\tif (requiredTaskManagerSlots > pendingTaskManagerSlots) {\n\t\t\trequestKubernetesPod();\n\t\t}\n\t}\n\n\tprivate void removePodIfTerminated(KubernetesPod pod) {\n\t\tif (pod.isTerminated()) {\n\t\t\tkubeClient.stopPod(pod.getName());\n\t\t\tfinal KubernetesWorkerNode kubernetesWorkerNode = workerNodeMap.remove(new ResourceID(pod.getName()));\n\t\t\tif (kubernetesWorkerNode != null) {\n\t\t\t\trequestKubernetesPodIfRequired();\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate List getTaskManagerStartCommand(String podName) {\n\t\tfinal ContaineredTaskManagerParameters taskManagerParameters =\n\t\t\tContaineredTaskManagerParameters.create(flinkConfig, taskExecutorResourceSpec, numberOfTaskSlots);\n\n\t\tlog.info(\"TaskExecutor {} will be started with {}.\", podName, taskExecutorResourceSpec);\n\n\t\tfinal String confDir = flinkConfig.getString(KubernetesConfigOptions.FLINK_CONF_DIR);\n\t\tfinal boolean hasLogback = new File(confDir, Constants.CONFIG_FILE_LOGBACK_NAME).exists();\n\t\tfinal boolean hasLog4j = new File(confDir, Constants.CONFIG_FILE_LOG4J_NAME).exists();\n\n\t\tfinal String logDir = flinkConfig.getString(KubernetesConfigOptions.FLINK_LOG_DIR);\n\n\t\tfinal String mainClassArgs = \"--\" + CommandLineOptions.CONFIG_DIR_OPTION.getLongOpt() + \" \" +\n\t\t\tflinkConfig.getString(KubernetesConfigOptions.FLINK_CONF_DIR) + \" \" +\n\t\t\tBootstrapTools.getDynamicProperties(flinkClientConfig, flinkConfig);\n\n\t\tfinal String command = KubernetesUtils.getTaskManagerStartCommand(\n\t\t\tflinkConfig,\n\t\t\ttaskManagerParameters,\n\t\t\tconfDir,\n\t\t\tlogDir,\n\t\t\thasLogback,\n\t\t\thasLog4j,\n\t\t\tKubernetesTaskExecutorRunner.class.getCanonicalName(),\n\t\t\tmainClassArgs);\n\n\t\treturn Arrays.asList(\"/bin/bash\", \"-c\", command);\n\t}\n\n\t/**\n\t * Get task manager label for the current flink cluster. They could be used to watching the pods status.\n\t * @return Task manager labels.\n\t */\n\tprivate Map getTaskManagerLabels() {\n\t\tfinal Map labels = new HashMap<>();\n\t\tlabels.put(Constants.LABEL_TYPE_KEY, Constants.LABEL_TYPE_NATIVE_TYPE);\n\t\tlabels.put(Constants.LABEL_APP_KEY, clusterId);\n\t\tlabels.put(Constants.LABEL_COMPONENT_KEY, Constants.LABEL_COMPONENT_TASK_MANAGER);\n\t\treturn labels;\n\t}\n\n\tprotected FlinkKubeClient createFlinkKubeClient() {\n\t\treturn KubeClientFactory.fromConfiguration(this.flinkConfig);\n\t}\n}", "context_after": "class KubernetesResourceManager extends ActiveResourceManager\n\timplements FlinkKubeClient.PodCallbackHandler {\n\n\tprivate static final Logger LOG = LoggerFactory.getLogger(KubernetesResourceManager.class);\n\n\t/** The taskmanager pod name pattern is {clusterId}-{taskmanager}-{attemptId}-{podIndex}. */\n\tprivate static final String TASK_MANAGER_POD_FORMAT = \"%s-taskmanager-%d-%d\";\n\n\tprivate final Map workerNodes = new HashMap<>();\n\n\tprivate final double defaultCpus;\n\n\t/** When ResourceManager failover, the max attempt should recover. */\n\tprivate long currentMaxAttemptId = 0;\n\n\t/** Current max pod index. When creating a new pod, it should increase one. */\n\tprivate long currentMaxPodId = 0;\n\n\tprivate final String clusterId;\n\n\tprivate final FlinkKubeClient kubeClient;\n\n\tprivate final ContaineredTaskManagerParameters taskManagerParameters;\n\n\tprivate final List taskManagerStartCommand;\n\n\t/** The number of pods requested, but not yet granted. */\n\tprivate int numPendingPodRequests = 0;\n\n\tpublic KubernetesResourceManager(\n\t\t\tRpcService rpcService,\n\t\t\tString resourceManagerEndpointId,\n\t\t\tResourceID resourceId,\n\t\t\tConfiguration flinkConfig,\n\t\t\tHighAvailabilityServices highAvailabilityServices,\n\t\t\tHeartbeatServices heartbeatServices,\n\t\t\tSlotManager slotManager,\n\t\t\tJobLeaderIdService jobLeaderIdService,\n\t\t\tClusterInformation clusterInformation,\n\t\t\tFatalErrorHandler fatalErrorHandler,\n\t\t\tResourceManagerMetricGroup resourceManagerMetricGroup) {\n\t\tsuper(\n\t\t\tflinkConfig,\n\t\t\tSystem.getenv(),\n\t\t\trpcService,\n\t\t\tresourceManagerEndpointId,\n\t\t\tresourceId,\n\t\t\thighAvailabilityServices,\n\t\t\theartbeatServices,\n\t\t\tslotManager,\n\t\t\tjobLeaderIdService,\n\t\t\tclusterInformation,\n\t\t\tfatalErrorHandler,\n\t\t\tresourceManagerMetricGroup);\n\t\tthis.clusterId = flinkConfig.getString(KubernetesConfigOptions.CLUSTER_ID);\n\t\tthis.defaultCpus = flinkConfig.getDouble(KubernetesConfigOptions.TASK_MANAGER_CPU, numSlotsPerTaskManager);\n\n\t\tthis.kubeClient = createFlinkKubeClient();\n\n\t\tthis.taskManagerParameters =\n\t\t\tContaineredTaskManagerParameters.create(flinkConfig, taskExecutorResourceSpec, numSlotsPerTaskManager);\n\n\t\tthis.taskManagerStartCommand = getTaskManagerStartCommand();\n\t}\n\n\t@Override\n\tprotected Configuration loadClientConfiguration() {\n\t\treturn GlobalConfiguration.loadConfiguration();\n\t}\n\n\t@Override\n\tprotected void initialize() throws ResourceManagerException {\n\t\trecoverWorkerNodesFromPreviousAttempts();\n\n\t\tkubeClient.watchPodsAndDoCallback(getTaskManagerLabels(), this);\n\t}\n\n\t@Override\n\tpublic CompletableFuture onStop() {\n\t\t\n\t\tThrowable exception = null;\n\n\t\ttry {\n\t\t\tkubeClient.close();\n\t\t} catch (Throwable t) {\n\t\t\texception = t;\n\t\t}\n\n\t\treturn getStopTerminationFutureOrCompletedExceptionally(exception);\n\t}\n\n\t@Override\n\tprotected void internalDeregisterApplication(ApplicationStatus finalStatus, @Nullable String diagnostics) {\n\t\tLOG.info(\n\t\t\t\"Stopping kubernetes cluster, clusterId: {}, diagnostics: {}\",\n\t\t\tclusterId,\n\t\t\tdiagnostics == null ? \"\" : diagnostics);\n\t\tkubeClient.stopAndCleanupCluster(clusterId);\n\t}\n\n\t@Override\n\tpublic Collection startNewWorker(ResourceProfile resourceProfile) {\n\t\tLOG.info(\"Starting new worker with resource profile, {}\", resourceProfile);\n\t\tif (!resourceProfilesPerWorker.iterator().next().isMatching(resourceProfile)) {\n\t\t\treturn Collections.emptyList();\n\t\t}\n\t\trequestKubernetesPod();\n\t\treturn resourceProfilesPerWorker;\n\t}\n\n\t@Override\n\tprotected KubernetesWorkerNode workerStarted(ResourceID resourceID) {\n\t\treturn workerNodes.get(resourceID);\n\t}\n\n\t@Override\n\tpublic boolean stopWorker(final KubernetesWorkerNode worker) {\n\t\tLOG.info(\"Stopping Worker {}.\", worker.getResourceID());\n\t\tworkerNodes.remove(worker.getResourceID());\n\t\ttry {\n\t\t\tkubeClient.stopPod(worker.getResourceID().toString());\n\t\t} catch (Exception e) {\n\t\t\tkubeClient.handleException(e);\n\t\t\treturn false;\n\t\t}\n\t\treturn true;\n\t}\n\n\t@Override\n\t\n\n\t@Override\n\tpublic void onModified(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodIfTerminated));\n\t}\n\n\t@Override\n\tpublic void onDeleted(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodIfTerminated));\n\t}\n\n\t@Override\n\tpublic void onError(List pods) {\n\t\trunAsync(() -> pods.forEach(this::removePodIfTerminated));\n\t}\n\n\t@VisibleForTesting\n\tMap getWorkerNodes() {\n\t\treturn workerNodes;\n\t}\n\n\tprivate void recoverWorkerNodesFromPreviousAttempts() throws ResourceManagerException {\n\t\tfinal List podList = kubeClient.getPodsWithLabels(getTaskManagerLabels());\n\t\tfor (KubernetesPod pod : podList) {\n\t\t\tfinal KubernetesWorkerNode worker = new KubernetesWorkerNode(new ResourceID(pod.getName()));\n\t\t\tworkerNodes.put(worker.getResourceID(), worker);\n\t\t\tfinal long attempt = worker.getAttempt();\n\t\t\tif (attempt > currentMaxAttemptId) {\n\t\t\t\tcurrentMaxAttemptId = attempt;\n\t\t\t}\n\t\t}\n\n\t\tlog.info(\"Recovered {} pods from previous attempts, current attempt id is {}.\",\n\t\t\tworkerNodes.size(),\n\t\t\t++currentMaxAttemptId);\n\t}\n\n\tprivate void requestKubernetesPod() {\n\t\tnumPendingPodRequests++;\n\n\t\tlog.info(\"Requesting new TaskManager pod with <{},{}>. Number pending requests {}.\",\n\t\t\tdefaultMemoryMB,\n\t\t\tdefaultCpus,\n\t\t\tnumPendingPodRequests);\n\n\t\tfinal String podName = String.format(\n\t\t\tTASK_MANAGER_POD_FORMAT,\n\t\t\tclusterId,\n\t\t\tcurrentMaxAttemptId,\n\t\t\t++currentMaxPodId);\n\n\t\tfinal HashMap env = new HashMap<>();\n\t\tenv.put(Constants.ENV_FLINK_POD_NAME, podName);\n\t\tenv.putAll(taskManagerParameters.taskManagerEnv());\n\n\t\tfinal TaskManagerPodParameter parameter = new TaskManagerPodParameter(\n\t\t\tpodName,\n\t\t\ttaskManagerStartCommand,\n\t\t\tdefaultMemoryMB,\n\t\t\tdefaultCpus,\n\t\t\tenv);\n\n\t\tlog.info(\"TaskManager {} will be started with {}.\", podName, taskExecutorResourceSpec);\n\t\tkubeClient.createTaskManagerPod(parameter);\n\t}\n\n\t/**\n\t * Request new pod if pending pods cannot satisfy pending slot requests.\n\t */\n\tprivate void requestKubernetesPodIfRequired() {\n\t\tfinal int requiredTaskManagerSlots = getNumberRequiredTaskManagerSlots();\n\t\tfinal int pendingTaskManagerSlots = numPendingPodRequests * numSlotsPerTaskManager;\n\n\t\tif (requiredTaskManagerSlots > pendingTaskManagerSlots) {\n\t\t\trequestKubernetesPod();\n\t\t}\n\t}\n\n\tprivate void removePodIfTerminated(KubernetesPod pod) {\n\t\tif (pod.isTerminated()) {\n\t\t\tkubeClient.stopPod(pod.getName());\n\t\t\tfinal KubernetesWorkerNode kubernetesWorkerNode = workerNodes.remove(new ResourceID(pod.getName()));\n\t\t\tif (kubernetesWorkerNode != null) {\n\t\t\t\trequestKubernetesPodIfRequired();\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate List getTaskManagerStartCommand() {\n\t\tfinal String confDir = flinkConfig.getString(KubernetesConfigOptions.FLINK_CONF_DIR);\n\t\tfinal boolean hasLogback = new File(confDir, Constants.CONFIG_FILE_LOGBACK_NAME).exists();\n\t\tfinal boolean hasLog4j = new File(confDir, Constants.CONFIG_FILE_LOG4J_NAME).exists();\n\n\t\tfinal String logDir = flinkConfig.getString(KubernetesConfigOptions.FLINK_LOG_DIR);\n\n\t\tfinal String mainClassArgs = \"--\" + CommandLineOptions.CONFIG_DIR_OPTION.getLongOpt() + \" \" +\n\t\t\tflinkConfig.getString(KubernetesConfigOptions.FLINK_CONF_DIR) + \" \" +\n\t\t\tBootstrapTools.getDynamicProperties(flinkClientConfig, flinkConfig);\n\n\t\tfinal String command = KubernetesUtils.getTaskManagerStartCommand(\n\t\t\tflinkConfig,\n\t\t\ttaskManagerParameters,\n\t\t\tconfDir,\n\t\t\tlogDir,\n\t\t\thasLogback,\n\t\t\thasLog4j,\n\t\t\tKubernetesTaskExecutorRunner.class.getCanonicalName(),\n\t\t\tmainClassArgs);\n\n\t\treturn Arrays.asList(\"/bin/bash\", \"-c\", command);\n\t}\n\n\t/**\n\t * Get task manager labels for the current Flink cluster. They could be used to watch the pods status.\n\t *\n\t * @return Task manager labels.\n\t */\n\tprivate Map getTaskManagerLabels() {\n\t\tfinal Map labels = new HashMap<>();\n\t\tlabels.put(Constants.LABEL_TYPE_KEY, Constants.LABEL_TYPE_NATIVE_TYPE);\n\t\tlabels.put(Constants.LABEL_APP_KEY, clusterId);\n\t\tlabels.put(Constants.LABEL_COMPONENT_KEY, Constants.LABEL_COMPONENT_TASK_MANAGER);\n\t\treturn labels;\n\t}\n\n\tprotected FlinkKubeClient createFlinkKubeClient() {\n\t\treturn KubeClientFactory.fromConfiguration(flinkConfig);\n\t}\n}" }, { "comment": "Minor: ```suggestion The thread number to create hive partition splits ```", "method_body": "public MRSplitsGetter(int threadNum) {\n if (threadNum > 1) {\n executorService = Executors.newFixedThreadPool(threadNum);\n } else if (threadNum == 1) {\n executorService = newDirectExecutorService();\n } else {\n throw new IllegalArgumentException(\n HiveOptions.TABLE_EXEC_HIVE_PARTITION_SPLIT_THREAD_NUM.key()\n + \" cannot be less than 1\");\n }\n LOG.info(\"Open {} threads to create hive partition splits.\", threadNum);\n }", "target_code": "HiveOptions.TABLE_EXEC_HIVE_PARTITION_SPLIT_THREAD_NUM.key()", "method_body_after": "public MRSplitsGetter(int threadNum) {\n if (threadNum > 1) {\n executorService = Executors.newFixedThreadPool(threadNum);\n } else if (threadNum == 1) {\n executorService = newDirectExecutorService();\n } else {\n throw new IllegalArgumentException(\n \"The thread number to create hive partition splits cannot be less than 1\");\n }\n LOG.info(\"Open {} threads to create hive partition splits.\", threadNum);\n }", "context_before": "class MRSplitsGetter implements Closeable {\n private static final Logger LOG = LoggerFactory.getLogger(MRSplitsGetter.class);\n\n private final ExecutorService executorService;\n\n \n\n public List getHiveTablePartitionMRSplits(\n int minNumSplits, List partitions, JobConf jobConf)\n throws IOException {\n LOG.info(\"Begin to create MR splits.\");\n long startTime = System.currentTimeMillis();\n\n final List> futures = new ArrayList<>();\n for (HiveTablePartition partition : partitions) {\n futures.add(\n executorService.submit(\n new MRSplitter(minNumSplits, partition, new JobConf(jobConf))));\n }\n\n int splitNum = 0;\n List hiveTablePartitionSplitsList = new ArrayList<>();\n try {\n for (Future future : futures) {\n HiveTablePartitionSplits hiveTablePartitionSplits = future.get();\n splitNum += hiveTablePartitionSplits.getInputSplits().length;\n hiveTablePartitionSplitsList.add(hiveTablePartitionSplits);\n }\n } catch (InterruptedException | ExecutionException e) {\n throw new IOException(\"Fail to create input splits.\", e);\n }\n\n LOG.info(\n \"It took {} seconds to create {} MR splits for {} hive partitions.\",\n (System.currentTimeMillis() - startTime) / 1000,\n splitNum,\n partitions.size());\n\n return hiveTablePartitionSplitsList;\n }\n\n private static class MRSplitter implements Callable {\n private final int minNumSplits;\n private final HiveTablePartition partition;\n private final JobConf jobConf;\n\n public MRSplitter(int minNumSplits, HiveTablePartition partition, JobConf jobConf) {\n this.minNumSplits = minNumSplits;\n this.partition = partition;\n this.jobConf = jobConf;\n }\n\n @Override\n public HiveTablePartitionSplits call() throws Exception {\n StorageDescriptor sd = partition.getStorageDescriptor();\n org.apache.hadoop.fs.Path inputPath = new org.apache.hadoop.fs.Path(sd.getLocation());\n FileSystem fs = inputPath.getFileSystem(jobConf);\n \n if (!fs.exists(inputPath)) {\n return new HiveTablePartitionSplits(partition, jobConf, new InputSplit[0]);\n }\n InputFormat format;\n try {\n format =\n (InputFormat)\n Class.forName(\n sd.getInputFormat(),\n true,\n Thread.currentThread().getContextClassLoader())\n .newInstance();\n } catch (Exception e) {\n throw new FlinkHiveException(\"Unable to instantiate the hadoop input format\", e);\n }\n ReflectionUtils.setConf(format, jobConf);\n \n jobConf.set(INPUT_DIR, StringUtils.escapeString(sd.getLocation()));\n \n \n return new HiveTablePartitionSplits(\n partition, jobConf, format.getSplits(jobConf, minNumSplits));\n }\n }\n\n @Override\n public void close() throws IOException {\n executorService.shutdownNow();\n }\n}", "context_after": "class MRSplitsGetter implements Closeable {\n private static final Logger LOG = LoggerFactory.getLogger(MRSplitsGetter.class);\n\n private final ExecutorService executorService;\n\n \n\n public List getHiveTablePartitionMRSplits(\n int minNumSplits, List partitions, JobConf jobConf)\n throws IOException {\n LOG.info(\"Begin to create MR splits.\");\n long startTime = System.currentTimeMillis();\n\n final List> futures = new ArrayList<>();\n for (HiveTablePartition partition : partitions) {\n futures.add(\n executorService.submit(\n new MRSplitter(minNumSplits, partition, new JobConf(jobConf))));\n }\n\n int splitNum = 0;\n List hiveTablePartitionSplitsList = new ArrayList<>();\n try {\n for (Future future : futures) {\n HiveTablePartitionSplits hiveTablePartitionSplits = future.get();\n splitNum += hiveTablePartitionSplits.getInputSplits().length;\n hiveTablePartitionSplitsList.add(hiveTablePartitionSplits);\n }\n } catch (InterruptedException | ExecutionException e) {\n throw new IOException(\"Fail to create input splits.\", e);\n }\n\n LOG.info(\n \"It took {} seconds to create {} MR splits for {} hive partitions.\",\n (System.currentTimeMillis() - startTime) / 1000,\n splitNum,\n partitions.size());\n\n return hiveTablePartitionSplitsList;\n }\n\n private static class MRSplitter implements Callable {\n private final int minNumSplits;\n private final HiveTablePartition partition;\n private final JobConf jobConf;\n\n public MRSplitter(int minNumSplits, HiveTablePartition partition, JobConf jobConf) {\n this.minNumSplits = minNumSplits;\n this.partition = partition;\n this.jobConf = jobConf;\n }\n\n @Override\n public HiveTablePartitionSplits call() throws Exception {\n StorageDescriptor sd = partition.getStorageDescriptor();\n org.apache.hadoop.fs.Path inputPath = new org.apache.hadoop.fs.Path(sd.getLocation());\n FileSystem fs = inputPath.getFileSystem(jobConf);\n \n if (!fs.exists(inputPath)) {\n return new HiveTablePartitionSplits(partition, jobConf, new InputSplit[0]);\n }\n InputFormat format;\n try {\n format =\n (InputFormat)\n Class.forName(\n sd.getInputFormat(),\n true,\n Thread.currentThread().getContextClassLoader())\n .newInstance();\n } catch (Exception e) {\n throw new FlinkHiveException(\"Unable to instantiate the hadoop input format\", e);\n }\n ReflectionUtils.setConf(format, jobConf);\n \n jobConf.set(INPUT_DIR, StringUtils.escapeString(sd.getLocation()));\n \n \n return new HiveTablePartitionSplits(\n partition, jobConf, format.getSplits(jobConf, minNumSplits));\n }\n }\n\n @Override\n public void close() throws IOException {\n executorService.shutdownNow();\n }\n}" }, { "comment": "missing \"that\" after value, (more occurrences in other files)", "method_body": "public String getDescription() {\n\t\treturn \"String value specifies the fully qualified name of the entry point class. \" +\n\t\t\t\"Overrides the class defined in the jar file manifest.\";\n\t}", "target_code": "return \"String value specifies the fully qualified name of the entry point class. \" +", "method_body_after": "public String getDescription() {\n\t\treturn \"String value that specifies the fully qualified name of the entry point class. \" +\n\t\t\t\"Overrides the class defined in the jar file manifest.\";\n\t}", "context_before": "class EntryClassQueryParameter extends StringQueryParameter {\n\tpublic EntryClassQueryParameter() {\n\t\tsuper(\"entry-class\", MessageParameterRequisiteness.OPTIONAL);\n\t}\n\n\t@Override\n\t\n}", "context_after": "class EntryClassQueryParameter extends StringQueryParameter {\n\tpublic EntryClassQueryParameter() {\n\t\tsuper(\"entry-class\", MessageParameterRequisiteness.OPTIONAL);\n\t}\n\n\t@Override\n\t\n}" }, { "comment": "```suggestion * request body, so this responsibility falls on the handler consuming the body instead. For the * deployment cases, the request body is validated in {@link ApplicationApiHandler.parseDataParts}. ```", "method_body": "private boolean keyVerifies(PublicKey key, DiscFilterRequest request) {\n /* This method only checks that the content hash has been signed by the provided public key, but\n * does not verify the content of the request. jDisc request filters do not allow inspecting the\n * request body, so this responsibility falls on the handler consuming the body instead. For this\n * specific case the request body is validated in {@link ApplicationApiHandler.parseDataParts}.\n */\n return new RequestVerifier(key, controller.clock()).verify(Method.valueOf(request.getMethod()),\n request.getUri(),\n request.getHeader(\"X-Timestamp\"),\n request.getHeader(\"X-Content-Hash\"),\n request.getHeader(\"X-Authorization\"));\n }", "target_code": "* specific case the request body is validated in {@link ApplicationApiHandler.parseDataParts}.", "method_body_after": "private boolean keyVerifies(PublicKey key, DiscFilterRequest request) {\n /* This method only checks that the content hash has been signed by the provided public key, but\n * does not verify the content of the request. jDisc request filters do not allow inspecting the\n * request body, so this responsibility falls on the handler consuming the body instead. For the\n * deployment cases, the request body is validated in {@link ApplicationApiHandler.parseDataParts}.\n */\n return new RequestVerifier(key, controller.clock()).verify(Method.valueOf(request.getMethod()),\n request.getUri(),\n request.getHeader(\"X-Timestamp\"),\n request.getHeader(\"X-Content-Hash\"),\n request.getHeader(\"X-Authorization\"));\n }", "context_before": "class SignatureFilter extends JsonSecurityRequestFilterBase {\n\n private static final Logger logger = Logger.getLogger(SignatureFilter.class.getName());\n\n private final Controller controller;\n\n @Inject\n public SignatureFilter(Controller controller) {\n this.controller = controller;\n }\n\n @Override\n protected Optional filter(DiscFilterRequest request) {\n if ( request.getAttribute(SecurityContext.ATTRIBUTE_NAME) == null\n && request.getHeader(\"X-Authorization\") != null)\n try {\n getSecurityContext(request).ifPresent(securityContext -> {\n request.setUserPrincipal(securityContext.principal());\n request.setRemoteUser(securityContext.principal().getName());\n request.setAttribute(SecurityContext.ATTRIBUTE_NAME, securityContext);\n });\n }\n catch (Exception e) {\n logger.log(Level.FINE, () -> \"Exception verifying signed request: \" + Exceptions.toMessageString(e));\n }\n return Optional.empty();\n }\n\n \n\n private Optional getSecurityContext(DiscFilterRequest request) {\n PublicKey key = KeyUtils.fromPemEncodedPublicKey(new String(Base64.getDecoder().decode(request.getHeader(\"X-Key\")), UTF_8));\n if (keyVerifies(key, request)) {\n ApplicationId id = ApplicationId.fromSerializedForm(request.getHeader(\"X-Key-Id\"));\n Optional tenant = controller.tenants().get(id.tenant())\n .filter(CloudTenant.class::isInstance)\n .map(CloudTenant.class::cast);\n if (tenant.isPresent() && tenant.get().developerKeys().containsKey(key))\n return Optional.of(new SecurityContext(tenant.get().developerKeys().get(key),\n Set.of(Role.reader(id.tenant()),\n Role.developer(id.tenant()))));\n\n Optional application = controller.applications().getApplication(TenantAndApplicationId.from(id));\n if (application.isPresent() && application.get().deployKeys().contains(key))\n return Optional.of(new SecurityContext(new SimplePrincipal(\"headless@\" + id.tenant() + \".\" + id.application()),\n Set.of(Role.reader(id.tenant()),\n Role.headless(id.tenant(), id.application()))));\n }\n return Optional.empty();\n }\n\n}", "context_after": "class SignatureFilter extends JsonSecurityRequestFilterBase {\n\n private static final Logger logger = Logger.getLogger(SignatureFilter.class.getName());\n\n private final Controller controller;\n\n @Inject\n public SignatureFilter(Controller controller) {\n this.controller = controller;\n }\n\n @Override\n protected Optional filter(DiscFilterRequest request) {\n if ( request.getAttribute(SecurityContext.ATTRIBUTE_NAME) == null\n && request.getHeader(\"X-Authorization\") != null)\n try {\n getSecurityContext(request).ifPresent(securityContext -> {\n request.setUserPrincipal(securityContext.principal());\n request.setRemoteUser(securityContext.principal().getName());\n request.setAttribute(SecurityContext.ATTRIBUTE_NAME, securityContext);\n });\n }\n catch (Exception e) {\n logger.log(Level.FINE, () -> \"Exception verifying signed request: \" + Exceptions.toMessageString(e));\n }\n return Optional.empty();\n }\n\n \n\n private Optional getSecurityContext(DiscFilterRequest request) {\n PublicKey key = KeyUtils.fromPemEncodedPublicKey(new String(Base64.getDecoder().decode(request.getHeader(\"X-Key\")), UTF_8));\n if (keyVerifies(key, request)) {\n ApplicationId id = ApplicationId.fromSerializedForm(request.getHeader(\"X-Key-Id\"));\n Optional tenant = controller.tenants().get(id.tenant())\n .filter(CloudTenant.class::isInstance)\n .map(CloudTenant.class::cast);\n if (tenant.isPresent() && tenant.get().developerKeys().containsKey(key))\n return Optional.of(new SecurityContext(tenant.get().developerKeys().get(key),\n Set.of(Role.reader(id.tenant()),\n Role.developer(id.tenant()))));\n\n Optional application = controller.applications().getApplication(TenantAndApplicationId.from(id));\n if (application.isPresent() && application.get().deployKeys().contains(key))\n return Optional.of(new SecurityContext(new SimplePrincipal(\"headless@\" + id.tenant() + \".\" + id.application()),\n Set.of(Role.reader(id.tenant()),\n Role.headless(id.tenant(), id.application()))));\n }\n return Optional.empty();\n }\n\n}" }, { "comment": "> @jingshanglu @tristaZero @dongzl Now, the refactor task of `Statement` just splits different dialect statements, `StatementContext` and `StatementAssert` are not splitted. When `Statement` is used by `StatemenContext` and `StatementAssert`, different dialect statements are processed uniformly through the `StatementHandler` now. > > I was wondering if we need to split `StatemenContext` and `StatementAssert`, since `StatementHandler` seems to handle logic well already. you mean the second way? decide whether to assert or not according to DB type in specific function, like some segment only contained by SQLServer.", "method_body": "public static void assertIs(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n assertTable(assertContext, actual, expected);\n assertInsertColumnsClause(assertContext, actual, expected);\n assertInsertValuesClause(assertContext, actual, expected);\n assertSetClause(assertContext, actual, expected);\n assertInsertSelectClause(assertContext, actual, expected);\n assertOnDuplicateKeyColumns(assertContext, actual, expected);\n assertWithClause(assertContext, actual, expected);\n assertOutputClause(assertContext, actual, expected);\n }", "target_code": "assertOutputClause(assertContext, actual, expected);", "method_body_after": "public static void assertIs(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n assertTable(assertContext, actual, expected);\n assertInsertColumnsClause(assertContext, actual, expected);\n assertInsertValuesClause(assertContext, actual, expected);\n assertSetClause(assertContext, actual, expected);\n assertInsertSelectClause(assertContext, actual, expected);\n assertOnDuplicateKeyColumns(assertContext, actual, expected);\n assertWithClause(assertContext, actual, expected);\n assertOutputClause(assertContext, actual, expected);\n }", "context_before": "class InsertStatementAssert {\n \n /**\n * Assert insert statement is correct with expected parser result.\n *\n * @param assertContext assert context\n * @param actual actual insert statement\n * @param expected expected insert statement test case\n */\n \n \n private static void assertTable(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n TableAssert.assertIs(assertContext, actual.getTable(), expected.getTable());\n }\n \n private static void assertInsertColumnsClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n if (null != expected.getInsertColumnsClause()) {\n assertTrue(assertContext.getText(\"Actual insert columns segment should exist.\"), actual.getInsertColumns().isPresent());\n InsertColumnsClauseAssert.assertIs(assertContext, actual.getInsertColumns().get(), expected.getInsertColumnsClause()); \n } else {\n assertFalse(assertContext.getText(\"Actual insert columns segment should not exist.\"), actual.getInsertColumns().isPresent());\n }\n }\n \n private static void assertInsertValuesClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n if (null != expected.getInsertValuesClause()) {\n assertFalse(assertContext.getText(\"Actual insert values segment should exist.\"), actual.getValues().isEmpty());\n InsertValuesClauseAssert.assertIs(assertContext, actual.getValues(), expected.getInsertValuesClause());\n } else {\n assertTrue(assertContext.getText(\"Actual insert values segment should not exist.\"), actual.getValues().isEmpty());\n }\n }\n \n private static void assertSetClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n Optional setAssignmentSegment = InsertStatementHandler.getSetAssignmentSegment(actual);\n if (null != expected.getSetClause()) {\n assertTrue(assertContext.getText(\"Actual set assignment segment should exist.\"), setAssignmentSegment.isPresent());\n SetClauseAssert.assertIs(assertContext, setAssignmentSegment.get(), expected.getSetClause());\n } else {\n assertFalse(assertContext.getText(\"Actual set assignment segment should not exist.\"), setAssignmentSegment.isPresent());\n }\n }\n\n private static void assertInsertSelectClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n if (null != expected.getSelectTestCase()) {\n assertTrue(assertContext.getText(\"Actual insert select segment should exist.\"), actual.getInsertSelect().isPresent());\n SelectStatementAssert.assertIs(assertContext, actual.getInsertSelect().get().getSelect(), expected.getSelectTestCase());\n } else {\n assertFalse(assertContext.getText(\"Actual insert select segment should not exist.\"), actual.getInsertSelect().isPresent());\n }\n }\n \n private static void assertOnDuplicateKeyColumns(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n Optional onDuplicateKeyColumnsSegment = InsertStatementHandler.getOnDuplicateKeyColumnsSegment(actual);\n if (null != expected.getOnDuplicateKeyColumns()) {\n assertTrue(assertContext.getText(\"Actual on duplicate key columns segment should exist.\"), onDuplicateKeyColumnsSegment.isPresent());\n OnDuplicateKeyColumnsAssert.assertIs(assertContext, onDuplicateKeyColumnsSegment.get(), expected.getOnDuplicateKeyColumns());\n } else {\n assertFalse(assertContext.getText(\"Actual on duplicate key columns segment should not exist.\"), onDuplicateKeyColumnsSegment.isPresent());\n }\n }\n \n private static void assertWithClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n Optional withSegment = InsertStatementHandler.getWithSegment(actual);\n if (null != expected.getWithClause()) {\n assertTrue(assertContext.getText(\"Actual with segment should exist.\"), withSegment.isPresent());\n WithClauseAssert.assertIs(assertContext, withSegment.get(), expected.getWithClause()); \n } else {\n assertFalse(assertContext.getText(\"Actual with segment should not exist.\"), withSegment.isPresent());\n }\n }\n \n private static void assertOutputClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n Optional outputSegment = InsertStatementHandler.getOutputSegment(actual);\n if (null != expected.getOutputClause()) {\n assertTrue(assertContext.getText(\"Actual output segment should exist.\"), outputSegment.isPresent());\n OutputClauseAssert.assertIs(assertContext, outputSegment.get(), expected.getOutputClause());\n } else {\n assertFalse(assertContext.getText(\"Actual output segment should not exist.\"), outputSegment.isPresent());\n }\n }\n}", "context_after": "class InsertStatementAssert {\n \n /**\n * Assert insert statement is correct with expected parser result.\n *\n * @param assertContext assert context\n * @param actual actual insert statement\n * @param expected expected insert statement test case\n */\n \n \n private static void assertTable(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n TableAssert.assertIs(assertContext, actual.getTable(), expected.getTable());\n }\n \n private static void assertInsertColumnsClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n if (null != expected.getInsertColumnsClause()) {\n assertTrue(assertContext.getText(\"Actual insert columns segment should exist.\"), actual.getInsertColumns().isPresent());\n InsertColumnsClauseAssert.assertIs(assertContext, actual.getInsertColumns().get(), expected.getInsertColumnsClause()); \n } else {\n assertFalse(assertContext.getText(\"Actual insert columns segment should not exist.\"), actual.getInsertColumns().isPresent());\n }\n }\n \n private static void assertInsertValuesClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n if (null != expected.getInsertValuesClause()) {\n assertFalse(assertContext.getText(\"Actual insert values segment should exist.\"), actual.getValues().isEmpty());\n InsertValuesClauseAssert.assertIs(assertContext, actual.getValues(), expected.getInsertValuesClause());\n } else {\n assertTrue(assertContext.getText(\"Actual insert values segment should not exist.\"), actual.getValues().isEmpty());\n }\n }\n \n private static void assertSetClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n Optional setAssignmentSegment = InsertStatementHandler.getSetAssignmentSegment(actual);\n if (null != expected.getSetClause()) {\n assertTrue(assertContext.getText(\"Actual set assignment segment should exist.\"), setAssignmentSegment.isPresent());\n SetClauseAssert.assertIs(assertContext, setAssignmentSegment.get(), expected.getSetClause());\n } else {\n assertFalse(assertContext.getText(\"Actual set assignment segment should not exist.\"), setAssignmentSegment.isPresent());\n }\n }\n\n private static void assertInsertSelectClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n if (null != expected.getSelectTestCase()) {\n assertTrue(assertContext.getText(\"Actual insert select segment should exist.\"), actual.getInsertSelect().isPresent());\n SelectStatementAssert.assertIs(assertContext, actual.getInsertSelect().get().getSelect(), expected.getSelectTestCase());\n } else {\n assertFalse(assertContext.getText(\"Actual insert select segment should not exist.\"), actual.getInsertSelect().isPresent());\n }\n }\n \n private static void assertOnDuplicateKeyColumns(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n Optional onDuplicateKeyColumnsSegment = InsertStatementHandler.getOnDuplicateKeyColumnsSegment(actual);\n if (null != expected.getOnDuplicateKeyColumns()) {\n assertTrue(assertContext.getText(\"Actual on duplicate key columns segment should exist.\"), onDuplicateKeyColumnsSegment.isPresent());\n OnDuplicateKeyColumnsAssert.assertIs(assertContext, onDuplicateKeyColumnsSegment.get(), expected.getOnDuplicateKeyColumns());\n } else {\n assertFalse(assertContext.getText(\"Actual on duplicate key columns segment should not exist.\"), onDuplicateKeyColumnsSegment.isPresent());\n }\n }\n \n private static void assertWithClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n Optional withSegment = InsertStatementHandler.getWithSegment(actual);\n if (null != expected.getWithClause()) {\n assertTrue(assertContext.getText(\"Actual with segment should exist.\"), withSegment.isPresent());\n WithClauseAssert.assertIs(assertContext, withSegment.get(), expected.getWithClause()); \n } else {\n assertFalse(assertContext.getText(\"Actual with segment should not exist.\"), withSegment.isPresent());\n }\n }\n \n private static void assertOutputClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) {\n Optional outputSegment = InsertStatementHandler.getOutputSegment(actual);\n if (null != expected.getOutputClause()) {\n assertTrue(assertContext.getText(\"Actual output segment should exist.\"), outputSegment.isPresent());\n OutputClauseAssert.assertIs(assertContext, outputSegment.get(), expected.getOutputClause());\n } else {\n assertFalse(assertContext.getText(\"Actual output segment should not exist.\"), outputSegment.isPresent());\n }\n }\n}" }, { "comment": "Suggestion: What if we populate these two fields also on demand as we have done for the other symbols such as annotations, typeDescriptor, etc.", "method_body": "public BallerinaObjectFieldSymbol(CompilerContext context, BField bField, SymbolKind kind) {\n super(bField.name.value, bField.symbol.pkgID, kind, bField.symbol);\n this.context = context;\n this.bField = bField;\n this.docAttachment = new BallerinaDocumentation(bField.symbol.markdownDocumentation);\n this.deprecated = Symbols.isFlagOn(bField.symbol.flags, Flags.DEPRECATED);\n }", "target_code": "this.deprecated = Symbols.isFlagOn(bField.symbol.flags, Flags.DEPRECATED);", "method_body_after": "public BallerinaObjectFieldSymbol(CompilerContext context, BField bField, SymbolKind kind) {\n super(bField.name.value, bField.symbol.pkgID, kind, bField.symbol);\n this.context = context;\n this.bField = bField;\n this.docAttachment = new BallerinaDocumentation(bField.symbol.markdownDocumentation);\n this.deprecated = Symbols.isFlagOn(bField.symbol.flags, Flags.DEPRECATED);\n }", "context_before": "class BallerinaObjectFieldSymbol extends BallerinaSymbol implements ObjectFieldSymbol {\n\n protected final BField bField;\n protected List qualifiers;\n private final Documentation docAttachment;\n private final CompilerContext context;\n private TypeSymbol typeDescriptor;\n private List annots;\n private String signature;\n private boolean deprecated;\n\n \n\n public BallerinaObjectFieldSymbol(CompilerContext context, BField bField) {\n this(context, bField, OBJECT_FIELD);\n }\n\n @Override\n public String name() {\n return this.bField.getName().getValue();\n }\n\n @Override\n public TypeSymbol typeDescriptor() {\n if (this.typeDescriptor == null) {\n TypesFactory typesFactory = TypesFactory.getInstance(this.context);\n this.typeDescriptor = typesFactory.getTypeDescriptor(this.bField.type);\n }\n\n return this.typeDescriptor;\n }\n\n @Override\n public List annotations() {\n if (this.annots != null) {\n return this.annots;\n }\n\n List annots = new ArrayList<>();\n SymbolFactory symbolFactory = SymbolFactory.getInstance(this.context);\n for (org.ballerinalang.model.symbols.AnnotationSymbol annot : bField.symbol.getAnnotations()) {\n annots.add(symbolFactory.createAnnotationSymbol((BAnnotationSymbol) annot));\n }\n\n this.annots = Collections.unmodifiableList(annots);\n return this.annots;\n }\n\n @Override\n public boolean deprecated() {\n return this.deprecated;\n }\n\n @Override\n public Optional documentation() {\n return Optional.ofNullable(docAttachment);\n }\n\n @Override\n public List qualifiers() {\n if (this.qualifiers != null) {\n return this.qualifiers;\n }\n\n List quals = new ArrayList<>();\n if (Symbols.isFlagOn(this.bField.symbol.flags, Flags.PUBLIC)) {\n quals.add(PUBLIC);\n }\n\n this.qualifiers = Collections.unmodifiableList(quals);\n return this.qualifiers;\n }\n\n @Override\n public String signature() {\n if (this.signature != null) {\n return this.signature;\n }\n\n StringJoiner joiner = new StringJoiner(\" \");\n\n for (Qualifier qualifier : this.qualifiers()) {\n joiner.add(qualifier.getValue());\n }\n\n this.signature = joiner.add(this.typeDescriptor().signature()).add(this.name()).toString();\n return this.signature;\n }\n}", "context_after": "class BallerinaObjectFieldSymbol extends BallerinaSymbol implements ObjectFieldSymbol {\n\n protected final BField bField;\n protected List qualifiers;\n private final Documentation docAttachment;\n private final CompilerContext context;\n private TypeSymbol typeDescriptor;\n private List annots;\n private String signature;\n private boolean deprecated;\n\n \n\n public BallerinaObjectFieldSymbol(CompilerContext context, BField bField) {\n this(context, bField, OBJECT_FIELD);\n }\n\n @Override\n public String name() {\n return this.bField.getName().getValue();\n }\n\n @Override\n public TypeSymbol typeDescriptor() {\n if (this.typeDescriptor == null) {\n TypesFactory typesFactory = TypesFactory.getInstance(this.context);\n this.typeDescriptor = typesFactory.getTypeDescriptor(this.bField.type);\n }\n\n return this.typeDescriptor;\n }\n\n @Override\n public List annotations() {\n if (this.annots != null) {\n return this.annots;\n }\n\n List annots = new ArrayList<>();\n SymbolFactory symbolFactory = SymbolFactory.getInstance(this.context);\n for (org.ballerinalang.model.symbols.AnnotationSymbol annot : bField.symbol.getAnnotations()) {\n annots.add(symbolFactory.createAnnotationSymbol((BAnnotationSymbol) annot));\n }\n\n this.annots = Collections.unmodifiableList(annots);\n return this.annots;\n }\n\n @Override\n public boolean deprecated() {\n return this.deprecated;\n }\n\n @Override\n public Optional documentation() {\n return Optional.ofNullable(docAttachment);\n }\n\n @Override\n public List qualifiers() {\n if (this.qualifiers != null) {\n return this.qualifiers;\n }\n\n List quals = new ArrayList<>();\n if (Symbols.isFlagOn(this.bField.symbol.flags, Flags.PUBLIC)) {\n quals.add(PUBLIC);\n }\n\n this.qualifiers = Collections.unmodifiableList(quals);\n return this.qualifiers;\n }\n\n @Override\n public String signature() {\n if (this.signature != null) {\n return this.signature;\n }\n\n StringJoiner joiner = new StringJoiner(\" \");\n\n for (Qualifier qualifier : this.qualifiers()) {\n joiner.add(qualifier.getValue());\n }\n\n this.signature = joiner.add(this.typeDescriptor().signature()).add(this.name()).toString();\n return this.signature;\n }\n}" }, { "comment": "It's better not put `incrementAndGet` in logging", "method_body": "private void reconnect() {\n log.info(\"reconnect mysql client, retryTimes={}\", reconnectTimes.incrementAndGet());\n closeChannel();\n connect();\n subscribe(lastBinlogEvent.getFileName(), lastBinlogEvent.getPosition());\n }", "target_code": "log.info(\"reconnect mysql client, retryTimes={}\", reconnectTimes.incrementAndGet());", "method_body_after": "private void reconnect() {\n int retryTimes = reconnectTimes.incrementAndGet();\n log.info(\"reconnect MySQL client, retry times={}\", retryTimes);\n closeChannel();\n connect();\n subscribe(lastBinlogEvent.getFileName(), lastBinlogEvent.getPosition());\n }", "context_before": "class MySQLBinlogEventHandler extends ChannelInboundHandlerAdapter {\n \n private AbstractBinlogEvent lastBinlogEvent;\n \n @Override\n public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception {\n if (!running) {\n return;\n }\n if (msg instanceof AbstractBinlogEvent) {\n lastBinlogEvent = (AbstractBinlogEvent) msg;\n blockingEventQueue.put(lastBinlogEvent);\n }\n }\n \n @Override\n public void channelInactive(final ChannelHandlerContext ctx) {\n log.warn(\"channel inactive\");\n if (!running) {\n return;\n }\n if (reconnectTimes.get() > 3) {\n log.warn(\"exceeds the maximum number of retry times, lastBinlogEvent={}\", lastBinlogEvent);\n running = false;\n return;\n }\n reconnect();\n }\n \n @Override\n public void exceptionCaught(final ChannelHandlerContext ctx, final Throwable cause) {\n running = false;\n log.error(\"MySQLBinlogEventHandler: protocol resolution error\", cause);\n }\n \n \n }", "context_after": "class MySQLBinlogEventHandler extends ChannelInboundHandlerAdapter {\n \n private AbstractBinlogEvent lastBinlogEvent;\n \n @Override\n public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception {\n if (!running) {\n return;\n }\n if (msg instanceof AbstractBinlogEvent) {\n lastBinlogEvent = (AbstractBinlogEvent) msg;\n blockingEventQueue.put(lastBinlogEvent);\n }\n }\n \n @Override\n public void channelInactive(final ChannelHandlerContext ctx) {\n log.warn(\"channel inactive\");\n if (!running) {\n return;\n }\n if (reconnectTimes.get() > 3) {\n log.warn(\"exceeds the maximum number of retry times, last binlog event:{}\", lastBinlogEvent);\n running = false;\n return;\n }\n reconnect();\n }\n \n @Override\n public void exceptionCaught(final ChannelHandlerContext ctx, final Throwable cause) {\n running = false;\n String fileName = null == lastBinlogEvent ? null : lastBinlogEvent.getFileName();\n Long position = null == lastBinlogEvent ? null : lastBinlogEvent.getPosition();\n log.error(\"MySQLBinlogEventHandler protocol resolution error, file name:{}, position:{}\", fileName, position, cause);\n }\n \n \n }" } ]

\n * Note: In the \n *